removed venv
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,2 +1,3 @@
|
|||||||
./venv
|
/venv/
|
||||||
|
.venv/
|
||||||
.env
|
.env
|
||||||
@@ -1,247 +0,0 @@
|
|||||||
<#
|
|
||||||
.Synopsis
|
|
||||||
Activate a Python virtual environment for the current PowerShell session.
|
|
||||||
|
|
||||||
.Description
|
|
||||||
Pushes the python executable for a virtual environment to the front of the
|
|
||||||
$Env:PATH environment variable and sets the prompt to signify that you are
|
|
||||||
in a Python virtual environment. Makes use of the command line switches as
|
|
||||||
well as the `pyvenv.cfg` file values present in the virtual environment.
|
|
||||||
|
|
||||||
.Parameter VenvDir
|
|
||||||
Path to the directory that contains the virtual environment to activate. The
|
|
||||||
default value for this is the parent of the directory that the Activate.ps1
|
|
||||||
script is located within.
|
|
||||||
|
|
||||||
.Parameter Prompt
|
|
||||||
The prompt prefix to display when this virtual environment is activated. By
|
|
||||||
default, this prompt is the name of the virtual environment folder (VenvDir)
|
|
||||||
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
|
|
||||||
|
|
||||||
.Example
|
|
||||||
Activate.ps1
|
|
||||||
Activates the Python virtual environment that contains the Activate.ps1 script.
|
|
||||||
|
|
||||||
.Example
|
|
||||||
Activate.ps1 -Verbose
|
|
||||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
|
||||||
and shows extra information about the activation as it executes.
|
|
||||||
|
|
||||||
.Example
|
|
||||||
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
|
|
||||||
Activates the Python virtual environment located in the specified location.
|
|
||||||
|
|
||||||
.Example
|
|
||||||
Activate.ps1 -Prompt "MyPython"
|
|
||||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
|
||||||
and prefixes the current prompt with the specified string (surrounded in
|
|
||||||
parentheses) while the virtual environment is active.
|
|
||||||
|
|
||||||
.Notes
|
|
||||||
On Windows, it may be required to enable this Activate.ps1 script by setting the
|
|
||||||
execution policy for the user. You can do this by issuing the following PowerShell
|
|
||||||
command:
|
|
||||||
|
|
||||||
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
|
||||||
|
|
||||||
For more information on Execution Policies:
|
|
||||||
https://go.microsoft.com/fwlink/?LinkID=135170
|
|
||||||
|
|
||||||
#>
|
|
||||||
Param(
|
|
||||||
[Parameter(Mandatory = $false)]
|
|
||||||
[String]
|
|
||||||
$VenvDir,
|
|
||||||
[Parameter(Mandatory = $false)]
|
|
||||||
[String]
|
|
||||||
$Prompt
|
|
||||||
)
|
|
||||||
|
|
||||||
<# Function declarations --------------------------------------------------- #>
|
|
||||||
|
|
||||||
<#
|
|
||||||
.Synopsis
|
|
||||||
Remove all shell session elements added by the Activate script, including the
|
|
||||||
addition of the virtual environment's Python executable from the beginning of
|
|
||||||
the PATH variable.
|
|
||||||
|
|
||||||
.Parameter NonDestructive
|
|
||||||
If present, do not remove this function from the global namespace for the
|
|
||||||
session.
|
|
||||||
|
|
||||||
#>
|
|
||||||
function global:deactivate ([switch]$NonDestructive) {
|
|
||||||
# Revert to original values
|
|
||||||
|
|
||||||
# The prior prompt:
|
|
||||||
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
|
|
||||||
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
|
|
||||||
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
|
|
||||||
}
|
|
||||||
|
|
||||||
# The prior PYTHONHOME:
|
|
||||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
|
|
||||||
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
|
|
||||||
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
|
|
||||||
}
|
|
||||||
|
|
||||||
# The prior PATH:
|
|
||||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
|
|
||||||
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
|
|
||||||
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
|
|
||||||
}
|
|
||||||
|
|
||||||
# Just remove the VIRTUAL_ENV altogether:
|
|
||||||
if (Test-Path -Path Env:VIRTUAL_ENV) {
|
|
||||||
Remove-Item -Path env:VIRTUAL_ENV
|
|
||||||
}
|
|
||||||
|
|
||||||
# Just remove VIRTUAL_ENV_PROMPT altogether.
|
|
||||||
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
|
|
||||||
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
|
|
||||||
}
|
|
||||||
|
|
||||||
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
|
|
||||||
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
|
|
||||||
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
|
|
||||||
}
|
|
||||||
|
|
||||||
# Leave deactivate function in the global namespace if requested:
|
|
||||||
if (-not $NonDestructive) {
|
|
||||||
Remove-Item -Path function:deactivate
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
<#
|
|
||||||
.Description
|
|
||||||
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
|
|
||||||
given folder, and returns them in a map.
|
|
||||||
|
|
||||||
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
|
|
||||||
two strings separated by `=` (with any amount of whitespace surrounding the =)
|
|
||||||
then it is considered a `key = value` line. The left hand string is the key,
|
|
||||||
the right hand is the value.
|
|
||||||
|
|
||||||
If the value starts with a `'` or a `"` then the first and last character is
|
|
||||||
stripped from the value before being captured.
|
|
||||||
|
|
||||||
.Parameter ConfigDir
|
|
||||||
Path to the directory that contains the `pyvenv.cfg` file.
|
|
||||||
#>
|
|
||||||
function Get-PyVenvConfig(
|
|
||||||
[String]
|
|
||||||
$ConfigDir
|
|
||||||
) {
|
|
||||||
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
|
|
||||||
|
|
||||||
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
|
|
||||||
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
|
|
||||||
|
|
||||||
# An empty map will be returned if no config file is found.
|
|
||||||
$pyvenvConfig = @{ }
|
|
||||||
|
|
||||||
if ($pyvenvConfigPath) {
|
|
||||||
|
|
||||||
Write-Verbose "File exists, parse `key = value` lines"
|
|
||||||
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
|
|
||||||
|
|
||||||
$pyvenvConfigContent | ForEach-Object {
|
|
||||||
$keyval = $PSItem -split "\s*=\s*", 2
|
|
||||||
if ($keyval[0] -and $keyval[1]) {
|
|
||||||
$val = $keyval[1]
|
|
||||||
|
|
||||||
# Remove extraneous quotations around a string value.
|
|
||||||
if ("'""".Contains($val.Substring(0, 1))) {
|
|
||||||
$val = $val.Substring(1, $val.Length - 2)
|
|
||||||
}
|
|
||||||
|
|
||||||
$pyvenvConfig[$keyval[0]] = $val
|
|
||||||
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return $pyvenvConfig
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
<# Begin Activate script --------------------------------------------------- #>
|
|
||||||
|
|
||||||
# Determine the containing directory of this script
|
|
||||||
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
|
||||||
$VenvExecDir = Get-Item -Path $VenvExecPath
|
|
||||||
|
|
||||||
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
|
|
||||||
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
|
|
||||||
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
|
|
||||||
|
|
||||||
# Set values required in priority: CmdLine, ConfigFile, Default
|
|
||||||
# First, get the location of the virtual environment, it might not be
|
|
||||||
# VenvExecDir if specified on the command line.
|
|
||||||
if ($VenvDir) {
|
|
||||||
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
|
|
||||||
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
|
|
||||||
Write-Verbose "VenvDir=$VenvDir"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Next, read the `pyvenv.cfg` file to determine any required value such
|
|
||||||
# as `prompt`.
|
|
||||||
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
|
|
||||||
|
|
||||||
# Next, set the prompt from the command line, or the config file, or
|
|
||||||
# just use the name of the virtual environment folder.
|
|
||||||
if ($Prompt) {
|
|
||||||
Write-Verbose "Prompt specified as argument, using '$Prompt'"
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
|
|
||||||
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
|
|
||||||
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
|
|
||||||
$Prompt = $pyvenvCfg['prompt'];
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
|
|
||||||
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
|
|
||||||
$Prompt = Split-Path -Path $venvDir -Leaf
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Write-Verbose "Prompt = '$Prompt'"
|
|
||||||
Write-Verbose "VenvDir='$VenvDir'"
|
|
||||||
|
|
||||||
# Deactivate any currently active virtual environment, but leave the
|
|
||||||
# deactivate function in place.
|
|
||||||
deactivate -nondestructive
|
|
||||||
|
|
||||||
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
|
|
||||||
# that there is an activated venv.
|
|
||||||
$env:VIRTUAL_ENV = $VenvDir
|
|
||||||
|
|
||||||
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
|
|
||||||
|
|
||||||
Write-Verbose "Setting prompt to '$Prompt'"
|
|
||||||
|
|
||||||
# Set the prompt to include the env name
|
|
||||||
# Make sure _OLD_VIRTUAL_PROMPT is global
|
|
||||||
function global:_OLD_VIRTUAL_PROMPT { "" }
|
|
||||||
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
|
|
||||||
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
|
|
||||||
|
|
||||||
function global:prompt {
|
|
||||||
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
|
|
||||||
_OLD_VIRTUAL_PROMPT
|
|
||||||
}
|
|
||||||
$env:VIRTUAL_ENV_PROMPT = $Prompt
|
|
||||||
}
|
|
||||||
|
|
||||||
# Clear PYTHONHOME
|
|
||||||
if (Test-Path -Path Env:PYTHONHOME) {
|
|
||||||
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
|
|
||||||
Remove-Item -Path Env:PYTHONHOME
|
|
||||||
}
|
|
||||||
|
|
||||||
# Add the venv to the PATH
|
|
||||||
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
|
|
||||||
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
|
|
||||||
@@ -1,70 +0,0 @@
|
|||||||
# This file must be used with "source bin/activate" *from bash*
|
|
||||||
# You cannot run it directly
|
|
||||||
|
|
||||||
deactivate () {
|
|
||||||
# reset old environment variables
|
|
||||||
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
|
|
||||||
PATH="${_OLD_VIRTUAL_PATH:-}"
|
|
||||||
export PATH
|
|
||||||
unset _OLD_VIRTUAL_PATH
|
|
||||||
fi
|
|
||||||
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
|
|
||||||
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
|
|
||||||
export PYTHONHOME
|
|
||||||
unset _OLD_VIRTUAL_PYTHONHOME
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Call hash to forget past commands. Without forgetting
|
|
||||||
# past commands the $PATH changes we made may not be respected
|
|
||||||
hash -r 2> /dev/null
|
|
||||||
|
|
||||||
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
|
|
||||||
PS1="${_OLD_VIRTUAL_PS1:-}"
|
|
||||||
export PS1
|
|
||||||
unset _OLD_VIRTUAL_PS1
|
|
||||||
fi
|
|
||||||
|
|
||||||
unset VIRTUAL_ENV
|
|
||||||
unset VIRTUAL_ENV_PROMPT
|
|
||||||
if [ ! "${1:-}" = "nondestructive" ] ; then
|
|
||||||
# Self destruct!
|
|
||||||
unset -f deactivate
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# unset irrelevant variables
|
|
||||||
deactivate nondestructive
|
|
||||||
|
|
||||||
# on Windows, a path can contain colons and backslashes and has to be converted:
|
|
||||||
if [ "${OSTYPE:-}" = "cygwin" ] || [ "${OSTYPE:-}" = "msys" ] ; then
|
|
||||||
# transform D:\path\to\venv to /d/path/to/venv on MSYS
|
|
||||||
# and to /cygdrive/d/path/to/venv on Cygwin
|
|
||||||
export VIRTUAL_ENV=$(cygpath /home/matsv/Documents/telegram-sticker-downloader/venv)
|
|
||||||
else
|
|
||||||
# use the path as-is
|
|
||||||
export VIRTUAL_ENV=/home/matsv/Documents/telegram-sticker-downloader/venv
|
|
||||||
fi
|
|
||||||
|
|
||||||
_OLD_VIRTUAL_PATH="$PATH"
|
|
||||||
PATH="$VIRTUAL_ENV/"bin":$PATH"
|
|
||||||
export PATH
|
|
||||||
|
|
||||||
# unset PYTHONHOME if set
|
|
||||||
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
|
|
||||||
# could use `if (set -u; : $PYTHONHOME) ;` in bash
|
|
||||||
if [ -n "${PYTHONHOME:-}" ] ; then
|
|
||||||
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
|
|
||||||
unset PYTHONHOME
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
|
|
||||||
_OLD_VIRTUAL_PS1="${PS1:-}"
|
|
||||||
PS1='(venv) '"${PS1:-}"
|
|
||||||
export PS1
|
|
||||||
VIRTUAL_ENV_PROMPT='(venv) '
|
|
||||||
export VIRTUAL_ENV_PROMPT
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Call hash to forget past commands. Without forgetting
|
|
||||||
# past commands the $PATH changes we made may not be respected
|
|
||||||
hash -r 2> /dev/null
|
|
||||||
@@ -1,27 +0,0 @@
|
|||||||
# This file must be used with "source bin/activate.csh" *from csh*.
|
|
||||||
# You cannot run it directly.
|
|
||||||
|
|
||||||
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
|
||||||
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
|
|
||||||
|
|
||||||
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
|
|
||||||
|
|
||||||
# Unset irrelevant variables.
|
|
||||||
deactivate nondestructive
|
|
||||||
|
|
||||||
setenv VIRTUAL_ENV /home/matsv/Documents/telegram-sticker-downloader/venv
|
|
||||||
|
|
||||||
set _OLD_VIRTUAL_PATH="$PATH"
|
|
||||||
setenv PATH "$VIRTUAL_ENV/"bin":$PATH"
|
|
||||||
|
|
||||||
|
|
||||||
set _OLD_VIRTUAL_PROMPT="$prompt"
|
|
||||||
|
|
||||||
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
|
|
||||||
set prompt = '(venv) '"$prompt"
|
|
||||||
setenv VIRTUAL_ENV_PROMPT '(venv) '
|
|
||||||
endif
|
|
||||||
|
|
||||||
alias pydoc python -m pydoc
|
|
||||||
|
|
||||||
rehash
|
|
||||||
@@ -1,69 +0,0 @@
|
|||||||
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
|
|
||||||
# (https://fishshell.com/). You cannot run it directly.
|
|
||||||
|
|
||||||
function deactivate -d "Exit virtual environment and return to normal shell environment"
|
|
||||||
# reset old environment variables
|
|
||||||
if test -n "$_OLD_VIRTUAL_PATH"
|
|
||||||
set -gx PATH $_OLD_VIRTUAL_PATH
|
|
||||||
set -e _OLD_VIRTUAL_PATH
|
|
||||||
end
|
|
||||||
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
|
||||||
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
|
|
||||||
set -e _OLD_VIRTUAL_PYTHONHOME
|
|
||||||
end
|
|
||||||
|
|
||||||
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
|
||||||
set -e _OLD_FISH_PROMPT_OVERRIDE
|
|
||||||
# prevents error when using nested fish instances (Issue #93858)
|
|
||||||
if functions -q _old_fish_prompt
|
|
||||||
functions -e fish_prompt
|
|
||||||
functions -c _old_fish_prompt fish_prompt
|
|
||||||
functions -e _old_fish_prompt
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
set -e VIRTUAL_ENV
|
|
||||||
set -e VIRTUAL_ENV_PROMPT
|
|
||||||
if test "$argv[1]" != "nondestructive"
|
|
||||||
# Self-destruct!
|
|
||||||
functions -e deactivate
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# Unset irrelevant variables.
|
|
||||||
deactivate nondestructive
|
|
||||||
|
|
||||||
set -gx VIRTUAL_ENV /home/matsv/Documents/telegram-sticker-downloader/venv
|
|
||||||
|
|
||||||
set -gx _OLD_VIRTUAL_PATH $PATH
|
|
||||||
set -gx PATH "$VIRTUAL_ENV/"bin $PATH
|
|
||||||
|
|
||||||
# Unset PYTHONHOME if set.
|
|
||||||
if set -q PYTHONHOME
|
|
||||||
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
|
||||||
set -e PYTHONHOME
|
|
||||||
end
|
|
||||||
|
|
||||||
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
|
||||||
# fish uses a function instead of an env var to generate the prompt.
|
|
||||||
|
|
||||||
# Save the current fish_prompt function as the function _old_fish_prompt.
|
|
||||||
functions -c fish_prompt _old_fish_prompt
|
|
||||||
|
|
||||||
# With the original prompt function renamed, we can override with our own.
|
|
||||||
function fish_prompt
|
|
||||||
# Save the return status of the last command.
|
|
||||||
set -l old_status $status
|
|
||||||
|
|
||||||
# Output the venv prompt; color taken from the blue of the Python logo.
|
|
||||||
printf "%s%s%s" (set_color 4B8BBE) '(venv) ' (set_color normal)
|
|
||||||
|
|
||||||
# Restore the return status of the previous command.
|
|
||||||
echo "exit $old_status" | .
|
|
||||||
# Output the original/"old" prompt.
|
|
||||||
_old_fish_prompt
|
|
||||||
end
|
|
||||||
|
|
||||||
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
|
||||||
set -gx VIRTUAL_ENV_PROMPT '(venv) '
|
|
||||||
end
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
#!/home/matsv/Documents/telegram-sticker-downloader/venv/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from dotenv.__main__ import cli
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(cli())
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
#!/home/matsv/Documents/telegram-sticker-downloader/venv/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from httpx import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
#!/home/matsv/Documents/telegram-sticker-downloader/venv/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from pip._internal.cli.main import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
#!/home/matsv/Documents/telegram-sticker-downloader/venv/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from pip._internal.cli.main import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
#!/home/matsv/Documents/telegram-sticker-downloader/venv/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from pip._internal.cli.main import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
python3
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
/usr/bin/python3
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
python3
|
|
||||||
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
pip
|
|
||||||
@@ -1,279 +0,0 @@
|
|||||||
A. HISTORY OF THE SOFTWARE
|
|
||||||
==========================
|
|
||||||
|
|
||||||
Python was created in the early 1990s by Guido van Rossum at Stichting
|
|
||||||
Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands
|
|
||||||
as a successor of a language called ABC. Guido remains Python's
|
|
||||||
principal author, although it includes many contributions from others.
|
|
||||||
|
|
||||||
In 1995, Guido continued his work on Python at the Corporation for
|
|
||||||
National Research Initiatives (CNRI, see https://www.cnri.reston.va.us)
|
|
||||||
in Reston, Virginia where he released several versions of the
|
|
||||||
software.
|
|
||||||
|
|
||||||
In May 2000, Guido and the Python core development team moved to
|
|
||||||
BeOpen.com to form the BeOpen PythonLabs team. In October of the same
|
|
||||||
year, the PythonLabs team moved to Digital Creations, which became
|
|
||||||
Zope Corporation. In 2001, the Python Software Foundation (PSF, see
|
|
||||||
https://www.python.org/psf/) was formed, a non-profit organization
|
|
||||||
created specifically to own Python-related Intellectual Property.
|
|
||||||
Zope Corporation was a sponsoring member of the PSF.
|
|
||||||
|
|
||||||
All Python releases are Open Source (see https://opensource.org for
|
|
||||||
the Open Source Definition). Historically, most, but not all, Python
|
|
||||||
releases have also been GPL-compatible; the table below summarizes
|
|
||||||
the various releases.
|
|
||||||
|
|
||||||
Release Derived Year Owner GPL-
|
|
||||||
from compatible? (1)
|
|
||||||
|
|
||||||
0.9.0 thru 1.2 1991-1995 CWI yes
|
|
||||||
1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
|
|
||||||
1.6 1.5.2 2000 CNRI no
|
|
||||||
2.0 1.6 2000 BeOpen.com no
|
|
||||||
1.6.1 1.6 2001 CNRI yes (2)
|
|
||||||
2.1 2.0+1.6.1 2001 PSF no
|
|
||||||
2.0.1 2.0+1.6.1 2001 PSF yes
|
|
||||||
2.1.1 2.1+2.0.1 2001 PSF yes
|
|
||||||
2.1.2 2.1.1 2002 PSF yes
|
|
||||||
2.1.3 2.1.2 2002 PSF yes
|
|
||||||
2.2 and above 2.1.1 2001-now PSF yes
|
|
||||||
|
|
||||||
Footnotes:
|
|
||||||
|
|
||||||
(1) GPL-compatible doesn't mean that we're distributing Python under
|
|
||||||
the GPL. All Python licenses, unlike the GPL, let you distribute
|
|
||||||
a modified version without making your changes open source. The
|
|
||||||
GPL-compatible licenses make it possible to combine Python with
|
|
||||||
other software that is released under the GPL; the others don't.
|
|
||||||
|
|
||||||
(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
|
|
||||||
because its license has a choice of law clause. According to
|
|
||||||
CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
|
|
||||||
is "not incompatible" with the GPL.
|
|
||||||
|
|
||||||
Thanks to the many outside volunteers who have worked under Guido's
|
|
||||||
direction to make these releases possible.
|
|
||||||
|
|
||||||
|
|
||||||
B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
|
|
||||||
===============================================================
|
|
||||||
|
|
||||||
Python software and documentation are licensed under the
|
|
||||||
Python Software Foundation License Version 2.
|
|
||||||
|
|
||||||
Starting with Python 3.8.6, examples, recipes, and other code in
|
|
||||||
the documentation are dual licensed under the PSF License Version 2
|
|
||||||
and the Zero-Clause BSD license.
|
|
||||||
|
|
||||||
Some software incorporated into Python is under different licenses.
|
|
||||||
The licenses are listed with code falling under that license.
|
|
||||||
|
|
||||||
|
|
||||||
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
|
|
||||||
--------------------------------------------
|
|
||||||
|
|
||||||
1. This LICENSE AGREEMENT is between the Python Software Foundation
|
|
||||||
("PSF"), and the Individual or Organization ("Licensee") accessing and
|
|
||||||
otherwise using this software ("Python") in source or binary form and
|
|
||||||
its associated documentation.
|
|
||||||
|
|
||||||
2. Subject to the terms and conditions of this License Agreement, PSF hereby
|
|
||||||
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
|
|
||||||
analyze, test, perform and/or display publicly, prepare derivative works,
|
|
||||||
distribute, and otherwise use Python alone or in any derivative version,
|
|
||||||
provided, however, that PSF's License Agreement and PSF's notice of copyright,
|
|
||||||
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
|
|
||||||
2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation;
|
|
||||||
All Rights Reserved" are retained in Python alone or in any derivative version
|
|
||||||
prepared by Licensee.
|
|
||||||
|
|
||||||
3. In the event Licensee prepares a derivative work that is based on
|
|
||||||
or incorporates Python or any part thereof, and wants to make
|
|
||||||
the derivative work available to others as provided herein, then
|
|
||||||
Licensee hereby agrees to include in any such work a brief summary of
|
|
||||||
the changes made to Python.
|
|
||||||
|
|
||||||
4. PSF is making Python available to Licensee on an "AS IS"
|
|
||||||
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
|
||||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
|
|
||||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
|
||||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
|
|
||||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
|
||||||
|
|
||||||
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
|
||||||
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
|
||||||
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
|
|
||||||
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
|
||||||
|
|
||||||
6. This License Agreement will automatically terminate upon a material
|
|
||||||
breach of its terms and conditions.
|
|
||||||
|
|
||||||
7. Nothing in this License Agreement shall be deemed to create any
|
|
||||||
relationship of agency, partnership, or joint venture between PSF and
|
|
||||||
Licensee. This License Agreement does not grant permission to use PSF
|
|
||||||
trademarks or trade name in a trademark sense to endorse or promote
|
|
||||||
products or services of Licensee, or any third party.
|
|
||||||
|
|
||||||
8. By copying, installing or otherwise using Python, Licensee
|
|
||||||
agrees to be bound by the terms and conditions of this License
|
|
||||||
Agreement.
|
|
||||||
|
|
||||||
|
|
||||||
BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
|
|
||||||
-------------------------------------------
|
|
||||||
|
|
||||||
BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
|
|
||||||
|
|
||||||
1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
|
|
||||||
office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
|
|
||||||
Individual or Organization ("Licensee") accessing and otherwise using
|
|
||||||
this software in source or binary form and its associated
|
|
||||||
documentation ("the Software").
|
|
||||||
|
|
||||||
2. Subject to the terms and conditions of this BeOpen Python License
|
|
||||||
Agreement, BeOpen hereby grants Licensee a non-exclusive,
|
|
||||||
royalty-free, world-wide license to reproduce, analyze, test, perform
|
|
||||||
and/or display publicly, prepare derivative works, distribute, and
|
|
||||||
otherwise use the Software alone or in any derivative version,
|
|
||||||
provided, however, that the BeOpen Python License is retained in the
|
|
||||||
Software, alone or in any derivative version prepared by Licensee.
|
|
||||||
|
|
||||||
3. BeOpen is making the Software available to Licensee on an "AS IS"
|
|
||||||
basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
|
||||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
|
|
||||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
|
||||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
|
|
||||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
|
||||||
|
|
||||||
4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
|
|
||||||
SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
|
|
||||||
AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
|
|
||||||
DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
|
||||||
|
|
||||||
5. This License Agreement will automatically terminate upon a material
|
|
||||||
breach of its terms and conditions.
|
|
||||||
|
|
||||||
6. This License Agreement shall be governed by and interpreted in all
|
|
||||||
respects by the law of the State of California, excluding conflict of
|
|
||||||
law provisions. Nothing in this License Agreement shall be deemed to
|
|
||||||
create any relationship of agency, partnership, or joint venture
|
|
||||||
between BeOpen and Licensee. This License Agreement does not grant
|
|
||||||
permission to use BeOpen trademarks or trade names in a trademark
|
|
||||||
sense to endorse or promote products or services of Licensee, or any
|
|
||||||
third party. As an exception, the "BeOpen Python" logos available at
|
|
||||||
http://www.pythonlabs.com/logos.html may be used according to the
|
|
||||||
permissions granted on that web page.
|
|
||||||
|
|
||||||
7. By copying, installing or otherwise using the software, Licensee
|
|
||||||
agrees to be bound by the terms and conditions of this License
|
|
||||||
Agreement.
|
|
||||||
|
|
||||||
|
|
||||||
CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
|
|
||||||
---------------------------------------
|
|
||||||
|
|
||||||
1. This LICENSE AGREEMENT is between the Corporation for National
|
|
||||||
Research Initiatives, having an office at 1895 Preston White Drive,
|
|
||||||
Reston, VA 20191 ("CNRI"), and the Individual or Organization
|
|
||||||
("Licensee") accessing and otherwise using Python 1.6.1 software in
|
|
||||||
source or binary form and its associated documentation.
|
|
||||||
|
|
||||||
2. Subject to the terms and conditions of this License Agreement, CNRI
|
|
||||||
hereby grants Licensee a nonexclusive, royalty-free, world-wide
|
|
||||||
license to reproduce, analyze, test, perform and/or display publicly,
|
|
||||||
prepare derivative works, distribute, and otherwise use Python 1.6.1
|
|
||||||
alone or in any derivative version, provided, however, that CNRI's
|
|
||||||
License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
|
|
||||||
1995-2001 Corporation for National Research Initiatives; All Rights
|
|
||||||
Reserved" are retained in Python 1.6.1 alone or in any derivative
|
|
||||||
version prepared by Licensee. Alternately, in lieu of CNRI's License
|
|
||||||
Agreement, Licensee may substitute the following text (omitting the
|
|
||||||
quotes): "Python 1.6.1 is made available subject to the terms and
|
|
||||||
conditions in CNRI's License Agreement. This Agreement together with
|
|
||||||
Python 1.6.1 may be located on the internet using the following
|
|
||||||
unique, persistent identifier (known as a handle): 1895.22/1013. This
|
|
||||||
Agreement may also be obtained from a proxy server on the internet
|
|
||||||
using the following URL: http://hdl.handle.net/1895.22/1013".
|
|
||||||
|
|
||||||
3. In the event Licensee prepares a derivative work that is based on
|
|
||||||
or incorporates Python 1.6.1 or any part thereof, and wants to make
|
|
||||||
the derivative work available to others as provided herein, then
|
|
||||||
Licensee hereby agrees to include in any such work a brief summary of
|
|
||||||
the changes made to Python 1.6.1.
|
|
||||||
|
|
||||||
4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
|
|
||||||
basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
|
||||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
|
|
||||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
|
||||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
|
|
||||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
|
||||||
|
|
||||||
5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
|
||||||
1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
|
||||||
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
|
|
||||||
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
|
||||||
|
|
||||||
6. This License Agreement will automatically terminate upon a material
|
|
||||||
breach of its terms and conditions.
|
|
||||||
|
|
||||||
7. This License Agreement shall be governed by the federal
|
|
||||||
intellectual property law of the United States, including without
|
|
||||||
limitation the federal copyright law, and, to the extent such
|
|
||||||
U.S. federal law does not apply, by the law of the Commonwealth of
|
|
||||||
Virginia, excluding Virginia's conflict of law provisions.
|
|
||||||
Notwithstanding the foregoing, with regard to derivative works based
|
|
||||||
on Python 1.6.1 that incorporate non-separable material that was
|
|
||||||
previously distributed under the GNU General Public License (GPL), the
|
|
||||||
law of the Commonwealth of Virginia shall govern this License
|
|
||||||
Agreement only as to issues arising under or with respect to
|
|
||||||
Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
|
|
||||||
License Agreement shall be deemed to create any relationship of
|
|
||||||
agency, partnership, or joint venture between CNRI and Licensee. This
|
|
||||||
License Agreement does not grant permission to use CNRI trademarks or
|
|
||||||
trade name in a trademark sense to endorse or promote products or
|
|
||||||
services of Licensee, or any third party.
|
|
||||||
|
|
||||||
8. By clicking on the "ACCEPT" button where indicated, or by copying,
|
|
||||||
installing or otherwise using Python 1.6.1, Licensee agrees to be
|
|
||||||
bound by the terms and conditions of this License Agreement.
|
|
||||||
|
|
||||||
ACCEPT
|
|
||||||
|
|
||||||
|
|
||||||
CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
|
|
||||||
--------------------------------------------------
|
|
||||||
|
|
||||||
Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
|
|
||||||
The Netherlands. All rights reserved.
|
|
||||||
|
|
||||||
Permission to use, copy, modify, and distribute this software and its
|
|
||||||
documentation for any purpose and without fee is hereby granted,
|
|
||||||
provided that the above copyright notice appear in all copies and that
|
|
||||||
both that copyright notice and this permission notice appear in
|
|
||||||
supporting documentation, and that the name of Stichting Mathematisch
|
|
||||||
Centrum or CWI not be used in advertising or publicity pertaining to
|
|
||||||
distribution of the software without specific, written prior
|
|
||||||
permission.
|
|
||||||
|
|
||||||
STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
|
|
||||||
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
|
||||||
FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
|
|
||||||
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
|
||||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
|
||||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
|
||||||
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
||||||
|
|
||||||
ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
|
|
||||||
Permission to use, copy, modify, and/or distribute this software for any
|
|
||||||
purpose with or without fee is hereby granted.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
|
||||||
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
|
||||||
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
|
||||||
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
|
||||||
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
|
||||||
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
|
||||||
PERFORMANCE OF THIS SOFTWARE.
|
|
||||||
@@ -1,123 +0,0 @@
|
|||||||
Metadata-Version: 2.3
|
|
||||||
Name: aiohappyeyeballs
|
|
||||||
Version: 2.6.1
|
|
||||||
Summary: Happy Eyeballs for asyncio
|
|
||||||
License: PSF-2.0
|
|
||||||
Author: J. Nick Koston
|
|
||||||
Author-email: nick@koston.org
|
|
||||||
Requires-Python: >=3.9
|
|
||||||
Classifier: Development Status :: 5 - Production/Stable
|
|
||||||
Classifier: Intended Audience :: Developers
|
|
||||||
Classifier: Natural Language :: English
|
|
||||||
Classifier: Operating System :: OS Independent
|
|
||||||
Classifier: Topic :: Software Development :: Libraries
|
|
||||||
Classifier: Programming Language :: Python :: 3
|
|
||||||
Classifier: Programming Language :: Python :: 3.9
|
|
||||||
Classifier: Programming Language :: Python :: 3.10
|
|
||||||
Classifier: Programming Language :: Python :: 3.11
|
|
||||||
Classifier: Programming Language :: Python :: 3.12
|
|
||||||
Classifier: Programming Language :: Python :: 3.13
|
|
||||||
Classifier: License :: OSI Approved :: Python Software Foundation License
|
|
||||||
Project-URL: Bug Tracker, https://github.com/aio-libs/aiohappyeyeballs/issues
|
|
||||||
Project-URL: Changelog, https://github.com/aio-libs/aiohappyeyeballs/blob/main/CHANGELOG.md
|
|
||||||
Project-URL: Documentation, https://aiohappyeyeballs.readthedocs.io
|
|
||||||
Project-URL: Repository, https://github.com/aio-libs/aiohappyeyeballs
|
|
||||||
Description-Content-Type: text/markdown
|
|
||||||
|
|
||||||
# aiohappyeyeballs
|
|
||||||
|
|
||||||
<p align="center">
|
|
||||||
<a href="https://github.com/aio-libs/aiohappyeyeballs/actions/workflows/ci.yml?query=branch%3Amain">
|
|
||||||
<img src="https://img.shields.io/github/actions/workflow/status/aio-libs/aiohappyeyeballs/ci-cd.yml?branch=main&label=CI&logo=github&style=flat-square" alt="CI Status" >
|
|
||||||
</a>
|
|
||||||
<a href="https://aiohappyeyeballs.readthedocs.io">
|
|
||||||
<img src="https://img.shields.io/readthedocs/aiohappyeyeballs.svg?logo=read-the-docs&logoColor=fff&style=flat-square" alt="Documentation Status">
|
|
||||||
</a>
|
|
||||||
<a href="https://codecov.io/gh/aio-libs/aiohappyeyeballs">
|
|
||||||
<img src="https://img.shields.io/codecov/c/github/aio-libs/aiohappyeyeballs.svg?logo=codecov&logoColor=fff&style=flat-square" alt="Test coverage percentage">
|
|
||||||
</a>
|
|
||||||
</p>
|
|
||||||
<p align="center">
|
|
||||||
<a href="https://python-poetry.org/">
|
|
||||||
<img src="https://img.shields.io/badge/packaging-poetry-299bd7?style=flat-square&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAA4AAAASCAYAAABrXO8xAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAJJSURBVHgBfZLPa1NBEMe/s7tNXoxW1KJQKaUHkXhQvHgW6UHQQ09CBS/6V3hKc/AP8CqCrUcpmop3Cx48eDB4yEECjVQrlZb80CRN8t6OM/teagVxYZi38+Yz853dJbzoMV3MM8cJUcLMSUKIE8AzQ2PieZzFxEJOHMOgMQQ+dUgSAckNXhapU/NMhDSWLs1B24A8sO1xrN4NECkcAC9ASkiIJc6k5TRiUDPhnyMMdhKc+Zx19l6SgyeW76BEONY9exVQMzKExGKwwPsCzza7KGSSWRWEQhyEaDXp6ZHEr416ygbiKYOd7TEWvvcQIeusHYMJGhTwF9y7sGnSwaWyFAiyoxzqW0PM/RjghPxF2pWReAowTEXnDh0xgcLs8l2YQmOrj3N7ByiqEoH0cARs4u78WgAVkoEDIDoOi3AkcLOHU60RIg5wC4ZuTC7FaHKQm8Hq1fQuSOBvX/sodmNJSB5geaF5CPIkUeecdMxieoRO5jz9bheL6/tXjrwCyX/UYBUcjCaWHljx1xiX6z9xEjkYAzbGVnB8pvLmyXm9ep+W8CmsSHQQY77Zx1zboxAV0w7ybMhQmfqdmmw3nEp1I0Z+FGO6M8LZdoyZnuzzBdjISicKRnpxzI9fPb+0oYXsNdyi+d3h9bm9MWYHFtPeIZfLwzmFDKy1ai3p+PDls1Llz4yyFpferxjnyjJDSEy9CaCx5m2cJPerq6Xm34eTrZt3PqxYO1XOwDYZrFlH1fWnpU38Y9HRze3lj0vOujZcXKuuXm3jP+s3KbZVra7y2EAAAAAASUVORK5CYII=" alt="Poetry">
|
|
||||||
</a>
|
|
||||||
<a href="https://github.com/astral-sh/ruff">
|
|
||||||
<img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json" alt="Ruff">
|
|
||||||
</a>
|
|
||||||
<a href="https://github.com/pre-commit/pre-commit">
|
|
||||||
<img src="https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white&style=flat-square" alt="pre-commit">
|
|
||||||
</a>
|
|
||||||
</p>
|
|
||||||
<p align="center">
|
|
||||||
<a href="https://pypi.org/project/aiohappyeyeballs/">
|
|
||||||
<img src="https://img.shields.io/pypi/v/aiohappyeyeballs.svg?logo=python&logoColor=fff&style=flat-square" alt="PyPI Version">
|
|
||||||
</a>
|
|
||||||
<img src="https://img.shields.io/pypi/pyversions/aiohappyeyeballs.svg?style=flat-square&logo=python&logoColor=fff" alt="Supported Python versions">
|
|
||||||
<img src="https://img.shields.io/pypi/l/aiohappyeyeballs.svg?style=flat-square" alt="License">
|
|
||||||
</p>
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Documentation**: <a href="https://aiohappyeyeballs.readthedocs.io" target="_blank">https://aiohappyeyeballs.readthedocs.io </a>
|
|
||||||
|
|
||||||
**Source Code**: <a href="https://github.com/aio-libs/aiohappyeyeballs" target="_blank">https://github.com/aio-libs/aiohappyeyeballs </a>
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
[Happy Eyeballs](https://en.wikipedia.org/wiki/Happy_Eyeballs)
|
|
||||||
([RFC 8305](https://www.rfc-editor.org/rfc/rfc8305.html))
|
|
||||||
|
|
||||||
## Use case
|
|
||||||
|
|
||||||
This library exists to allow connecting with
|
|
||||||
[Happy Eyeballs](https://en.wikipedia.org/wiki/Happy_Eyeballs)
|
|
||||||
([RFC 8305](https://www.rfc-editor.org/rfc/rfc8305.html))
|
|
||||||
when you
|
|
||||||
already have a list of addrinfo and not a DNS name.
|
|
||||||
|
|
||||||
The stdlib version of `loop.create_connection()`
|
|
||||||
will only work when you pass in an unresolved name which
|
|
||||||
is not a good fit when using DNS caching or resolving
|
|
||||||
names via another method such as `zeroconf`.
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
Install this via pip (or your favourite package manager):
|
|
||||||
|
|
||||||
`pip install aiohappyeyeballs`
|
|
||||||
|
|
||||||
## License
|
|
||||||
|
|
||||||
[aiohappyeyeballs is licensed under the same terms as cpython itself.](https://github.com/python/cpython/blob/main/LICENSE)
|
|
||||||
|
|
||||||
## Example usage
|
|
||||||
|
|
||||||
```python
|
|
||||||
|
|
||||||
addr_infos = await loop.getaddrinfo("example.org", 80)
|
|
||||||
|
|
||||||
socket = await start_connection(addr_infos)
|
|
||||||
socket = await start_connection(addr_infos, local_addr_infos=local_addr_infos, happy_eyeballs_delay=0.2)
|
|
||||||
|
|
||||||
transport, protocol = await loop.create_connection(
|
|
||||||
MyProtocol, sock=socket, ...)
|
|
||||||
|
|
||||||
# Remove the first address for each family from addr_info
|
|
||||||
pop_addr_infos_interleave(addr_info, 1)
|
|
||||||
|
|
||||||
# Remove all matching address from addr_info
|
|
||||||
remove_addr_infos(addr_info, "dead::beef::")
|
|
||||||
|
|
||||||
# Convert a local_addr to local_addr_infos
|
|
||||||
local_addr_infos = addr_to_addr_infos(("127.0.0.1",0))
|
|
||||||
```
|
|
||||||
|
|
||||||
## Credits
|
|
||||||
|
|
||||||
This package contains code from cpython and is licensed under the same terms as cpython itself.
|
|
||||||
|
|
||||||
This package was created with
|
|
||||||
[Copier](https://copier.readthedocs.io/) and the
|
|
||||||
[browniebroke/pypackage-template](https://github.com/browniebroke/pypackage-template)
|
|
||||||
project template.
|
|
||||||
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
aiohappyeyeballs-2.6.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
|
||||||
aiohappyeyeballs-2.6.1.dist-info/LICENSE,sha256=Oy-B_iHRgcSZxZolbI4ZaEVdZonSaaqFNzv7avQdo78,13936
|
|
||||||
aiohappyeyeballs-2.6.1.dist-info/METADATA,sha256=NSXlhJwAfi380eEjAo7BQ4P_TVal9xi0qkyZWibMsVM,5915
|
|
||||||
aiohappyeyeballs-2.6.1.dist-info/RECORD,,
|
|
||||||
aiohappyeyeballs-2.6.1.dist-info/WHEEL,sha256=XbeZDeTWKc1w7CSIyre5aMDU_-PohRwTQceYnisIYYY,88
|
|
||||||
aiohappyeyeballs/__init__.py,sha256=x7kktHEtaD9quBcWDJPuLeKyjuVAI-Jj14S9B_5hcTs,361
|
|
||||||
aiohappyeyeballs/__pycache__/__init__.cpython-312.pyc,,
|
|
||||||
aiohappyeyeballs/__pycache__/_staggered.cpython-312.pyc,,
|
|
||||||
aiohappyeyeballs/__pycache__/impl.cpython-312.pyc,,
|
|
||||||
aiohappyeyeballs/__pycache__/types.cpython-312.pyc,,
|
|
||||||
aiohappyeyeballs/__pycache__/utils.cpython-312.pyc,,
|
|
||||||
aiohappyeyeballs/_staggered.py,sha256=edfVowFx-P-ywJjIEF3MdPtEMVODujV6CeMYr65otac,6900
|
|
||||||
aiohappyeyeballs/impl.py,sha256=Dlcm2mTJ28ucrGnxkb_fo9CZzLAkOOBizOt7dreBbXE,9681
|
|
||||||
aiohappyeyeballs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
aiohappyeyeballs/types.py,sha256=YZJIAnyoV4Dz0WFtlaf_OyE4EW7Xus1z7aIfNI6tDDQ,425
|
|
||||||
aiohappyeyeballs/utils.py,sha256=on9GxIR0LhEfZu8P6Twi9hepX9zDanuZM20MWsb3xlQ,3028
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
Wheel-Version: 1.0
|
|
||||||
Generator: poetry-core 2.1.1
|
|
||||||
Root-Is-Purelib: true
|
|
||||||
Tag: py3-none-any
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
__version__ = "2.6.1"
|
|
||||||
|
|
||||||
from .impl import start_connection
|
|
||||||
from .types import AddrInfoType, SocketFactoryType
|
|
||||||
from .utils import addr_to_addr_infos, pop_addr_infos_interleave, remove_addr_infos
|
|
||||||
|
|
||||||
__all__ = (
|
|
||||||
"AddrInfoType",
|
|
||||||
"SocketFactoryType",
|
|
||||||
"addr_to_addr_infos",
|
|
||||||
"pop_addr_infos_interleave",
|
|
||||||
"remove_addr_infos",
|
|
||||||
"start_connection",
|
|
||||||
)
|
|
||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1,207 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
import contextlib
|
|
||||||
|
|
||||||
# PY3.9: Import Callable from typing until we drop Python 3.9 support
|
|
||||||
# https://github.com/python/cpython/issues/87131
|
|
||||||
from typing import (
|
|
||||||
TYPE_CHECKING,
|
|
||||||
Any,
|
|
||||||
Awaitable,
|
|
||||||
Callable,
|
|
||||||
Iterable,
|
|
||||||
List,
|
|
||||||
Optional,
|
|
||||||
Set,
|
|
||||||
Tuple,
|
|
||||||
TypeVar,
|
|
||||||
Union,
|
|
||||||
)
|
|
||||||
|
|
||||||
_T = TypeVar("_T")
|
|
||||||
|
|
||||||
RE_RAISE_EXCEPTIONS = (SystemExit, KeyboardInterrupt)
|
|
||||||
|
|
||||||
|
|
||||||
def _set_result(wait_next: "asyncio.Future[None]") -> None:
|
|
||||||
"""Set the result of a future if it is not already done."""
|
|
||||||
if not wait_next.done():
|
|
||||||
wait_next.set_result(None)
|
|
||||||
|
|
||||||
|
|
||||||
async def _wait_one(
|
|
||||||
futures: "Iterable[asyncio.Future[Any]]",
|
|
||||||
loop: asyncio.AbstractEventLoop,
|
|
||||||
) -> _T:
|
|
||||||
"""Wait for the first future to complete."""
|
|
||||||
wait_next = loop.create_future()
|
|
||||||
|
|
||||||
def _on_completion(fut: "asyncio.Future[Any]") -> None:
|
|
||||||
if not wait_next.done():
|
|
||||||
wait_next.set_result(fut)
|
|
||||||
|
|
||||||
for f in futures:
|
|
||||||
f.add_done_callback(_on_completion)
|
|
||||||
|
|
||||||
try:
|
|
||||||
return await wait_next
|
|
||||||
finally:
|
|
||||||
for f in futures:
|
|
||||||
f.remove_done_callback(_on_completion)
|
|
||||||
|
|
||||||
|
|
||||||
async def staggered_race(
|
|
||||||
coro_fns: Iterable[Callable[[], Awaitable[_T]]],
|
|
||||||
delay: Optional[float],
|
|
||||||
*,
|
|
||||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
|
||||||
) -> Tuple[Optional[_T], Optional[int], List[Optional[BaseException]]]:
|
|
||||||
"""
|
|
||||||
Run coroutines with staggered start times and take the first to finish.
|
|
||||||
|
|
||||||
This method takes an iterable of coroutine functions. The first one is
|
|
||||||
started immediately. From then on, whenever the immediately preceding one
|
|
||||||
fails (raises an exception), or when *delay* seconds has passed, the next
|
|
||||||
coroutine is started. This continues until one of the coroutines complete
|
|
||||||
successfully, in which case all others are cancelled, or until all
|
|
||||||
coroutines fail.
|
|
||||||
|
|
||||||
The coroutines provided should be well-behaved in the following way:
|
|
||||||
|
|
||||||
* They should only ``return`` if completed successfully.
|
|
||||||
|
|
||||||
* They should always raise an exception if they did not complete
|
|
||||||
successfully. In particular, if they handle cancellation, they should
|
|
||||||
probably reraise, like this::
|
|
||||||
|
|
||||||
try:
|
|
||||||
# do work
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
# undo partially completed work
|
|
||||||
raise
|
|
||||||
|
|
||||||
Args:
|
|
||||||
----
|
|
||||||
coro_fns: an iterable of coroutine functions, i.e. callables that
|
|
||||||
return a coroutine object when called. Use ``functools.partial`` or
|
|
||||||
lambdas to pass arguments.
|
|
||||||
|
|
||||||
delay: amount of time, in seconds, between starting coroutines. If
|
|
||||||
``None``, the coroutines will run sequentially.
|
|
||||||
|
|
||||||
loop: the event loop to use. If ``None``, the running loop is used.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
-------
|
|
||||||
tuple *(winner_result, winner_index, exceptions)* where
|
|
||||||
|
|
||||||
- *winner_result*: the result of the winning coroutine, or ``None``
|
|
||||||
if no coroutines won.
|
|
||||||
|
|
||||||
- *winner_index*: the index of the winning coroutine in
|
|
||||||
``coro_fns``, or ``None`` if no coroutines won. If the winning
|
|
||||||
coroutine may return None on success, *winner_index* can be used
|
|
||||||
to definitively determine whether any coroutine won.
|
|
||||||
|
|
||||||
- *exceptions*: list of exceptions returned by the coroutines.
|
|
||||||
``len(exceptions)`` is equal to the number of coroutines actually
|
|
||||||
started, and the order is the same as in ``coro_fns``. The winning
|
|
||||||
coroutine's entry is ``None``.
|
|
||||||
|
|
||||||
"""
|
|
||||||
loop = loop or asyncio.get_running_loop()
|
|
||||||
exceptions: List[Optional[BaseException]] = []
|
|
||||||
tasks: Set[asyncio.Task[Optional[Tuple[_T, int]]]] = set()
|
|
||||||
|
|
||||||
async def run_one_coro(
|
|
||||||
coro_fn: Callable[[], Awaitable[_T]],
|
|
||||||
this_index: int,
|
|
||||||
start_next: "asyncio.Future[None]",
|
|
||||||
) -> Optional[Tuple[_T, int]]:
|
|
||||||
"""
|
|
||||||
Run a single coroutine.
|
|
||||||
|
|
||||||
If the coroutine fails, set the exception in the exceptions list and
|
|
||||||
start the next coroutine by setting the result of the start_next.
|
|
||||||
|
|
||||||
If the coroutine succeeds, return the result and the index of the
|
|
||||||
coroutine in the coro_fns list.
|
|
||||||
|
|
||||||
If SystemExit or KeyboardInterrupt is raised, re-raise it.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
result = await coro_fn()
|
|
||||||
except RE_RAISE_EXCEPTIONS:
|
|
||||||
raise
|
|
||||||
except BaseException as e:
|
|
||||||
exceptions[this_index] = e
|
|
||||||
_set_result(start_next) # Kickstart the next coroutine
|
|
||||||
return None
|
|
||||||
|
|
||||||
return result, this_index
|
|
||||||
|
|
||||||
start_next_timer: Optional[asyncio.TimerHandle] = None
|
|
||||||
start_next: Optional[asyncio.Future[None]]
|
|
||||||
task: asyncio.Task[Optional[Tuple[_T, int]]]
|
|
||||||
done: Union[asyncio.Future[None], asyncio.Task[Optional[Tuple[_T, int]]]]
|
|
||||||
coro_iter = iter(coro_fns)
|
|
||||||
this_index = -1
|
|
||||||
try:
|
|
||||||
while True:
|
|
||||||
if coro_fn := next(coro_iter, None):
|
|
||||||
this_index += 1
|
|
||||||
exceptions.append(None)
|
|
||||||
start_next = loop.create_future()
|
|
||||||
task = loop.create_task(run_one_coro(coro_fn, this_index, start_next))
|
|
||||||
tasks.add(task)
|
|
||||||
start_next_timer = (
|
|
||||||
loop.call_later(delay, _set_result, start_next) if delay else None
|
|
||||||
)
|
|
||||||
elif not tasks:
|
|
||||||
# We exhausted the coro_fns list and no tasks are running
|
|
||||||
# so we have no winner and all coroutines failed.
|
|
||||||
break
|
|
||||||
|
|
||||||
while tasks or start_next:
|
|
||||||
done = await _wait_one(
|
|
||||||
(*tasks, start_next) if start_next else tasks, loop
|
|
||||||
)
|
|
||||||
if done is start_next:
|
|
||||||
# The current task has failed or the timer has expired
|
|
||||||
# so we need to start the next task.
|
|
||||||
start_next = None
|
|
||||||
if start_next_timer:
|
|
||||||
start_next_timer.cancel()
|
|
||||||
start_next_timer = None
|
|
||||||
|
|
||||||
# Break out of the task waiting loop to start the next
|
|
||||||
# task.
|
|
||||||
break
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
assert isinstance(done, asyncio.Task)
|
|
||||||
|
|
||||||
tasks.remove(done)
|
|
||||||
if winner := done.result():
|
|
||||||
return *winner, exceptions
|
|
||||||
finally:
|
|
||||||
# We either have:
|
|
||||||
# - a winner
|
|
||||||
# - all tasks failed
|
|
||||||
# - a KeyboardInterrupt or SystemExit.
|
|
||||||
|
|
||||||
#
|
|
||||||
# If the timer is still running, cancel it.
|
|
||||||
#
|
|
||||||
if start_next_timer:
|
|
||||||
start_next_timer.cancel()
|
|
||||||
|
|
||||||
#
|
|
||||||
# If there are any tasks left, cancel them and than
|
|
||||||
# wait them so they fill the exceptions list.
|
|
||||||
#
|
|
||||||
for task in tasks:
|
|
||||||
task.cancel()
|
|
||||||
with contextlib.suppress(asyncio.CancelledError):
|
|
||||||
await task
|
|
||||||
|
|
||||||
return None, None, exceptions
|
|
||||||
@@ -1,259 +0,0 @@
|
|||||||
"""Base implementation."""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import collections
|
|
||||||
import contextlib
|
|
||||||
import functools
|
|
||||||
import itertools
|
|
||||||
import socket
|
|
||||||
from typing import List, Optional, Sequence, Set, Union
|
|
||||||
|
|
||||||
from . import _staggered
|
|
||||||
from .types import AddrInfoType, SocketFactoryType
|
|
||||||
|
|
||||||
|
|
||||||
async def start_connection(
|
|
||||||
addr_infos: Sequence[AddrInfoType],
|
|
||||||
*,
|
|
||||||
local_addr_infos: Optional[Sequence[AddrInfoType]] = None,
|
|
||||||
happy_eyeballs_delay: Optional[float] = None,
|
|
||||||
interleave: Optional[int] = None,
|
|
||||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
|
||||||
socket_factory: Optional[SocketFactoryType] = None,
|
|
||||||
) -> socket.socket:
|
|
||||||
"""
|
|
||||||
Connect to a TCP server.
|
|
||||||
|
|
||||||
Create a socket connection to a specified destination. The
|
|
||||||
destination is specified as a list of AddrInfoType tuples as
|
|
||||||
returned from getaddrinfo().
|
|
||||||
|
|
||||||
The arguments are, in order:
|
|
||||||
|
|
||||||
* ``family``: the address family, e.g. ``socket.AF_INET`` or
|
|
||||||
``socket.AF_INET6``.
|
|
||||||
* ``type``: the socket type, e.g. ``socket.SOCK_STREAM`` or
|
|
||||||
``socket.SOCK_DGRAM``.
|
|
||||||
* ``proto``: the protocol, e.g. ``socket.IPPROTO_TCP`` or
|
|
||||||
``socket.IPPROTO_UDP``.
|
|
||||||
* ``canonname``: the canonical name of the address, e.g.
|
|
||||||
``"www.python.org"``.
|
|
||||||
* ``sockaddr``: the socket address
|
|
||||||
|
|
||||||
This method is a coroutine which will try to establish the connection
|
|
||||||
in the background. When successful, the coroutine returns a
|
|
||||||
socket.
|
|
||||||
|
|
||||||
The expected use case is to use this method in conjunction with
|
|
||||||
loop.create_connection() to establish a connection to a server::
|
|
||||||
|
|
||||||
socket = await start_connection(addr_infos)
|
|
||||||
transport, protocol = await loop.create_connection(
|
|
||||||
MyProtocol, sock=socket, ...)
|
|
||||||
"""
|
|
||||||
if not (current_loop := loop):
|
|
||||||
current_loop = asyncio.get_running_loop()
|
|
||||||
|
|
||||||
single_addr_info = len(addr_infos) == 1
|
|
||||||
|
|
||||||
if happy_eyeballs_delay is not None and interleave is None:
|
|
||||||
# If using happy eyeballs, default to interleave addresses by family
|
|
||||||
interleave = 1
|
|
||||||
|
|
||||||
if interleave and not single_addr_info:
|
|
||||||
addr_infos = _interleave_addrinfos(addr_infos, interleave)
|
|
||||||
|
|
||||||
sock: Optional[socket.socket] = None
|
|
||||||
# uvloop can raise RuntimeError instead of OSError
|
|
||||||
exceptions: List[List[Union[OSError, RuntimeError]]] = []
|
|
||||||
if happy_eyeballs_delay is None or single_addr_info:
|
|
||||||
# not using happy eyeballs
|
|
||||||
for addrinfo in addr_infos:
|
|
||||||
try:
|
|
||||||
sock = await _connect_sock(
|
|
||||||
current_loop,
|
|
||||||
exceptions,
|
|
||||||
addrinfo,
|
|
||||||
local_addr_infos,
|
|
||||||
None,
|
|
||||||
socket_factory,
|
|
||||||
)
|
|
||||||
break
|
|
||||||
except (RuntimeError, OSError):
|
|
||||||
continue
|
|
||||||
else: # using happy eyeballs
|
|
||||||
open_sockets: Set[socket.socket] = set()
|
|
||||||
try:
|
|
||||||
sock, _, _ = await _staggered.staggered_race(
|
|
||||||
(
|
|
||||||
functools.partial(
|
|
||||||
_connect_sock,
|
|
||||||
current_loop,
|
|
||||||
exceptions,
|
|
||||||
addrinfo,
|
|
||||||
local_addr_infos,
|
|
||||||
open_sockets,
|
|
||||||
socket_factory,
|
|
||||||
)
|
|
||||||
for addrinfo in addr_infos
|
|
||||||
),
|
|
||||||
happy_eyeballs_delay,
|
|
||||||
)
|
|
||||||
finally:
|
|
||||||
# If we have a winner, staggered_race will
|
|
||||||
# cancel the other tasks, however there is a
|
|
||||||
# small race window where any of the other tasks
|
|
||||||
# can be done before they are cancelled which
|
|
||||||
# will leave the socket open. To avoid this problem
|
|
||||||
# we pass a set to _connect_sock to keep track of
|
|
||||||
# the open sockets and close them here if there
|
|
||||||
# are any "runner up" sockets.
|
|
||||||
for s in open_sockets:
|
|
||||||
if s is not sock:
|
|
||||||
with contextlib.suppress(OSError):
|
|
||||||
s.close()
|
|
||||||
open_sockets = None # type: ignore[assignment]
|
|
||||||
|
|
||||||
if sock is None:
|
|
||||||
all_exceptions = [exc for sub in exceptions for exc in sub]
|
|
||||||
try:
|
|
||||||
first_exception = all_exceptions[0]
|
|
||||||
if len(all_exceptions) == 1:
|
|
||||||
raise first_exception
|
|
||||||
else:
|
|
||||||
# If they all have the same str(), raise one.
|
|
||||||
model = str(first_exception)
|
|
||||||
if all(str(exc) == model for exc in all_exceptions):
|
|
||||||
raise first_exception
|
|
||||||
# Raise a combined exception so the user can see all
|
|
||||||
# the various error messages.
|
|
||||||
msg = "Multiple exceptions: {}".format(
|
|
||||||
", ".join(str(exc) for exc in all_exceptions)
|
|
||||||
)
|
|
||||||
# If the errno is the same for all exceptions, raise
|
|
||||||
# an OSError with that errno.
|
|
||||||
if isinstance(first_exception, OSError):
|
|
||||||
first_errno = first_exception.errno
|
|
||||||
if all(
|
|
||||||
isinstance(exc, OSError) and exc.errno == first_errno
|
|
||||||
for exc in all_exceptions
|
|
||||||
):
|
|
||||||
raise OSError(first_errno, msg)
|
|
||||||
elif isinstance(first_exception, RuntimeError) and all(
|
|
||||||
isinstance(exc, RuntimeError) for exc in all_exceptions
|
|
||||||
):
|
|
||||||
raise RuntimeError(msg)
|
|
||||||
# We have a mix of OSError and RuntimeError
|
|
||||||
# so we have to pick which one to raise.
|
|
||||||
# and we raise OSError for compatibility
|
|
||||||
raise OSError(msg)
|
|
||||||
finally:
|
|
||||||
all_exceptions = None # type: ignore[assignment]
|
|
||||||
exceptions = None # type: ignore[assignment]
|
|
||||||
|
|
||||||
return sock
|
|
||||||
|
|
||||||
|
|
||||||
async def _connect_sock(
|
|
||||||
loop: asyncio.AbstractEventLoop,
|
|
||||||
exceptions: List[List[Union[OSError, RuntimeError]]],
|
|
||||||
addr_info: AddrInfoType,
|
|
||||||
local_addr_infos: Optional[Sequence[AddrInfoType]] = None,
|
|
||||||
open_sockets: Optional[Set[socket.socket]] = None,
|
|
||||||
socket_factory: Optional[SocketFactoryType] = None,
|
|
||||||
) -> socket.socket:
|
|
||||||
"""
|
|
||||||
Create, bind and connect one socket.
|
|
||||||
|
|
||||||
If open_sockets is passed, add the socket to the set of open sockets.
|
|
||||||
Any failure caught here will remove the socket from the set and close it.
|
|
||||||
|
|
||||||
Callers can use this set to close any sockets that are not the winner
|
|
||||||
of all staggered tasks in the result there are runner up sockets aka
|
|
||||||
multiple winners.
|
|
||||||
"""
|
|
||||||
my_exceptions: List[Union[OSError, RuntimeError]] = []
|
|
||||||
exceptions.append(my_exceptions)
|
|
||||||
family, type_, proto, _, address = addr_info
|
|
||||||
sock = None
|
|
||||||
try:
|
|
||||||
if socket_factory is not None:
|
|
||||||
sock = socket_factory(addr_info)
|
|
||||||
else:
|
|
||||||
sock = socket.socket(family=family, type=type_, proto=proto)
|
|
||||||
if open_sockets is not None:
|
|
||||||
open_sockets.add(sock)
|
|
||||||
sock.setblocking(False)
|
|
||||||
if local_addr_infos is not None:
|
|
||||||
for lfamily, _, _, _, laddr in local_addr_infos:
|
|
||||||
# skip local addresses of different family
|
|
||||||
if lfamily != family:
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
sock.bind(laddr)
|
|
||||||
break
|
|
||||||
except OSError as exc:
|
|
||||||
msg = (
|
|
||||||
f"error while attempting to bind on "
|
|
||||||
f"address {laddr!r}: "
|
|
||||||
f"{(exc.strerror or '').lower()}"
|
|
||||||
)
|
|
||||||
exc = OSError(exc.errno, msg)
|
|
||||||
my_exceptions.append(exc)
|
|
||||||
else: # all bind attempts failed
|
|
||||||
if my_exceptions:
|
|
||||||
raise my_exceptions.pop()
|
|
||||||
else:
|
|
||||||
raise OSError(f"no matching local address with {family=} found")
|
|
||||||
await loop.sock_connect(sock, address)
|
|
||||||
return sock
|
|
||||||
except (RuntimeError, OSError) as exc:
|
|
||||||
my_exceptions.append(exc)
|
|
||||||
if sock is not None:
|
|
||||||
if open_sockets is not None:
|
|
||||||
open_sockets.remove(sock)
|
|
||||||
try:
|
|
||||||
sock.close()
|
|
||||||
except OSError as e:
|
|
||||||
my_exceptions.append(e)
|
|
||||||
raise
|
|
||||||
raise
|
|
||||||
except:
|
|
||||||
if sock is not None:
|
|
||||||
if open_sockets is not None:
|
|
||||||
open_sockets.remove(sock)
|
|
||||||
try:
|
|
||||||
sock.close()
|
|
||||||
except OSError as e:
|
|
||||||
my_exceptions.append(e)
|
|
||||||
raise
|
|
||||||
raise
|
|
||||||
finally:
|
|
||||||
exceptions = my_exceptions = None # type: ignore[assignment]
|
|
||||||
|
|
||||||
|
|
||||||
def _interleave_addrinfos(
|
|
||||||
addrinfos: Sequence[AddrInfoType], first_address_family_count: int = 1
|
|
||||||
) -> List[AddrInfoType]:
|
|
||||||
"""Interleave list of addrinfo tuples by family."""
|
|
||||||
# Group addresses by family
|
|
||||||
addrinfos_by_family: collections.OrderedDict[int, List[AddrInfoType]] = (
|
|
||||||
collections.OrderedDict()
|
|
||||||
)
|
|
||||||
for addr in addrinfos:
|
|
||||||
family = addr[0]
|
|
||||||
if family not in addrinfos_by_family:
|
|
||||||
addrinfos_by_family[family] = []
|
|
||||||
addrinfos_by_family[family].append(addr)
|
|
||||||
addrinfos_lists = list(addrinfos_by_family.values())
|
|
||||||
|
|
||||||
reordered: List[AddrInfoType] = []
|
|
||||||
if first_address_family_count > 1:
|
|
||||||
reordered.extend(addrinfos_lists[0][: first_address_family_count - 1])
|
|
||||||
del addrinfos_lists[0][: first_address_family_count - 1]
|
|
||||||
reordered.extend(
|
|
||||||
a
|
|
||||||
for a in itertools.chain.from_iterable(itertools.zip_longest(*addrinfos_lists))
|
|
||||||
if a is not None
|
|
||||||
)
|
|
||||||
return reordered
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
"""Types for aiohappyeyeballs."""
|
|
||||||
|
|
||||||
import socket
|
|
||||||
|
|
||||||
# PY3.9: Import Callable from typing until we drop Python 3.9 support
|
|
||||||
# https://github.com/python/cpython/issues/87131
|
|
||||||
from typing import Callable, Tuple, Union
|
|
||||||
|
|
||||||
AddrInfoType = Tuple[
|
|
||||||
Union[int, socket.AddressFamily],
|
|
||||||
Union[int, socket.SocketKind],
|
|
||||||
int,
|
|
||||||
str,
|
|
||||||
Tuple, # type: ignore[type-arg]
|
|
||||||
]
|
|
||||||
|
|
||||||
SocketFactoryType = Callable[[AddrInfoType], socket.socket]
|
|
||||||
@@ -1,97 +0,0 @@
|
|||||||
"""Utility functions for aiohappyeyeballs."""
|
|
||||||
|
|
||||||
import ipaddress
|
|
||||||
import socket
|
|
||||||
from typing import Dict, List, Optional, Tuple, Union
|
|
||||||
|
|
||||||
from .types import AddrInfoType
|
|
||||||
|
|
||||||
|
|
||||||
def addr_to_addr_infos(
|
|
||||||
addr: Optional[
|
|
||||||
Union[Tuple[str, int, int, int], Tuple[str, int, int], Tuple[str, int]]
|
|
||||||
],
|
|
||||||
) -> Optional[List[AddrInfoType]]:
|
|
||||||
"""Convert an address tuple to a list of addr_info tuples."""
|
|
||||||
if addr is None:
|
|
||||||
return None
|
|
||||||
host = addr[0]
|
|
||||||
port = addr[1]
|
|
||||||
is_ipv6 = ":" in host
|
|
||||||
if is_ipv6:
|
|
||||||
flowinfo = 0
|
|
||||||
scopeid = 0
|
|
||||||
addr_len = len(addr)
|
|
||||||
if addr_len >= 4:
|
|
||||||
scopeid = addr[3] # type: ignore[misc]
|
|
||||||
if addr_len >= 3:
|
|
||||||
flowinfo = addr[2] # type: ignore[misc]
|
|
||||||
addr = (host, port, flowinfo, scopeid)
|
|
||||||
family = socket.AF_INET6
|
|
||||||
else:
|
|
||||||
addr = (host, port)
|
|
||||||
family = socket.AF_INET
|
|
||||||
return [(family, socket.SOCK_STREAM, socket.IPPROTO_TCP, "", addr)]
|
|
||||||
|
|
||||||
|
|
||||||
def pop_addr_infos_interleave(
|
|
||||||
addr_infos: List[AddrInfoType], interleave: Optional[int] = None
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
Pop addr_info from the list of addr_infos by family up to interleave times.
|
|
||||||
|
|
||||||
The interleave parameter is used to know how many addr_infos for
|
|
||||||
each family should be popped of the top of the list.
|
|
||||||
"""
|
|
||||||
seen: Dict[int, int] = {}
|
|
||||||
if interleave is None:
|
|
||||||
interleave = 1
|
|
||||||
to_remove: List[AddrInfoType] = []
|
|
||||||
for addr_info in addr_infos:
|
|
||||||
family = addr_info[0]
|
|
||||||
if family not in seen:
|
|
||||||
seen[family] = 0
|
|
||||||
if seen[family] < interleave:
|
|
||||||
to_remove.append(addr_info)
|
|
||||||
seen[family] += 1
|
|
||||||
for addr_info in to_remove:
|
|
||||||
addr_infos.remove(addr_info)
|
|
||||||
|
|
||||||
|
|
||||||
def _addr_tuple_to_ip_address(
|
|
||||||
addr: Union[Tuple[str, int], Tuple[str, int, int, int]],
|
|
||||||
) -> Union[
|
|
||||||
Tuple[ipaddress.IPv4Address, int], Tuple[ipaddress.IPv6Address, int, int, int]
|
|
||||||
]:
|
|
||||||
"""Convert an address tuple to an IPv4Address."""
|
|
||||||
return (ipaddress.ip_address(addr[0]), *addr[1:])
|
|
||||||
|
|
||||||
|
|
||||||
def remove_addr_infos(
|
|
||||||
addr_infos: List[AddrInfoType],
|
|
||||||
addr: Union[Tuple[str, int], Tuple[str, int, int, int]],
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
Remove an address from the list of addr_infos.
|
|
||||||
|
|
||||||
The addr value is typically the return value of
|
|
||||||
sock.getpeername().
|
|
||||||
"""
|
|
||||||
bad_addrs_infos: List[AddrInfoType] = []
|
|
||||||
for addr_info in addr_infos:
|
|
||||||
if addr_info[-1] == addr:
|
|
||||||
bad_addrs_infos.append(addr_info)
|
|
||||||
if bad_addrs_infos:
|
|
||||||
for bad_addr_info in bad_addrs_infos:
|
|
||||||
addr_infos.remove(bad_addr_info)
|
|
||||||
return
|
|
||||||
# Slow path in case addr is formatted differently
|
|
||||||
match_addr = _addr_tuple_to_ip_address(addr)
|
|
||||||
for addr_info in addr_infos:
|
|
||||||
if match_addr == _addr_tuple_to_ip_address(addr_info[-1]):
|
|
||||||
bad_addrs_infos.append(addr_info)
|
|
||||||
if bad_addrs_infos:
|
|
||||||
for bad_addr_info in bad_addrs_infos:
|
|
||||||
addr_infos.remove(bad_addr_info)
|
|
||||||
return
|
|
||||||
raise ValueError(f"Address {addr} not found in addr_infos")
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
pip
|
|
||||||
@@ -1,250 +0,0 @@
|
|||||||
Metadata-Version: 2.4
|
|
||||||
Name: aiohttp
|
|
||||||
Version: 3.12.14
|
|
||||||
Summary: Async http client/server framework (asyncio)
|
|
||||||
Home-page: https://github.com/aio-libs/aiohttp
|
|
||||||
Maintainer: aiohttp team <team@aiohttp.org>
|
|
||||||
Maintainer-email: team@aiohttp.org
|
|
||||||
License: Apache-2.0
|
|
||||||
Project-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org
|
|
||||||
Project-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org
|
|
||||||
Project-URL: CI: GitHub Actions, https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI
|
|
||||||
Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp
|
|
||||||
Project-URL: Docs: Changelog, https://docs.aiohttp.org/en/stable/changes.html
|
|
||||||
Project-URL: Docs: RTD, https://docs.aiohttp.org
|
|
||||||
Project-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues
|
|
||||||
Project-URL: GitHub: repo, https://github.com/aio-libs/aiohttp
|
|
||||||
Classifier: Development Status :: 5 - Production/Stable
|
|
||||||
Classifier: Framework :: AsyncIO
|
|
||||||
Classifier: Intended Audience :: Developers
|
|
||||||
Classifier: Operating System :: POSIX
|
|
||||||
Classifier: Operating System :: MacOS :: MacOS X
|
|
||||||
Classifier: Operating System :: Microsoft :: Windows
|
|
||||||
Classifier: Programming Language :: Python
|
|
||||||
Classifier: Programming Language :: Python :: 3
|
|
||||||
Classifier: Programming Language :: Python :: 3.9
|
|
||||||
Classifier: Programming Language :: Python :: 3.10
|
|
||||||
Classifier: Programming Language :: Python :: 3.11
|
|
||||||
Classifier: Programming Language :: Python :: 3.12
|
|
||||||
Classifier: Programming Language :: Python :: 3.13
|
|
||||||
Classifier: Topic :: Internet :: WWW/HTTP
|
|
||||||
Requires-Python: >=3.9
|
|
||||||
Description-Content-Type: text/x-rst
|
|
||||||
License-File: LICENSE.txt
|
|
||||||
Requires-Dist: aiohappyeyeballs>=2.5.0
|
|
||||||
Requires-Dist: aiosignal>=1.4.0
|
|
||||||
Requires-Dist: async-timeout<6.0,>=4.0; python_version < "3.11"
|
|
||||||
Requires-Dist: attrs>=17.3.0
|
|
||||||
Requires-Dist: frozenlist>=1.1.1
|
|
||||||
Requires-Dist: multidict<7.0,>=4.5
|
|
||||||
Requires-Dist: propcache>=0.2.0
|
|
||||||
Requires-Dist: yarl<2.0,>=1.17.0
|
|
||||||
Provides-Extra: speedups
|
|
||||||
Requires-Dist: aiodns>=3.3.0; extra == "speedups"
|
|
||||||
Requires-Dist: Brotli; platform_python_implementation == "CPython" and extra == "speedups"
|
|
||||||
Requires-Dist: brotlicffi; platform_python_implementation != "CPython" and extra == "speedups"
|
|
||||||
Dynamic: license-file
|
|
||||||
|
|
||||||
==================================
|
|
||||||
Async http client/server framework
|
|
||||||
==================================
|
|
||||||
|
|
||||||
.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/aiohttp-plain.svg
|
|
||||||
:height: 64px
|
|
||||||
:width: 64px
|
|
||||||
:alt: aiohttp logo
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
||||||
.. image:: https://github.com/aio-libs/aiohttp/workflows/CI/badge.svg
|
|
||||||
:target: https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI
|
|
||||||
:alt: GitHub Actions status for master branch
|
|
||||||
|
|
||||||
.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg
|
|
||||||
:target: https://codecov.io/gh/aio-libs/aiohttp
|
|
||||||
:alt: codecov.io status for master branch
|
|
||||||
|
|
||||||
.. image:: https://img.shields.io/endpoint?url=https://codspeed.io/badge.json
|
|
||||||
:target: https://codspeed.io/aio-libs/aiohttp
|
|
||||||
:alt: Codspeed.io status for aiohttp
|
|
||||||
|
|
||||||
.. image:: https://badge.fury.io/py/aiohttp.svg
|
|
||||||
:target: https://pypi.org/project/aiohttp
|
|
||||||
:alt: Latest PyPI package version
|
|
||||||
|
|
||||||
.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest
|
|
||||||
:target: https://docs.aiohttp.org/
|
|
||||||
:alt: Latest Read The Docs
|
|
||||||
|
|
||||||
.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
|
|
||||||
:target: https://matrix.to/#/%23aio-libs:matrix.org
|
|
||||||
:alt: Matrix Room — #aio-libs:matrix.org
|
|
||||||
|
|
||||||
.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
|
|
||||||
:target: https://matrix.to/#/%23aio-libs-space:matrix.org
|
|
||||||
:alt: Matrix Space — #aio-libs-space:matrix.org
|
|
||||||
|
|
||||||
|
|
||||||
Key Features
|
|
||||||
============
|
|
||||||
|
|
||||||
- Supports both client and server side of HTTP protocol.
|
|
||||||
- Supports both client and server Web-Sockets out-of-the-box and avoids
|
|
||||||
Callback Hell.
|
|
||||||
- Provides Web-server with middleware and pluggable routing.
|
|
||||||
|
|
||||||
|
|
||||||
Getting started
|
|
||||||
===============
|
|
||||||
|
|
||||||
Client
|
|
||||||
------
|
|
||||||
|
|
||||||
To get something from the web:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
import aiohttp
|
|
||||||
import asyncio
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
|
|
||||||
async with aiohttp.ClientSession() as session:
|
|
||||||
async with session.get('http://python.org') as response:
|
|
||||||
|
|
||||||
print("Status:", response.status)
|
|
||||||
print("Content-type:", response.headers['content-type'])
|
|
||||||
|
|
||||||
html = await response.text()
|
|
||||||
print("Body:", html[:15], "...")
|
|
||||||
|
|
||||||
asyncio.run(main())
|
|
||||||
|
|
||||||
This prints:
|
|
||||||
|
|
||||||
.. code-block::
|
|
||||||
|
|
||||||
Status: 200
|
|
||||||
Content-type: text/html; charset=utf-8
|
|
||||||
Body: <!doctype html> ...
|
|
||||||
|
|
||||||
Coming from `requests <https://requests.readthedocs.io/>`_ ? Read `why we need so many lines <https://aiohttp.readthedocs.io/en/latest/http_request_lifecycle.html>`_.
|
|
||||||
|
|
||||||
Server
|
|
||||||
------
|
|
||||||
|
|
||||||
An example using a simple server:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
# examples/server_simple.py
|
|
||||||
from aiohttp import web
|
|
||||||
|
|
||||||
async def handle(request):
|
|
||||||
name = request.match_info.get('name', "Anonymous")
|
|
||||||
text = "Hello, " + name
|
|
||||||
return web.Response(text=text)
|
|
||||||
|
|
||||||
async def wshandle(request):
|
|
||||||
ws = web.WebSocketResponse()
|
|
||||||
await ws.prepare(request)
|
|
||||||
|
|
||||||
async for msg in ws:
|
|
||||||
if msg.type == web.WSMsgType.text:
|
|
||||||
await ws.send_str("Hello, {}".format(msg.data))
|
|
||||||
elif msg.type == web.WSMsgType.binary:
|
|
||||||
await ws.send_bytes(msg.data)
|
|
||||||
elif msg.type == web.WSMsgType.close:
|
|
||||||
break
|
|
||||||
|
|
||||||
return ws
|
|
||||||
|
|
||||||
|
|
||||||
app = web.Application()
|
|
||||||
app.add_routes([web.get('/', handle),
|
|
||||||
web.get('/echo', wshandle),
|
|
||||||
web.get('/{name}', handle)])
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
web.run_app(app)
|
|
||||||
|
|
||||||
|
|
||||||
Documentation
|
|
||||||
=============
|
|
||||||
|
|
||||||
https://aiohttp.readthedocs.io/
|
|
||||||
|
|
||||||
|
|
||||||
Demos
|
|
||||||
=====
|
|
||||||
|
|
||||||
https://github.com/aio-libs/aiohttp-demos
|
|
||||||
|
|
||||||
|
|
||||||
External links
|
|
||||||
==============
|
|
||||||
|
|
||||||
* `Third party libraries
|
|
||||||
<http://aiohttp.readthedocs.io/en/latest/third_party.html>`_
|
|
||||||
* `Built with aiohttp
|
|
||||||
<http://aiohttp.readthedocs.io/en/latest/built_with.html>`_
|
|
||||||
* `Powered by aiohttp
|
|
||||||
<http://aiohttp.readthedocs.io/en/latest/powered_by.html>`_
|
|
||||||
|
|
||||||
Feel free to make a Pull Request for adding your link to these pages!
|
|
||||||
|
|
||||||
|
|
||||||
Communication channels
|
|
||||||
======================
|
|
||||||
|
|
||||||
*aio-libs Discussions*: https://github.com/aio-libs/aiohttp/discussions
|
|
||||||
|
|
||||||
*Matrix*: `#aio-libs:matrix.org <https://matrix.to/#/#aio-libs:matrix.org>`_
|
|
||||||
|
|
||||||
We support `Stack Overflow
|
|
||||||
<https://stackoverflow.com/questions/tagged/aiohttp>`_.
|
|
||||||
Please add *aiohttp* tag to your question there.
|
|
||||||
|
|
||||||
Requirements
|
|
||||||
============
|
|
||||||
|
|
||||||
- attrs_
|
|
||||||
- multidict_
|
|
||||||
- yarl_
|
|
||||||
- frozenlist_
|
|
||||||
|
|
||||||
Optionally you may install the aiodns_ library (highly recommended for sake of speed).
|
|
||||||
|
|
||||||
.. _aiodns: https://pypi.python.org/pypi/aiodns
|
|
||||||
.. _attrs: https://github.com/python-attrs/attrs
|
|
||||||
.. _multidict: https://pypi.python.org/pypi/multidict
|
|
||||||
.. _frozenlist: https://pypi.org/project/frozenlist/
|
|
||||||
.. _yarl: https://pypi.python.org/pypi/yarl
|
|
||||||
.. _async-timeout: https://pypi.python.org/pypi/async_timeout
|
|
||||||
|
|
||||||
License
|
|
||||||
=======
|
|
||||||
|
|
||||||
``aiohttp`` is offered under the Apache 2 license.
|
|
||||||
|
|
||||||
|
|
||||||
Keepsafe
|
|
||||||
========
|
|
||||||
|
|
||||||
The aiohttp community would like to thank Keepsafe
|
|
||||||
(https://www.getkeepsafe.com) for its support in the early days of
|
|
||||||
the project.
|
|
||||||
|
|
||||||
|
|
||||||
Source code
|
|
||||||
===========
|
|
||||||
|
|
||||||
The latest developer version is available in a GitHub repository:
|
|
||||||
https://github.com/aio-libs/aiohttp
|
|
||||||
|
|
||||||
Benchmarks
|
|
||||||
==========
|
|
||||||
|
|
||||||
If you are interested in efficiency, the AsyncIO community maintains a
|
|
||||||
list of benchmarks on the official wiki:
|
|
||||||
https://github.com/python/asyncio/wiki/Benchmarks
|
|
||||||
@@ -1,138 +0,0 @@
|
|||||||
aiohttp-3.12.14.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
|
||||||
aiohttp-3.12.14.dist-info/METADATA,sha256=eANbIsB4Kj7_7QofG0pGvr-qVDn7-uqvxOTGuI0iX_w,7613
|
|
||||||
aiohttp-3.12.14.dist-info/RECORD,,
|
|
||||||
aiohttp-3.12.14.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
aiohttp-3.12.14.dist-info/WHEEL,sha256=aSgG0F4rGPZtV0iTEIfy6dtHq6g67Lze3uLfk0vWn88,151
|
|
||||||
aiohttp-3.12.14.dist-info/licenses/LICENSE.txt,sha256=n4DQ2311WpQdtFchcsJw7L2PCCuiFd3QlZhZQu2Uqes,588
|
|
||||||
aiohttp-3.12.14.dist-info/top_level.txt,sha256=iv-JIaacmTl-hSho3QmphcKnbRRYx1st47yjz_178Ro,8
|
|
||||||
aiohttp/.hash/_cparser.pxd.hash,sha256=pjs-sEXNw_eijXGAedwG-BHnlFp8B7sOCgUagIWaU2A,121
|
|
||||||
aiohttp/.hash/_find_header.pxd.hash,sha256=_mbpD6vM-CVCKq3ulUvsOAz5Wdo88wrDzfpOsMQaMNA,125
|
|
||||||
aiohttp/.hash/_http_parser.pyx.hash,sha256=8LCTs_O4fFH1HswgQLgjUn8gknOO8Z8V63c_hQ4fNnM,125
|
|
||||||
aiohttp/.hash/_http_writer.pyx.hash,sha256=uhOanbDG8R2Pxria3xMb15h7biBeeT3ioBoQNwqKYp8,125
|
|
||||||
aiohttp/.hash/hdrs.py.hash,sha256=v6IaKbsxjsdQxBzhb5AjP0x_9G3rUe84D7avf7AI4cs,116
|
|
||||||
aiohttp/__init__.py,sha256=Pzr8s2ho-qqwAaB81nT1jk2rkDSL3dcAbMguPmcLpyc,8303
|
|
||||||
aiohttp/__pycache__/__init__.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/_cookie_helpers.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/abc.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/base_protocol.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/client.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/client_exceptions.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/client_middleware_digest_auth.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/client_middlewares.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/client_proto.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/client_reqrep.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/client_ws.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/compression_utils.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/connector.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/cookiejar.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/formdata.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/hdrs.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/helpers.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/http.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/http_exceptions.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/http_parser.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/http_websocket.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/http_writer.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/log.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/multipart.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/payload.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/payload_streamer.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/pytest_plugin.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/resolver.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/streams.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/tcp_helpers.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/test_utils.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/tracing.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/typedefs.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/web.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/web_app.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/web_exceptions.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/web_fileresponse.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/web_log.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/web_middlewares.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/web_protocol.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/web_request.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/web_response.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/web_routedef.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/web_runner.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/web_server.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/web_urldispatcher.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/web_ws.cpython-312.pyc,,
|
|
||||||
aiohttp/__pycache__/worker.cpython-312.pyc,,
|
|
||||||
aiohttp/_cookie_helpers.py,sha256=xjCVZKrQIfH1bwN5UeNrem8kevnXwZcBoNY94yyk8Qc,12418
|
|
||||||
aiohttp/_cparser.pxd,sha256=UnbUYCHg4NdXfgyRVYAMv2KTLWClB4P-xCrvtj_r7ew,4295
|
|
||||||
aiohttp/_find_header.pxd,sha256=0GfwFCPN2zxEKTO1_MA5sYq2UfzsG8kcV3aTqvwlz3g,68
|
|
||||||
aiohttp/_headers.pxi,sha256=n701k28dVPjwRnx5j6LpJhLTfj7dqu2vJt7f0O60Oyg,2007
|
|
||||||
aiohttp/_http_parser.cpython-312-x86_64-linux-gnu.so,sha256=ealDvc9qJCkwzAIYgtKMLkij41Qxk78JXdZiUokTTvg,2878000
|
|
||||||
aiohttp/_http_parser.pyx,sha256=1L07PKuJjgDGQuqlmy965a5aoTdOaYWX99gFowLyPiE,28239
|
|
||||||
aiohttp/_http_writer.cpython-312-x86_64-linux-gnu.so,sha256=fGELEfKELoBWO-C9VKXunQnqSLURO1RvlGL59rVyOt8,511688
|
|
||||||
aiohttp/_http_writer.pyx,sha256=96seJigne4J3LVnB3DAzwTSV12nfZ7HR1JsaR0p13VI,4561
|
|
||||||
aiohttp/_websocket/.hash/mask.pxd.hash,sha256=Y0zBddk_ck3pi9-BFzMcpkcvCKvwvZ4GTtZFb9u1nxQ,128
|
|
||||||
aiohttp/_websocket/.hash/mask.pyx.hash,sha256=90owpXYM8_kIma4KUcOxhWSk-Uv4NVMBoCYeFM1B3d0,128
|
|
||||||
aiohttp/_websocket/.hash/reader_c.pxd.hash,sha256=5xf3oobk6vx4xbJm-xtZ1_QufB8fYFtLQV2MNdqUc1w,132
|
|
||||||
aiohttp/_websocket/__init__.py,sha256=Mar3R9_vBN_Ea4lsW7iTAVXD7OKswKPGqF5xgSyt77k,44
|
|
||||||
aiohttp/_websocket/__pycache__/__init__.cpython-312.pyc,,
|
|
||||||
aiohttp/_websocket/__pycache__/helpers.cpython-312.pyc,,
|
|
||||||
aiohttp/_websocket/__pycache__/models.cpython-312.pyc,,
|
|
||||||
aiohttp/_websocket/__pycache__/reader.cpython-312.pyc,,
|
|
||||||
aiohttp/_websocket/__pycache__/reader_c.cpython-312.pyc,,
|
|
||||||
aiohttp/_websocket/__pycache__/reader_py.cpython-312.pyc,,
|
|
||||||
aiohttp/_websocket/__pycache__/writer.cpython-312.pyc,,
|
|
||||||
aiohttp/_websocket/helpers.py,sha256=P-XLv8IUaihKzDenVUqfKU5DJbWE5HvG8uhvUZK8Ic4,5038
|
|
||||||
aiohttp/_websocket/mask.cpython-312-x86_64-linux-gnu.so,sha256=PISNT8-1dxCmHxX3aMjYsrAatk5CLnJtjvIvpL7sUcA,258728
|
|
||||||
aiohttp/_websocket/mask.pxd,sha256=sBmZ1Amym9kW4Ge8lj1fLZ7mPPya4LzLdpkQExQXv5M,112
|
|
||||||
aiohttp/_websocket/mask.pyx,sha256=BHjOtV0O0w7xp9p0LNADRJvGmgfPn9sGeJvSs0fL__4,1397
|
|
||||||
aiohttp/_websocket/models.py,sha256=XAzjs_8JYszWXIgZ6R3ZRrF-tX9Q_6LiD49WRYojopM,2121
|
|
||||||
aiohttp/_websocket/reader.py,sha256=eC4qS0c5sOeQ2ebAHLaBpIaTVFaSKX79pY2xvh3Pqyw,1030
|
|
||||||
aiohttp/_websocket/reader_c.cpython-312-x86_64-linux-gnu.so,sha256=VkB5K9VXo-zC9aaa7p3xOwSTs-OgeYsqZE21uJ1Jd4w,1818512
|
|
||||||
aiohttp/_websocket/reader_c.pxd,sha256=nl_njtDrzlQU0rjgGGjZDB-swguE0tX_bCPobkShVa4,2625
|
|
||||||
aiohttp/_websocket/reader_c.py,sha256=gSsE_iSBr7-ORvOmgkCT7Jpj4_j3854i_Cp88Se1_6E,18791
|
|
||||||
aiohttp/_websocket/reader_py.py,sha256=gSsE_iSBr7-ORvOmgkCT7Jpj4_j3854i_Cp88Se1_6E,18791
|
|
||||||
aiohttp/_websocket/writer.py,sha256=9qCnQnCFwPmvf6U6i_7VfTldjpcDfQ_ojeCv5mXoMkw,7139
|
|
||||||
aiohttp/abc.py,sha256=jA2jRYAxc217gO96C-wDXcAPcDWjVJpqXrTGfa7uwqM,7148
|
|
||||||
aiohttp/base_protocol.py,sha256=Tp8cxUPQvv9kUPk3w6lAzk6d2MAzV3scwI_3Go3C47c,3025
|
|
||||||
aiohttp/client.py,sha256=UmwwoDurmDDvxTwa4e1VElko4mc8_Snsvs3CA6SE-kc,57584
|
|
||||||
aiohttp/client_exceptions.py,sha256=uyKbxI2peZhKl7lELBMx3UeusNkfpemPWpGFq0r6JeM,11367
|
|
||||||
aiohttp/client_middleware_digest_auth.py,sha256=_1RpbyJtbY42-qy5TGYvEa0PXZjAsFmf1CMXp-_626U,16938
|
|
||||||
aiohttp/client_middlewares.py,sha256=kP5N9CMzQPMGPIEydeVUiLUTLsw8Vl8Gr4qAWYdu3vM,1918
|
|
||||||
aiohttp/client_proto.py,sha256=56_WtLStZGBFPYKzgEgY6v24JkhV1y6JEmmuxeJT2So,12110
|
|
||||||
aiohttp/client_reqrep.py,sha256=OJuvhGlFMxq7i0z2WLovzeaAcICeNn3qKA25MhwsZrY,53524
|
|
||||||
aiohttp/client_ws.py,sha256=1CIjIXwyzOMIYw6AjUES4-qUwbyVHW1seJKQfg_Rta8,15109
|
|
||||||
aiohttp/compression_utils.py,sha256=LDUVfDiChHNb_ojMEITJuoSEbOAQ4Qznu07vTHL-_pY,8868
|
|
||||||
aiohttp/connector.py,sha256=WQetKoSW7XnHA9r4o9OWwO3-n7ymOwBd2Tg_xHNw0Bs,68456
|
|
||||||
aiohttp/cookiejar.py,sha256=e28ZMQwJ5P0vbPX1OX4Se7-k3zeGvocFEqzGhwpG53k,18922
|
|
||||||
aiohttp/formdata.py,sha256=dRmQY8LA6WSj5HzqF9tUzu_SNe6mzZ1DqXXkyg4ga20,6410
|
|
||||||
aiohttp/hdrs.py,sha256=2rj5MyA-6yRdYPhW5UKkW4iNWhEAlGIOSBH5D4FmKNE,5111
|
|
||||||
aiohttp/helpers.py,sha256=bblNEhp4hFimEmxMdPNxEluBY17L5YUArHYvoxzoEe4,29614
|
|
||||||
aiohttp/http.py,sha256=8o8j8xH70OWjnfTWA9V44NR785QPxEPrUtzMXiAVpwc,1842
|
|
||||||
aiohttp/http_exceptions.py,sha256=AZafFHgtAkAgrKZf8zYPU8VX2dq32-VAoP-UZxBLU0c,2960
|
|
||||||
aiohttp/http_parser.py,sha256=SRADKjgUtYJxUgvvYTyJA0wB8WpKjTcKpzIT8fsE1aE,36896
|
|
||||||
aiohttp/http_websocket.py,sha256=8VXFKw6KQUEmPg48GtRMB37v0gTK7A0inoxXuDxMZEc,842
|
|
||||||
aiohttp/http_writer.py,sha256=fbRtKPYSqRbtAdr_gqpjF2-4sI1ESL8dPDF-xY_mAMY,12446
|
|
||||||
aiohttp/log.py,sha256=BbNKx9e3VMIm0xYjZI0IcBBoS7wjdeIeSaiJE7-qK2g,325
|
|
||||||
aiohttp/multipart.py,sha256=YvgDa5-vOAk9njEJAVwa-L6XVu83PNdct56tDJsfSjI,39867
|
|
||||||
aiohttp/payload.py,sha256=qHpvXhgJyODHjb6tEq7oyB6ChCBRVZV7kd3QAoMhW8k,41044
|
|
||||||
aiohttp/payload_streamer.py,sha256=ZzEYyfzcjGWkVkK3XR2pBthSCSIykYvY3Wr5cGQ2eTc,2211
|
|
||||||
aiohttp/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7
|
|
||||||
aiohttp/pytest_plugin.py,sha256=z4XwqmsKdyJCKxbGiA5kFf90zcedvomqk4RqjZbhKNk,12901
|
|
||||||
aiohttp/resolver.py,sha256=gsrfUpFf8iHlcHfJvY-1fiBHW3PRvRVNb5lNZBg3zlY,10031
|
|
||||||
aiohttp/streams.py,sha256=U-qTkuAqIfpJChuKEy-vYn8nQ_Z1MVcW0WO2DHiJz_o,22329
|
|
||||||
aiohttp/tcp_helpers.py,sha256=BSadqVWaBpMFDRWnhaaR941N9MiDZ7bdTrxgCb0CW-M,961
|
|
||||||
aiohttp/test_utils.py,sha256=ZJSzZWjC76KSbtwddTKcP6vHpUl_ozfAf3F93ewmHRU,23016
|
|
||||||
aiohttp/tracing.py,sha256=-6aaW6l0J9uJD45LzR4cijYH0j62pt0U_nn_aVzFku4,14558
|
|
||||||
aiohttp/typedefs.py,sha256=wUlqwe9Mw9W8jT3HsYJcYk00qP3EMPz3nTkYXmeNN48,1657
|
|
||||||
aiohttp/web.py,sha256=sG_U41AY4S_LBY9sReiBzXKJRZpXk8xgiE_l5S_UPPg,18390
|
|
||||||
aiohttp/web_app.py,sha256=lGU_aAMN-h3wy-LTTHi6SeKH8ydt1G51BXcCspgD5ZA,19452
|
|
||||||
aiohttp/web_exceptions.py,sha256=7nIuiwhZ39vJJ9KrWqArA5QcWbUdqkz2CLwEpJapeN8,10360
|
|
||||||
aiohttp/web_fileresponse.py,sha256=EtDuw5mF7uGkjrrwSBaDQk6F1FJW4pnwE2pZGv3T1QI,16474
|
|
||||||
aiohttp/web_log.py,sha256=rX5D7xLOX2B6BMdiZ-chme_KfJfW5IXEoFwLfkfkajs,7865
|
|
||||||
aiohttp/web_middlewares.py,sha256=sFI0AgeNjdyAjuz92QtMIpngmJSOxrqe2Jfbs4BNUu0,4165
|
|
||||||
aiohttp/web_protocol.py,sha256=c8a0PKGqfhIAiq2RboMsy1NRza4dnj6gnXIWvJUeCF0,27015
|
|
||||||
aiohttp/web_request.py,sha256=zN96OlMRlrCFOMRpdh7y9rvHP0Hm8zavC0OFCj0wlSg,29833
|
|
||||||
aiohttp/web_response.py,sha256=GlxFuiUqqHoXkGGFymII59SbIKU-itLgsl-bD0wGrzc,29342
|
|
||||||
aiohttp/web_routedef.py,sha256=VT1GAx6BrawoDh5RwBwBu5wSABSqgWwAe74AUCyZAEo,6110
|
|
||||||
aiohttp/web_runner.py,sha256=v1G1nKiOOQgFnTSR4IMc6I9ReEFDMaHtMLvO_roDM-A,11786
|
|
||||||
aiohttp/web_server.py,sha256=-9WDKUAiR9ll-rSdwXSqG6YjaoW79d1R4y0BGSqgUMA,2888
|
|
||||||
aiohttp/web_urldispatcher.py,sha256=sFkcsa8qLFkDp47_oW7Z7fiq7DcVXiff1Etn0QN8DJA,44000
|
|
||||||
aiohttp/web_ws.py,sha256=lItgmyatkXh0M6EY7JoZnSZkUl6R0wv8B88X4ILqQbU,22739
|
|
||||||
aiohttp/worker.py,sha256=zT0iWN5Xze194bO6_VjHou0x7lR_k0MviN6Kadnk22g,8152
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
Wheel-Version: 1.0
|
|
||||||
Generator: setuptools (80.9.0)
|
|
||||||
Root-Is-Purelib: false
|
|
||||||
Tag: cp312-cp312-manylinux_2_17_x86_64
|
|
||||||
Tag: cp312-cp312-manylinux2014_x86_64
|
|
||||||
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
Copyright aio-libs contributors.
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
aiohttp
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
5276d46021e0e0d7577e0c9155800cbf62932d60a50783fec42aefb63febedec /home/runner/work/aiohttp/aiohttp/aiohttp/_cparser.pxd
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
d067f01423cddb3c442933b5fcc039b18ab651fcec1bc91c577693aafc25cf78 /home/runner/work/aiohttp/aiohttp/aiohttp/_find_header.pxd
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
d4bd3b3cab898e00c642eaa59b2f7ae5ae5aa1374e698597f7d805a302f23e21 /home/runner/work/aiohttp/aiohttp/aiohttp/_http_parser.pyx
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
f7ab1e2628277b82772d59c1dc3033c13495d769df67b1d1d49b1a474a75dd52 /home/runner/work/aiohttp/aiohttp/aiohttp/_http_writer.pyx
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
dab8f933203eeb245d60f856e542a45b888d5a110094620e4811f90f816628d1 /home/runner/work/aiohttp/aiohttp/aiohttp/hdrs.py
|
|
||||||
@@ -1,278 +0,0 @@
|
|||||||
__version__ = "3.12.14"
|
|
||||||
|
|
||||||
from typing import TYPE_CHECKING, Tuple
|
|
||||||
|
|
||||||
from . import hdrs as hdrs
|
|
||||||
from .client import (
|
|
||||||
BaseConnector,
|
|
||||||
ClientConnectionError,
|
|
||||||
ClientConnectionResetError,
|
|
||||||
ClientConnectorCertificateError,
|
|
||||||
ClientConnectorDNSError,
|
|
||||||
ClientConnectorError,
|
|
||||||
ClientConnectorSSLError,
|
|
||||||
ClientError,
|
|
||||||
ClientHttpProxyError,
|
|
||||||
ClientOSError,
|
|
||||||
ClientPayloadError,
|
|
||||||
ClientProxyConnectionError,
|
|
||||||
ClientRequest,
|
|
||||||
ClientResponse,
|
|
||||||
ClientResponseError,
|
|
||||||
ClientSession,
|
|
||||||
ClientSSLError,
|
|
||||||
ClientTimeout,
|
|
||||||
ClientWebSocketResponse,
|
|
||||||
ClientWSTimeout,
|
|
||||||
ConnectionTimeoutError,
|
|
||||||
ContentTypeError,
|
|
||||||
Fingerprint,
|
|
||||||
InvalidURL,
|
|
||||||
InvalidUrlClientError,
|
|
||||||
InvalidUrlRedirectClientError,
|
|
||||||
NamedPipeConnector,
|
|
||||||
NonHttpUrlClientError,
|
|
||||||
NonHttpUrlRedirectClientError,
|
|
||||||
RedirectClientError,
|
|
||||||
RequestInfo,
|
|
||||||
ServerConnectionError,
|
|
||||||
ServerDisconnectedError,
|
|
||||||
ServerFingerprintMismatch,
|
|
||||||
ServerTimeoutError,
|
|
||||||
SocketTimeoutError,
|
|
||||||
TCPConnector,
|
|
||||||
TooManyRedirects,
|
|
||||||
UnixConnector,
|
|
||||||
WSMessageTypeError,
|
|
||||||
WSServerHandshakeError,
|
|
||||||
request,
|
|
||||||
)
|
|
||||||
from .client_middleware_digest_auth import DigestAuthMiddleware
|
|
||||||
from .client_middlewares import ClientHandlerType, ClientMiddlewareType
|
|
||||||
from .compression_utils import set_zlib_backend
|
|
||||||
from .connector import (
|
|
||||||
AddrInfoType as AddrInfoType,
|
|
||||||
SocketFactoryType as SocketFactoryType,
|
|
||||||
)
|
|
||||||
from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar
|
|
||||||
from .formdata import FormData as FormData
|
|
||||||
from .helpers import BasicAuth, ChainMapProxy, ETag
|
|
||||||
from .http import (
|
|
||||||
HttpVersion as HttpVersion,
|
|
||||||
HttpVersion10 as HttpVersion10,
|
|
||||||
HttpVersion11 as HttpVersion11,
|
|
||||||
WebSocketError as WebSocketError,
|
|
||||||
WSCloseCode as WSCloseCode,
|
|
||||||
WSMessage as WSMessage,
|
|
||||||
WSMsgType as WSMsgType,
|
|
||||||
)
|
|
||||||
from .multipart import (
|
|
||||||
BadContentDispositionHeader as BadContentDispositionHeader,
|
|
||||||
BadContentDispositionParam as BadContentDispositionParam,
|
|
||||||
BodyPartReader as BodyPartReader,
|
|
||||||
MultipartReader as MultipartReader,
|
|
||||||
MultipartWriter as MultipartWriter,
|
|
||||||
content_disposition_filename as content_disposition_filename,
|
|
||||||
parse_content_disposition as parse_content_disposition,
|
|
||||||
)
|
|
||||||
from .payload import (
|
|
||||||
PAYLOAD_REGISTRY as PAYLOAD_REGISTRY,
|
|
||||||
AsyncIterablePayload as AsyncIterablePayload,
|
|
||||||
BufferedReaderPayload as BufferedReaderPayload,
|
|
||||||
BytesIOPayload as BytesIOPayload,
|
|
||||||
BytesPayload as BytesPayload,
|
|
||||||
IOBasePayload as IOBasePayload,
|
|
||||||
JsonPayload as JsonPayload,
|
|
||||||
Payload as Payload,
|
|
||||||
StringIOPayload as StringIOPayload,
|
|
||||||
StringPayload as StringPayload,
|
|
||||||
TextIOPayload as TextIOPayload,
|
|
||||||
get_payload as get_payload,
|
|
||||||
payload_type as payload_type,
|
|
||||||
)
|
|
||||||
from .payload_streamer import streamer as streamer
|
|
||||||
from .resolver import (
|
|
||||||
AsyncResolver as AsyncResolver,
|
|
||||||
DefaultResolver as DefaultResolver,
|
|
||||||
ThreadedResolver as ThreadedResolver,
|
|
||||||
)
|
|
||||||
from .streams import (
|
|
||||||
EMPTY_PAYLOAD as EMPTY_PAYLOAD,
|
|
||||||
DataQueue as DataQueue,
|
|
||||||
EofStream as EofStream,
|
|
||||||
FlowControlDataQueue as FlowControlDataQueue,
|
|
||||||
StreamReader as StreamReader,
|
|
||||||
)
|
|
||||||
from .tracing import (
|
|
||||||
TraceConfig as TraceConfig,
|
|
||||||
TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,
|
|
||||||
TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,
|
|
||||||
TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,
|
|
||||||
TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,
|
|
||||||
TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,
|
|
||||||
TraceDnsCacheHitParams as TraceDnsCacheHitParams,
|
|
||||||
TraceDnsCacheMissParams as TraceDnsCacheMissParams,
|
|
||||||
TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,
|
|
||||||
TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,
|
|
||||||
TraceRequestChunkSentParams as TraceRequestChunkSentParams,
|
|
||||||
TraceRequestEndParams as TraceRequestEndParams,
|
|
||||||
TraceRequestExceptionParams as TraceRequestExceptionParams,
|
|
||||||
TraceRequestHeadersSentParams as TraceRequestHeadersSentParams,
|
|
||||||
TraceRequestRedirectParams as TraceRequestRedirectParams,
|
|
||||||
TraceRequestStartParams as TraceRequestStartParams,
|
|
||||||
TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
|
|
||||||
)
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
# At runtime these are lazy-loaded at the bottom of the file.
|
|
||||||
from .worker import (
|
|
||||||
GunicornUVLoopWebWorker as GunicornUVLoopWebWorker,
|
|
||||||
GunicornWebWorker as GunicornWebWorker,
|
|
||||||
)
|
|
||||||
|
|
||||||
__all__: Tuple[str, ...] = (
|
|
||||||
"hdrs",
|
|
||||||
# client
|
|
||||||
"AddrInfoType",
|
|
||||||
"BaseConnector",
|
|
||||||
"ClientConnectionError",
|
|
||||||
"ClientConnectionResetError",
|
|
||||||
"ClientConnectorCertificateError",
|
|
||||||
"ClientConnectorDNSError",
|
|
||||||
"ClientConnectorError",
|
|
||||||
"ClientConnectorSSLError",
|
|
||||||
"ClientError",
|
|
||||||
"ClientHttpProxyError",
|
|
||||||
"ClientOSError",
|
|
||||||
"ClientPayloadError",
|
|
||||||
"ClientProxyConnectionError",
|
|
||||||
"ClientResponse",
|
|
||||||
"ClientRequest",
|
|
||||||
"ClientResponseError",
|
|
||||||
"ClientSSLError",
|
|
||||||
"ClientSession",
|
|
||||||
"ClientTimeout",
|
|
||||||
"ClientWebSocketResponse",
|
|
||||||
"ClientWSTimeout",
|
|
||||||
"ConnectionTimeoutError",
|
|
||||||
"ContentTypeError",
|
|
||||||
"Fingerprint",
|
|
||||||
"FlowControlDataQueue",
|
|
||||||
"InvalidURL",
|
|
||||||
"InvalidUrlClientError",
|
|
||||||
"InvalidUrlRedirectClientError",
|
|
||||||
"NonHttpUrlClientError",
|
|
||||||
"NonHttpUrlRedirectClientError",
|
|
||||||
"RedirectClientError",
|
|
||||||
"RequestInfo",
|
|
||||||
"ServerConnectionError",
|
|
||||||
"ServerDisconnectedError",
|
|
||||||
"ServerFingerprintMismatch",
|
|
||||||
"ServerTimeoutError",
|
|
||||||
"SocketFactoryType",
|
|
||||||
"SocketTimeoutError",
|
|
||||||
"TCPConnector",
|
|
||||||
"TooManyRedirects",
|
|
||||||
"UnixConnector",
|
|
||||||
"NamedPipeConnector",
|
|
||||||
"WSServerHandshakeError",
|
|
||||||
"request",
|
|
||||||
# client_middleware
|
|
||||||
"ClientMiddlewareType",
|
|
||||||
"ClientHandlerType",
|
|
||||||
# cookiejar
|
|
||||||
"CookieJar",
|
|
||||||
"DummyCookieJar",
|
|
||||||
# formdata
|
|
||||||
"FormData",
|
|
||||||
# helpers
|
|
||||||
"BasicAuth",
|
|
||||||
"ChainMapProxy",
|
|
||||||
"DigestAuthMiddleware",
|
|
||||||
"ETag",
|
|
||||||
"set_zlib_backend",
|
|
||||||
# http
|
|
||||||
"HttpVersion",
|
|
||||||
"HttpVersion10",
|
|
||||||
"HttpVersion11",
|
|
||||||
"WSMsgType",
|
|
||||||
"WSCloseCode",
|
|
||||||
"WSMessage",
|
|
||||||
"WebSocketError",
|
|
||||||
# multipart
|
|
||||||
"BadContentDispositionHeader",
|
|
||||||
"BadContentDispositionParam",
|
|
||||||
"BodyPartReader",
|
|
||||||
"MultipartReader",
|
|
||||||
"MultipartWriter",
|
|
||||||
"content_disposition_filename",
|
|
||||||
"parse_content_disposition",
|
|
||||||
# payload
|
|
||||||
"AsyncIterablePayload",
|
|
||||||
"BufferedReaderPayload",
|
|
||||||
"BytesIOPayload",
|
|
||||||
"BytesPayload",
|
|
||||||
"IOBasePayload",
|
|
||||||
"JsonPayload",
|
|
||||||
"PAYLOAD_REGISTRY",
|
|
||||||
"Payload",
|
|
||||||
"StringIOPayload",
|
|
||||||
"StringPayload",
|
|
||||||
"TextIOPayload",
|
|
||||||
"get_payload",
|
|
||||||
"payload_type",
|
|
||||||
# payload_streamer
|
|
||||||
"streamer",
|
|
||||||
# resolver
|
|
||||||
"AsyncResolver",
|
|
||||||
"DefaultResolver",
|
|
||||||
"ThreadedResolver",
|
|
||||||
# streams
|
|
||||||
"DataQueue",
|
|
||||||
"EMPTY_PAYLOAD",
|
|
||||||
"EofStream",
|
|
||||||
"StreamReader",
|
|
||||||
# tracing
|
|
||||||
"TraceConfig",
|
|
||||||
"TraceConnectionCreateEndParams",
|
|
||||||
"TraceConnectionCreateStartParams",
|
|
||||||
"TraceConnectionQueuedEndParams",
|
|
||||||
"TraceConnectionQueuedStartParams",
|
|
||||||
"TraceConnectionReuseconnParams",
|
|
||||||
"TraceDnsCacheHitParams",
|
|
||||||
"TraceDnsCacheMissParams",
|
|
||||||
"TraceDnsResolveHostEndParams",
|
|
||||||
"TraceDnsResolveHostStartParams",
|
|
||||||
"TraceRequestChunkSentParams",
|
|
||||||
"TraceRequestEndParams",
|
|
||||||
"TraceRequestExceptionParams",
|
|
||||||
"TraceRequestHeadersSentParams",
|
|
||||||
"TraceRequestRedirectParams",
|
|
||||||
"TraceRequestStartParams",
|
|
||||||
"TraceResponseChunkReceivedParams",
|
|
||||||
# workers (imported lazily with __getattr__)
|
|
||||||
"GunicornUVLoopWebWorker",
|
|
||||||
"GunicornWebWorker",
|
|
||||||
"WSMessageTypeError",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def __dir__() -> Tuple[str, ...]:
|
|
||||||
return __all__ + ("__doc__",)
|
|
||||||
|
|
||||||
|
|
||||||
def __getattr__(name: str) -> object:
|
|
||||||
global GunicornUVLoopWebWorker, GunicornWebWorker
|
|
||||||
|
|
||||||
# Importing gunicorn takes a long time (>100ms), so only import if actually needed.
|
|
||||||
if name in ("GunicornUVLoopWebWorker", "GunicornWebWorker"):
|
|
||||||
try:
|
|
||||||
from .worker import GunicornUVLoopWebWorker as guv, GunicornWebWorker as gw
|
|
||||||
except ImportError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
GunicornUVLoopWebWorker = guv # type: ignore[misc]
|
|
||||||
GunicornWebWorker = gw # type: ignore[misc]
|
|
||||||
return guv if name == "GunicornUVLoopWebWorker" else gw
|
|
||||||
|
|
||||||
raise AttributeError(f"module {__name__} has no attribute {name}")
|
|
||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1,309 +0,0 @@
|
|||||||
"""
|
|
||||||
Internal cookie handling helpers.
|
|
||||||
|
|
||||||
This module contains internal utilities for cookie parsing and manipulation.
|
|
||||||
These are not part of the public API and may change without notice.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from http.cookies import Morsel
|
|
||||||
from typing import List, Optional, Sequence, Tuple, cast
|
|
||||||
|
|
||||||
from .log import internal_logger
|
|
||||||
|
|
||||||
__all__ = (
|
|
||||||
"parse_set_cookie_headers",
|
|
||||||
"parse_cookie_header",
|
|
||||||
"preserve_morsel_with_coded_value",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Cookie parsing constants
|
|
||||||
# Allow more characters in cookie names to handle real-world cookies
|
|
||||||
# that don't strictly follow RFC standards (fixes #2683)
|
|
||||||
# RFC 6265 defines cookie-name token as per RFC 2616 Section 2.2,
|
|
||||||
# but many servers send cookies with characters like {} [] () etc.
|
|
||||||
# This makes the cookie parser more tolerant of real-world cookies
|
|
||||||
# while still providing some validation to catch obviously malformed names.
|
|
||||||
_COOKIE_NAME_RE = re.compile(r"^[!#$%&\'()*+\-./0-9:<=>?@A-Z\[\]^_`a-z{|}~]+$")
|
|
||||||
_COOKIE_KNOWN_ATTRS = frozenset( # AKA Morsel._reserved
|
|
||||||
(
|
|
||||||
"path",
|
|
||||||
"domain",
|
|
||||||
"max-age",
|
|
||||||
"expires",
|
|
||||||
"secure",
|
|
||||||
"httponly",
|
|
||||||
"samesite",
|
|
||||||
"partitioned",
|
|
||||||
"version",
|
|
||||||
"comment",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
_COOKIE_BOOL_ATTRS = frozenset( # AKA Morsel._flags
|
|
||||||
("secure", "httponly", "partitioned")
|
|
||||||
)
|
|
||||||
|
|
||||||
# SimpleCookie's pattern for parsing cookies with relaxed validation
|
|
||||||
# Based on http.cookies pattern but extended to allow more characters in cookie names
|
|
||||||
# to handle real-world cookies (fixes #2683)
|
|
||||||
_COOKIE_PATTERN = re.compile(
|
|
||||||
r"""
|
|
||||||
\s* # Optional whitespace at start of cookie
|
|
||||||
(?P<key> # Start of group 'key'
|
|
||||||
# aiohttp has extended to include [] for compatibility with real-world cookies
|
|
||||||
[\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=\[\]]+? # Any word of at least one letter
|
|
||||||
) # End of group 'key'
|
|
||||||
( # Optional group: there may not be a value.
|
|
||||||
\s*=\s* # Equal Sign
|
|
||||||
(?P<val> # Start of group 'val'
|
|
||||||
"(?:[^\\"]|\\.)*" # Any double-quoted string (properly closed)
|
|
||||||
| # or
|
|
||||||
"[^";]* # Unmatched opening quote (differs from SimpleCookie - issue #7993)
|
|
||||||
| # or
|
|
||||||
# Special case for "expires" attr - RFC 822, RFC 850, RFC 1036, RFC 1123
|
|
||||||
(\w{3,6}day|\w{3}),\s # Day of the week or abbreviated day (with comma)
|
|
||||||
[\w\d\s-]{9,11}\s[\d:]{8}\s # Date and time in specific format
|
|
||||||
(GMT|[+-]\d{4}) # Timezone: GMT or RFC 2822 offset like -0000, +0100
|
|
||||||
# NOTE: RFC 2822 timezone support is an aiohttp extension
|
|
||||||
# for issue #4493 - SimpleCookie does NOT support this
|
|
||||||
| # or
|
|
||||||
# ANSI C asctime() format: "Wed Jun 9 10:18:14 2021"
|
|
||||||
# NOTE: This is an aiohttp extension for issue #4327 - SimpleCookie does NOT support this format
|
|
||||||
\w{3}\s+\w{3}\s+[\s\d]\d\s+\d{2}:\d{2}:\d{2}\s+\d{4}
|
|
||||||
| # or
|
|
||||||
[\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=\[\]]* # Any word or empty string
|
|
||||||
) # End of group 'val'
|
|
||||||
)? # End of optional value group
|
|
||||||
\s* # Any number of spaces.
|
|
||||||
(\s+|;|$) # Ending either at space, semicolon, or EOS.
|
|
||||||
""",
|
|
||||||
re.VERBOSE | re.ASCII,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def preserve_morsel_with_coded_value(cookie: Morsel[str]) -> Morsel[str]:
|
|
||||||
"""
|
|
||||||
Preserve a Morsel's coded_value exactly as received from the server.
|
|
||||||
|
|
||||||
This function ensures that cookie encoding is preserved exactly as sent by
|
|
||||||
the server, which is critical for compatibility with old servers that have
|
|
||||||
strict requirements about cookie formats.
|
|
||||||
|
|
||||||
This addresses the issue described in https://github.com/aio-libs/aiohttp/pull/1453
|
|
||||||
where Python's SimpleCookie would re-encode cookies, breaking authentication
|
|
||||||
with certain servers.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
cookie: A Morsel object from SimpleCookie
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A Morsel object with preserved coded_value
|
|
||||||
|
|
||||||
"""
|
|
||||||
mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel()))
|
|
||||||
# We use __setstate__ instead of the public set() API because it allows us to
|
|
||||||
# bypass validation and set already validated state. This is more stable than
|
|
||||||
# setting protected attributes directly and unlikely to change since it would
|
|
||||||
# break pickling.
|
|
||||||
mrsl_val.__setstate__( # type: ignore[attr-defined]
|
|
||||||
{"key": cookie.key, "value": cookie.value, "coded_value": cookie.coded_value}
|
|
||||||
)
|
|
||||||
return mrsl_val
|
|
||||||
|
|
||||||
|
|
||||||
_unquote_sub = re.compile(r"\\(?:([0-3][0-7][0-7])|(.))").sub
|
|
||||||
|
|
||||||
|
|
||||||
def _unquote_replace(m: re.Match[str]) -> str:
|
|
||||||
"""
|
|
||||||
Replace function for _unquote_sub regex substitution.
|
|
||||||
|
|
||||||
Handles escaped characters in cookie values:
|
|
||||||
- Octal sequences are converted to their character representation
|
|
||||||
- Other escaped characters are unescaped by removing the backslash
|
|
||||||
"""
|
|
||||||
if m[1]:
|
|
||||||
return chr(int(m[1], 8))
|
|
||||||
return m[2]
|
|
||||||
|
|
||||||
|
|
||||||
def _unquote(value: str) -> str:
|
|
||||||
"""
|
|
||||||
Unquote a cookie value.
|
|
||||||
|
|
||||||
Vendored from http.cookies._unquote to ensure compatibility.
|
|
||||||
|
|
||||||
Note: The original implementation checked for None, but we've removed
|
|
||||||
that check since all callers already ensure the value is not None.
|
|
||||||
"""
|
|
||||||
# If there aren't any doublequotes,
|
|
||||||
# then there can't be any special characters. See RFC 2109.
|
|
||||||
if len(value) < 2:
|
|
||||||
return value
|
|
||||||
if value[0] != '"' or value[-1] != '"':
|
|
||||||
return value
|
|
||||||
|
|
||||||
# We have to assume that we must decode this string.
|
|
||||||
# Down to work.
|
|
||||||
|
|
||||||
# Remove the "s
|
|
||||||
value = value[1:-1]
|
|
||||||
|
|
||||||
# Check for special sequences. Examples:
|
|
||||||
# \012 --> \n
|
|
||||||
# \" --> "
|
|
||||||
#
|
|
||||||
return _unquote_sub(_unquote_replace, value)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_cookie_header(header: str) -> List[Tuple[str, Morsel[str]]]:
|
|
||||||
"""
|
|
||||||
Parse a Cookie header according to RFC 6265 Section 5.4.
|
|
||||||
|
|
||||||
Cookie headers contain only name-value pairs separated by semicolons.
|
|
||||||
There are no attributes in Cookie headers - even names that match
|
|
||||||
attribute names (like 'path' or 'secure') should be treated as cookies.
|
|
||||||
|
|
||||||
This parser uses the same regex-based approach as parse_set_cookie_headers
|
|
||||||
to properly handle quoted values that may contain semicolons.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
header: The Cookie header value to parse
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List of (name, Morsel) tuples for compatibility with SimpleCookie.update()
|
|
||||||
"""
|
|
||||||
if not header:
|
|
||||||
return []
|
|
||||||
|
|
||||||
cookies: List[Tuple[str, Morsel[str]]] = []
|
|
||||||
i = 0
|
|
||||||
n = len(header)
|
|
||||||
|
|
||||||
while i < n:
|
|
||||||
# Use the same pattern as parse_set_cookie_headers to find cookies
|
|
||||||
match = _COOKIE_PATTERN.match(header, i)
|
|
||||||
if not match:
|
|
||||||
break
|
|
||||||
|
|
||||||
key = match.group("key")
|
|
||||||
value = match.group("val") or ""
|
|
||||||
i = match.end(0)
|
|
||||||
|
|
||||||
# Validate the name
|
|
||||||
if not key or not _COOKIE_NAME_RE.match(key):
|
|
||||||
internal_logger.warning("Can not load cookie: Illegal cookie name %r", key)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Create new morsel
|
|
||||||
morsel: Morsel[str] = Morsel()
|
|
||||||
# Preserve the original value as coded_value (with quotes if present)
|
|
||||||
# We use __setstate__ instead of the public set() API because it allows us to
|
|
||||||
# bypass validation and set already validated state. This is more stable than
|
|
||||||
# setting protected attributes directly and unlikely to change since it would
|
|
||||||
# break pickling.
|
|
||||||
morsel.__setstate__( # type: ignore[attr-defined]
|
|
||||||
{"key": key, "value": _unquote(value), "coded_value": value}
|
|
||||||
)
|
|
||||||
|
|
||||||
cookies.append((key, morsel))
|
|
||||||
|
|
||||||
return cookies
|
|
||||||
|
|
||||||
|
|
||||||
def parse_set_cookie_headers(headers: Sequence[str]) -> List[Tuple[str, Morsel[str]]]:
|
|
||||||
"""
|
|
||||||
Parse cookie headers using a vendored version of SimpleCookie parsing.
|
|
||||||
|
|
||||||
This implementation is based on SimpleCookie.__parse_string to ensure
|
|
||||||
compatibility with how SimpleCookie parses cookies, including handling
|
|
||||||
of malformed cookies with missing semicolons.
|
|
||||||
|
|
||||||
This function is used for both Cookie and Set-Cookie headers in order to be
|
|
||||||
forgiving. Ideally we would have followed RFC 6265 Section 5.2 (for Cookie
|
|
||||||
headers) and RFC 6265 Section 4.2.1 (for Set-Cookie headers), but the
|
|
||||||
real world data makes it impossible since we need to be a bit more forgiving.
|
|
||||||
|
|
||||||
NOTE: This implementation differs from SimpleCookie in handling unmatched quotes.
|
|
||||||
SimpleCookie will stop parsing when it encounters a cookie value with an unmatched
|
|
||||||
quote (e.g., 'cookie="value'), causing subsequent cookies to be silently dropped.
|
|
||||||
This implementation handles unmatched quotes more gracefully to prevent cookie loss.
|
|
||||||
See https://github.com/aio-libs/aiohttp/issues/7993
|
|
||||||
"""
|
|
||||||
parsed_cookies: List[Tuple[str, Morsel[str]]] = []
|
|
||||||
|
|
||||||
for header in headers:
|
|
||||||
if not header:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Parse cookie string using SimpleCookie's algorithm
|
|
||||||
i = 0
|
|
||||||
n = len(header)
|
|
||||||
current_morsel: Optional[Morsel[str]] = None
|
|
||||||
morsel_seen = False
|
|
||||||
|
|
||||||
while 0 <= i < n:
|
|
||||||
# Start looking for a cookie
|
|
||||||
match = _COOKIE_PATTERN.match(header, i)
|
|
||||||
if not match:
|
|
||||||
# No more cookies
|
|
||||||
break
|
|
||||||
|
|
||||||
key, value = match.group("key"), match.group("val")
|
|
||||||
i = match.end(0)
|
|
||||||
lower_key = key.lower()
|
|
||||||
|
|
||||||
if key[0] == "$":
|
|
||||||
if not morsel_seen:
|
|
||||||
# We ignore attributes which pertain to the cookie
|
|
||||||
# mechanism as a whole, such as "$Version".
|
|
||||||
continue
|
|
||||||
# Process as attribute
|
|
||||||
if current_morsel is not None:
|
|
||||||
attr_lower_key = lower_key[1:]
|
|
||||||
if attr_lower_key in _COOKIE_KNOWN_ATTRS:
|
|
||||||
current_morsel[attr_lower_key] = value or ""
|
|
||||||
elif lower_key in _COOKIE_KNOWN_ATTRS:
|
|
||||||
if not morsel_seen:
|
|
||||||
# Invalid cookie string - attribute before cookie
|
|
||||||
break
|
|
||||||
if lower_key in _COOKIE_BOOL_ATTRS:
|
|
||||||
# Boolean attribute with any value should be True
|
|
||||||
if current_morsel is not None:
|
|
||||||
if lower_key == "partitioned" and sys.version_info < (3, 14):
|
|
||||||
dict.__setitem__(current_morsel, lower_key, True)
|
|
||||||
else:
|
|
||||||
current_morsel[lower_key] = True
|
|
||||||
elif value is None:
|
|
||||||
# Invalid cookie string - non-boolean attribute without value
|
|
||||||
break
|
|
||||||
elif current_morsel is not None:
|
|
||||||
# Regular attribute with value
|
|
||||||
current_morsel[lower_key] = _unquote(value)
|
|
||||||
elif value is not None:
|
|
||||||
# This is a cookie name=value pair
|
|
||||||
# Validate the name
|
|
||||||
if key in _COOKIE_KNOWN_ATTRS or not _COOKIE_NAME_RE.match(key):
|
|
||||||
internal_logger.warning(
|
|
||||||
"Can not load cookies: Illegal cookie name %r", key
|
|
||||||
)
|
|
||||||
current_morsel = None
|
|
||||||
else:
|
|
||||||
# Create new morsel
|
|
||||||
current_morsel = Morsel()
|
|
||||||
# Preserve the original value as coded_value (with quotes if present)
|
|
||||||
# We use __setstate__ instead of the public set() API because it allows us to
|
|
||||||
# bypass validation and set already validated state. This is more stable than
|
|
||||||
# setting protected attributes directly and unlikely to change since it would
|
|
||||||
# break pickling.
|
|
||||||
current_morsel.__setstate__( # type: ignore[attr-defined]
|
|
||||||
{"key": key, "value": _unquote(value), "coded_value": value}
|
|
||||||
)
|
|
||||||
parsed_cookies.append((key, current_morsel))
|
|
||||||
morsel_seen = True
|
|
||||||
else:
|
|
||||||
# Invalid cookie string - no value for non-attribute
|
|
||||||
break
|
|
||||||
|
|
||||||
return parsed_cookies
|
|
||||||
@@ -1,158 +0,0 @@
|
|||||||
from libc.stdint cimport int32_t, uint8_t, uint16_t, uint64_t
|
|
||||||
|
|
||||||
|
|
||||||
cdef extern from "llhttp.h":
|
|
||||||
|
|
||||||
struct llhttp__internal_s:
|
|
||||||
int32_t _index
|
|
||||||
void* _span_pos0
|
|
||||||
void* _span_cb0
|
|
||||||
int32_t error
|
|
||||||
const char* reason
|
|
||||||
const char* error_pos
|
|
||||||
void* data
|
|
||||||
void* _current
|
|
||||||
uint64_t content_length
|
|
||||||
uint8_t type
|
|
||||||
uint8_t method
|
|
||||||
uint8_t http_major
|
|
||||||
uint8_t http_minor
|
|
||||||
uint8_t header_state
|
|
||||||
uint8_t lenient_flags
|
|
||||||
uint8_t upgrade
|
|
||||||
uint8_t finish
|
|
||||||
uint16_t flags
|
|
||||||
uint16_t status_code
|
|
||||||
void* settings
|
|
||||||
|
|
||||||
ctypedef llhttp__internal_s llhttp__internal_t
|
|
||||||
ctypedef llhttp__internal_t llhttp_t
|
|
||||||
|
|
||||||
ctypedef int (*llhttp_data_cb)(llhttp_t*, const char *at, size_t length) except -1
|
|
||||||
ctypedef int (*llhttp_cb)(llhttp_t*) except -1
|
|
||||||
|
|
||||||
struct llhttp_settings_s:
|
|
||||||
llhttp_cb on_message_begin
|
|
||||||
llhttp_data_cb on_url
|
|
||||||
llhttp_data_cb on_status
|
|
||||||
llhttp_data_cb on_header_field
|
|
||||||
llhttp_data_cb on_header_value
|
|
||||||
llhttp_cb on_headers_complete
|
|
||||||
llhttp_data_cb on_body
|
|
||||||
llhttp_cb on_message_complete
|
|
||||||
llhttp_cb on_chunk_header
|
|
||||||
llhttp_cb on_chunk_complete
|
|
||||||
|
|
||||||
llhttp_cb on_url_complete
|
|
||||||
llhttp_cb on_status_complete
|
|
||||||
llhttp_cb on_header_field_complete
|
|
||||||
llhttp_cb on_header_value_complete
|
|
||||||
|
|
||||||
ctypedef llhttp_settings_s llhttp_settings_t
|
|
||||||
|
|
||||||
enum llhttp_errno:
|
|
||||||
HPE_OK,
|
|
||||||
HPE_INTERNAL,
|
|
||||||
HPE_STRICT,
|
|
||||||
HPE_LF_EXPECTED,
|
|
||||||
HPE_UNEXPECTED_CONTENT_LENGTH,
|
|
||||||
HPE_CLOSED_CONNECTION,
|
|
||||||
HPE_INVALID_METHOD,
|
|
||||||
HPE_INVALID_URL,
|
|
||||||
HPE_INVALID_CONSTANT,
|
|
||||||
HPE_INVALID_VERSION,
|
|
||||||
HPE_INVALID_HEADER_TOKEN,
|
|
||||||
HPE_INVALID_CONTENT_LENGTH,
|
|
||||||
HPE_INVALID_CHUNK_SIZE,
|
|
||||||
HPE_INVALID_STATUS,
|
|
||||||
HPE_INVALID_EOF_STATE,
|
|
||||||
HPE_INVALID_TRANSFER_ENCODING,
|
|
||||||
HPE_CB_MESSAGE_BEGIN,
|
|
||||||
HPE_CB_HEADERS_COMPLETE,
|
|
||||||
HPE_CB_MESSAGE_COMPLETE,
|
|
||||||
HPE_CB_CHUNK_HEADER,
|
|
||||||
HPE_CB_CHUNK_COMPLETE,
|
|
||||||
HPE_PAUSED,
|
|
||||||
HPE_PAUSED_UPGRADE,
|
|
||||||
HPE_USER
|
|
||||||
|
|
||||||
ctypedef llhttp_errno llhttp_errno_t
|
|
||||||
|
|
||||||
enum llhttp_flags:
|
|
||||||
F_CHUNKED,
|
|
||||||
F_CONTENT_LENGTH
|
|
||||||
|
|
||||||
enum llhttp_type:
|
|
||||||
HTTP_REQUEST,
|
|
||||||
HTTP_RESPONSE,
|
|
||||||
HTTP_BOTH
|
|
||||||
|
|
||||||
enum llhttp_method:
|
|
||||||
HTTP_DELETE,
|
|
||||||
HTTP_GET,
|
|
||||||
HTTP_HEAD,
|
|
||||||
HTTP_POST,
|
|
||||||
HTTP_PUT,
|
|
||||||
HTTP_CONNECT,
|
|
||||||
HTTP_OPTIONS,
|
|
||||||
HTTP_TRACE,
|
|
||||||
HTTP_COPY,
|
|
||||||
HTTP_LOCK,
|
|
||||||
HTTP_MKCOL,
|
|
||||||
HTTP_MOVE,
|
|
||||||
HTTP_PROPFIND,
|
|
||||||
HTTP_PROPPATCH,
|
|
||||||
HTTP_SEARCH,
|
|
||||||
HTTP_UNLOCK,
|
|
||||||
HTTP_BIND,
|
|
||||||
HTTP_REBIND,
|
|
||||||
HTTP_UNBIND,
|
|
||||||
HTTP_ACL,
|
|
||||||
HTTP_REPORT,
|
|
||||||
HTTP_MKACTIVITY,
|
|
||||||
HTTP_CHECKOUT,
|
|
||||||
HTTP_MERGE,
|
|
||||||
HTTP_MSEARCH,
|
|
||||||
HTTP_NOTIFY,
|
|
||||||
HTTP_SUBSCRIBE,
|
|
||||||
HTTP_UNSUBSCRIBE,
|
|
||||||
HTTP_PATCH,
|
|
||||||
HTTP_PURGE,
|
|
||||||
HTTP_MKCALENDAR,
|
|
||||||
HTTP_LINK,
|
|
||||||
HTTP_UNLINK,
|
|
||||||
HTTP_SOURCE,
|
|
||||||
HTTP_PRI,
|
|
||||||
HTTP_DESCRIBE,
|
|
||||||
HTTP_ANNOUNCE,
|
|
||||||
HTTP_SETUP,
|
|
||||||
HTTP_PLAY,
|
|
||||||
HTTP_PAUSE,
|
|
||||||
HTTP_TEARDOWN,
|
|
||||||
HTTP_GET_PARAMETER,
|
|
||||||
HTTP_SET_PARAMETER,
|
|
||||||
HTTP_REDIRECT,
|
|
||||||
HTTP_RECORD,
|
|
||||||
HTTP_FLUSH
|
|
||||||
|
|
||||||
ctypedef llhttp_method llhttp_method_t;
|
|
||||||
|
|
||||||
void llhttp_settings_init(llhttp_settings_t* settings)
|
|
||||||
void llhttp_init(llhttp_t* parser, llhttp_type type,
|
|
||||||
const llhttp_settings_t* settings)
|
|
||||||
|
|
||||||
llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len)
|
|
||||||
|
|
||||||
int llhttp_should_keep_alive(const llhttp_t* parser)
|
|
||||||
|
|
||||||
void llhttp_resume_after_upgrade(llhttp_t* parser)
|
|
||||||
|
|
||||||
llhttp_errno_t llhttp_get_errno(const llhttp_t* parser)
|
|
||||||
const char* llhttp_get_error_reason(const llhttp_t* parser)
|
|
||||||
const char* llhttp_get_error_pos(const llhttp_t* parser)
|
|
||||||
|
|
||||||
const char* llhttp_method_name(llhttp_method_t method)
|
|
||||||
|
|
||||||
void llhttp_set_lenient_headers(llhttp_t* parser, int enabled)
|
|
||||||
void llhttp_set_lenient_optional_cr_before_lf(llhttp_t* parser, int enabled)
|
|
||||||
void llhttp_set_lenient_spaces_after_chunk_size(llhttp_t* parser, int enabled)
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
cdef extern from "_find_header.h":
|
|
||||||
int find_header(char *, int)
|
|
||||||
@@ -1,83 +0,0 @@
|
|||||||
# The file is autogenerated from aiohttp/hdrs.py
|
|
||||||
# Run ./tools/gen.py to update it after the origin changing.
|
|
||||||
|
|
||||||
from . import hdrs
|
|
||||||
cdef tuple headers = (
|
|
||||||
hdrs.ACCEPT,
|
|
||||||
hdrs.ACCEPT_CHARSET,
|
|
||||||
hdrs.ACCEPT_ENCODING,
|
|
||||||
hdrs.ACCEPT_LANGUAGE,
|
|
||||||
hdrs.ACCEPT_RANGES,
|
|
||||||
hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS,
|
|
||||||
hdrs.ACCESS_CONTROL_ALLOW_HEADERS,
|
|
||||||
hdrs.ACCESS_CONTROL_ALLOW_METHODS,
|
|
||||||
hdrs.ACCESS_CONTROL_ALLOW_ORIGIN,
|
|
||||||
hdrs.ACCESS_CONTROL_EXPOSE_HEADERS,
|
|
||||||
hdrs.ACCESS_CONTROL_MAX_AGE,
|
|
||||||
hdrs.ACCESS_CONTROL_REQUEST_HEADERS,
|
|
||||||
hdrs.ACCESS_CONTROL_REQUEST_METHOD,
|
|
||||||
hdrs.AGE,
|
|
||||||
hdrs.ALLOW,
|
|
||||||
hdrs.AUTHORIZATION,
|
|
||||||
hdrs.CACHE_CONTROL,
|
|
||||||
hdrs.CONNECTION,
|
|
||||||
hdrs.CONTENT_DISPOSITION,
|
|
||||||
hdrs.CONTENT_ENCODING,
|
|
||||||
hdrs.CONTENT_LANGUAGE,
|
|
||||||
hdrs.CONTENT_LENGTH,
|
|
||||||
hdrs.CONTENT_LOCATION,
|
|
||||||
hdrs.CONTENT_MD5,
|
|
||||||
hdrs.CONTENT_RANGE,
|
|
||||||
hdrs.CONTENT_TRANSFER_ENCODING,
|
|
||||||
hdrs.CONTENT_TYPE,
|
|
||||||
hdrs.COOKIE,
|
|
||||||
hdrs.DATE,
|
|
||||||
hdrs.DESTINATION,
|
|
||||||
hdrs.DIGEST,
|
|
||||||
hdrs.ETAG,
|
|
||||||
hdrs.EXPECT,
|
|
||||||
hdrs.EXPIRES,
|
|
||||||
hdrs.FORWARDED,
|
|
||||||
hdrs.FROM,
|
|
||||||
hdrs.HOST,
|
|
||||||
hdrs.IF_MATCH,
|
|
||||||
hdrs.IF_MODIFIED_SINCE,
|
|
||||||
hdrs.IF_NONE_MATCH,
|
|
||||||
hdrs.IF_RANGE,
|
|
||||||
hdrs.IF_UNMODIFIED_SINCE,
|
|
||||||
hdrs.KEEP_ALIVE,
|
|
||||||
hdrs.LAST_EVENT_ID,
|
|
||||||
hdrs.LAST_MODIFIED,
|
|
||||||
hdrs.LINK,
|
|
||||||
hdrs.LOCATION,
|
|
||||||
hdrs.MAX_FORWARDS,
|
|
||||||
hdrs.ORIGIN,
|
|
||||||
hdrs.PRAGMA,
|
|
||||||
hdrs.PROXY_AUTHENTICATE,
|
|
||||||
hdrs.PROXY_AUTHORIZATION,
|
|
||||||
hdrs.RANGE,
|
|
||||||
hdrs.REFERER,
|
|
||||||
hdrs.RETRY_AFTER,
|
|
||||||
hdrs.SEC_WEBSOCKET_ACCEPT,
|
|
||||||
hdrs.SEC_WEBSOCKET_EXTENSIONS,
|
|
||||||
hdrs.SEC_WEBSOCKET_KEY,
|
|
||||||
hdrs.SEC_WEBSOCKET_KEY1,
|
|
||||||
hdrs.SEC_WEBSOCKET_PROTOCOL,
|
|
||||||
hdrs.SEC_WEBSOCKET_VERSION,
|
|
||||||
hdrs.SERVER,
|
|
||||||
hdrs.SET_COOKIE,
|
|
||||||
hdrs.TE,
|
|
||||||
hdrs.TRAILER,
|
|
||||||
hdrs.TRANSFER_ENCODING,
|
|
||||||
hdrs.URI,
|
|
||||||
hdrs.UPGRADE,
|
|
||||||
hdrs.USER_AGENT,
|
|
||||||
hdrs.VARY,
|
|
||||||
hdrs.VIA,
|
|
||||||
hdrs.WWW_AUTHENTICATE,
|
|
||||||
hdrs.WANT_DIGEST,
|
|
||||||
hdrs.WARNING,
|
|
||||||
hdrs.X_FORWARDED_FOR,
|
|
||||||
hdrs.X_FORWARDED_HOST,
|
|
||||||
hdrs.X_FORWARDED_PROTO,
|
|
||||||
)
|
|
||||||
Binary file not shown.
@@ -1,837 +0,0 @@
|
|||||||
#cython: language_level=3
|
|
||||||
#
|
|
||||||
# Based on https://github.com/MagicStack/httptools
|
|
||||||
#
|
|
||||||
|
|
||||||
from cpython cimport (
|
|
||||||
Py_buffer,
|
|
||||||
PyBUF_SIMPLE,
|
|
||||||
PyBuffer_Release,
|
|
||||||
PyBytes_AsString,
|
|
||||||
PyBytes_AsStringAndSize,
|
|
||||||
PyObject_GetBuffer,
|
|
||||||
)
|
|
||||||
from cpython.mem cimport PyMem_Free, PyMem_Malloc
|
|
||||||
from libc.limits cimport ULLONG_MAX
|
|
||||||
from libc.string cimport memcpy
|
|
||||||
|
|
||||||
from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy
|
|
||||||
from yarl import URL as _URL
|
|
||||||
|
|
||||||
from aiohttp import hdrs
|
|
||||||
from aiohttp.helpers import DEBUG, set_exception
|
|
||||||
|
|
||||||
from .http_exceptions import (
|
|
||||||
BadHttpMessage,
|
|
||||||
BadHttpMethod,
|
|
||||||
BadStatusLine,
|
|
||||||
ContentLengthError,
|
|
||||||
InvalidHeader,
|
|
||||||
InvalidURLError,
|
|
||||||
LineTooLong,
|
|
||||||
PayloadEncodingError,
|
|
||||||
TransferEncodingError,
|
|
||||||
)
|
|
||||||
from .http_parser import DeflateBuffer as _DeflateBuffer
|
|
||||||
from .http_writer import (
|
|
||||||
HttpVersion as _HttpVersion,
|
|
||||||
HttpVersion10 as _HttpVersion10,
|
|
||||||
HttpVersion11 as _HttpVersion11,
|
|
||||||
)
|
|
||||||
from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader
|
|
||||||
|
|
||||||
cimport cython
|
|
||||||
|
|
||||||
from aiohttp cimport _cparser as cparser
|
|
||||||
|
|
||||||
include "_headers.pxi"
|
|
||||||
|
|
||||||
from aiohttp cimport _find_header
|
|
||||||
|
|
||||||
ALLOWED_UPGRADES = frozenset({"websocket"})
|
|
||||||
DEF DEFAULT_FREELIST_SIZE = 250
|
|
||||||
|
|
||||||
cdef extern from "Python.h":
|
|
||||||
int PyByteArray_Resize(object, Py_ssize_t) except -1
|
|
||||||
Py_ssize_t PyByteArray_Size(object) except -1
|
|
||||||
char* PyByteArray_AsString(object)
|
|
||||||
|
|
||||||
__all__ = ('HttpRequestParser', 'HttpResponseParser',
|
|
||||||
'RawRequestMessage', 'RawResponseMessage')
|
|
||||||
|
|
||||||
cdef object URL = _URL
|
|
||||||
cdef object URL_build = URL.build
|
|
||||||
cdef object CIMultiDict = _CIMultiDict
|
|
||||||
cdef object CIMultiDictProxy = _CIMultiDictProxy
|
|
||||||
cdef object HttpVersion = _HttpVersion
|
|
||||||
cdef object HttpVersion10 = _HttpVersion10
|
|
||||||
cdef object HttpVersion11 = _HttpVersion11
|
|
||||||
cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1
|
|
||||||
cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING
|
|
||||||
cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD
|
|
||||||
cdef object StreamReader = _StreamReader
|
|
||||||
cdef object DeflateBuffer = _DeflateBuffer
|
|
||||||
cdef bytes EMPTY_BYTES = b""
|
|
||||||
|
|
||||||
cdef inline object extend(object buf, const char* at, size_t length):
|
|
||||||
cdef Py_ssize_t s
|
|
||||||
cdef char* ptr
|
|
||||||
s = PyByteArray_Size(buf)
|
|
||||||
PyByteArray_Resize(buf, s + length)
|
|
||||||
ptr = PyByteArray_AsString(buf)
|
|
||||||
memcpy(ptr + s, at, length)
|
|
||||||
|
|
||||||
|
|
||||||
DEF METHODS_COUNT = 46;
|
|
||||||
|
|
||||||
cdef list _http_method = []
|
|
||||||
|
|
||||||
for i in range(METHODS_COUNT):
|
|
||||||
_http_method.append(
|
|
||||||
cparser.llhttp_method_name(<cparser.llhttp_method_t> i).decode('ascii'))
|
|
||||||
|
|
||||||
|
|
||||||
cdef inline str http_method_str(int i):
|
|
||||||
if i < METHODS_COUNT:
|
|
||||||
return <str>_http_method[i]
|
|
||||||
else:
|
|
||||||
return "<unknown>"
|
|
||||||
|
|
||||||
cdef inline object find_header(bytes raw_header):
|
|
||||||
cdef Py_ssize_t size
|
|
||||||
cdef char *buf
|
|
||||||
cdef int idx
|
|
||||||
PyBytes_AsStringAndSize(raw_header, &buf, &size)
|
|
||||||
idx = _find_header.find_header(buf, size)
|
|
||||||
if idx == -1:
|
|
||||||
return raw_header.decode('utf-8', 'surrogateescape')
|
|
||||||
return headers[idx]
|
|
||||||
|
|
||||||
|
|
||||||
@cython.freelist(DEFAULT_FREELIST_SIZE)
|
|
||||||
cdef class RawRequestMessage:
|
|
||||||
cdef readonly str method
|
|
||||||
cdef readonly str path
|
|
||||||
cdef readonly object version # HttpVersion
|
|
||||||
cdef readonly object headers # CIMultiDict
|
|
||||||
cdef readonly object raw_headers # tuple
|
|
||||||
cdef readonly object should_close
|
|
||||||
cdef readonly object compression
|
|
||||||
cdef readonly object upgrade
|
|
||||||
cdef readonly object chunked
|
|
||||||
cdef readonly object url # yarl.URL
|
|
||||||
|
|
||||||
def __init__(self, method, path, version, headers, raw_headers,
|
|
||||||
should_close, compression, upgrade, chunked, url):
|
|
||||||
self.method = method
|
|
||||||
self.path = path
|
|
||||||
self.version = version
|
|
||||||
self.headers = headers
|
|
||||||
self.raw_headers = raw_headers
|
|
||||||
self.should_close = should_close
|
|
||||||
self.compression = compression
|
|
||||||
self.upgrade = upgrade
|
|
||||||
self.chunked = chunked
|
|
||||||
self.url = url
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
info = []
|
|
||||||
info.append(("method", self.method))
|
|
||||||
info.append(("path", self.path))
|
|
||||||
info.append(("version", self.version))
|
|
||||||
info.append(("headers", self.headers))
|
|
||||||
info.append(("raw_headers", self.raw_headers))
|
|
||||||
info.append(("should_close", self.should_close))
|
|
||||||
info.append(("compression", self.compression))
|
|
||||||
info.append(("upgrade", self.upgrade))
|
|
||||||
info.append(("chunked", self.chunked))
|
|
||||||
info.append(("url", self.url))
|
|
||||||
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
|
|
||||||
return '<RawRequestMessage(' + sinfo + ')>'
|
|
||||||
|
|
||||||
def _replace(self, **dct):
|
|
||||||
cdef RawRequestMessage ret
|
|
||||||
ret = _new_request_message(self.method,
|
|
||||||
self.path,
|
|
||||||
self.version,
|
|
||||||
self.headers,
|
|
||||||
self.raw_headers,
|
|
||||||
self.should_close,
|
|
||||||
self.compression,
|
|
||||||
self.upgrade,
|
|
||||||
self.chunked,
|
|
||||||
self.url)
|
|
||||||
if "method" in dct:
|
|
||||||
ret.method = dct["method"]
|
|
||||||
if "path" in dct:
|
|
||||||
ret.path = dct["path"]
|
|
||||||
if "version" in dct:
|
|
||||||
ret.version = dct["version"]
|
|
||||||
if "headers" in dct:
|
|
||||||
ret.headers = dct["headers"]
|
|
||||||
if "raw_headers" in dct:
|
|
||||||
ret.raw_headers = dct["raw_headers"]
|
|
||||||
if "should_close" in dct:
|
|
||||||
ret.should_close = dct["should_close"]
|
|
||||||
if "compression" in dct:
|
|
||||||
ret.compression = dct["compression"]
|
|
||||||
if "upgrade" in dct:
|
|
||||||
ret.upgrade = dct["upgrade"]
|
|
||||||
if "chunked" in dct:
|
|
||||||
ret.chunked = dct["chunked"]
|
|
||||||
if "url" in dct:
|
|
||||||
ret.url = dct["url"]
|
|
||||||
return ret
|
|
||||||
|
|
||||||
cdef _new_request_message(str method,
|
|
||||||
str path,
|
|
||||||
object version,
|
|
||||||
object headers,
|
|
||||||
object raw_headers,
|
|
||||||
bint should_close,
|
|
||||||
object compression,
|
|
||||||
bint upgrade,
|
|
||||||
bint chunked,
|
|
||||||
object url):
|
|
||||||
cdef RawRequestMessage ret
|
|
||||||
ret = RawRequestMessage.__new__(RawRequestMessage)
|
|
||||||
ret.method = method
|
|
||||||
ret.path = path
|
|
||||||
ret.version = version
|
|
||||||
ret.headers = headers
|
|
||||||
ret.raw_headers = raw_headers
|
|
||||||
ret.should_close = should_close
|
|
||||||
ret.compression = compression
|
|
||||||
ret.upgrade = upgrade
|
|
||||||
ret.chunked = chunked
|
|
||||||
ret.url = url
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
@cython.freelist(DEFAULT_FREELIST_SIZE)
|
|
||||||
cdef class RawResponseMessage:
|
|
||||||
cdef readonly object version # HttpVersion
|
|
||||||
cdef readonly int code
|
|
||||||
cdef readonly str reason
|
|
||||||
cdef readonly object headers # CIMultiDict
|
|
||||||
cdef readonly object raw_headers # tuple
|
|
||||||
cdef readonly object should_close
|
|
||||||
cdef readonly object compression
|
|
||||||
cdef readonly object upgrade
|
|
||||||
cdef readonly object chunked
|
|
||||||
|
|
||||||
def __init__(self, version, code, reason, headers, raw_headers,
|
|
||||||
should_close, compression, upgrade, chunked):
|
|
||||||
self.version = version
|
|
||||||
self.code = code
|
|
||||||
self.reason = reason
|
|
||||||
self.headers = headers
|
|
||||||
self.raw_headers = raw_headers
|
|
||||||
self.should_close = should_close
|
|
||||||
self.compression = compression
|
|
||||||
self.upgrade = upgrade
|
|
||||||
self.chunked = chunked
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
info = []
|
|
||||||
info.append(("version", self.version))
|
|
||||||
info.append(("code", self.code))
|
|
||||||
info.append(("reason", self.reason))
|
|
||||||
info.append(("headers", self.headers))
|
|
||||||
info.append(("raw_headers", self.raw_headers))
|
|
||||||
info.append(("should_close", self.should_close))
|
|
||||||
info.append(("compression", self.compression))
|
|
||||||
info.append(("upgrade", self.upgrade))
|
|
||||||
info.append(("chunked", self.chunked))
|
|
||||||
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
|
|
||||||
return '<RawResponseMessage(' + sinfo + ')>'
|
|
||||||
|
|
||||||
|
|
||||||
cdef _new_response_message(object version,
|
|
||||||
int code,
|
|
||||||
str reason,
|
|
||||||
object headers,
|
|
||||||
object raw_headers,
|
|
||||||
bint should_close,
|
|
||||||
object compression,
|
|
||||||
bint upgrade,
|
|
||||||
bint chunked):
|
|
||||||
cdef RawResponseMessage ret
|
|
||||||
ret = RawResponseMessage.__new__(RawResponseMessage)
|
|
||||||
ret.version = version
|
|
||||||
ret.code = code
|
|
||||||
ret.reason = reason
|
|
||||||
ret.headers = headers
|
|
||||||
ret.raw_headers = raw_headers
|
|
||||||
ret.should_close = should_close
|
|
||||||
ret.compression = compression
|
|
||||||
ret.upgrade = upgrade
|
|
||||||
ret.chunked = chunked
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
@cython.internal
|
|
||||||
cdef class HttpParser:
|
|
||||||
|
|
||||||
cdef:
|
|
||||||
cparser.llhttp_t* _cparser
|
|
||||||
cparser.llhttp_settings_t* _csettings
|
|
||||||
|
|
||||||
bytes _raw_name
|
|
||||||
object _name
|
|
||||||
bytes _raw_value
|
|
||||||
bint _has_value
|
|
||||||
|
|
||||||
object _protocol
|
|
||||||
object _loop
|
|
||||||
object _timer
|
|
||||||
|
|
||||||
size_t _max_line_size
|
|
||||||
size_t _max_field_size
|
|
||||||
size_t _max_headers
|
|
||||||
bint _response_with_body
|
|
||||||
bint _read_until_eof
|
|
||||||
|
|
||||||
bint _started
|
|
||||||
object _url
|
|
||||||
bytearray _buf
|
|
||||||
str _path
|
|
||||||
str _reason
|
|
||||||
list _headers
|
|
||||||
list _raw_headers
|
|
||||||
bint _upgraded
|
|
||||||
list _messages
|
|
||||||
object _payload
|
|
||||||
bint _payload_error
|
|
||||||
object _payload_exception
|
|
||||||
object _last_error
|
|
||||||
bint _auto_decompress
|
|
||||||
int _limit
|
|
||||||
|
|
||||||
str _content_encoding
|
|
||||||
|
|
||||||
Py_buffer py_buf
|
|
||||||
|
|
||||||
def __cinit__(self):
|
|
||||||
self._cparser = <cparser.llhttp_t*> \
|
|
||||||
PyMem_Malloc(sizeof(cparser.llhttp_t))
|
|
||||||
if self._cparser is NULL:
|
|
||||||
raise MemoryError()
|
|
||||||
|
|
||||||
self._csettings = <cparser.llhttp_settings_t*> \
|
|
||||||
PyMem_Malloc(sizeof(cparser.llhttp_settings_t))
|
|
||||||
if self._csettings is NULL:
|
|
||||||
raise MemoryError()
|
|
||||||
|
|
||||||
def __dealloc__(self):
|
|
||||||
PyMem_Free(self._cparser)
|
|
||||||
PyMem_Free(self._csettings)
|
|
||||||
|
|
||||||
cdef _init(
|
|
||||||
self, cparser.llhttp_type mode,
|
|
||||||
object protocol, object loop, int limit,
|
|
||||||
object timer=None,
|
|
||||||
size_t max_line_size=8190, size_t max_headers=32768,
|
|
||||||
size_t max_field_size=8190, payload_exception=None,
|
|
||||||
bint response_with_body=True, bint read_until_eof=False,
|
|
||||||
bint auto_decompress=True,
|
|
||||||
):
|
|
||||||
cparser.llhttp_settings_init(self._csettings)
|
|
||||||
cparser.llhttp_init(self._cparser, mode, self._csettings)
|
|
||||||
self._cparser.data = <void*>self
|
|
||||||
self._cparser.content_length = 0
|
|
||||||
|
|
||||||
self._protocol = protocol
|
|
||||||
self._loop = loop
|
|
||||||
self._timer = timer
|
|
||||||
|
|
||||||
self._buf = bytearray()
|
|
||||||
self._payload = None
|
|
||||||
self._payload_error = 0
|
|
||||||
self._payload_exception = payload_exception
|
|
||||||
self._messages = []
|
|
||||||
|
|
||||||
self._raw_name = EMPTY_BYTES
|
|
||||||
self._raw_value = EMPTY_BYTES
|
|
||||||
self._has_value = False
|
|
||||||
|
|
||||||
self._max_line_size = max_line_size
|
|
||||||
self._max_headers = max_headers
|
|
||||||
self._max_field_size = max_field_size
|
|
||||||
self._response_with_body = response_with_body
|
|
||||||
self._read_until_eof = read_until_eof
|
|
||||||
self._upgraded = False
|
|
||||||
self._auto_decompress = auto_decompress
|
|
||||||
self._content_encoding = None
|
|
||||||
|
|
||||||
self._csettings.on_url = cb_on_url
|
|
||||||
self._csettings.on_status = cb_on_status
|
|
||||||
self._csettings.on_header_field = cb_on_header_field
|
|
||||||
self._csettings.on_header_value = cb_on_header_value
|
|
||||||
self._csettings.on_headers_complete = cb_on_headers_complete
|
|
||||||
self._csettings.on_body = cb_on_body
|
|
||||||
self._csettings.on_message_begin = cb_on_message_begin
|
|
||||||
self._csettings.on_message_complete = cb_on_message_complete
|
|
||||||
self._csettings.on_chunk_header = cb_on_chunk_header
|
|
||||||
self._csettings.on_chunk_complete = cb_on_chunk_complete
|
|
||||||
|
|
||||||
self._last_error = None
|
|
||||||
self._limit = limit
|
|
||||||
|
|
||||||
cdef _process_header(self):
|
|
||||||
cdef str value
|
|
||||||
if self._raw_name is not EMPTY_BYTES:
|
|
||||||
name = find_header(self._raw_name)
|
|
||||||
value = self._raw_value.decode('utf-8', 'surrogateescape')
|
|
||||||
|
|
||||||
self._headers.append((name, value))
|
|
||||||
|
|
||||||
if name is CONTENT_ENCODING:
|
|
||||||
self._content_encoding = value
|
|
||||||
|
|
||||||
self._has_value = False
|
|
||||||
self._raw_headers.append((self._raw_name, self._raw_value))
|
|
||||||
self._raw_name = EMPTY_BYTES
|
|
||||||
self._raw_value = EMPTY_BYTES
|
|
||||||
|
|
||||||
cdef _on_header_field(self, char* at, size_t length):
|
|
||||||
if self._has_value:
|
|
||||||
self._process_header()
|
|
||||||
|
|
||||||
if self._raw_name is EMPTY_BYTES:
|
|
||||||
self._raw_name = at[:length]
|
|
||||||
else:
|
|
||||||
self._raw_name += at[:length]
|
|
||||||
|
|
||||||
cdef _on_header_value(self, char* at, size_t length):
|
|
||||||
if self._raw_value is EMPTY_BYTES:
|
|
||||||
self._raw_value = at[:length]
|
|
||||||
else:
|
|
||||||
self._raw_value += at[:length]
|
|
||||||
self._has_value = True
|
|
||||||
|
|
||||||
cdef _on_headers_complete(self):
|
|
||||||
self._process_header()
|
|
||||||
|
|
||||||
should_close = not cparser.llhttp_should_keep_alive(self._cparser)
|
|
||||||
upgrade = self._cparser.upgrade
|
|
||||||
chunked = self._cparser.flags & cparser.F_CHUNKED
|
|
||||||
|
|
||||||
raw_headers = tuple(self._raw_headers)
|
|
||||||
headers = CIMultiDictProxy(CIMultiDict(self._headers))
|
|
||||||
|
|
||||||
if self._cparser.type == cparser.HTTP_REQUEST:
|
|
||||||
allowed = upgrade and headers.get("upgrade", "").lower() in ALLOWED_UPGRADES
|
|
||||||
if allowed or self._cparser.method == cparser.HTTP_CONNECT:
|
|
||||||
self._upgraded = True
|
|
||||||
else:
|
|
||||||
if upgrade and self._cparser.status_code == 101:
|
|
||||||
self._upgraded = True
|
|
||||||
|
|
||||||
# do not support old websocket spec
|
|
||||||
if SEC_WEBSOCKET_KEY1 in headers:
|
|
||||||
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
|
|
||||||
|
|
||||||
encoding = None
|
|
||||||
enc = self._content_encoding
|
|
||||||
if enc is not None:
|
|
||||||
self._content_encoding = None
|
|
||||||
enc = enc.lower()
|
|
||||||
if enc in ('gzip', 'deflate', 'br'):
|
|
||||||
encoding = enc
|
|
||||||
|
|
||||||
if self._cparser.type == cparser.HTTP_REQUEST:
|
|
||||||
method = http_method_str(self._cparser.method)
|
|
||||||
msg = _new_request_message(
|
|
||||||
method, self._path,
|
|
||||||
self.http_version(), headers, raw_headers,
|
|
||||||
should_close, encoding, upgrade, chunked, self._url)
|
|
||||||
else:
|
|
||||||
msg = _new_response_message(
|
|
||||||
self.http_version(), self._cparser.status_code, self._reason,
|
|
||||||
headers, raw_headers, should_close, encoding,
|
|
||||||
upgrade, chunked)
|
|
||||||
|
|
||||||
if (
|
|
||||||
ULLONG_MAX > self._cparser.content_length > 0 or chunked or
|
|
||||||
self._cparser.method == cparser.HTTP_CONNECT or
|
|
||||||
(self._cparser.status_code >= 199 and
|
|
||||||
self._cparser.content_length == 0 and
|
|
||||||
self._read_until_eof)
|
|
||||||
):
|
|
||||||
payload = StreamReader(
|
|
||||||
self._protocol, timer=self._timer, loop=self._loop,
|
|
||||||
limit=self._limit)
|
|
||||||
else:
|
|
||||||
payload = EMPTY_PAYLOAD
|
|
||||||
|
|
||||||
self._payload = payload
|
|
||||||
if encoding is not None and self._auto_decompress:
|
|
||||||
self._payload = DeflateBuffer(payload, encoding)
|
|
||||||
|
|
||||||
if not self._response_with_body:
|
|
||||||
payload = EMPTY_PAYLOAD
|
|
||||||
|
|
||||||
self._messages.append((msg, payload))
|
|
||||||
|
|
||||||
cdef _on_message_complete(self):
|
|
||||||
self._payload.feed_eof()
|
|
||||||
self._payload = None
|
|
||||||
|
|
||||||
cdef _on_chunk_header(self):
|
|
||||||
self._payload.begin_http_chunk_receiving()
|
|
||||||
|
|
||||||
cdef _on_chunk_complete(self):
|
|
||||||
self._payload.end_http_chunk_receiving()
|
|
||||||
|
|
||||||
cdef object _on_status_complete(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
cdef inline http_version(self):
|
|
||||||
cdef cparser.llhttp_t* parser = self._cparser
|
|
||||||
|
|
||||||
if parser.http_major == 1:
|
|
||||||
if parser.http_minor == 0:
|
|
||||||
return HttpVersion10
|
|
||||||
elif parser.http_minor == 1:
|
|
||||||
return HttpVersion11
|
|
||||||
|
|
||||||
return HttpVersion(parser.http_major, parser.http_minor)
|
|
||||||
|
|
||||||
### Public API ###
|
|
||||||
|
|
||||||
def feed_eof(self):
|
|
||||||
cdef bytes desc
|
|
||||||
|
|
||||||
if self._payload is not None:
|
|
||||||
if self._cparser.flags & cparser.F_CHUNKED:
|
|
||||||
raise TransferEncodingError(
|
|
||||||
"Not enough data to satisfy transfer length header.")
|
|
||||||
elif self._cparser.flags & cparser.F_CONTENT_LENGTH:
|
|
||||||
raise ContentLengthError(
|
|
||||||
"Not enough data to satisfy content length header.")
|
|
||||||
elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK:
|
|
||||||
desc = cparser.llhttp_get_error_reason(self._cparser)
|
|
||||||
raise PayloadEncodingError(desc.decode('latin-1'))
|
|
||||||
else:
|
|
||||||
self._payload.feed_eof()
|
|
||||||
elif self._started:
|
|
||||||
self._on_headers_complete()
|
|
||||||
if self._messages:
|
|
||||||
return self._messages[-1][0]
|
|
||||||
|
|
||||||
def feed_data(self, data):
|
|
||||||
cdef:
|
|
||||||
size_t data_len
|
|
||||||
size_t nb
|
|
||||||
cdef cparser.llhttp_errno_t errno
|
|
||||||
|
|
||||||
PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE)
|
|
||||||
data_len = <size_t>self.py_buf.len
|
|
||||||
|
|
||||||
errno = cparser.llhttp_execute(
|
|
||||||
self._cparser,
|
|
||||||
<char*>self.py_buf.buf,
|
|
||||||
data_len)
|
|
||||||
|
|
||||||
if errno is cparser.HPE_PAUSED_UPGRADE:
|
|
||||||
cparser.llhttp_resume_after_upgrade(self._cparser)
|
|
||||||
|
|
||||||
nb = cparser.llhttp_get_error_pos(self._cparser) - <char*>self.py_buf.buf
|
|
||||||
|
|
||||||
PyBuffer_Release(&self.py_buf)
|
|
||||||
|
|
||||||
if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE):
|
|
||||||
if self._payload_error == 0:
|
|
||||||
if self._last_error is not None:
|
|
||||||
ex = self._last_error
|
|
||||||
self._last_error = None
|
|
||||||
else:
|
|
||||||
after = cparser.llhttp_get_error_pos(self._cparser)
|
|
||||||
before = data[:after - <char*>self.py_buf.buf]
|
|
||||||
after_b = after.split(b"\r\n", 1)[0]
|
|
||||||
before = before.rsplit(b"\r\n", 1)[-1]
|
|
||||||
data = before + after_b
|
|
||||||
pointer = " " * (len(repr(before))-1) + "^"
|
|
||||||
ex = parser_error_from_errno(self._cparser, data, pointer)
|
|
||||||
self._payload = None
|
|
||||||
raise ex
|
|
||||||
|
|
||||||
if self._messages:
|
|
||||||
messages = self._messages
|
|
||||||
self._messages = []
|
|
||||||
else:
|
|
||||||
messages = ()
|
|
||||||
|
|
||||||
if self._upgraded:
|
|
||||||
return messages, True, data[nb:]
|
|
||||||
else:
|
|
||||||
return messages, False, b""
|
|
||||||
|
|
||||||
def set_upgraded(self, val):
|
|
||||||
self._upgraded = val
|
|
||||||
|
|
||||||
|
|
||||||
cdef class HttpRequestParser(HttpParser):
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self, protocol, loop, int limit, timer=None,
|
|
||||||
size_t max_line_size=8190, size_t max_headers=32768,
|
|
||||||
size_t max_field_size=8190, payload_exception=None,
|
|
||||||
bint response_with_body=True, bint read_until_eof=False,
|
|
||||||
bint auto_decompress=True,
|
|
||||||
):
|
|
||||||
self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer,
|
|
||||||
max_line_size, max_headers, max_field_size,
|
|
||||||
payload_exception, response_with_body, read_until_eof,
|
|
||||||
auto_decompress)
|
|
||||||
|
|
||||||
cdef object _on_status_complete(self):
|
|
||||||
cdef int idx1, idx2
|
|
||||||
if not self._buf:
|
|
||||||
return
|
|
||||||
self._path = self._buf.decode('utf-8', 'surrogateescape')
|
|
||||||
try:
|
|
||||||
idx3 = len(self._path)
|
|
||||||
if self._cparser.method == cparser.HTTP_CONNECT:
|
|
||||||
# authority-form,
|
|
||||||
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
|
|
||||||
self._url = URL.build(authority=self._path, encoded=True)
|
|
||||||
elif idx3 > 1 and self._path[0] == '/':
|
|
||||||
# origin-form,
|
|
||||||
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
|
|
||||||
idx1 = self._path.find("?")
|
|
||||||
if idx1 == -1:
|
|
||||||
query = ""
|
|
||||||
idx2 = self._path.find("#")
|
|
||||||
if idx2 == -1:
|
|
||||||
path = self._path
|
|
||||||
fragment = ""
|
|
||||||
else:
|
|
||||||
path = self._path[0: idx2]
|
|
||||||
fragment = self._path[idx2+1:]
|
|
||||||
|
|
||||||
else:
|
|
||||||
path = self._path[0:idx1]
|
|
||||||
idx1 += 1
|
|
||||||
idx2 = self._path.find("#", idx1+1)
|
|
||||||
if idx2 == -1:
|
|
||||||
query = self._path[idx1:]
|
|
||||||
fragment = ""
|
|
||||||
else:
|
|
||||||
query = self._path[idx1: idx2]
|
|
||||||
fragment = self._path[idx2+1:]
|
|
||||||
|
|
||||||
self._url = URL.build(
|
|
||||||
path=path,
|
|
||||||
query_string=query,
|
|
||||||
fragment=fragment,
|
|
||||||
encoded=True,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# absolute-form for proxy maybe,
|
|
||||||
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
|
|
||||||
self._url = URL(self._path, encoded=True)
|
|
||||||
finally:
|
|
||||||
PyByteArray_Resize(self._buf, 0)
|
|
||||||
|
|
||||||
|
|
||||||
cdef class HttpResponseParser(HttpParser):
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self, protocol, loop, int limit, timer=None,
|
|
||||||
size_t max_line_size=8190, size_t max_headers=32768,
|
|
||||||
size_t max_field_size=8190, payload_exception=None,
|
|
||||||
bint response_with_body=True, bint read_until_eof=False,
|
|
||||||
bint auto_decompress=True
|
|
||||||
):
|
|
||||||
self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer,
|
|
||||||
max_line_size, max_headers, max_field_size,
|
|
||||||
payload_exception, response_with_body, read_until_eof,
|
|
||||||
auto_decompress)
|
|
||||||
# Use strict parsing on dev mode, so users are warned about broken servers.
|
|
||||||
if not DEBUG:
|
|
||||||
cparser.llhttp_set_lenient_headers(self._cparser, 1)
|
|
||||||
cparser.llhttp_set_lenient_optional_cr_before_lf(self._cparser, 1)
|
|
||||||
cparser.llhttp_set_lenient_spaces_after_chunk_size(self._cparser, 1)
|
|
||||||
|
|
||||||
cdef object _on_status_complete(self):
|
|
||||||
if self._buf:
|
|
||||||
self._reason = self._buf.decode('utf-8', 'surrogateescape')
|
|
||||||
PyByteArray_Resize(self._buf, 0)
|
|
||||||
else:
|
|
||||||
self._reason = self._reason or ''
|
|
||||||
|
|
||||||
cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
|
|
||||||
pyparser._started = True
|
|
||||||
pyparser._headers = []
|
|
||||||
pyparser._raw_headers = []
|
|
||||||
PyByteArray_Resize(pyparser._buf, 0)
|
|
||||||
pyparser._path = None
|
|
||||||
pyparser._reason = None
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_url(cparser.llhttp_t* parser,
|
|
||||||
const char *at, size_t length) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
try:
|
|
||||||
if length > pyparser._max_line_size:
|
|
||||||
raise LineTooLong(
|
|
||||||
'Status line is too long', pyparser._max_line_size, length)
|
|
||||||
extend(pyparser._buf, at, length)
|
|
||||||
except BaseException as ex:
|
|
||||||
pyparser._last_error = ex
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_status(cparser.llhttp_t* parser,
|
|
||||||
const char *at, size_t length) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
cdef str reason
|
|
||||||
try:
|
|
||||||
if length > pyparser._max_line_size:
|
|
||||||
raise LineTooLong(
|
|
||||||
'Status line is too long', pyparser._max_line_size, length)
|
|
||||||
extend(pyparser._buf, at, length)
|
|
||||||
except BaseException as ex:
|
|
||||||
pyparser._last_error = ex
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_header_field(cparser.llhttp_t* parser,
|
|
||||||
const char *at, size_t length) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
cdef Py_ssize_t size
|
|
||||||
try:
|
|
||||||
pyparser._on_status_complete()
|
|
||||||
size = len(pyparser._raw_name) + length
|
|
||||||
if size > pyparser._max_field_size:
|
|
||||||
raise LineTooLong(
|
|
||||||
'Header name is too long', pyparser._max_field_size, size)
|
|
||||||
pyparser._on_header_field(at, length)
|
|
||||||
except BaseException as ex:
|
|
||||||
pyparser._last_error = ex
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_header_value(cparser.llhttp_t* parser,
|
|
||||||
const char *at, size_t length) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
cdef Py_ssize_t size
|
|
||||||
try:
|
|
||||||
size = len(pyparser._raw_value) + length
|
|
||||||
if size > pyparser._max_field_size:
|
|
||||||
raise LineTooLong(
|
|
||||||
'Header value is too long', pyparser._max_field_size, size)
|
|
||||||
pyparser._on_header_value(at, length)
|
|
||||||
except BaseException as ex:
|
|
||||||
pyparser._last_error = ex
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
try:
|
|
||||||
pyparser._on_status_complete()
|
|
||||||
pyparser._on_headers_complete()
|
|
||||||
except BaseException as exc:
|
|
||||||
pyparser._last_error = exc
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
if pyparser._upgraded or pyparser._cparser.method == cparser.HTTP_CONNECT:
|
|
||||||
return 2
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_body(cparser.llhttp_t* parser,
|
|
||||||
const char *at, size_t length) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
cdef bytes body = at[:length]
|
|
||||||
try:
|
|
||||||
pyparser._payload.feed_data(body, length)
|
|
||||||
except BaseException as underlying_exc:
|
|
||||||
reraised_exc = underlying_exc
|
|
||||||
if pyparser._payload_exception is not None:
|
|
||||||
reraised_exc = pyparser._payload_exception(str(underlying_exc))
|
|
||||||
|
|
||||||
set_exception(pyparser._payload, reraised_exc, underlying_exc)
|
|
||||||
|
|
||||||
pyparser._payload_error = 1
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
try:
|
|
||||||
pyparser._started = False
|
|
||||||
pyparser._on_message_complete()
|
|
||||||
except BaseException as exc:
|
|
||||||
pyparser._last_error = exc
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
try:
|
|
||||||
pyparser._on_chunk_header()
|
|
||||||
except BaseException as exc:
|
|
||||||
pyparser._last_error = exc
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
try:
|
|
||||||
pyparser._on_chunk_complete()
|
|
||||||
except BaseException as exc:
|
|
||||||
pyparser._last_error = exc
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer):
|
|
||||||
cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser)
|
|
||||||
cdef bytes desc = cparser.llhttp_get_error_reason(parser)
|
|
||||||
|
|
||||||
err_msg = "{}:\n\n {!r}\n {}".format(desc.decode("latin-1"), data, pointer)
|
|
||||||
|
|
||||||
if errno in {cparser.HPE_CB_MESSAGE_BEGIN,
|
|
||||||
cparser.HPE_CB_HEADERS_COMPLETE,
|
|
||||||
cparser.HPE_CB_MESSAGE_COMPLETE,
|
|
||||||
cparser.HPE_CB_CHUNK_HEADER,
|
|
||||||
cparser.HPE_CB_CHUNK_COMPLETE,
|
|
||||||
cparser.HPE_INVALID_CONSTANT,
|
|
||||||
cparser.HPE_INVALID_HEADER_TOKEN,
|
|
||||||
cparser.HPE_INVALID_CONTENT_LENGTH,
|
|
||||||
cparser.HPE_INVALID_CHUNK_SIZE,
|
|
||||||
cparser.HPE_INVALID_EOF_STATE,
|
|
||||||
cparser.HPE_INVALID_TRANSFER_ENCODING}:
|
|
||||||
return BadHttpMessage(err_msg)
|
|
||||||
elif errno == cparser.HPE_INVALID_METHOD:
|
|
||||||
return BadHttpMethod(error=err_msg)
|
|
||||||
elif errno in {cparser.HPE_INVALID_STATUS,
|
|
||||||
cparser.HPE_INVALID_VERSION}:
|
|
||||||
return BadStatusLine(error=err_msg)
|
|
||||||
elif errno == cparser.HPE_INVALID_URL:
|
|
||||||
return InvalidURLError(err_msg)
|
|
||||||
|
|
||||||
return BadHttpMessage(err_msg)
|
|
||||||
Binary file not shown.
@@ -1,160 +0,0 @@
|
|||||||
from cpython.bytes cimport PyBytes_FromStringAndSize
|
|
||||||
from cpython.exc cimport PyErr_NoMemory
|
|
||||||
from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc
|
|
||||||
from cpython.object cimport PyObject_Str
|
|
||||||
from libc.stdint cimport uint8_t, uint64_t
|
|
||||||
from libc.string cimport memcpy
|
|
||||||
|
|
||||||
from multidict import istr
|
|
||||||
|
|
||||||
DEF BUF_SIZE = 16 * 1024 # 16KiB
|
|
||||||
cdef char BUFFER[BUF_SIZE]
|
|
||||||
|
|
||||||
cdef object _istr = istr
|
|
||||||
|
|
||||||
|
|
||||||
# ----------------- writer ---------------------------
|
|
||||||
|
|
||||||
cdef struct Writer:
|
|
||||||
char *buf
|
|
||||||
Py_ssize_t size
|
|
||||||
Py_ssize_t pos
|
|
||||||
|
|
||||||
|
|
||||||
cdef inline void _init_writer(Writer* writer):
|
|
||||||
writer.buf = &BUFFER[0]
|
|
||||||
writer.size = BUF_SIZE
|
|
||||||
writer.pos = 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef inline void _release_writer(Writer* writer):
|
|
||||||
if writer.buf != BUFFER:
|
|
||||||
PyMem_Free(writer.buf)
|
|
||||||
|
|
||||||
|
|
||||||
cdef inline int _write_byte(Writer* writer, uint8_t ch):
|
|
||||||
cdef char * buf
|
|
||||||
cdef Py_ssize_t size
|
|
||||||
|
|
||||||
if writer.pos == writer.size:
|
|
||||||
# reallocate
|
|
||||||
size = writer.size + BUF_SIZE
|
|
||||||
if writer.buf == BUFFER:
|
|
||||||
buf = <char*>PyMem_Malloc(size)
|
|
||||||
if buf == NULL:
|
|
||||||
PyErr_NoMemory()
|
|
||||||
return -1
|
|
||||||
memcpy(buf, writer.buf, writer.size)
|
|
||||||
else:
|
|
||||||
buf = <char*>PyMem_Realloc(writer.buf, size)
|
|
||||||
if buf == NULL:
|
|
||||||
PyErr_NoMemory()
|
|
||||||
return -1
|
|
||||||
writer.buf = buf
|
|
||||||
writer.size = size
|
|
||||||
writer.buf[writer.pos] = <char>ch
|
|
||||||
writer.pos += 1
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol):
|
|
||||||
cdef uint64_t utf = <uint64_t> symbol
|
|
||||||
|
|
||||||
if utf < 0x80:
|
|
||||||
return _write_byte(writer, <uint8_t>utf)
|
|
||||||
elif utf < 0x800:
|
|
||||||
if _write_byte(writer, <uint8_t>(0xc0 | (utf >> 6))) < 0:
|
|
||||||
return -1
|
|
||||||
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
|
||||||
elif 0xD800 <= utf <= 0xDFFF:
|
|
||||||
# surogate pair, ignored
|
|
||||||
return 0
|
|
||||||
elif utf < 0x10000:
|
|
||||||
if _write_byte(writer, <uint8_t>(0xe0 | (utf >> 12))) < 0:
|
|
||||||
return -1
|
|
||||||
if _write_byte(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
|
|
||||||
return -1
|
|
||||||
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
|
||||||
elif utf > 0x10FFFF:
|
|
||||||
# symbol is too large
|
|
||||||
return 0
|
|
||||||
else:
|
|
||||||
if _write_byte(writer, <uint8_t>(0xf0 | (utf >> 18))) < 0:
|
|
||||||
return -1
|
|
||||||
if _write_byte(writer,
|
|
||||||
<uint8_t>(0x80 | ((utf >> 12) & 0x3f))) < 0:
|
|
||||||
return -1
|
|
||||||
if _write_byte(writer,
|
|
||||||
<uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
|
|
||||||
return -1
|
|
||||||
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
|
||||||
|
|
||||||
|
|
||||||
cdef inline int _write_str(Writer* writer, str s):
|
|
||||||
cdef Py_UCS4 ch
|
|
||||||
for ch in s:
|
|
||||||
if _write_utf8(writer, ch) < 0:
|
|
||||||
return -1
|
|
||||||
|
|
||||||
|
|
||||||
cdef inline int _write_str_raise_on_nlcr(Writer* writer, object s):
|
|
||||||
cdef Py_UCS4 ch
|
|
||||||
cdef str out_str
|
|
||||||
if type(s) is str:
|
|
||||||
out_str = <str>s
|
|
||||||
elif type(s) is _istr:
|
|
||||||
out_str = PyObject_Str(s)
|
|
||||||
elif not isinstance(s, str):
|
|
||||||
raise TypeError("Cannot serialize non-str key {!r}".format(s))
|
|
||||||
else:
|
|
||||||
out_str = str(s)
|
|
||||||
|
|
||||||
for ch in out_str:
|
|
||||||
if ch == 0x0D or ch == 0x0A:
|
|
||||||
raise ValueError(
|
|
||||||
"Newline or carriage return detected in headers. "
|
|
||||||
"Potential header injection attack."
|
|
||||||
)
|
|
||||||
if _write_utf8(writer, ch) < 0:
|
|
||||||
return -1
|
|
||||||
|
|
||||||
|
|
||||||
# --------------- _serialize_headers ----------------------
|
|
||||||
|
|
||||||
def _serialize_headers(str status_line, headers):
|
|
||||||
cdef Writer writer
|
|
||||||
cdef object key
|
|
||||||
cdef object val
|
|
||||||
|
|
||||||
_init_writer(&writer)
|
|
||||||
|
|
||||||
try:
|
|
||||||
if _write_str(&writer, status_line) < 0:
|
|
||||||
raise
|
|
||||||
if _write_byte(&writer, b'\r') < 0:
|
|
||||||
raise
|
|
||||||
if _write_byte(&writer, b'\n') < 0:
|
|
||||||
raise
|
|
||||||
|
|
||||||
for key, val in headers.items():
|
|
||||||
if _write_str_raise_on_nlcr(&writer, key) < 0:
|
|
||||||
raise
|
|
||||||
if _write_byte(&writer, b':') < 0:
|
|
||||||
raise
|
|
||||||
if _write_byte(&writer, b' ') < 0:
|
|
||||||
raise
|
|
||||||
if _write_str_raise_on_nlcr(&writer, val) < 0:
|
|
||||||
raise
|
|
||||||
if _write_byte(&writer, b'\r') < 0:
|
|
||||||
raise
|
|
||||||
if _write_byte(&writer, b'\n') < 0:
|
|
||||||
raise
|
|
||||||
|
|
||||||
if _write_byte(&writer, b'\r') < 0:
|
|
||||||
raise
|
|
||||||
if _write_byte(&writer, b'\n') < 0:
|
|
||||||
raise
|
|
||||||
|
|
||||||
return PyBytes_FromStringAndSize(writer.buf, writer.pos)
|
|
||||||
finally:
|
|
||||||
_release_writer(&writer)
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
b01999d409b29bd916e067bc963d5f2d9ee63cfc9ae0bccb769910131417bf93 /home/runner/work/aiohttp/aiohttp/aiohttp/_websocket/mask.pxd
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user