before send to remote
This commit is contained in:
commit
3814beb3e0
|
@ -0,0 +1,21 @@
|
||||||
|
# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.238.0/containers/python-3/.devcontainer/base.Dockerfile
|
||||||
|
|
||||||
|
# [Choice] Python version (use -bullseye variants on local arm64/Apple Silicon): 3, 3.10, 3.9, 3.8, 3.7, 3.6, 3-bullseye, 3.10-bullseye, 3.9-bullseye, 3.8-bullseye, 3.7-bullseye, 3.6-bullseye, 3-buster, 3.10-buster, 3.9-buster, 3.8-buster, 3.7-buster, 3.6-buster
|
||||||
|
ARG VARIANT="3.10-bullseye"
|
||||||
|
FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT}
|
||||||
|
|
||||||
|
# [Choice] Node.js version: none, lts/*, 16, 14, 12, 10
|
||||||
|
ARG NODE_VERSION="none"
|
||||||
|
RUN if [ "${NODE_VERSION}" != "none" ]; then su vscode -c "umask 0002 && . /usr/local/share/nvm/nvm.sh && nvm install ${NODE_VERSION} 2>&1"; fi
|
||||||
|
|
||||||
|
# [Optional] If your pip requirements rarely change, uncomment this section to add them to the image.
|
||||||
|
# COPY requirements.txt /tmp/pip-tmp/
|
||||||
|
# RUN pip3 --disable-pip-version-check --no-cache-dir install -r /tmp/pip-tmp/requirements.txt \
|
||||||
|
# && rm -rf /tmp/pip-tmp
|
||||||
|
|
||||||
|
# [Optional] Uncomment this section to install additional OS packages.
|
||||||
|
# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
||||||
|
# && apt-get -y install --no-install-recommends <your-package-list-here>
|
||||||
|
|
||||||
|
# [Optional] Uncomment this line to install global node packages.
|
||||||
|
# RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g <your-package-here>" 2>&1
|
|
@ -0,0 +1,57 @@
|
||||||
|
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
|
||||||
|
// https://github.com/microsoft/vscode-dev-containers/tree/v0.238.0/containers/python-3
|
||||||
|
{
|
||||||
|
"name": "Python 3",
|
||||||
|
"build": {
|
||||||
|
"dockerfile": "Dockerfile",
|
||||||
|
"context": "..",
|
||||||
|
"args": {
|
||||||
|
// Update 'VARIANT' to pick a Python version: 3, 3.10, 3.9, 3.8, 3.7, 3.6
|
||||||
|
// Append -bullseye or -buster to pin to an OS version.
|
||||||
|
// Use -bullseye variants on local on arm64/Apple Silicon.
|
||||||
|
"VARIANT": "3.10-bullseye",
|
||||||
|
// Options
|
||||||
|
"NODE_VERSION": "none"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
// Configure tool-specific properties.
|
||||||
|
"customizations": {
|
||||||
|
// Configure properties specific to VS Code.
|
||||||
|
"vscode": {
|
||||||
|
// Set *default* container specific settings.json values on container create.
|
||||||
|
"settings": {
|
||||||
|
"python.defaultInterpreterPath": "/usr/local/bin/python",
|
||||||
|
"python.linting.enabled": true,
|
||||||
|
"python.linting.pylintEnabled": true,
|
||||||
|
"python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8",
|
||||||
|
"python.formatting.blackPath": "/usr/local/py-utils/bin/black",
|
||||||
|
"python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf",
|
||||||
|
"python.linting.banditPath": "/usr/local/py-utils/bin/bandit",
|
||||||
|
"python.linting.flake8Path": "/usr/local/py-utils/bin/flake8",
|
||||||
|
"python.linting.mypyPath": "/usr/local/py-utils/bin/mypy",
|
||||||
|
"python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle",
|
||||||
|
"python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle",
|
||||||
|
"python.linting.pylintPath": "/usr/local/py-utils/bin/pylint"
|
||||||
|
},
|
||||||
|
|
||||||
|
// Add the IDs of extensions you want installed when the container is created.
|
||||||
|
"extensions": [
|
||||||
|
"ms-python.python",
|
||||||
|
"ms-python.vscode-pylance"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||||
|
// "forwardPorts": [],
|
||||||
|
|
||||||
|
// Use 'postCreateCommand' to run commands after the container is created.
|
||||||
|
// "postCreateCommand": "pip3 install --user -r requirements.txt",
|
||||||
|
|
||||||
|
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
|
||||||
|
"remoteUser": "vscode",
|
||||||
|
"features": {
|
||||||
|
"git": "os-provided"
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,4 @@
|
||||||
|
**/.env
|
||||||
|
|
||||||
|
.env.dev*
|
||||||
|
.env.prod*
|
|
@ -0,0 +1,3 @@
|
||||||
|
{
|
||||||
|
"workbench.colorTheme": "ReUI"
|
||||||
|
}
|
|
@ -0,0 +1,28 @@
|
||||||
|
# pull official base image
|
||||||
|
FROM python:3.9.6-alpine
|
||||||
|
|
||||||
|
# set work directory
|
||||||
|
WORKDIR /usr/src/app
|
||||||
|
|
||||||
|
# set environment variables
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE 1
|
||||||
|
ENV PYTHONUNBUFFERED 1
|
||||||
|
|
||||||
|
# install psycopg2 dependencies
|
||||||
|
RUN apk update && apk add postgresql-dev gcc python3-dev musl-dev
|
||||||
|
|
||||||
|
# install dependencies
|
||||||
|
RUN pip install --upgrade pip
|
||||||
|
COPY ./requirements.txt .
|
||||||
|
RUN pip install -r requirements.txt
|
||||||
|
|
||||||
|
# copy entrypoint.sh
|
||||||
|
COPY ./entrypoint.sh .
|
||||||
|
RUN sed -i 's/\r$//g' /usr/src/app/entrypoint.sh
|
||||||
|
RUN chmod +x /usr/src/app/entrypoint.sh
|
||||||
|
|
||||||
|
# copy project
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# run entrypoint.sh
|
||||||
|
ENTRYPOINT ["/usr/src/app/entrypoint.sh"]
|
|
@ -0,0 +1,68 @@
|
||||||
|
###########
|
||||||
|
# BUILDER #
|
||||||
|
###########
|
||||||
|
|
||||||
|
# pull official base image
|
||||||
|
FROM python:3.9.6-alpine as builder
|
||||||
|
|
||||||
|
# set work directory
|
||||||
|
WORKDIR /usr/src/app
|
||||||
|
|
||||||
|
# set environment variables
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE 1
|
||||||
|
ENV PYTHONUNBUFFERED 1
|
||||||
|
|
||||||
|
# install psycopg2 dependencies
|
||||||
|
RUN apk update && apk add postgresql-dev gcc python3-dev musl-dev
|
||||||
|
|
||||||
|
# lint
|
||||||
|
# RUN pip install --upgrade pip
|
||||||
|
# RUN pip install flake8==3.9.2
|
||||||
|
COPY . .
|
||||||
|
# RUN flake8 --ignore=E501,F401 .
|
||||||
|
|
||||||
|
# install dependencies
|
||||||
|
COPY ./requirements.txt .
|
||||||
|
RUN pip wheel --no-cache-dir --no-deps --wheel-dir /usr/src/app/wheels -r requirements.txt
|
||||||
|
|
||||||
|
#########
|
||||||
|
# FINAL #
|
||||||
|
#########
|
||||||
|
|
||||||
|
# pull official base image
|
||||||
|
FROM python:3.9.6-alpine
|
||||||
|
|
||||||
|
# create directory for the app user
|
||||||
|
RUN mkdir -p /home/app
|
||||||
|
|
||||||
|
# create the app user
|
||||||
|
RUN addgroup -S app && adduser -S app -G app
|
||||||
|
|
||||||
|
# create the appropriate directories
|
||||||
|
ENV HOME=/home/app
|
||||||
|
RUN mkdir -p $HOME/staticfiles
|
||||||
|
RUN mkdir -p $HOME/mediafiles
|
||||||
|
WORKDIR $HOME
|
||||||
|
|
||||||
|
# install dependencies
|
||||||
|
RUN apk update && apk add libpq
|
||||||
|
COPY --from=builder /usr/src/app/wheels /wheels
|
||||||
|
COPY --from=builder /usr/src/app/requirements.txt .
|
||||||
|
RUN pip install --no-cache /wheels/*
|
||||||
|
|
||||||
|
# copy entrypoint.prod.sh
|
||||||
|
COPY ./entrypoint.prod.sh .
|
||||||
|
RUN sed -i 's/\r$//g' $HOME/entrypoint.prod.sh
|
||||||
|
RUN chmod +x $HOME/entrypoint.prod.sh
|
||||||
|
|
||||||
|
# copy project
|
||||||
|
COPY . $HOME
|
||||||
|
|
||||||
|
# chown all the files to the app user
|
||||||
|
RUN chown -R app:app $HOME
|
||||||
|
|
||||||
|
# change to the app user
|
||||||
|
USER app
|
||||||
|
|
||||||
|
# run entrypoint.prod.sh
|
||||||
|
ENTRYPOINT ["/home/app/entrypoint.prod.sh"]
|
|
@ -0,0 +1,18 @@
|
||||||
|
# init db with flush
|
||||||
|
docker-compose exec web python manage.py flush --no-input && \
|
||||||
|
docker-compose exec web python manage.py migrate && \
|
||||||
|
docker-compose exec web python manage.py createsuperuser
|
||||||
|
|
||||||
|
# make migrations for app
|
||||||
|
# python manage.py makemigrations polls
|
||||||
|
|
||||||
|
#init in production
|
||||||
|
docker-compose -f ./docker-compose.prod.yaml exec web python manage.py flush --no-input && \
|
||||||
|
docker-compose -f ./docker-compose.prod.yaml exec web python manage.py migrate && \
|
||||||
|
docker-compose -f ./docker-compose.prod.yaml exec web python manage.py createsuperuser
|
||||||
|
|
||||||
|
# collect static files
|
||||||
|
python manage.py collectstatic --no-input --clear
|
||||||
|
|
||||||
|
# start production
|
||||||
|
docker-compose -f ./docker-compose.prod.yaml up --build
|
|
@ -0,0 +1,38 @@
|
||||||
|
version: '3.3'
|
||||||
|
|
||||||
|
services:
|
||||||
|
web:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile.prod
|
||||||
|
command: gunicorn example.wsgi:application --bind 0.0.0.0:8000
|
||||||
|
volumes:
|
||||||
|
- static_volume:/home/app/staticfiles
|
||||||
|
- media_volume:/home/app/mediafiles
|
||||||
|
expose:
|
||||||
|
- 8000
|
||||||
|
env_file:
|
||||||
|
- ./.env.prod
|
||||||
|
depends_on:
|
||||||
|
- db
|
||||||
|
|
||||||
|
db:
|
||||||
|
image: postgres:14.5-alpine
|
||||||
|
volumes:
|
||||||
|
- postgres_data_prod:/var/lib/postgresql/data/
|
||||||
|
env_file:
|
||||||
|
- ./.env.prod.db
|
||||||
|
|
||||||
|
nginx:
|
||||||
|
build: ./nginx
|
||||||
|
volumes:
|
||||||
|
- static_volume:/home/app/staticfiles
|
||||||
|
ports:
|
||||||
|
- 1337:80
|
||||||
|
depends_on:
|
||||||
|
- web
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
postgres_data_prod:
|
||||||
|
static_volume:
|
||||||
|
media_volume:
|
|
@ -0,0 +1,28 @@
|
||||||
|
version: '3.3'
|
||||||
|
|
||||||
|
services:
|
||||||
|
web:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
command: python manage.py runserver 0.0.0.0:8000
|
||||||
|
volumes:
|
||||||
|
- ./:/usr/src/app/
|
||||||
|
ports:
|
||||||
|
- 8000:8000
|
||||||
|
env_file:
|
||||||
|
- ./.env.dev
|
||||||
|
depends_on:
|
||||||
|
- db_dev
|
||||||
|
|
||||||
|
db_dev:
|
||||||
|
image: postgres:14.5-alpine
|
||||||
|
volumes:
|
||||||
|
- postgres_data_dev:/var/lib/postgresql/data/
|
||||||
|
environment:
|
||||||
|
- POSTGRES_USER=django
|
||||||
|
- POSTGRES_PASSWORD=pwddjango
|
||||||
|
- POSTGRES_DB=django_dev
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
postgres_data_dev:
|
|
@ -0,0 +1,15 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
if [ "$DATABASE" = "postgres" ]
|
||||||
|
then
|
||||||
|
echo "Waiting for postgres..."
|
||||||
|
|
||||||
|
while ! nc -z $SQL_HOST $SQL_PORT; do
|
||||||
|
sleep 0.1
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "PostgreSQL started in production mode"
|
||||||
|
EXPORT IS_PRODUCTION=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec "$@"
|
|
@ -0,0 +1,18 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
if [ "$DATABASE" = "postgres" ]
|
||||||
|
then
|
||||||
|
echo "Waiting for postgres..."
|
||||||
|
|
||||||
|
while ! nc -z $SQL_HOST $SQL_PORT; do
|
||||||
|
sleep 0.1
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "PostgreSQL started"
|
||||||
|
EXPORT IS_PRODUCTION=0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# python manage.py flush --no-input
|
||||||
|
# python manage.py migrate
|
||||||
|
|
||||||
|
exec "$@"
|
|
@ -0,0 +1,241 @@
|
||||||
|
<#
|
||||||
|
.Synopsis
|
||||||
|
Activate a Python virtual environment for the current PowerShell session.
|
||||||
|
|
||||||
|
.Description
|
||||||
|
Pushes the python executable for a virtual environment to the front of the
|
||||||
|
$Env:PATH environment variable and sets the prompt to signify that you are
|
||||||
|
in a Python virtual environment. Makes use of the command line switches as
|
||||||
|
well as the `pyvenv.cfg` file values present in the virtual environment.
|
||||||
|
|
||||||
|
.Parameter VenvDir
|
||||||
|
Path to the directory that contains the virtual environment to activate. The
|
||||||
|
default value for this is the parent of the directory that the Activate.ps1
|
||||||
|
script is located within.
|
||||||
|
|
||||||
|
.Parameter Prompt
|
||||||
|
The prompt prefix to display when this virtual environment is activated. By
|
||||||
|
default, this prompt is the name of the virtual environment folder (VenvDir)
|
||||||
|
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
|
||||||
|
|
||||||
|
.Example
|
||||||
|
Activate.ps1
|
||||||
|
Activates the Python virtual environment that contains the Activate.ps1 script.
|
||||||
|
|
||||||
|
.Example
|
||||||
|
Activate.ps1 -Verbose
|
||||||
|
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||||
|
and shows extra information about the activation as it executes.
|
||||||
|
|
||||||
|
.Example
|
||||||
|
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
|
||||||
|
Activates the Python virtual environment located in the specified location.
|
||||||
|
|
||||||
|
.Example
|
||||||
|
Activate.ps1 -Prompt "MyPython"
|
||||||
|
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||||
|
and prefixes the current prompt with the specified string (surrounded in
|
||||||
|
parentheses) while the virtual environment is active.
|
||||||
|
|
||||||
|
.Notes
|
||||||
|
On Windows, it may be required to enable this Activate.ps1 script by setting the
|
||||||
|
execution policy for the user. You can do this by issuing the following PowerShell
|
||||||
|
command:
|
||||||
|
|
||||||
|
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
||||||
|
|
||||||
|
For more information on Execution Policies:
|
||||||
|
https://go.microsoft.com/fwlink/?LinkID=135170
|
||||||
|
|
||||||
|
#>
|
||||||
|
Param(
|
||||||
|
[Parameter(Mandatory = $false)]
|
||||||
|
[String]
|
||||||
|
$VenvDir,
|
||||||
|
[Parameter(Mandatory = $false)]
|
||||||
|
[String]
|
||||||
|
$Prompt
|
||||||
|
)
|
||||||
|
|
||||||
|
<# Function declarations --------------------------------------------------- #>
|
||||||
|
|
||||||
|
<#
|
||||||
|
.Synopsis
|
||||||
|
Remove all shell session elements added by the Activate script, including the
|
||||||
|
addition of the virtual environment's Python executable from the beginning of
|
||||||
|
the PATH variable.
|
||||||
|
|
||||||
|
.Parameter NonDestructive
|
||||||
|
If present, do not remove this function from the global namespace for the
|
||||||
|
session.
|
||||||
|
|
||||||
|
#>
|
||||||
|
function global:deactivate ([switch]$NonDestructive) {
|
||||||
|
# Revert to original values
|
||||||
|
|
||||||
|
# The prior prompt:
|
||||||
|
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
|
||||||
|
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
|
||||||
|
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
|
||||||
|
}
|
||||||
|
|
||||||
|
# The prior PYTHONHOME:
|
||||||
|
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
|
||||||
|
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
|
||||||
|
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
|
||||||
|
}
|
||||||
|
|
||||||
|
# The prior PATH:
|
||||||
|
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
|
||||||
|
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
|
||||||
|
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
|
||||||
|
}
|
||||||
|
|
||||||
|
# Just remove the VIRTUAL_ENV altogether:
|
||||||
|
if (Test-Path -Path Env:VIRTUAL_ENV) {
|
||||||
|
Remove-Item -Path env:VIRTUAL_ENV
|
||||||
|
}
|
||||||
|
|
||||||
|
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
|
||||||
|
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
|
||||||
|
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
|
||||||
|
}
|
||||||
|
|
||||||
|
# Leave deactivate function in the global namespace if requested:
|
||||||
|
if (-not $NonDestructive) {
|
||||||
|
Remove-Item -Path function:deactivate
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
<#
|
||||||
|
.Description
|
||||||
|
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
|
||||||
|
given folder, and returns them in a map.
|
||||||
|
|
||||||
|
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
|
||||||
|
two strings separated by `=` (with any amount of whitespace surrounding the =)
|
||||||
|
then it is considered a `key = value` line. The left hand string is the key,
|
||||||
|
the right hand is the value.
|
||||||
|
|
||||||
|
If the value starts with a `'` or a `"` then the first and last character is
|
||||||
|
stripped from the value before being captured.
|
||||||
|
|
||||||
|
.Parameter ConfigDir
|
||||||
|
Path to the directory that contains the `pyvenv.cfg` file.
|
||||||
|
#>
|
||||||
|
function Get-PyVenvConfig(
|
||||||
|
[String]
|
||||||
|
$ConfigDir
|
||||||
|
) {
|
||||||
|
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
|
||||||
|
|
||||||
|
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
|
||||||
|
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
|
||||||
|
|
||||||
|
# An empty map will be returned if no config file is found.
|
||||||
|
$pyvenvConfig = @{ }
|
||||||
|
|
||||||
|
if ($pyvenvConfigPath) {
|
||||||
|
|
||||||
|
Write-Verbose "File exists, parse `key = value` lines"
|
||||||
|
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
|
||||||
|
|
||||||
|
$pyvenvConfigContent | ForEach-Object {
|
||||||
|
$keyval = $PSItem -split "\s*=\s*", 2
|
||||||
|
if ($keyval[0] -and $keyval[1]) {
|
||||||
|
$val = $keyval[1]
|
||||||
|
|
||||||
|
# Remove extraneous quotations around a string value.
|
||||||
|
if ("'""".Contains($val.Substring(0, 1))) {
|
||||||
|
$val = $val.Substring(1, $val.Length - 2)
|
||||||
|
}
|
||||||
|
|
||||||
|
$pyvenvConfig[$keyval[0]] = $val
|
||||||
|
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return $pyvenvConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
<# Begin Activate script --------------------------------------------------- #>
|
||||||
|
|
||||||
|
# Determine the containing directory of this script
|
||||||
|
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||||
|
$VenvExecDir = Get-Item -Path $VenvExecPath
|
||||||
|
|
||||||
|
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
|
||||||
|
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
|
||||||
|
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
|
||||||
|
|
||||||
|
# Set values required in priority: CmdLine, ConfigFile, Default
|
||||||
|
# First, get the location of the virtual environment, it might not be
|
||||||
|
# VenvExecDir if specified on the command line.
|
||||||
|
if ($VenvDir) {
|
||||||
|
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
|
||||||
|
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
|
||||||
|
Write-Verbose "VenvDir=$VenvDir"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Next, read the `pyvenv.cfg` file to determine any required value such
|
||||||
|
# as `prompt`.
|
||||||
|
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
|
||||||
|
|
||||||
|
# Next, set the prompt from the command line, or the config file, or
|
||||||
|
# just use the name of the virtual environment folder.
|
||||||
|
if ($Prompt) {
|
||||||
|
Write-Verbose "Prompt specified as argument, using '$Prompt'"
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
|
||||||
|
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
|
||||||
|
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
|
||||||
|
$Prompt = $pyvenvCfg['prompt'];
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virutal environment)"
|
||||||
|
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
|
||||||
|
$Prompt = Split-Path -Path $venvDir -Leaf
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Write-Verbose "Prompt = '$Prompt'"
|
||||||
|
Write-Verbose "VenvDir='$VenvDir'"
|
||||||
|
|
||||||
|
# Deactivate any currently active virtual environment, but leave the
|
||||||
|
# deactivate function in place.
|
||||||
|
deactivate -nondestructive
|
||||||
|
|
||||||
|
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
|
||||||
|
# that there is an activated venv.
|
||||||
|
$env:VIRTUAL_ENV = $VenvDir
|
||||||
|
|
||||||
|
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
|
||||||
|
|
||||||
|
Write-Verbose "Setting prompt to '$Prompt'"
|
||||||
|
|
||||||
|
# Set the prompt to include the env name
|
||||||
|
# Make sure _OLD_VIRTUAL_PROMPT is global
|
||||||
|
function global:_OLD_VIRTUAL_PROMPT { "" }
|
||||||
|
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
|
||||||
|
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
|
||||||
|
|
||||||
|
function global:prompt {
|
||||||
|
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
|
||||||
|
_OLD_VIRTUAL_PROMPT
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Clear PYTHONHOME
|
||||||
|
if (Test-Path -Path Env:PYTHONHOME) {
|
||||||
|
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
|
||||||
|
Remove-Item -Path Env:PYTHONHOME
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add the venv to the PATH
|
||||||
|
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
|
||||||
|
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
|
|
@ -0,0 +1,76 @@
|
||||||
|
# This file must be used with "source bin/activate" *from bash*
|
||||||
|
# you cannot run it directly
|
||||||
|
|
||||||
|
deactivate () {
|
||||||
|
# reset old environment variables
|
||||||
|
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
|
||||||
|
PATH="${_OLD_VIRTUAL_PATH:-}"
|
||||||
|
export PATH
|
||||||
|
unset _OLD_VIRTUAL_PATH
|
||||||
|
fi
|
||||||
|
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
|
||||||
|
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
|
||||||
|
export PYTHONHOME
|
||||||
|
unset _OLD_VIRTUAL_PYTHONHOME
|
||||||
|
fi
|
||||||
|
|
||||||
|
# This should detect bash and zsh, which have a hash command that must
|
||||||
|
# be called to get it to forget past commands. Without forgetting
|
||||||
|
# past commands the $PATH changes we made may not be respected
|
||||||
|
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||||
|
hash -r
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
|
||||||
|
PS1="${_OLD_VIRTUAL_PS1:-}"
|
||||||
|
export PS1
|
||||||
|
unset _OLD_VIRTUAL_PS1
|
||||||
|
fi
|
||||||
|
|
||||||
|
unset VIRTUAL_ENV
|
||||||
|
if [ ! "${1:-}" = "nondestructive" ] ; then
|
||||||
|
# Self destruct!
|
||||||
|
unset -f deactivate
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# unset irrelevant variables
|
||||||
|
deactivate nondestructive
|
||||||
|
|
||||||
|
VIRTUAL_ENV="/home/su/Загрузки/django_example/env"
|
||||||
|
export VIRTUAL_ENV
|
||||||
|
|
||||||
|
_OLD_VIRTUAL_PATH="$PATH"
|
||||||
|
PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||||
|
export PATH
|
||||||
|
|
||||||
|
# unset PYTHONHOME if set
|
||||||
|
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
|
||||||
|
# could use `if (set -u; : $PYTHONHOME) ;` in bash
|
||||||
|
if [ -n "${PYTHONHOME:-}" ] ; then
|
||||||
|
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
|
||||||
|
unset PYTHONHOME
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
|
||||||
|
_OLD_VIRTUAL_PS1="${PS1:-}"
|
||||||
|
if [ "x(env) " != x ] ; then
|
||||||
|
PS1="(env) ${PS1:-}"
|
||||||
|
else
|
||||||
|
if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then
|
||||||
|
# special case for Aspen magic directories
|
||||||
|
# see https://aspen.io/
|
||||||
|
PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1"
|
||||||
|
else
|
||||||
|
PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
export PS1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# This should detect bash and zsh, which have a hash command that must
|
||||||
|
# be called to get it to forget past commands. Without forgetting
|
||||||
|
# past commands the $PATH changes we made may not be respected
|
||||||
|
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||||
|
hash -r
|
||||||
|
fi
|
|
@ -0,0 +1,37 @@
|
||||||
|
# This file must be used with "source bin/activate.csh" *from csh*.
|
||||||
|
# You cannot run it directly.
|
||||||
|
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
||||||
|
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
|
||||||
|
|
||||||
|
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate'
|
||||||
|
|
||||||
|
# Unset irrelevant variables.
|
||||||
|
deactivate nondestructive
|
||||||
|
|
||||||
|
setenv VIRTUAL_ENV "/home/su/Загрузки/django_example/env"
|
||||||
|
|
||||||
|
set _OLD_VIRTUAL_PATH="$PATH"
|
||||||
|
setenv PATH "$VIRTUAL_ENV/bin:$PATH"
|
||||||
|
|
||||||
|
|
||||||
|
set _OLD_VIRTUAL_PROMPT="$prompt"
|
||||||
|
|
||||||
|
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
|
||||||
|
if ("env" != "") then
|
||||||
|
set env_name = "env"
|
||||||
|
else
|
||||||
|
if (`basename "VIRTUAL_ENV"` == "__") then
|
||||||
|
# special case for Aspen magic directories
|
||||||
|
# see https://aspen.io/
|
||||||
|
set env_name = `basename \`dirname "$VIRTUAL_ENV"\``
|
||||||
|
else
|
||||||
|
set env_name = `basename "$VIRTUAL_ENV"`
|
||||||
|
endif
|
||||||
|
endif
|
||||||
|
set prompt = "[$env_name] $prompt"
|
||||||
|
unset env_name
|
||||||
|
endif
|
||||||
|
|
||||||
|
alias pydoc python -m pydoc
|
||||||
|
|
||||||
|
rehash
|
|
@ -0,0 +1,75 @@
|
||||||
|
# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org)
|
||||||
|
# you cannot run it directly
|
||||||
|
|
||||||
|
function deactivate -d "Exit virtualenv and return to normal shell environment"
|
||||||
|
# reset old environment variables
|
||||||
|
if test -n "$_OLD_VIRTUAL_PATH"
|
||||||
|
set -gx PATH $_OLD_VIRTUAL_PATH
|
||||||
|
set -e _OLD_VIRTUAL_PATH
|
||||||
|
end
|
||||||
|
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
||||||
|
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
|
||||||
|
set -e _OLD_VIRTUAL_PYTHONHOME
|
||||||
|
end
|
||||||
|
|
||||||
|
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
||||||
|
functions -e fish_prompt
|
||||||
|
set -e _OLD_FISH_PROMPT_OVERRIDE
|
||||||
|
functions -c _old_fish_prompt fish_prompt
|
||||||
|
functions -e _old_fish_prompt
|
||||||
|
end
|
||||||
|
|
||||||
|
set -e VIRTUAL_ENV
|
||||||
|
if test "$argv[1]" != "nondestructive"
|
||||||
|
# Self destruct!
|
||||||
|
functions -e deactivate
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# unset irrelevant variables
|
||||||
|
deactivate nondestructive
|
||||||
|
|
||||||
|
set -gx VIRTUAL_ENV "/home/su/Загрузки/django_example/env"
|
||||||
|
|
||||||
|
set -gx _OLD_VIRTUAL_PATH $PATH
|
||||||
|
set -gx PATH "$VIRTUAL_ENV/bin" $PATH
|
||||||
|
|
||||||
|
# unset PYTHONHOME if set
|
||||||
|
if set -q PYTHONHOME
|
||||||
|
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
||||||
|
set -e PYTHONHOME
|
||||||
|
end
|
||||||
|
|
||||||
|
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
||||||
|
# fish uses a function instead of an env var to generate the prompt.
|
||||||
|
|
||||||
|
# save the current fish_prompt function as the function _old_fish_prompt
|
||||||
|
functions -c fish_prompt _old_fish_prompt
|
||||||
|
|
||||||
|
# with the original prompt function renamed, we can override with our own.
|
||||||
|
function fish_prompt
|
||||||
|
# Save the return status of the last command
|
||||||
|
set -l old_status $status
|
||||||
|
|
||||||
|
# Prompt override?
|
||||||
|
if test -n "(env) "
|
||||||
|
printf "%s%s" "(env) " (set_color normal)
|
||||||
|
else
|
||||||
|
# ...Otherwise, prepend env
|
||||||
|
set -l _checkbase (basename "$VIRTUAL_ENV")
|
||||||
|
if test $_checkbase = "__"
|
||||||
|
# special case for Aspen magic directories
|
||||||
|
# see https://aspen.io/
|
||||||
|
printf "%s[%s]%s " (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal)
|
||||||
|
else
|
||||||
|
printf "%s(%s)%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Restore the return status of the previous command.
|
||||||
|
echo "exit $old_status" | .
|
||||||
|
_old_fish_prompt
|
||||||
|
end
|
||||||
|
|
||||||
|
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
||||||
|
end
|
|
@ -0,0 +1,8 @@
|
||||||
|
#!/home/su/Загрузки/django_example/env/bin/python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from autopep8 import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
|
@ -0,0 +1,8 @@
|
||||||
|
#!/home/su/Загрузки/django_example/env/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from django.core.management import execute_from_command_line
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(execute_from_command_line())
|
|
@ -0,0 +1,8 @@
|
||||||
|
#!/home/su/Загрузки/django_example/env/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from setuptools.command.easy_install import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
|
@ -0,0 +1,8 @@
|
||||||
|
#!/home/su/Загрузки/django_example/env/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from setuptools.command.easy_install import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
|
@ -0,0 +1,8 @@
|
||||||
|
#!/home/su/Загрузки/django_example/env/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from gunicorn.app.wsgiapp import run
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(run())
|
|
@ -0,0 +1,8 @@
|
||||||
|
#!/home/su/Загрузки/django_example/env/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from pip._internal.cli.main import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
|
@ -0,0 +1,8 @@
|
||||||
|
#!/home/su/Загрузки/django_example/env/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from pip._internal.cli.main import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
|
@ -0,0 +1,8 @@
|
||||||
|
#!/home/su/Загрузки/django_example/env/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from pip._internal.cli.main import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
|
@ -0,0 +1,8 @@
|
||||||
|
#!/home/su/Загрузки/django_example/env/bin/python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from pycodestyle import _main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(_main())
|
|
@ -0,0 +1 @@
|
||||||
|
python3
|
|
@ -0,0 +1 @@
|
||||||
|
/usr/bin/python3
|
|
@ -0,0 +1,8 @@
|
||||||
|
#!/home/su/Загрузки/django_example/env/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from sqlparse.__main__ import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1 @@
|
||||||
|
pip
|
|
@ -0,0 +1,27 @@
|
||||||
|
Copyright (c) Django Software Foundation and individual contributors.
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
this list of conditions and the following disclaimer.
|
||||||
|
|
||||||
|
2. Redistributions in binary form must reproduce the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer in the
|
||||||
|
documentation and/or other materials provided with the distribution.
|
||||||
|
|
||||||
|
3. Neither the name of Django nor the names of its contributors may be used
|
||||||
|
to endorse or promote products derived from this software without
|
||||||
|
specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
@ -0,0 +1,290 @@
|
||||||
|
Django is licensed under the three-clause BSD license; see the file
|
||||||
|
LICENSE for details.
|
||||||
|
|
||||||
|
Django includes code from the Python standard library, which is licensed under
|
||||||
|
the Python license, a permissive open source license. The copyright and license
|
||||||
|
is included below for compliance with Python's terms.
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
||||||
|
Copyright (c) 2001-present Python Software Foundation; All Rights Reserved
|
||||||
|
|
||||||
|
A. HISTORY OF THE SOFTWARE
|
||||||
|
==========================
|
||||||
|
|
||||||
|
Python was created in the early 1990s by Guido van Rossum at Stichting
|
||||||
|
Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
|
||||||
|
as a successor of a language called ABC. Guido remains Python's
|
||||||
|
principal author, although it includes many contributions from others.
|
||||||
|
|
||||||
|
In 1995, Guido continued his work on Python at the Corporation for
|
||||||
|
National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
|
||||||
|
in Reston, Virginia where he released several versions of the
|
||||||
|
software.
|
||||||
|
|
||||||
|
In May 2000, Guido and the Python core development team moved to
|
||||||
|
BeOpen.com to form the BeOpen PythonLabs team. In October of the same
|
||||||
|
year, the PythonLabs team moved to Digital Creations, which became
|
||||||
|
Zope Corporation. In 2001, the Python Software Foundation (PSF, see
|
||||||
|
https://www.python.org/psf/) was formed, a non-profit organization
|
||||||
|
created specifically to own Python-related Intellectual Property.
|
||||||
|
Zope Corporation was a sponsoring member of the PSF.
|
||||||
|
|
||||||
|
All Python releases are Open Source (see http://www.opensource.org for
|
||||||
|
the Open Source Definition). Historically, most, but not all, Python
|
||||||
|
releases have also been GPL-compatible; the table below summarizes
|
||||||
|
the various releases.
|
||||||
|
|
||||||
|
Release Derived Year Owner GPL-
|
||||||
|
from compatible? (1)
|
||||||
|
|
||||||
|
0.9.0 thru 1.2 1991-1995 CWI yes
|
||||||
|
1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
|
||||||
|
1.6 1.5.2 2000 CNRI no
|
||||||
|
2.0 1.6 2000 BeOpen.com no
|
||||||
|
1.6.1 1.6 2001 CNRI yes (2)
|
||||||
|
2.1 2.0+1.6.1 2001 PSF no
|
||||||
|
2.0.1 2.0+1.6.1 2001 PSF yes
|
||||||
|
2.1.1 2.1+2.0.1 2001 PSF yes
|
||||||
|
2.1.2 2.1.1 2002 PSF yes
|
||||||
|
2.1.3 2.1.2 2002 PSF yes
|
||||||
|
2.2 and above 2.1.1 2001-now PSF yes
|
||||||
|
|
||||||
|
Footnotes:
|
||||||
|
|
||||||
|
(1) GPL-compatible doesn't mean that we're distributing Python under
|
||||||
|
the GPL. All Python licenses, unlike the GPL, let you distribute
|
||||||
|
a modified version without making your changes open source. The
|
||||||
|
GPL-compatible licenses make it possible to combine Python with
|
||||||
|
other software that is released under the GPL; the others don't.
|
||||||
|
|
||||||
|
(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
|
||||||
|
because its license has a choice of law clause. According to
|
||||||
|
CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
|
||||||
|
is "not incompatible" with the GPL.
|
||||||
|
|
||||||
|
Thanks to the many outside volunteers who have worked under Guido's
|
||||||
|
direction to make these releases possible.
|
||||||
|
|
||||||
|
|
||||||
|
B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
|
||||||
|
===============================================================
|
||||||
|
|
||||||
|
Python software and documentation are licensed under the
|
||||||
|
Python Software Foundation License Version 2.
|
||||||
|
|
||||||
|
Starting with Python 3.8.6, examples, recipes, and other code in
|
||||||
|
the documentation are dual licensed under the PSF License Version 2
|
||||||
|
and the Zero-Clause BSD license.
|
||||||
|
|
||||||
|
Some software incorporated into Python is under different licenses.
|
||||||
|
The licenses are listed with code falling under that license.
|
||||||
|
|
||||||
|
|
||||||
|
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
|
||||||
|
--------------------------------------------
|
||||||
|
|
||||||
|
1. This LICENSE AGREEMENT is between the Python Software Foundation
|
||||||
|
("PSF"), and the Individual or Organization ("Licensee") accessing and
|
||||||
|
otherwise using this software ("Python") in source or binary form and
|
||||||
|
its associated documentation.
|
||||||
|
|
||||||
|
2. Subject to the terms and conditions of this License Agreement, PSF hereby
|
||||||
|
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
|
||||||
|
analyze, test, perform and/or display publicly, prepare derivative works,
|
||||||
|
distribute, and otherwise use Python alone or in any derivative version,
|
||||||
|
provided, however, that PSF's License Agreement and PSF's notice of copyright,
|
||||||
|
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
|
||||||
|
2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022 Python Software Foundation;
|
||||||
|
All Rights Reserved" are retained in Python alone or in any derivative version
|
||||||
|
prepared by Licensee.
|
||||||
|
|
||||||
|
3. In the event Licensee prepares a derivative work that is based on
|
||||||
|
or incorporates Python or any part thereof, and wants to make
|
||||||
|
the derivative work available to others as provided herein, then
|
||||||
|
Licensee hereby agrees to include in any such work a brief summary of
|
||||||
|
the changes made to Python.
|
||||||
|
|
||||||
|
4. PSF is making Python available to Licensee on an "AS IS"
|
||||||
|
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||||
|
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
|
||||||
|
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||||
|
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
|
||||||
|
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||||
|
|
||||||
|
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
||||||
|
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
||||||
|
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
|
||||||
|
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||||
|
|
||||||
|
6. This License Agreement will automatically terminate upon a material
|
||||||
|
breach of its terms and conditions.
|
||||||
|
|
||||||
|
7. Nothing in this License Agreement shall be deemed to create any
|
||||||
|
relationship of agency, partnership, or joint venture between PSF and
|
||||||
|
Licensee. This License Agreement does not grant permission to use PSF
|
||||||
|
trademarks or trade name in a trademark sense to endorse or promote
|
||||||
|
products or services of Licensee, or any third party.
|
||||||
|
|
||||||
|
8. By copying, installing or otherwise using Python, Licensee
|
||||||
|
agrees to be bound by the terms and conditions of this License
|
||||||
|
Agreement.
|
||||||
|
|
||||||
|
|
||||||
|
BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
|
||||||
|
-------------------------------------------
|
||||||
|
|
||||||
|
BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
|
||||||
|
|
||||||
|
1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
|
||||||
|
office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
|
||||||
|
Individual or Organization ("Licensee") accessing and otherwise using
|
||||||
|
this software in source or binary form and its associated
|
||||||
|
documentation ("the Software").
|
||||||
|
|
||||||
|
2. Subject to the terms and conditions of this BeOpen Python License
|
||||||
|
Agreement, BeOpen hereby grants Licensee a non-exclusive,
|
||||||
|
royalty-free, world-wide license to reproduce, analyze, test, perform
|
||||||
|
and/or display publicly, prepare derivative works, distribute, and
|
||||||
|
otherwise use the Software alone or in any derivative version,
|
||||||
|
provided, however, that the BeOpen Python License is retained in the
|
||||||
|
Software, alone or in any derivative version prepared by Licensee.
|
||||||
|
|
||||||
|
3. BeOpen is making the Software available to Licensee on an "AS IS"
|
||||||
|
basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||||
|
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
|
||||||
|
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||||
|
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
|
||||||
|
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||||
|
|
||||||
|
4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
|
||||||
|
SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
|
||||||
|
AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
|
||||||
|
DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||||
|
|
||||||
|
5. This License Agreement will automatically terminate upon a material
|
||||||
|
breach of its terms and conditions.
|
||||||
|
|
||||||
|
6. This License Agreement shall be governed by and interpreted in all
|
||||||
|
respects by the law of the State of California, excluding conflict of
|
||||||
|
law provisions. Nothing in this License Agreement shall be deemed to
|
||||||
|
create any relationship of agency, partnership, or joint venture
|
||||||
|
between BeOpen and Licensee. This License Agreement does not grant
|
||||||
|
permission to use BeOpen trademarks or trade names in a trademark
|
||||||
|
sense to endorse or promote products or services of Licensee, or any
|
||||||
|
third party. As an exception, the "BeOpen Python" logos available at
|
||||||
|
http://www.pythonlabs.com/logos.html may be used according to the
|
||||||
|
permissions granted on that web page.
|
||||||
|
|
||||||
|
7. By copying, installing or otherwise using the software, Licensee
|
||||||
|
agrees to be bound by the terms and conditions of this License
|
||||||
|
Agreement.
|
||||||
|
|
||||||
|
|
||||||
|
CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
|
||||||
|
---------------------------------------
|
||||||
|
|
||||||
|
1. This LICENSE AGREEMENT is between the Corporation for National
|
||||||
|
Research Initiatives, having an office at 1895 Preston White Drive,
|
||||||
|
Reston, VA 20191 ("CNRI"), and the Individual or Organization
|
||||||
|
("Licensee") accessing and otherwise using Python 1.6.1 software in
|
||||||
|
source or binary form and its associated documentation.
|
||||||
|
|
||||||
|
2. Subject to the terms and conditions of this License Agreement, CNRI
|
||||||
|
hereby grants Licensee a nonexclusive, royalty-free, world-wide
|
||||||
|
license to reproduce, analyze, test, perform and/or display publicly,
|
||||||
|
prepare derivative works, distribute, and otherwise use Python 1.6.1
|
||||||
|
alone or in any derivative version, provided, however, that CNRI's
|
||||||
|
License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
|
||||||
|
1995-2001 Corporation for National Research Initiatives; All Rights
|
||||||
|
Reserved" are retained in Python 1.6.1 alone or in any derivative
|
||||||
|
version prepared by Licensee. Alternately, in lieu of CNRI's License
|
||||||
|
Agreement, Licensee may substitute the following text (omitting the
|
||||||
|
quotes): "Python 1.6.1 is made available subject to the terms and
|
||||||
|
conditions in CNRI's License Agreement. This Agreement together with
|
||||||
|
Python 1.6.1 may be located on the internet using the following
|
||||||
|
unique, persistent identifier (known as a handle): 1895.22/1013. This
|
||||||
|
Agreement may also be obtained from a proxy server on the internet
|
||||||
|
using the following URL: http://hdl.handle.net/1895.22/1013".
|
||||||
|
|
||||||
|
3. In the event Licensee prepares a derivative work that is based on
|
||||||
|
or incorporates Python 1.6.1 or any part thereof, and wants to make
|
||||||
|
the derivative work available to others as provided herein, then
|
||||||
|
Licensee hereby agrees to include in any such work a brief summary of
|
||||||
|
the changes made to Python 1.6.1.
|
||||||
|
|
||||||
|
4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
|
||||||
|
basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||||
|
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
|
||||||
|
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||||
|
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
|
||||||
|
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||||
|
|
||||||
|
5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
||||||
|
1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
||||||
|
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
|
||||||
|
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||||
|
|
||||||
|
6. This License Agreement will automatically terminate upon a material
|
||||||
|
breach of its terms and conditions.
|
||||||
|
|
||||||
|
7. This License Agreement shall be governed by the federal
|
||||||
|
intellectual property law of the United States, including without
|
||||||
|
limitation the federal copyright law, and, to the extent such
|
||||||
|
U.S. federal law does not apply, by the law of the Commonwealth of
|
||||||
|
Virginia, excluding Virginia's conflict of law provisions.
|
||||||
|
Notwithstanding the foregoing, with regard to derivative works based
|
||||||
|
on Python 1.6.1 that incorporate non-separable material that was
|
||||||
|
previously distributed under the GNU General Public License (GPL), the
|
||||||
|
law of the Commonwealth of Virginia shall govern this License
|
||||||
|
Agreement only as to issues arising under or with respect to
|
||||||
|
Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
|
||||||
|
License Agreement shall be deemed to create any relationship of
|
||||||
|
agency, partnership, or joint venture between CNRI and Licensee. This
|
||||||
|
License Agreement does not grant permission to use CNRI trademarks or
|
||||||
|
trade name in a trademark sense to endorse or promote products or
|
||||||
|
services of Licensee, or any third party.
|
||||||
|
|
||||||
|
8. By clicking on the "ACCEPT" button where indicated, or by copying,
|
||||||
|
installing or otherwise using Python 1.6.1, Licensee agrees to be
|
||||||
|
bound by the terms and conditions of this License Agreement.
|
||||||
|
|
||||||
|
ACCEPT
|
||||||
|
|
||||||
|
|
||||||
|
CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
|
||||||
|
--------------------------------------------------
|
||||||
|
|
||||||
|
Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
|
||||||
|
The Netherlands. All rights reserved.
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and distribute this software and its
|
||||||
|
documentation for any purpose and without fee is hereby granted,
|
||||||
|
provided that the above copyright notice appear in all copies and that
|
||||||
|
both that copyright notice and this permission notice appear in
|
||||||
|
supporting documentation, and that the name of Stichting Mathematisch
|
||||||
|
Centrum or CWI not be used in advertising or publicity pertaining to
|
||||||
|
distribution of the software without specific, written prior
|
||||||
|
permission.
|
||||||
|
|
||||||
|
STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
|
||||||
|
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||||
|
FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
|
||||||
|
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||||
|
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and/or distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
||||||
|
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
||||||
|
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||||
|
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
||||||
|
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
||||||
|
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||||
|
PERFORMANCE OF THIS SOFTWARE.
|
|
@ -0,0 +1,100 @@
|
||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: Django
|
||||||
|
Version: 4.1
|
||||||
|
Summary: A high-level Python web framework that encourages rapid development and clean, pragmatic design.
|
||||||
|
Home-page: https://www.djangoproject.com/
|
||||||
|
Author: Django Software Foundation
|
||||||
|
Author-email: foundation@djangoproject.com
|
||||||
|
License: BSD-3-Clause
|
||||||
|
Project-URL: Documentation, https://docs.djangoproject.com/
|
||||||
|
Project-URL: Release notes, https://docs.djangoproject.com/en/stable/releases/
|
||||||
|
Project-URL: Funding, https://www.djangoproject.com/fundraising/
|
||||||
|
Project-URL: Source, https://github.com/django/django
|
||||||
|
Project-URL: Tracker, https://code.djangoproject.com/
|
||||||
|
Platform: UNKNOWN
|
||||||
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
|
Classifier: Environment :: Web Environment
|
||||||
|
Classifier: Framework :: Django
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: License :: OSI Approved :: BSD License
|
||||||
|
Classifier: Operating System :: OS Independent
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 3 :: Only
|
||||||
|
Classifier: Programming Language :: Python :: 3.8
|
||||||
|
Classifier: Programming Language :: Python :: 3.9
|
||||||
|
Classifier: Programming Language :: Python :: 3.10
|
||||||
|
Classifier: Topic :: Internet :: WWW/HTTP
|
||||||
|
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
||||||
|
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI
|
||||||
|
Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
|
||||||
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||||
|
Requires-Python: >=3.8
|
||||||
|
License-File: LICENSE
|
||||||
|
License-File: LICENSE.python
|
||||||
|
License-File: AUTHORS
|
||||||
|
Requires-Dist: asgiref (<4,>=3.5.2)
|
||||||
|
Requires-Dist: sqlparse (>=0.2.2)
|
||||||
|
Requires-Dist: backports.zoneinfo ; python_version < "3.9"
|
||||||
|
Requires-Dist: tzdata ; sys_platform == "win32"
|
||||||
|
Provides-Extra: argon2
|
||||||
|
Requires-Dist: argon2-cffi (>=19.1.0) ; extra == 'argon2'
|
||||||
|
Provides-Extra: bcrypt
|
||||||
|
Requires-Dist: bcrypt ; extra == 'bcrypt'
|
||||||
|
|
||||||
|
======
|
||||||
|
Django
|
||||||
|
======
|
||||||
|
|
||||||
|
Django is a high-level Python web framework that encourages rapid development
|
||||||
|
and clean, pragmatic design. Thanks for checking it out.
|
||||||
|
|
||||||
|
All documentation is in the "``docs``" directory and online at
|
||||||
|
https://docs.djangoproject.com/en/stable/. If you're just getting started,
|
||||||
|
here's how we recommend you read the docs:
|
||||||
|
|
||||||
|
* First, read ``docs/intro/install.txt`` for instructions on installing Django.
|
||||||
|
|
||||||
|
* Next, work through the tutorials in order (``docs/intro/tutorial01.txt``,
|
||||||
|
``docs/intro/tutorial02.txt``, etc.).
|
||||||
|
|
||||||
|
* If you want to set up an actual deployment server, read
|
||||||
|
``docs/howto/deployment/index.txt`` for instructions.
|
||||||
|
|
||||||
|
* You'll probably want to read through the topical guides (in ``docs/topics``)
|
||||||
|
next; from there you can jump to the HOWTOs (in ``docs/howto``) for specific
|
||||||
|
problems, and check out the reference (``docs/ref``) for gory details.
|
||||||
|
|
||||||
|
* See ``docs/README`` for instructions on building an HTML version of the docs.
|
||||||
|
|
||||||
|
Docs are updated rigorously. If you find any problems in the docs, or think
|
||||||
|
they should be clarified in any way, please take 30 seconds to fill out a
|
||||||
|
ticket here: https://code.djangoproject.com/newticket
|
||||||
|
|
||||||
|
To get more help:
|
||||||
|
|
||||||
|
* Join the ``#django`` channel on ``irc.libera.chat``. Lots of helpful people
|
||||||
|
hang out there. See https://web.libera.chat if you're new to IRC.
|
||||||
|
|
||||||
|
* Join the django-users mailing list, or read the archives, at
|
||||||
|
https://groups.google.com/group/django-users.
|
||||||
|
|
||||||
|
To contribute to Django:
|
||||||
|
|
||||||
|
* Check out https://docs.djangoproject.com/en/dev/internals/contributing/ for
|
||||||
|
information about getting involved.
|
||||||
|
|
||||||
|
To run Django's test suite:
|
||||||
|
|
||||||
|
* Follow the instructions in the "Unit tests" section of
|
||||||
|
``docs/internals/contributing/writing-code/unit-tests.txt``, published online at
|
||||||
|
https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/unit-tests/#running-the-unit-tests
|
||||||
|
|
||||||
|
Supporting the Development of Django
|
||||||
|
====================================
|
||||||
|
|
||||||
|
Django's development depends on your contributions.
|
||||||
|
|
||||||
|
If you depend on Django, remember to support the Django Software Foundation: https://www.djangoproject.com/fundraising/
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,5 @@
|
||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: bdist_wheel (0.37.0)
|
||||||
|
Root-Is-Purelib: true
|
||||||
|
Tag: py3-none-any
|
||||||
|
|
|
@ -0,0 +1,3 @@
|
||||||
|
[console_scripts]
|
||||||
|
django-admin = django.core.management:execute_from_command_line
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
django
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
pip
|
|
@ -0,0 +1,27 @@
|
||||||
|
Copyright (c) Django Software Foundation and individual contributors.
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
this list of conditions and the following disclaimer.
|
||||||
|
|
||||||
|
2. Redistributions in binary form must reproduce the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer in the
|
||||||
|
documentation and/or other materials provided with the distribution.
|
||||||
|
|
||||||
|
3. Neither the name of Django nor the names of its contributors may be used
|
||||||
|
to endorse or promote products derived from this software without
|
||||||
|
specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
@ -0,0 +1,245 @@
|
||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: asgiref
|
||||||
|
Version: 3.5.2
|
||||||
|
Summary: ASGI specs, helper code, and adapters
|
||||||
|
Home-page: https://github.com/django/asgiref/
|
||||||
|
Author: Django Software Foundation
|
||||||
|
Author-email: foundation@djangoproject.com
|
||||||
|
License: BSD
|
||||||
|
Project-URL: Documentation, https://asgi.readthedocs.io/
|
||||||
|
Project-URL: Further Documentation, https://docs.djangoproject.com/en/stable/topics/async/#async-adapter-functions
|
||||||
|
Project-URL: Changelog, https://github.com/django/asgiref/blob/master/CHANGELOG.txt
|
||||||
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
|
Classifier: Environment :: Web Environment
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: License :: OSI Approved :: BSD License
|
||||||
|
Classifier: Operating System :: OS Independent
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 3 :: Only
|
||||||
|
Classifier: Programming Language :: Python :: 3.7
|
||||||
|
Classifier: Programming Language :: Python :: 3.8
|
||||||
|
Classifier: Programming Language :: Python :: 3.9
|
||||||
|
Classifier: Programming Language :: Python :: 3.10
|
||||||
|
Classifier: Topic :: Internet :: WWW/HTTP
|
||||||
|
Requires-Python: >=3.7
|
||||||
|
License-File: LICENSE
|
||||||
|
Requires-Dist: typing-extensions ; python_version < "3.8"
|
||||||
|
Provides-Extra: tests
|
||||||
|
Requires-Dist: pytest ; extra == 'tests'
|
||||||
|
Requires-Dist: pytest-asyncio ; extra == 'tests'
|
||||||
|
Requires-Dist: mypy (>=0.800) ; extra == 'tests'
|
||||||
|
|
||||||
|
asgiref
|
||||||
|
=======
|
||||||
|
|
||||||
|
.. image:: https://api.travis-ci.org/django/asgiref.svg
|
||||||
|
:target: https://travis-ci.org/django/asgiref
|
||||||
|
|
||||||
|
.. image:: https://img.shields.io/pypi/v/asgiref.svg
|
||||||
|
:target: https://pypi.python.org/pypi/asgiref
|
||||||
|
|
||||||
|
ASGI is a standard for Python asynchronous web apps and servers to communicate
|
||||||
|
with each other, and positioned as an asynchronous successor to WSGI. You can
|
||||||
|
read more at https://asgi.readthedocs.io/en/latest/
|
||||||
|
|
||||||
|
This package includes ASGI base libraries, such as:
|
||||||
|
|
||||||
|
* Sync-to-async and async-to-sync function wrappers, ``asgiref.sync``
|
||||||
|
* Server base classes, ``asgiref.server``
|
||||||
|
* A WSGI-to-ASGI adapter, in ``asgiref.wsgi``
|
||||||
|
|
||||||
|
|
||||||
|
Function wrappers
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
These allow you to wrap or decorate async or sync functions to call them from
|
||||||
|
the other style (so you can call async functions from a synchronous thread,
|
||||||
|
or vice-versa).
|
||||||
|
|
||||||
|
In particular:
|
||||||
|
|
||||||
|
* AsyncToSync lets a synchronous subthread stop and wait while the async
|
||||||
|
function is called on the main thread's event loop, and then control is
|
||||||
|
returned to the thread when the async function is finished.
|
||||||
|
|
||||||
|
* SyncToAsync lets async code call a synchronous function, which is run in
|
||||||
|
a threadpool and control returned to the async coroutine when the synchronous
|
||||||
|
function completes.
|
||||||
|
|
||||||
|
The idea is to make it easier to call synchronous APIs from async code and
|
||||||
|
asynchronous APIs from synchronous code so it's easier to transition code from
|
||||||
|
one style to the other. In the case of Channels, we wrap the (synchronous)
|
||||||
|
Django view system with SyncToAsync to allow it to run inside the (asynchronous)
|
||||||
|
ASGI server.
|
||||||
|
|
||||||
|
Note that exactly what threads things run in is very specific, and aimed to
|
||||||
|
keep maximum compatibility with old synchronous code. See
|
||||||
|
"Synchronous code & Threads" below for a full explanation. By default,
|
||||||
|
``sync_to_async`` will run all synchronous code in the program in the same
|
||||||
|
thread for safety reasons; you can disable this for more performance with
|
||||||
|
``@sync_to_async(thread_sensitive=False)``, but make sure that your code does
|
||||||
|
not rely on anything bound to threads (like database connections) when you do.
|
||||||
|
|
||||||
|
|
||||||
|
Threadlocal replacement
|
||||||
|
-----------------------
|
||||||
|
|
||||||
|
This is a drop-in replacement for ``threading.local`` that works with both
|
||||||
|
threads and asyncio Tasks. Even better, it will proxy values through from a
|
||||||
|
task-local context to a thread-local context when you use ``sync_to_async``
|
||||||
|
to run things in a threadpool, and vice-versa for ``async_to_sync``.
|
||||||
|
|
||||||
|
If you instead want true thread- and task-safety, you can set
|
||||||
|
``thread_critical`` on the Local object to ensure this instead.
|
||||||
|
|
||||||
|
|
||||||
|
Server base classes
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
Includes a ``StatelessServer`` class which provides all the hard work of
|
||||||
|
writing a stateless server (as in, does not handle direct incoming sockets
|
||||||
|
but instead consumes external streams or sockets to work out what is happening).
|
||||||
|
|
||||||
|
An example of such a server would be a chatbot server that connects out to
|
||||||
|
a central chat server and provides a "connection scope" per user chatting to
|
||||||
|
it. There's only one actual connection, but the server has to separate things
|
||||||
|
into several scopes for easier writing of the code.
|
||||||
|
|
||||||
|
You can see an example of this being used in `frequensgi <https://github.com/andrewgodwin/frequensgi>`_.
|
||||||
|
|
||||||
|
|
||||||
|
WSGI-to-ASGI adapter
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
Allows you to wrap a WSGI application so it appears as a valid ASGI application.
|
||||||
|
|
||||||
|
Simply wrap it around your WSGI application like so::
|
||||||
|
|
||||||
|
asgi_application = WsgiToAsgi(wsgi_application)
|
||||||
|
|
||||||
|
The WSGI application will be run in a synchronous threadpool, and the wrapped
|
||||||
|
ASGI application will be one that accepts ``http`` class messages.
|
||||||
|
|
||||||
|
Please note that not all extended features of WSGI may be supported (such as
|
||||||
|
file handles for incoming POST bodies).
|
||||||
|
|
||||||
|
|
||||||
|
Dependencies
|
||||||
|
------------
|
||||||
|
|
||||||
|
``asgiref`` requires Python 3.7 or higher.
|
||||||
|
|
||||||
|
|
||||||
|
Contributing
|
||||||
|
------------
|
||||||
|
|
||||||
|
Please refer to the
|
||||||
|
`main Channels contributing docs <https://github.com/django/channels/blob/master/CONTRIBUTING.rst>`_.
|
||||||
|
|
||||||
|
|
||||||
|
Testing
|
||||||
|
'''''''
|
||||||
|
|
||||||
|
To run tests, make sure you have installed the ``tests`` extra with the package::
|
||||||
|
|
||||||
|
cd asgiref/
|
||||||
|
pip install -e .[tests]
|
||||||
|
pytest
|
||||||
|
|
||||||
|
|
||||||
|
Building the documentation
|
||||||
|
''''''''''''''''''''''''''
|
||||||
|
|
||||||
|
The documentation uses `Sphinx <http://www.sphinx-doc.org>`_::
|
||||||
|
|
||||||
|
cd asgiref/docs/
|
||||||
|
pip install sphinx
|
||||||
|
|
||||||
|
To build the docs, you can use the default tools::
|
||||||
|
|
||||||
|
sphinx-build -b html . _build/html # or `make html`, if you've got make set up
|
||||||
|
cd _build/html
|
||||||
|
python -m http.server
|
||||||
|
|
||||||
|
...or you can use ``sphinx-autobuild`` to run a server and rebuild/reload
|
||||||
|
your documentation changes automatically::
|
||||||
|
|
||||||
|
pip install sphinx-autobuild
|
||||||
|
sphinx-autobuild . _build/html
|
||||||
|
|
||||||
|
|
||||||
|
Releasing
|
||||||
|
'''''''''
|
||||||
|
|
||||||
|
To release, first add details to CHANGELOG.txt and update the version number in ``asgiref/__init__.py``.
|
||||||
|
|
||||||
|
Then, build and push the packages::
|
||||||
|
|
||||||
|
python -m build
|
||||||
|
twine upload dist/*
|
||||||
|
rm -r build/ dist/
|
||||||
|
|
||||||
|
|
||||||
|
Implementation Details
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
Synchronous code & threads
|
||||||
|
''''''''''''''''''''''''''
|
||||||
|
|
||||||
|
The ``asgiref.sync`` module provides two wrappers that let you go between
|
||||||
|
asynchronous and synchronous code at will, while taking care of the rough edges
|
||||||
|
for you.
|
||||||
|
|
||||||
|
Unfortunately, the rough edges are numerous, and the code has to work especially
|
||||||
|
hard to keep things in the same thread as much as possible. Notably, the
|
||||||
|
restrictions we are working with are:
|
||||||
|
|
||||||
|
* All synchronous code called through ``SyncToAsync`` and marked with
|
||||||
|
``thread_sensitive`` should run in the same thread as each other (and if the
|
||||||
|
outer layer of the program is synchronous, the main thread)
|
||||||
|
|
||||||
|
* If a thread already has a running async loop, ``AsyncToSync`` can't run things
|
||||||
|
on that loop if it's blocked on synchronous code that is above you in the
|
||||||
|
call stack.
|
||||||
|
|
||||||
|
The first compromise you get to might be that ``thread_sensitive`` code should
|
||||||
|
just run in the same thread and not spawn in a sub-thread, fulfilling the first
|
||||||
|
restriction, but that immediately runs you into the second restriction.
|
||||||
|
|
||||||
|
The only real solution is to essentially have a variant of ThreadPoolExecutor
|
||||||
|
that executes any ``thread_sensitive`` code on the outermost synchronous
|
||||||
|
thread - either the main thread, or a single spawned subthread.
|
||||||
|
|
||||||
|
This means you now have two basic states:
|
||||||
|
|
||||||
|
* If the outermost layer of your program is synchronous, then all async code
|
||||||
|
run through ``AsyncToSync`` will run in a per-call event loop in arbitrary
|
||||||
|
sub-threads, while all ``thread_sensitive`` code will run in the main thread.
|
||||||
|
|
||||||
|
* If the outermost layer of your program is asynchronous, then all async code
|
||||||
|
runs on the main thread's event loop, and all ``thread_sensitive`` synchronous
|
||||||
|
code will run in a single shared sub-thread.
|
||||||
|
|
||||||
|
Crucially, this means that in both cases there is a thread which is a shared
|
||||||
|
resource that all ``thread_sensitive`` code must run on, and there is a chance
|
||||||
|
that this thread is currently blocked on its own ``AsyncToSync`` call. Thus,
|
||||||
|
``AsyncToSync`` needs to act as an executor for thread code while it's blocking.
|
||||||
|
|
||||||
|
The ``CurrentThreadExecutor`` class provides this functionality; rather than
|
||||||
|
simply waiting on a Future, you can call its ``run_until_future`` method and
|
||||||
|
it will run submitted code until that Future is done. This means that code
|
||||||
|
inside the call can then run code on your thread.
|
||||||
|
|
||||||
|
|
||||||
|
Maintenance and Security
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
To report security issues, please contact security@djangoproject.com. For GPG
|
||||||
|
signatures and more security process information, see
|
||||||
|
https://docs.djangoproject.com/en/dev/internals/security/.
|
||||||
|
|
||||||
|
To report bugs or request new features, please open a new GitHub issue.
|
||||||
|
|
||||||
|
This repository is part of the Channels project. For the shepherd and maintenance team, please see the
|
||||||
|
`main Channels readme <https://github.com/django/channels/blob/master/README.rst>`_.
|
|
@ -0,0 +1,27 @@
|
||||||
|
asgiref-3.5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
asgiref-3.5.2.dist-info/LICENSE,sha256=uEZBXRtRTpwd_xSiLeuQbXlLxUbKYSn5UKGM0JHipmk,1552
|
||||||
|
asgiref-3.5.2.dist-info/METADATA,sha256=3JU5Zw-j9qCKPcuf3cJZ5dVispB_b7UXU0fnQVp9DDA,9143
|
||||||
|
asgiref-3.5.2.dist-info/RECORD,,
|
||||||
|
asgiref-3.5.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
|
||||||
|
asgiref-3.5.2.dist-info/top_level.txt,sha256=bokQjCzwwERhdBiPdvYEZa4cHxT4NCeAffQNUqJ8ssg,8
|
||||||
|
asgiref/__init__.py,sha256=LtYJ5AVwuiAlsrJUQwzHZMrGMIRn7cuIoIt4OznYy6c,22
|
||||||
|
asgiref/__pycache__/__init__.cpython-38.pyc,,
|
||||||
|
asgiref/__pycache__/compatibility.cpython-38.pyc,,
|
||||||
|
asgiref/__pycache__/current_thread_executor.cpython-38.pyc,,
|
||||||
|
asgiref/__pycache__/local.cpython-38.pyc,,
|
||||||
|
asgiref/__pycache__/server.cpython-38.pyc,,
|
||||||
|
asgiref/__pycache__/sync.cpython-38.pyc,,
|
||||||
|
asgiref/__pycache__/testing.cpython-38.pyc,,
|
||||||
|
asgiref/__pycache__/timeout.cpython-38.pyc,,
|
||||||
|
asgiref/__pycache__/typing.cpython-38.pyc,,
|
||||||
|
asgiref/__pycache__/wsgi.cpython-38.pyc,,
|
||||||
|
asgiref/compatibility.py,sha256=MVH2bEdiCMMVTLbE-1V6KiU7q4LwqzP7PIufeXa-njM,1598
|
||||||
|
asgiref/current_thread_executor.py,sha256=oeH8zv2tTmcbpxdUmOSMzbEXzeY5nJzIMFvzprE95gA,2801
|
||||||
|
asgiref/local.py,sha256=nx5RqVFLYgUJVaxzApuQUW7dd9y21sruMYdgISoRs1k,4854
|
||||||
|
asgiref/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
asgiref/server.py,sha256=egTQhZo1k4G0F7SSBQNp_VOekpGcjBJZU2kkCoiGC_M,6005
|
||||||
|
asgiref/sync.py,sha256=3P813NHl3EHPMtzPEjaBelmjV_JUw97zYbtx-MmLUiw,20185
|
||||||
|
asgiref/testing.py,sha256=3byNRV7Oto_Fg8Z-fErQJ3yGf7OQlcUexbN_cDQugzQ,3119
|
||||||
|
asgiref/timeout.py,sha256=5Ekbmn3X1HPR55qgx-hPJMPEu_-YoivHqNhFEitiSYE,3440
|
||||||
|
asgiref/typing.py,sha256=MZ7vbJY1F7EQqo9gL9pMSFRMw9b_SQrQQsnvlJQ2iP4,5603
|
||||||
|
asgiref/wsgi.py,sha256=-L0eo_uK_dq7EPjv1meW1BRGytURaO9NPESxnJc9CtA,6575
|
|
@ -0,0 +1,5 @@
|
||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: bdist_wheel (0.37.1)
|
||||||
|
Root-Is-Purelib: true
|
||||||
|
Tag: py3-none-any
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
asgiref
|
|
@ -0,0 +1 @@
|
||||||
|
__version__ = "3.5.2"
|
BIN
env/lib/python3.8/site-packages/asgiref/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
BIN
env/lib/python3.8/site-packages/asgiref/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
env/lib/python3.8/site-packages/asgiref/__pycache__/compatibility.cpython-38.pyc
vendored
Normal file
BIN
env/lib/python3.8/site-packages/asgiref/__pycache__/compatibility.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
env/lib/python3.8/site-packages/asgiref/__pycache__/current_thread_executor.cpython-38.pyc
vendored
Normal file
BIN
env/lib/python3.8/site-packages/asgiref/__pycache__/current_thread_executor.cpython-38.pyc
vendored
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,47 @@
|
||||||
|
import asyncio
|
||||||
|
import inspect
|
||||||
|
|
||||||
|
|
||||||
|
def is_double_callable(application):
|
||||||
|
"""
|
||||||
|
Tests to see if an application is a legacy-style (double-callable) application.
|
||||||
|
"""
|
||||||
|
# Look for a hint on the object first
|
||||||
|
if getattr(application, "_asgi_single_callable", False):
|
||||||
|
return False
|
||||||
|
if getattr(application, "_asgi_double_callable", False):
|
||||||
|
return True
|
||||||
|
# Uninstanted classes are double-callable
|
||||||
|
if inspect.isclass(application):
|
||||||
|
return True
|
||||||
|
# Instanted classes depend on their __call__
|
||||||
|
if hasattr(application, "__call__"):
|
||||||
|
# We only check to see if its __call__ is a coroutine function -
|
||||||
|
# if it's not, it still might be a coroutine function itself.
|
||||||
|
if asyncio.iscoroutinefunction(application.__call__):
|
||||||
|
return False
|
||||||
|
# Non-classes we just check directly
|
||||||
|
return not asyncio.iscoroutinefunction(application)
|
||||||
|
|
||||||
|
|
||||||
|
def double_to_single_callable(application):
|
||||||
|
"""
|
||||||
|
Transforms a double-callable ASGI application into a single-callable one.
|
||||||
|
"""
|
||||||
|
|
||||||
|
async def new_application(scope, receive, send):
|
||||||
|
instance = application(scope)
|
||||||
|
return await instance(receive, send)
|
||||||
|
|
||||||
|
return new_application
|
||||||
|
|
||||||
|
|
||||||
|
def guarantee_single_callable(application):
|
||||||
|
"""
|
||||||
|
Takes either a single- or double-callable application and always returns it
|
||||||
|
in single-callable style. Use this to add backwards compatibility for ASGI
|
||||||
|
2.0 applications to your server/test harness/etc.
|
||||||
|
"""
|
||||||
|
if is_double_callable(application):
|
||||||
|
application = double_to_single_callable(application)
|
||||||
|
return application
|
|
@ -0,0 +1,81 @@
|
||||||
|
import queue
|
||||||
|
import threading
|
||||||
|
from concurrent.futures import Executor, Future
|
||||||
|
|
||||||
|
|
||||||
|
class _WorkItem:
|
||||||
|
"""
|
||||||
|
Represents an item needing to be run in the executor.
|
||||||
|
Copied from ThreadPoolExecutor (but it's private, so we're not going to rely on importing it)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, future, fn, args, kwargs):
|
||||||
|
self.future = future
|
||||||
|
self.fn = fn
|
||||||
|
self.args = args
|
||||||
|
self.kwargs = kwargs
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
if not self.future.set_running_or_notify_cancel():
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
result = self.fn(*self.args, **self.kwargs)
|
||||||
|
except BaseException as exc:
|
||||||
|
self.future.set_exception(exc)
|
||||||
|
# Break a reference cycle with the exception 'exc'
|
||||||
|
self = None
|
||||||
|
else:
|
||||||
|
self.future.set_result(result)
|
||||||
|
|
||||||
|
|
||||||
|
class CurrentThreadExecutor(Executor):
|
||||||
|
"""
|
||||||
|
An Executor that actually runs code in the thread it is instantiated in.
|
||||||
|
Passed to other threads running async code, so they can run sync code in
|
||||||
|
the thread they came from.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._work_thread = threading.current_thread()
|
||||||
|
self._work_queue = queue.Queue()
|
||||||
|
self._broken = False
|
||||||
|
|
||||||
|
def run_until_future(self, future):
|
||||||
|
"""
|
||||||
|
Runs the code in the work queue until a result is available from the future.
|
||||||
|
Should be run from the thread the executor is initialised in.
|
||||||
|
"""
|
||||||
|
# Check we're in the right thread
|
||||||
|
if threading.current_thread() != self._work_thread:
|
||||||
|
raise RuntimeError(
|
||||||
|
"You cannot run CurrentThreadExecutor from a different thread"
|
||||||
|
)
|
||||||
|
future.add_done_callback(self._work_queue.put)
|
||||||
|
# Keep getting and running work items until we get the future we're waiting for
|
||||||
|
# back via the future's done callback.
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
# Get a work item and run it
|
||||||
|
work_item = self._work_queue.get()
|
||||||
|
if work_item is future:
|
||||||
|
return
|
||||||
|
work_item.run()
|
||||||
|
del work_item
|
||||||
|
finally:
|
||||||
|
self._broken = True
|
||||||
|
|
||||||
|
def submit(self, fn, *args, **kwargs):
|
||||||
|
# Check they're not submitting from the same thread
|
||||||
|
if threading.current_thread() == self._work_thread:
|
||||||
|
raise RuntimeError(
|
||||||
|
"You cannot submit onto CurrentThreadExecutor from its own thread"
|
||||||
|
)
|
||||||
|
# Check they're not too late or the executor errored
|
||||||
|
if self._broken:
|
||||||
|
raise RuntimeError("CurrentThreadExecutor already quit or is broken")
|
||||||
|
# Add to work queue
|
||||||
|
f = Future()
|
||||||
|
work_item = _WorkItem(f, fn, args, kwargs)
|
||||||
|
self._work_queue.put(work_item)
|
||||||
|
# Return the future
|
||||||
|
return f
|
|
@ -0,0 +1,120 @@
|
||||||
|
import random
|
||||||
|
import string
|
||||||
|
import sys
|
||||||
|
import threading
|
||||||
|
import weakref
|
||||||
|
|
||||||
|
|
||||||
|
class Local:
|
||||||
|
"""
|
||||||
|
A drop-in replacement for threading.locals that also works with asyncio
|
||||||
|
Tasks (via the current_task asyncio method), and passes locals through
|
||||||
|
sync_to_async and async_to_sync.
|
||||||
|
|
||||||
|
Specifically:
|
||||||
|
- Locals work per-coroutine on any thread not spawned using asgiref
|
||||||
|
- Locals work per-thread on any thread not spawned using asgiref
|
||||||
|
- Locals are shared with the parent coroutine when using sync_to_async
|
||||||
|
- Locals are shared with the parent thread when using async_to_sync
|
||||||
|
(and if that thread was launched using sync_to_async, with its parent
|
||||||
|
coroutine as well, with this working for indefinite levels of nesting)
|
||||||
|
|
||||||
|
Set thread_critical to True to not allow locals to pass from an async Task
|
||||||
|
to a thread it spawns. This is needed for code that truly needs
|
||||||
|
thread-safety, as opposed to things used for helpful context (e.g. sqlite
|
||||||
|
does not like being called from a different thread to the one it is from).
|
||||||
|
Thread-critical code will still be differentiated per-Task within a thread
|
||||||
|
as it is expected it does not like concurrent access.
|
||||||
|
|
||||||
|
This doesn't use contextvars as it needs to support 3.6. Once it can support
|
||||||
|
3.7 only, we can then reimplement the storage more nicely.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, thread_critical: bool = False) -> None:
|
||||||
|
self._thread_critical = thread_critical
|
||||||
|
self._thread_lock = threading.RLock()
|
||||||
|
self._context_refs: "weakref.WeakSet[object]" = weakref.WeakSet()
|
||||||
|
# Random suffixes stop accidental reuse between different Locals,
|
||||||
|
# though we try to force deletion as well.
|
||||||
|
self._attr_name = "_asgiref_local_impl_{}_{}".format(
|
||||||
|
id(self),
|
||||||
|
"".join(random.choice(string.ascii_letters) for i in range(8)),
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_context_id(self):
|
||||||
|
"""
|
||||||
|
Get the ID we should use for looking up variables
|
||||||
|
"""
|
||||||
|
# Prevent a circular reference
|
||||||
|
from .sync import AsyncToSync, SyncToAsync
|
||||||
|
|
||||||
|
# First, pull the current task if we can
|
||||||
|
context_id = SyncToAsync.get_current_task()
|
||||||
|
context_is_async = True
|
||||||
|
# OK, let's try for a thread ID
|
||||||
|
if context_id is None:
|
||||||
|
context_id = threading.current_thread()
|
||||||
|
context_is_async = False
|
||||||
|
# If we're thread-critical, we stop here, as we can't share contexts.
|
||||||
|
if self._thread_critical:
|
||||||
|
return context_id
|
||||||
|
# Now, take those and see if we can resolve them through the launch maps
|
||||||
|
for i in range(sys.getrecursionlimit()):
|
||||||
|
try:
|
||||||
|
if context_is_async:
|
||||||
|
# Tasks have a source thread in AsyncToSync
|
||||||
|
context_id = AsyncToSync.launch_map[context_id]
|
||||||
|
context_is_async = False
|
||||||
|
else:
|
||||||
|
# Threads have a source task in SyncToAsync
|
||||||
|
context_id = SyncToAsync.launch_map[context_id]
|
||||||
|
context_is_async = True
|
||||||
|
except KeyError:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
# Catch infinite loops (they happen if you are screwing around
|
||||||
|
# with AsyncToSync implementations)
|
||||||
|
raise RuntimeError("Infinite launch_map loops")
|
||||||
|
return context_id
|
||||||
|
|
||||||
|
def _get_storage(self):
|
||||||
|
context_obj = self._get_context_id()
|
||||||
|
if not hasattr(context_obj, self._attr_name):
|
||||||
|
setattr(context_obj, self._attr_name, {})
|
||||||
|
self._context_refs.add(context_obj)
|
||||||
|
return getattr(context_obj, self._attr_name)
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
try:
|
||||||
|
for context_obj in self._context_refs:
|
||||||
|
try:
|
||||||
|
delattr(context_obj, self._attr_name)
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
except TypeError:
|
||||||
|
# WeakSet.__iter__ can crash when interpreter is shutting down due
|
||||||
|
# to _IterationGuard being None.
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __getattr__(self, key):
|
||||||
|
with self._thread_lock:
|
||||||
|
storage = self._get_storage()
|
||||||
|
if key in storage:
|
||||||
|
return storage[key]
|
||||||
|
else:
|
||||||
|
raise AttributeError(f"{self!r} object has no attribute {key!r}")
|
||||||
|
|
||||||
|
def __setattr__(self, key, value):
|
||||||
|
if key in ("_context_refs", "_thread_critical", "_thread_lock", "_attr_name"):
|
||||||
|
return super().__setattr__(key, value)
|
||||||
|
with self._thread_lock:
|
||||||
|
storage = self._get_storage()
|
||||||
|
storage[key] = value
|
||||||
|
|
||||||
|
def __delattr__(self, key):
|
||||||
|
with self._thread_lock:
|
||||||
|
storage = self._get_storage()
|
||||||
|
if key in storage:
|
||||||
|
del storage[key]
|
||||||
|
else:
|
||||||
|
raise AttributeError(f"{self!r} object has no attribute {key!r}")
|
|
@ -0,0 +1,157 @@
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from .compatibility import guarantee_single_callable
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class StatelessServer:
|
||||||
|
"""
|
||||||
|
Base server class that handles basic concepts like application instance
|
||||||
|
creation/pooling, exception handling, and similar, for stateless protocols
|
||||||
|
(i.e. ones without actual incoming connections to the process)
|
||||||
|
|
||||||
|
Your code should override the handle() method, doing whatever it needs to,
|
||||||
|
and calling get_or_create_application_instance with a unique `scope_id`
|
||||||
|
and `scope` for the scope it wants to get.
|
||||||
|
|
||||||
|
If an application instance is found with the same `scope_id`, you are
|
||||||
|
given its input queue, otherwise one is made for you with the scope provided
|
||||||
|
and you are given that fresh new input queue. Either way, you should do
|
||||||
|
something like:
|
||||||
|
|
||||||
|
input_queue = self.get_or_create_application_instance(
|
||||||
|
"user-123456",
|
||||||
|
{"type": "testprotocol", "user_id": "123456", "username": "andrew"},
|
||||||
|
)
|
||||||
|
input_queue.put_nowait(message)
|
||||||
|
|
||||||
|
If you try and create an application instance and there are already
|
||||||
|
`max_application` instances, the oldest/least recently used one will be
|
||||||
|
reclaimed and shut down to make space.
|
||||||
|
|
||||||
|
Application coroutines that error will be found periodically (every 100ms
|
||||||
|
by default) and have their exceptions printed to the console. Override
|
||||||
|
application_exception() if you want to do more when this happens.
|
||||||
|
|
||||||
|
If you override run(), make sure you handle things like launching the
|
||||||
|
application checker.
|
||||||
|
"""
|
||||||
|
|
||||||
|
application_checker_interval = 0.1
|
||||||
|
|
||||||
|
def __init__(self, application, max_applications=1000):
|
||||||
|
# Parameters
|
||||||
|
self.application = application
|
||||||
|
self.max_applications = max_applications
|
||||||
|
# Initialisation
|
||||||
|
self.application_instances = {}
|
||||||
|
|
||||||
|
### Mainloop and handling
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
"""
|
||||||
|
Runs the asyncio event loop with our handler loop.
|
||||||
|
"""
|
||||||
|
event_loop = asyncio.get_event_loop()
|
||||||
|
asyncio.ensure_future(self.application_checker())
|
||||||
|
try:
|
||||||
|
event_loop.run_until_complete(self.handle())
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
logger.info("Exiting due to Ctrl-C/interrupt")
|
||||||
|
|
||||||
|
async def handle(self):
|
||||||
|
raise NotImplementedError("You must implement handle()")
|
||||||
|
|
||||||
|
async def application_send(self, scope, message):
|
||||||
|
"""
|
||||||
|
Receives outbound sends from applications and handles them.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError("You must implement application_send()")
|
||||||
|
|
||||||
|
### Application instance management
|
||||||
|
|
||||||
|
def get_or_create_application_instance(self, scope_id, scope):
|
||||||
|
"""
|
||||||
|
Creates an application instance and returns its queue.
|
||||||
|
"""
|
||||||
|
if scope_id in self.application_instances:
|
||||||
|
self.application_instances[scope_id]["last_used"] = time.time()
|
||||||
|
return self.application_instances[scope_id]["input_queue"]
|
||||||
|
# See if we need to delete an old one
|
||||||
|
while len(self.application_instances) > self.max_applications:
|
||||||
|
self.delete_oldest_application_instance()
|
||||||
|
# Make an instance of the application
|
||||||
|
input_queue = asyncio.Queue()
|
||||||
|
application_instance = guarantee_single_callable(self.application)
|
||||||
|
# Run it, and stash the future for later checking
|
||||||
|
future = asyncio.ensure_future(
|
||||||
|
application_instance(
|
||||||
|
scope=scope,
|
||||||
|
receive=input_queue.get,
|
||||||
|
send=lambda message: self.application_send(scope, message),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
self.application_instances[scope_id] = {
|
||||||
|
"input_queue": input_queue,
|
||||||
|
"future": future,
|
||||||
|
"scope": scope,
|
||||||
|
"last_used": time.time(),
|
||||||
|
}
|
||||||
|
return input_queue
|
||||||
|
|
||||||
|
def delete_oldest_application_instance(self):
|
||||||
|
"""
|
||||||
|
Finds and deletes the oldest application instance
|
||||||
|
"""
|
||||||
|
oldest_time = min(
|
||||||
|
details["last_used"] for details in self.application_instances.values()
|
||||||
|
)
|
||||||
|
for scope_id, details in self.application_instances.items():
|
||||||
|
if details["last_used"] == oldest_time:
|
||||||
|
self.delete_application_instance(scope_id)
|
||||||
|
# Return to make sure we only delete one in case two have
|
||||||
|
# the same oldest time
|
||||||
|
return
|
||||||
|
|
||||||
|
def delete_application_instance(self, scope_id):
|
||||||
|
"""
|
||||||
|
Removes an application instance (makes sure its task is stopped,
|
||||||
|
then removes it from the current set)
|
||||||
|
"""
|
||||||
|
details = self.application_instances[scope_id]
|
||||||
|
del self.application_instances[scope_id]
|
||||||
|
if not details["future"].done():
|
||||||
|
details["future"].cancel()
|
||||||
|
|
||||||
|
async def application_checker(self):
|
||||||
|
"""
|
||||||
|
Goes through the set of current application instance Futures and cleans up
|
||||||
|
any that are done/prints exceptions for any that errored.
|
||||||
|
"""
|
||||||
|
while True:
|
||||||
|
await asyncio.sleep(self.application_checker_interval)
|
||||||
|
for scope_id, details in list(self.application_instances.items()):
|
||||||
|
if details["future"].done():
|
||||||
|
exception = details["future"].exception()
|
||||||
|
if exception:
|
||||||
|
await self.application_exception(exception, details)
|
||||||
|
try:
|
||||||
|
del self.application_instances[scope_id]
|
||||||
|
except KeyError:
|
||||||
|
# Exception handling might have already got here before us. That's fine.
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def application_exception(self, exception, application_details):
|
||||||
|
"""
|
||||||
|
Called whenever an application coroutine has an exception.
|
||||||
|
"""
|
||||||
|
logging.error(
|
||||||
|
"Exception inside application: %s\n%s%s",
|
||||||
|
exception,
|
||||||
|
"".join(traceback.format_tb(exception.__traceback__)),
|
||||||
|
f" {exception}",
|
||||||
|
)
|
|
@ -0,0 +1,532 @@
|
||||||
|
import asyncio
|
||||||
|
import asyncio.coroutines
|
||||||
|
import contextvars
|
||||||
|
import functools
|
||||||
|
import inspect
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import threading
|
||||||
|
import warnings
|
||||||
|
import weakref
|
||||||
|
from concurrent.futures import Future, ThreadPoolExecutor
|
||||||
|
from typing import Any, Callable, Dict, Optional, overload
|
||||||
|
|
||||||
|
from .current_thread_executor import CurrentThreadExecutor
|
||||||
|
from .local import Local
|
||||||
|
|
||||||
|
|
||||||
|
def _restore_context(context):
|
||||||
|
# Check for changes in contextvars, and set them to the current
|
||||||
|
# context for downstream consumers
|
||||||
|
for cvar in context:
|
||||||
|
try:
|
||||||
|
if cvar.get() != context.get(cvar):
|
||||||
|
cvar.set(context.get(cvar))
|
||||||
|
except LookupError:
|
||||||
|
cvar.set(context.get(cvar))
|
||||||
|
|
||||||
|
|
||||||
|
def _iscoroutinefunction_or_partial(func: Any) -> bool:
|
||||||
|
# Python < 3.8 does not correctly determine partially wrapped
|
||||||
|
# coroutine functions are coroutine functions, hence the need for
|
||||||
|
# this to exist. Code taken from CPython.
|
||||||
|
if sys.version_info >= (3, 8):
|
||||||
|
return asyncio.iscoroutinefunction(func)
|
||||||
|
else:
|
||||||
|
while inspect.ismethod(func):
|
||||||
|
func = func.__func__
|
||||||
|
while isinstance(func, functools.partial):
|
||||||
|
func = func.func
|
||||||
|
|
||||||
|
return asyncio.iscoroutinefunction(func)
|
||||||
|
|
||||||
|
|
||||||
|
class ThreadSensitiveContext:
|
||||||
|
"""Async context manager to manage context for thread sensitive mode
|
||||||
|
|
||||||
|
This context manager controls which thread pool executor is used when in
|
||||||
|
thread sensitive mode. By default, a single thread pool executor is shared
|
||||||
|
within a process.
|
||||||
|
|
||||||
|
In Python 3.7+, the ThreadSensitiveContext() context manager may be used to
|
||||||
|
specify a thread pool per context.
|
||||||
|
|
||||||
|
This context manager is re-entrant, so only the outer-most call to
|
||||||
|
ThreadSensitiveContext will set the context.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
|
||||||
|
>>> import time
|
||||||
|
>>> async with ThreadSensitiveContext():
|
||||||
|
... await sync_to_async(time.sleep, 1)()
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.token = None
|
||||||
|
|
||||||
|
async def __aenter__(self):
|
||||||
|
try:
|
||||||
|
SyncToAsync.thread_sensitive_context.get()
|
||||||
|
except LookupError:
|
||||||
|
self.token = SyncToAsync.thread_sensitive_context.set(self)
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __aexit__(self, exc, value, tb):
|
||||||
|
if not self.token:
|
||||||
|
return
|
||||||
|
|
||||||
|
executor = SyncToAsync.context_to_thread_executor.pop(self, None)
|
||||||
|
if executor:
|
||||||
|
executor.shutdown()
|
||||||
|
SyncToAsync.thread_sensitive_context.reset(self.token)
|
||||||
|
|
||||||
|
|
||||||
|
class AsyncToSync:
|
||||||
|
"""
|
||||||
|
Utility class which turns an awaitable that only works on the thread with
|
||||||
|
the event loop into a synchronous callable that works in a subthread.
|
||||||
|
|
||||||
|
If the call stack contains an async loop, the code runs there.
|
||||||
|
Otherwise, the code runs in a new loop in a new thread.
|
||||||
|
|
||||||
|
Either way, this thread then pauses and waits to run any thread_sensitive
|
||||||
|
code called from further down the call stack using SyncToAsync, before
|
||||||
|
finally exiting once the async task returns.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Maps launched Tasks to the threads that launched them (for locals impl)
|
||||||
|
launch_map: "Dict[asyncio.Task[object], threading.Thread]" = {}
|
||||||
|
|
||||||
|
# Keeps track of which CurrentThreadExecutor to use. This uses an asgiref
|
||||||
|
# Local, not a threadlocal, so that tasks can work out what their parent used.
|
||||||
|
executors = Local()
|
||||||
|
|
||||||
|
# When we can't find a CurrentThreadExecutor from the context, such as
|
||||||
|
# inside create_task, we'll look it up here from the running event loop.
|
||||||
|
loop_thread_executors: "Dict[asyncio.AbstractEventLoop, CurrentThreadExecutor]" = {}
|
||||||
|
|
||||||
|
def __init__(self, awaitable, force_new_loop=False):
|
||||||
|
if not callable(awaitable) or (
|
||||||
|
not _iscoroutinefunction_or_partial(awaitable)
|
||||||
|
and not _iscoroutinefunction_or_partial(
|
||||||
|
getattr(awaitable, "__call__", awaitable)
|
||||||
|
)
|
||||||
|
):
|
||||||
|
# Python does not have very reliable detection of async functions
|
||||||
|
# (lots of false negatives) so this is just a warning.
|
||||||
|
warnings.warn(
|
||||||
|
"async_to_sync was passed a non-async-marked callable", stacklevel=2
|
||||||
|
)
|
||||||
|
self.awaitable = awaitable
|
||||||
|
try:
|
||||||
|
self.__self__ = self.awaitable.__self__
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
if force_new_loop:
|
||||||
|
# They have asked that we always run in a new sub-loop.
|
||||||
|
self.main_event_loop = None
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
self.main_event_loop = asyncio.get_running_loop()
|
||||||
|
except RuntimeError:
|
||||||
|
# There's no event loop in this thread. Look for the threadlocal if
|
||||||
|
# we're inside SyncToAsync
|
||||||
|
main_event_loop_pid = getattr(
|
||||||
|
SyncToAsync.threadlocal, "main_event_loop_pid", None
|
||||||
|
)
|
||||||
|
# We make sure the parent loop is from the same process - if
|
||||||
|
# they've forked, this is not going to be valid any more (#194)
|
||||||
|
if main_event_loop_pid and main_event_loop_pid == os.getpid():
|
||||||
|
self.main_event_loop = getattr(
|
||||||
|
SyncToAsync.threadlocal, "main_event_loop", None
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.main_event_loop = None
|
||||||
|
|
||||||
|
def __call__(self, *args, **kwargs):
|
||||||
|
# You can't call AsyncToSync from a thread with a running event loop
|
||||||
|
try:
|
||||||
|
event_loop = asyncio.get_running_loop()
|
||||||
|
except RuntimeError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
if event_loop.is_running():
|
||||||
|
raise RuntimeError(
|
||||||
|
"You cannot use AsyncToSync in the same thread as an async event loop - "
|
||||||
|
"just await the async function directly."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Wrapping context in list so it can be reassigned from within
|
||||||
|
# `main_wrap`.
|
||||||
|
context = [contextvars.copy_context()]
|
||||||
|
|
||||||
|
# Make a future for the return information
|
||||||
|
call_result = Future()
|
||||||
|
# Get the source thread
|
||||||
|
source_thread = threading.current_thread()
|
||||||
|
# Make a CurrentThreadExecutor we'll use to idle in this thread - we
|
||||||
|
# need one for every sync frame, even if there's one above us in the
|
||||||
|
# same thread.
|
||||||
|
if hasattr(self.executors, "current"):
|
||||||
|
old_current_executor = self.executors.current
|
||||||
|
else:
|
||||||
|
old_current_executor = None
|
||||||
|
current_executor = CurrentThreadExecutor()
|
||||||
|
self.executors.current = current_executor
|
||||||
|
loop = None
|
||||||
|
# Use call_soon_threadsafe to schedule a synchronous callback on the
|
||||||
|
# main event loop's thread if it's there, otherwise make a new loop
|
||||||
|
# in this thread.
|
||||||
|
try:
|
||||||
|
awaitable = self.main_wrap(
|
||||||
|
args, kwargs, call_result, source_thread, sys.exc_info(), context
|
||||||
|
)
|
||||||
|
|
||||||
|
if not (self.main_event_loop and self.main_event_loop.is_running()):
|
||||||
|
# Make our own event loop - in a new thread - and run inside that.
|
||||||
|
loop = asyncio.new_event_loop()
|
||||||
|
self.loop_thread_executors[loop] = current_executor
|
||||||
|
loop_executor = ThreadPoolExecutor(max_workers=1)
|
||||||
|
loop_future = loop_executor.submit(
|
||||||
|
self._run_event_loop, loop, awaitable
|
||||||
|
)
|
||||||
|
if current_executor:
|
||||||
|
# Run the CurrentThreadExecutor until the future is done
|
||||||
|
current_executor.run_until_future(loop_future)
|
||||||
|
# Wait for future and/or allow for exception propagation
|
||||||
|
loop_future.result()
|
||||||
|
else:
|
||||||
|
# Call it inside the existing loop
|
||||||
|
self.main_event_loop.call_soon_threadsafe(
|
||||||
|
self.main_event_loop.create_task, awaitable
|
||||||
|
)
|
||||||
|
if current_executor:
|
||||||
|
# Run the CurrentThreadExecutor until the future is done
|
||||||
|
current_executor.run_until_future(call_result)
|
||||||
|
finally:
|
||||||
|
# Clean up any executor we were running
|
||||||
|
if loop is not None:
|
||||||
|
del self.loop_thread_executors[loop]
|
||||||
|
if hasattr(self.executors, "current"):
|
||||||
|
del self.executors.current
|
||||||
|
if old_current_executor:
|
||||||
|
self.executors.current = old_current_executor
|
||||||
|
_restore_context(context[0])
|
||||||
|
|
||||||
|
# Wait for results from the future.
|
||||||
|
return call_result.result()
|
||||||
|
|
||||||
|
def _run_event_loop(self, loop, coro):
|
||||||
|
"""
|
||||||
|
Runs the given event loop (designed to be called in a thread).
|
||||||
|
"""
|
||||||
|
asyncio.set_event_loop(loop)
|
||||||
|
try:
|
||||||
|
loop.run_until_complete(coro)
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
# mimic asyncio.run() behavior
|
||||||
|
# cancel unexhausted async generators
|
||||||
|
tasks = asyncio.all_tasks(loop)
|
||||||
|
for task in tasks:
|
||||||
|
task.cancel()
|
||||||
|
|
||||||
|
async def gather():
|
||||||
|
await asyncio.gather(*tasks, return_exceptions=True)
|
||||||
|
|
||||||
|
loop.run_until_complete(gather())
|
||||||
|
for task in tasks:
|
||||||
|
if task.cancelled():
|
||||||
|
continue
|
||||||
|
if task.exception() is not None:
|
||||||
|
loop.call_exception_handler(
|
||||||
|
{
|
||||||
|
"message": "unhandled exception during loop shutdown",
|
||||||
|
"exception": task.exception(),
|
||||||
|
"task": task,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
if hasattr(loop, "shutdown_asyncgens"):
|
||||||
|
loop.run_until_complete(loop.shutdown_asyncgens())
|
||||||
|
finally:
|
||||||
|
loop.close()
|
||||||
|
asyncio.set_event_loop(self.main_event_loop)
|
||||||
|
|
||||||
|
def __get__(self, parent, objtype):
|
||||||
|
"""
|
||||||
|
Include self for methods
|
||||||
|
"""
|
||||||
|
func = functools.partial(self.__call__, parent)
|
||||||
|
return functools.update_wrapper(func, self.awaitable)
|
||||||
|
|
||||||
|
async def main_wrap(
|
||||||
|
self, args, kwargs, call_result, source_thread, exc_info, context
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Wraps the awaitable with something that puts the result into the
|
||||||
|
result/exception future.
|
||||||
|
"""
|
||||||
|
if context is not None:
|
||||||
|
_restore_context(context[0])
|
||||||
|
|
||||||
|
current_task = SyncToAsync.get_current_task()
|
||||||
|
self.launch_map[current_task] = source_thread
|
||||||
|
try:
|
||||||
|
# If we have an exception, run the function inside the except block
|
||||||
|
# after raising it so exc_info is correctly populated.
|
||||||
|
if exc_info[1]:
|
||||||
|
try:
|
||||||
|
raise exc_info[1]
|
||||||
|
except BaseException:
|
||||||
|
result = await self.awaitable(*args, **kwargs)
|
||||||
|
else:
|
||||||
|
result = await self.awaitable(*args, **kwargs)
|
||||||
|
except BaseException as e:
|
||||||
|
call_result.set_exception(e)
|
||||||
|
else:
|
||||||
|
call_result.set_result(result)
|
||||||
|
finally:
|
||||||
|
del self.launch_map[current_task]
|
||||||
|
|
||||||
|
context[0] = contextvars.copy_context()
|
||||||
|
|
||||||
|
|
||||||
|
class SyncToAsync:
|
||||||
|
"""
|
||||||
|
Utility class which turns a synchronous callable into an awaitable that
|
||||||
|
runs in a threadpool. It also sets a threadlocal inside the thread so
|
||||||
|
calls to AsyncToSync can escape it.
|
||||||
|
|
||||||
|
If thread_sensitive is passed, the code will run in the same thread as any
|
||||||
|
outer code. This is needed for underlying Python code that is not
|
||||||
|
threadsafe (for example, code which handles SQLite database connections).
|
||||||
|
|
||||||
|
If the outermost program is async (i.e. SyncToAsync is outermost), then
|
||||||
|
this will be a dedicated single sub-thread that all sync code runs in,
|
||||||
|
one after the other. If the outermost program is sync (i.e. AsyncToSync is
|
||||||
|
outermost), this will just be the main thread. This is achieved by idling
|
||||||
|
with a CurrentThreadExecutor while AsyncToSync is blocking its sync parent,
|
||||||
|
rather than just blocking.
|
||||||
|
|
||||||
|
If executor is passed in, that will be used instead of the loop's default executor.
|
||||||
|
In order to pass in an executor, thread_sensitive must be set to False, otherwise
|
||||||
|
a TypeError will be raised.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# If they've set ASGI_THREADS, update the default asyncio executor for now
|
||||||
|
if "ASGI_THREADS" in os.environ:
|
||||||
|
# We use get_event_loop here - not get_running_loop - as this will
|
||||||
|
# be run at import time, and we want to update the main thread's loop.
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
loop.set_default_executor(
|
||||||
|
ThreadPoolExecutor(max_workers=int(os.environ["ASGI_THREADS"]))
|
||||||
|
)
|
||||||
|
|
||||||
|
# Maps launched threads to the coroutines that spawned them
|
||||||
|
launch_map: "Dict[threading.Thread, asyncio.Task[object]]" = {}
|
||||||
|
|
||||||
|
# Storage for main event loop references
|
||||||
|
threadlocal = threading.local()
|
||||||
|
|
||||||
|
# Single-thread executor for thread-sensitive code
|
||||||
|
single_thread_executor = ThreadPoolExecutor(max_workers=1)
|
||||||
|
|
||||||
|
# Maintain a contextvar for the current execution context. Optionally used
|
||||||
|
# for thread sensitive mode.
|
||||||
|
thread_sensitive_context: "contextvars.ContextVar[str]" = contextvars.ContextVar(
|
||||||
|
"thread_sensitive_context"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Contextvar that is used to detect if the single thread executor
|
||||||
|
# would be awaited on while already being used in the same context
|
||||||
|
deadlock_context: "contextvars.ContextVar[bool]" = contextvars.ContextVar(
|
||||||
|
"deadlock_context"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Maintaining a weak reference to the context ensures that thread pools are
|
||||||
|
# erased once the context goes out of scope. This terminates the thread pool.
|
||||||
|
context_to_thread_executor: "weakref.WeakKeyDictionary[object, ThreadPoolExecutor]" = (
|
||||||
|
weakref.WeakKeyDictionary()
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
func: Callable[..., Any],
|
||||||
|
thread_sensitive: bool = True,
|
||||||
|
executor: Optional["ThreadPoolExecutor"] = None,
|
||||||
|
) -> None:
|
||||||
|
if (
|
||||||
|
not callable(func)
|
||||||
|
or _iscoroutinefunction_or_partial(func)
|
||||||
|
or _iscoroutinefunction_or_partial(getattr(func, "__call__", func))
|
||||||
|
):
|
||||||
|
raise TypeError("sync_to_async can only be applied to sync functions.")
|
||||||
|
self.func = func
|
||||||
|
functools.update_wrapper(self, func)
|
||||||
|
self._thread_sensitive = thread_sensitive
|
||||||
|
self._is_coroutine = asyncio.coroutines._is_coroutine # type: ignore
|
||||||
|
if thread_sensitive and executor is not None:
|
||||||
|
raise TypeError("executor must not be set when thread_sensitive is True")
|
||||||
|
self._executor = executor
|
||||||
|
try:
|
||||||
|
self.__self__ = func.__self__ # type: ignore
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def __call__(self, *args, **kwargs):
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
|
||||||
|
# Work out what thread to run the code in
|
||||||
|
if self._thread_sensitive:
|
||||||
|
if hasattr(AsyncToSync.executors, "current"):
|
||||||
|
# If we have a parent sync thread above somewhere, use that
|
||||||
|
executor = AsyncToSync.executors.current
|
||||||
|
elif self.thread_sensitive_context and self.thread_sensitive_context.get(
|
||||||
|
None
|
||||||
|
):
|
||||||
|
# If we have a way of retrieving the current context, attempt
|
||||||
|
# to use a per-context thread pool executor
|
||||||
|
thread_sensitive_context = self.thread_sensitive_context.get()
|
||||||
|
|
||||||
|
if thread_sensitive_context in self.context_to_thread_executor:
|
||||||
|
# Re-use thread executor in current context
|
||||||
|
executor = self.context_to_thread_executor[thread_sensitive_context]
|
||||||
|
else:
|
||||||
|
# Create new thread executor in current context
|
||||||
|
executor = ThreadPoolExecutor(max_workers=1)
|
||||||
|
self.context_to_thread_executor[thread_sensitive_context] = executor
|
||||||
|
elif loop in AsyncToSync.loop_thread_executors:
|
||||||
|
# Re-use thread executor for running loop
|
||||||
|
executor = AsyncToSync.loop_thread_executors[loop]
|
||||||
|
elif self.deadlock_context and self.deadlock_context.get(False):
|
||||||
|
raise RuntimeError(
|
||||||
|
"Single thread executor already being used, would deadlock"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Otherwise, we run it in a fixed single thread
|
||||||
|
executor = self.single_thread_executor
|
||||||
|
if self.deadlock_context:
|
||||||
|
self.deadlock_context.set(True)
|
||||||
|
else:
|
||||||
|
# Use the passed in executor, or the loop's default if it is None
|
||||||
|
executor = self._executor
|
||||||
|
|
||||||
|
context = contextvars.copy_context()
|
||||||
|
child = functools.partial(self.func, *args, **kwargs)
|
||||||
|
func = context.run
|
||||||
|
args = (child,)
|
||||||
|
kwargs = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Run the code in the right thread
|
||||||
|
future = loop.run_in_executor(
|
||||||
|
executor,
|
||||||
|
functools.partial(
|
||||||
|
self.thread_handler,
|
||||||
|
loop,
|
||||||
|
self.get_current_task(),
|
||||||
|
sys.exc_info(),
|
||||||
|
func,
|
||||||
|
*args,
|
||||||
|
**kwargs,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
ret = await asyncio.wait_for(future, timeout=None)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
_restore_context(context)
|
||||||
|
if self.deadlock_context:
|
||||||
|
self.deadlock_context.set(False)
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def __get__(self, parent, objtype):
|
||||||
|
"""
|
||||||
|
Include self for methods
|
||||||
|
"""
|
||||||
|
return functools.partial(self.__call__, parent)
|
||||||
|
|
||||||
|
def thread_handler(self, loop, source_task, exc_info, func, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Wraps the sync application with exception handling.
|
||||||
|
"""
|
||||||
|
# Set the threadlocal for AsyncToSync
|
||||||
|
self.threadlocal.main_event_loop = loop
|
||||||
|
self.threadlocal.main_event_loop_pid = os.getpid()
|
||||||
|
# Set the task mapping (used for the locals module)
|
||||||
|
current_thread = threading.current_thread()
|
||||||
|
if AsyncToSync.launch_map.get(source_task) == current_thread:
|
||||||
|
# Our parent task was launched from this same thread, so don't make
|
||||||
|
# a launch map entry - let it shortcut over us! (and stop infinite loops)
|
||||||
|
parent_set = False
|
||||||
|
else:
|
||||||
|
self.launch_map[current_thread] = source_task
|
||||||
|
parent_set = True
|
||||||
|
# Run the function
|
||||||
|
try:
|
||||||
|
# If we have an exception, run the function inside the except block
|
||||||
|
# after raising it so exc_info is correctly populated.
|
||||||
|
if exc_info[1]:
|
||||||
|
try:
|
||||||
|
raise exc_info[1]
|
||||||
|
except BaseException:
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
else:
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
finally:
|
||||||
|
# Only delete the launch_map parent if we set it, otherwise it is
|
||||||
|
# from someone else.
|
||||||
|
if parent_set:
|
||||||
|
del self.launch_map[current_thread]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_current_task():
|
||||||
|
"""
|
||||||
|
Implementation of asyncio.current_task()
|
||||||
|
that returns None if there is no task.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return asyncio.current_task()
|
||||||
|
except RuntimeError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
# Lowercase aliases (and decorator friendliness)
|
||||||
|
async_to_sync = AsyncToSync
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def sync_to_async(
|
||||||
|
func: None = None,
|
||||||
|
thread_sensitive: bool = True,
|
||||||
|
executor: Optional["ThreadPoolExecutor"] = None,
|
||||||
|
) -> Callable[[Callable[..., Any]], SyncToAsync]:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def sync_to_async(
|
||||||
|
func: Callable[..., Any],
|
||||||
|
thread_sensitive: bool = True,
|
||||||
|
executor: Optional["ThreadPoolExecutor"] = None,
|
||||||
|
) -> SyncToAsync:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
def sync_to_async(
|
||||||
|
func=None,
|
||||||
|
thread_sensitive=True,
|
||||||
|
executor=None,
|
||||||
|
):
|
||||||
|
if func is None:
|
||||||
|
return lambda f: SyncToAsync(
|
||||||
|
f,
|
||||||
|
thread_sensitive=thread_sensitive,
|
||||||
|
executor=executor,
|
||||||
|
)
|
||||||
|
return SyncToAsync(
|
||||||
|
func,
|
||||||
|
thread_sensitive=thread_sensitive,
|
||||||
|
executor=executor,
|
||||||
|
)
|
|
@ -0,0 +1,97 @@
|
||||||
|
import asyncio
|
||||||
|
import time
|
||||||
|
|
||||||
|
from .compatibility import guarantee_single_callable
|
||||||
|
from .timeout import timeout as async_timeout
|
||||||
|
|
||||||
|
|
||||||
|
class ApplicationCommunicator:
|
||||||
|
"""
|
||||||
|
Runs an ASGI application in a test mode, allowing sending of
|
||||||
|
messages to it and retrieval of messages it sends.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, application, scope):
|
||||||
|
self.application = guarantee_single_callable(application)
|
||||||
|
self.scope = scope
|
||||||
|
self.input_queue = asyncio.Queue()
|
||||||
|
self.output_queue = asyncio.Queue()
|
||||||
|
self.future = asyncio.ensure_future(
|
||||||
|
self.application(scope, self.input_queue.get, self.output_queue.put)
|
||||||
|
)
|
||||||
|
|
||||||
|
async def wait(self, timeout=1):
|
||||||
|
"""
|
||||||
|
Waits for the application to stop itself and returns any exceptions.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
async with async_timeout(timeout):
|
||||||
|
try:
|
||||||
|
await self.future
|
||||||
|
self.future.result()
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
pass
|
||||||
|
finally:
|
||||||
|
if not self.future.done():
|
||||||
|
self.future.cancel()
|
||||||
|
try:
|
||||||
|
await self.future
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def stop(self, exceptions=True):
|
||||||
|
if not self.future.done():
|
||||||
|
self.future.cancel()
|
||||||
|
elif exceptions:
|
||||||
|
# Give a chance to raise any exceptions
|
||||||
|
self.future.result()
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
# Clean up on deletion
|
||||||
|
try:
|
||||||
|
self.stop(exceptions=False)
|
||||||
|
except RuntimeError:
|
||||||
|
# Event loop already stopped
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def send_input(self, message):
|
||||||
|
"""
|
||||||
|
Sends a single message to the application
|
||||||
|
"""
|
||||||
|
# Give it the message
|
||||||
|
await self.input_queue.put(message)
|
||||||
|
|
||||||
|
async def receive_output(self, timeout=1):
|
||||||
|
"""
|
||||||
|
Receives a single message from the application, with optional timeout.
|
||||||
|
"""
|
||||||
|
# Make sure there's not an exception to raise from the task
|
||||||
|
if self.future.done():
|
||||||
|
self.future.result()
|
||||||
|
# Wait and receive the message
|
||||||
|
try:
|
||||||
|
async with async_timeout(timeout):
|
||||||
|
return await self.output_queue.get()
|
||||||
|
except asyncio.TimeoutError as e:
|
||||||
|
# See if we have another error to raise inside
|
||||||
|
if self.future.done():
|
||||||
|
self.future.result()
|
||||||
|
else:
|
||||||
|
self.future.cancel()
|
||||||
|
try:
|
||||||
|
await self.future
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
pass
|
||||||
|
raise e
|
||||||
|
|
||||||
|
async def receive_nothing(self, timeout=0.1, interval=0.01):
|
||||||
|
"""
|
||||||
|
Checks that there is no message to receive in the given time.
|
||||||
|
"""
|
||||||
|
# `interval` has precedence over `timeout`
|
||||||
|
start = time.monotonic()
|
||||||
|
while time.monotonic() - start < timeout:
|
||||||
|
if not self.output_queue.empty():
|
||||||
|
return False
|
||||||
|
await asyncio.sleep(interval)
|
||||||
|
return self.output_queue.empty()
|
|
@ -0,0 +1,112 @@
|
||||||
|
# This code is originally sourced from the aio-libs project "async_timeout",
|
||||||
|
# under the Apache 2.0 license. You may see the original project at
|
||||||
|
# https://github.com/aio-libs/async-timeout
|
||||||
|
|
||||||
|
# It is vendored here to reduce chain-dependencies on this library, and
|
||||||
|
# modified slightly to remove some features we don't use.
|
||||||
|
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
from types import TracebackType
|
||||||
|
from typing import Any, Optional, Type
|
||||||
|
|
||||||
|
|
||||||
|
class timeout:
|
||||||
|
"""timeout context manager.
|
||||||
|
|
||||||
|
Useful in cases when you want to apply timeout logic around block
|
||||||
|
of code or in cases when asyncio.wait_for is not suitable. For example:
|
||||||
|
|
||||||
|
>>> with timeout(0.001):
|
||||||
|
... async with aiohttp.get('https://github.com') as r:
|
||||||
|
... await r.text()
|
||||||
|
|
||||||
|
|
||||||
|
timeout - value in seconds or None to disable timeout logic
|
||||||
|
loop - asyncio compatible event loop
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
timeout: Optional[float],
|
||||||
|
*,
|
||||||
|
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||||
|
) -> None:
|
||||||
|
self._timeout = timeout
|
||||||
|
if loop is None:
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
self._loop = loop
|
||||||
|
self._task = None # type: Optional[asyncio.Task[Any]]
|
||||||
|
self._cancelled = False
|
||||||
|
self._cancel_handler = None # type: Optional[asyncio.Handle]
|
||||||
|
self._cancel_at = None # type: Optional[float]
|
||||||
|
|
||||||
|
def __enter__(self) -> "timeout":
|
||||||
|
return self._do_enter()
|
||||||
|
|
||||||
|
def __exit__(
|
||||||
|
self,
|
||||||
|
exc_type: Type[BaseException],
|
||||||
|
exc_val: BaseException,
|
||||||
|
exc_tb: TracebackType,
|
||||||
|
) -> Optional[bool]:
|
||||||
|
self._do_exit(exc_type)
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def __aenter__(self) -> "timeout":
|
||||||
|
return self._do_enter()
|
||||||
|
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: Type[BaseException],
|
||||||
|
exc_val: BaseException,
|
||||||
|
exc_tb: TracebackType,
|
||||||
|
) -> None:
|
||||||
|
self._do_exit(exc_type)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def expired(self) -> bool:
|
||||||
|
return self._cancelled
|
||||||
|
|
||||||
|
@property
|
||||||
|
def remaining(self) -> Optional[float]:
|
||||||
|
if self._cancel_at is not None:
|
||||||
|
return max(self._cancel_at - self._loop.time(), 0.0)
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _do_enter(self) -> "timeout":
|
||||||
|
# Support Tornado 5- without timeout
|
||||||
|
# Details: https://github.com/python/asyncio/issues/392
|
||||||
|
if self._timeout is None:
|
||||||
|
return self
|
||||||
|
|
||||||
|
self._task = asyncio.current_task(self._loop)
|
||||||
|
if self._task is None:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Timeout context manager should be used " "inside a task"
|
||||||
|
)
|
||||||
|
|
||||||
|
if self._timeout <= 0:
|
||||||
|
self._loop.call_soon(self._cancel_task)
|
||||||
|
return self
|
||||||
|
|
||||||
|
self._cancel_at = self._loop.time() + self._timeout
|
||||||
|
self._cancel_handler = self._loop.call_at(self._cancel_at, self._cancel_task)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def _do_exit(self, exc_type: Type[BaseException]) -> None:
|
||||||
|
if exc_type is asyncio.CancelledError and self._cancelled:
|
||||||
|
self._cancel_handler = None
|
||||||
|
self._task = None
|
||||||
|
raise asyncio.TimeoutError
|
||||||
|
if self._timeout is not None and self._cancel_handler is not None:
|
||||||
|
self._cancel_handler.cancel()
|
||||||
|
self._cancel_handler = None
|
||||||
|
self._task = None
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _cancel_task(self) -> None:
|
||||||
|
if self._task is not None:
|
||||||
|
self._task.cancel()
|
||||||
|
self._cancelled = True
|
|
@ -0,0 +1,242 @@
|
||||||
|
import sys
|
||||||
|
from typing import Awaitable, Callable, Dict, Iterable, Optional, Tuple, Type, Union
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 8):
|
||||||
|
from typing import Literal, Protocol, TypedDict
|
||||||
|
else:
|
||||||
|
from typing_extensions import Literal, Protocol, TypedDict
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"ASGIVersions",
|
||||||
|
"HTTPScope",
|
||||||
|
"WebSocketScope",
|
||||||
|
"LifespanScope",
|
||||||
|
"WWWScope",
|
||||||
|
"Scope",
|
||||||
|
"HTTPRequestEvent",
|
||||||
|
"HTTPResponseStartEvent",
|
||||||
|
"HTTPResponseBodyEvent",
|
||||||
|
"HTTPServerPushEvent",
|
||||||
|
"HTTPDisconnectEvent",
|
||||||
|
"WebSocketConnectEvent",
|
||||||
|
"WebSocketAcceptEvent",
|
||||||
|
"WebSocketReceiveEvent",
|
||||||
|
"WebSocketSendEvent",
|
||||||
|
"WebSocketResponseStartEvent",
|
||||||
|
"WebSocketResponseBodyEvent",
|
||||||
|
"WebSocketDisconnectEvent",
|
||||||
|
"WebSocketCloseEvent",
|
||||||
|
"LifespanStartupEvent",
|
||||||
|
"LifespanShutdownEvent",
|
||||||
|
"LifespanStartupCompleteEvent",
|
||||||
|
"LifespanStartupFailedEvent",
|
||||||
|
"LifespanShutdownCompleteEvent",
|
||||||
|
"LifespanShutdownFailedEvent",
|
||||||
|
"ASGIReceiveEvent",
|
||||||
|
"ASGISendEvent",
|
||||||
|
"ASGIReceiveCallable",
|
||||||
|
"ASGISendCallable",
|
||||||
|
"ASGI2Protocol",
|
||||||
|
"ASGI2Application",
|
||||||
|
"ASGI3Application",
|
||||||
|
"ASGIApplication",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ASGIVersions(TypedDict):
|
||||||
|
spec_version: str
|
||||||
|
version: Union[Literal["2.0"], Literal["3.0"]]
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPScope(TypedDict):
|
||||||
|
type: Literal["http"]
|
||||||
|
asgi: ASGIVersions
|
||||||
|
http_version: str
|
||||||
|
method: str
|
||||||
|
scheme: str
|
||||||
|
path: str
|
||||||
|
raw_path: bytes
|
||||||
|
query_string: bytes
|
||||||
|
root_path: str
|
||||||
|
headers: Iterable[Tuple[bytes, bytes]]
|
||||||
|
client: Optional[Tuple[str, int]]
|
||||||
|
server: Optional[Tuple[str, Optional[int]]]
|
||||||
|
extensions: Optional[Dict[str, Dict[object, object]]]
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketScope(TypedDict):
|
||||||
|
type: Literal["websocket"]
|
||||||
|
asgi: ASGIVersions
|
||||||
|
http_version: str
|
||||||
|
scheme: str
|
||||||
|
path: str
|
||||||
|
raw_path: bytes
|
||||||
|
query_string: bytes
|
||||||
|
root_path: str
|
||||||
|
headers: Iterable[Tuple[bytes, bytes]]
|
||||||
|
client: Optional[Tuple[str, int]]
|
||||||
|
server: Optional[Tuple[str, Optional[int]]]
|
||||||
|
subprotocols: Iterable[str]
|
||||||
|
extensions: Optional[Dict[str, Dict[object, object]]]
|
||||||
|
|
||||||
|
|
||||||
|
class LifespanScope(TypedDict):
|
||||||
|
type: Literal["lifespan"]
|
||||||
|
asgi: ASGIVersions
|
||||||
|
|
||||||
|
|
||||||
|
WWWScope = Union[HTTPScope, WebSocketScope]
|
||||||
|
Scope = Union[HTTPScope, WebSocketScope, LifespanScope]
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPRequestEvent(TypedDict):
|
||||||
|
type: Literal["http.request"]
|
||||||
|
body: bytes
|
||||||
|
more_body: bool
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPResponseStartEvent(TypedDict):
|
||||||
|
type: Literal["http.response.start"]
|
||||||
|
status: int
|
||||||
|
headers: Iterable[Tuple[bytes, bytes]]
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPResponseBodyEvent(TypedDict):
|
||||||
|
type: Literal["http.response.body"]
|
||||||
|
body: bytes
|
||||||
|
more_body: bool
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPServerPushEvent(TypedDict):
|
||||||
|
type: Literal["http.response.push"]
|
||||||
|
path: str
|
||||||
|
headers: Iterable[Tuple[bytes, bytes]]
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPDisconnectEvent(TypedDict):
|
||||||
|
type: Literal["http.disconnect"]
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketConnectEvent(TypedDict):
|
||||||
|
type: Literal["websocket.connect"]
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketAcceptEvent(TypedDict):
|
||||||
|
type: Literal["websocket.accept"]
|
||||||
|
subprotocol: Optional[str]
|
||||||
|
headers: Iterable[Tuple[bytes, bytes]]
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketReceiveEvent(TypedDict):
|
||||||
|
type: Literal["websocket.receive"]
|
||||||
|
bytes: Optional[bytes]
|
||||||
|
text: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketSendEvent(TypedDict):
|
||||||
|
type: Literal["websocket.send"]
|
||||||
|
bytes: Optional[bytes]
|
||||||
|
text: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketResponseStartEvent(TypedDict):
|
||||||
|
type: Literal["websocket.http.response.start"]
|
||||||
|
status: int
|
||||||
|
headers: Iterable[Tuple[bytes, bytes]]
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketResponseBodyEvent(TypedDict):
|
||||||
|
type: Literal["websocket.http.response.body"]
|
||||||
|
body: bytes
|
||||||
|
more_body: bool
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketDisconnectEvent(TypedDict):
|
||||||
|
type: Literal["websocket.disconnect"]
|
||||||
|
code: int
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketCloseEvent(TypedDict):
|
||||||
|
type: Literal["websocket.close"]
|
||||||
|
code: int
|
||||||
|
reason: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
|
class LifespanStartupEvent(TypedDict):
|
||||||
|
type: Literal["lifespan.startup"]
|
||||||
|
|
||||||
|
|
||||||
|
class LifespanShutdownEvent(TypedDict):
|
||||||
|
type: Literal["lifespan.shutdown"]
|
||||||
|
|
||||||
|
|
||||||
|
class LifespanStartupCompleteEvent(TypedDict):
|
||||||
|
type: Literal["lifespan.startup.complete"]
|
||||||
|
|
||||||
|
|
||||||
|
class LifespanStartupFailedEvent(TypedDict):
|
||||||
|
type: Literal["lifespan.startup.failed"]
|
||||||
|
message: str
|
||||||
|
|
||||||
|
|
||||||
|
class LifespanShutdownCompleteEvent(TypedDict):
|
||||||
|
type: Literal["lifespan.shutdown.complete"]
|
||||||
|
|
||||||
|
|
||||||
|
class LifespanShutdownFailedEvent(TypedDict):
|
||||||
|
type: Literal["lifespan.shutdown.failed"]
|
||||||
|
message: str
|
||||||
|
|
||||||
|
|
||||||
|
ASGIReceiveEvent = Union[
|
||||||
|
HTTPRequestEvent,
|
||||||
|
HTTPDisconnectEvent,
|
||||||
|
WebSocketConnectEvent,
|
||||||
|
WebSocketReceiveEvent,
|
||||||
|
WebSocketDisconnectEvent,
|
||||||
|
LifespanStartupEvent,
|
||||||
|
LifespanShutdownEvent,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
ASGISendEvent = Union[
|
||||||
|
HTTPResponseStartEvent,
|
||||||
|
HTTPResponseBodyEvent,
|
||||||
|
HTTPServerPushEvent,
|
||||||
|
HTTPDisconnectEvent,
|
||||||
|
WebSocketAcceptEvent,
|
||||||
|
WebSocketSendEvent,
|
||||||
|
WebSocketResponseStartEvent,
|
||||||
|
WebSocketResponseBodyEvent,
|
||||||
|
WebSocketCloseEvent,
|
||||||
|
LifespanStartupCompleteEvent,
|
||||||
|
LifespanStartupFailedEvent,
|
||||||
|
LifespanShutdownCompleteEvent,
|
||||||
|
LifespanShutdownFailedEvent,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
ASGIReceiveCallable = Callable[[], Awaitable[ASGIReceiveEvent]]
|
||||||
|
ASGISendCallable = Callable[[ASGISendEvent], Awaitable[None]]
|
||||||
|
|
||||||
|
|
||||||
|
class ASGI2Protocol(Protocol):
|
||||||
|
def __init__(self, scope: Scope) -> None:
|
||||||
|
...
|
||||||
|
|
||||||
|
async def __call__(
|
||||||
|
self, receive: ASGIReceiveCallable, send: ASGISendCallable
|
||||||
|
) -> None:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
ASGI2Application = Type[ASGI2Protocol]
|
||||||
|
ASGI3Application = Callable[
|
||||||
|
[
|
||||||
|
Scope,
|
||||||
|
ASGIReceiveCallable,
|
||||||
|
ASGISendCallable,
|
||||||
|
],
|
||||||
|
Awaitable[None],
|
||||||
|
]
|
||||||
|
ASGIApplication = Union[ASGI2Application, ASGI3Application]
|
|
@ -0,0 +1,162 @@
|
||||||
|
from io import BytesIO
|
||||||
|
from tempfile import SpooledTemporaryFile
|
||||||
|
|
||||||
|
from asgiref.sync import AsyncToSync, sync_to_async
|
||||||
|
|
||||||
|
|
||||||
|
class WsgiToAsgi:
|
||||||
|
"""
|
||||||
|
Wraps a WSGI application to make it into an ASGI application.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, wsgi_application):
|
||||||
|
self.wsgi_application = wsgi_application
|
||||||
|
|
||||||
|
async def __call__(self, scope, receive, send):
|
||||||
|
"""
|
||||||
|
ASGI application instantiation point.
|
||||||
|
We return a new WsgiToAsgiInstance here with the WSGI app
|
||||||
|
and the scope, ready to respond when it is __call__ed.
|
||||||
|
"""
|
||||||
|
await WsgiToAsgiInstance(self.wsgi_application)(scope, receive, send)
|
||||||
|
|
||||||
|
|
||||||
|
class WsgiToAsgiInstance:
|
||||||
|
"""
|
||||||
|
Per-socket instance of a wrapped WSGI application
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, wsgi_application):
|
||||||
|
self.wsgi_application = wsgi_application
|
||||||
|
self.response_started = False
|
||||||
|
self.response_content_length = None
|
||||||
|
|
||||||
|
async def __call__(self, scope, receive, send):
|
||||||
|
if scope["type"] != "http":
|
||||||
|
raise ValueError("WSGI wrapper received a non-HTTP scope")
|
||||||
|
self.scope = scope
|
||||||
|
with SpooledTemporaryFile(max_size=65536) as body:
|
||||||
|
# Alright, wait for the http.request messages
|
||||||
|
while True:
|
||||||
|
message = await receive()
|
||||||
|
if message["type"] != "http.request":
|
||||||
|
raise ValueError("WSGI wrapper received a non-HTTP-request message")
|
||||||
|
body.write(message.get("body", b""))
|
||||||
|
if not message.get("more_body"):
|
||||||
|
break
|
||||||
|
body.seek(0)
|
||||||
|
# Wrap send so it can be called from the subthread
|
||||||
|
self.sync_send = AsyncToSync(send)
|
||||||
|
# Call the WSGI app
|
||||||
|
await self.run_wsgi_app(body)
|
||||||
|
|
||||||
|
def build_environ(self, scope, body):
|
||||||
|
"""
|
||||||
|
Builds a scope and request body into a WSGI environ object.
|
||||||
|
"""
|
||||||
|
environ = {
|
||||||
|
"REQUEST_METHOD": scope["method"],
|
||||||
|
"SCRIPT_NAME": scope.get("root_path", "").encode("utf8").decode("latin1"),
|
||||||
|
"PATH_INFO": scope["path"].encode("utf8").decode("latin1"),
|
||||||
|
"QUERY_STRING": scope["query_string"].decode("ascii"),
|
||||||
|
"SERVER_PROTOCOL": "HTTP/%s" % scope["http_version"],
|
||||||
|
"wsgi.version": (1, 0),
|
||||||
|
"wsgi.url_scheme": scope.get("scheme", "http"),
|
||||||
|
"wsgi.input": body,
|
||||||
|
"wsgi.errors": BytesIO(),
|
||||||
|
"wsgi.multithread": True,
|
||||||
|
"wsgi.multiprocess": True,
|
||||||
|
"wsgi.run_once": False,
|
||||||
|
}
|
||||||
|
# Get server name and port - required in WSGI, not in ASGI
|
||||||
|
if "server" in scope:
|
||||||
|
environ["SERVER_NAME"] = scope["server"][0]
|
||||||
|
environ["SERVER_PORT"] = str(scope["server"][1])
|
||||||
|
else:
|
||||||
|
environ["SERVER_NAME"] = "localhost"
|
||||||
|
environ["SERVER_PORT"] = "80"
|
||||||
|
|
||||||
|
if "client" in scope:
|
||||||
|
environ["REMOTE_ADDR"] = scope["client"][0]
|
||||||
|
|
||||||
|
# Go through headers and make them into environ entries
|
||||||
|
for name, value in self.scope.get("headers", []):
|
||||||
|
name = name.decode("latin1")
|
||||||
|
if name == "content-length":
|
||||||
|
corrected_name = "CONTENT_LENGTH"
|
||||||
|
elif name == "content-type":
|
||||||
|
corrected_name = "CONTENT_TYPE"
|
||||||
|
else:
|
||||||
|
corrected_name = "HTTP_%s" % name.upper().replace("-", "_")
|
||||||
|
# HTTPbis say only ASCII chars are allowed in headers, but we latin1 just in case
|
||||||
|
value = value.decode("latin1")
|
||||||
|
if corrected_name in environ:
|
||||||
|
value = environ[corrected_name] + "," + value
|
||||||
|
environ[corrected_name] = value
|
||||||
|
return environ
|
||||||
|
|
||||||
|
def start_response(self, status, response_headers, exc_info=None):
|
||||||
|
"""
|
||||||
|
WSGI start_response callable.
|
||||||
|
"""
|
||||||
|
# Don't allow re-calling once response has begun
|
||||||
|
if self.response_started:
|
||||||
|
raise exc_info[1].with_traceback(exc_info[2])
|
||||||
|
# Don't allow re-calling without exc_info
|
||||||
|
if hasattr(self, "response_start") and exc_info is None:
|
||||||
|
raise ValueError(
|
||||||
|
"You cannot call start_response a second time without exc_info"
|
||||||
|
)
|
||||||
|
# Extract status code
|
||||||
|
status_code, _ = status.split(" ", 1)
|
||||||
|
status_code = int(status_code)
|
||||||
|
# Extract headers
|
||||||
|
headers = [
|
||||||
|
(name.lower().encode("ascii"), value.encode("ascii"))
|
||||||
|
for name, value in response_headers
|
||||||
|
]
|
||||||
|
# Extract content-length
|
||||||
|
self.response_content_length = None
|
||||||
|
for name, value in response_headers:
|
||||||
|
if name.lower() == "content-length":
|
||||||
|
self.response_content_length = int(value)
|
||||||
|
# Build and send response start message.
|
||||||
|
self.response_start = {
|
||||||
|
"type": "http.response.start",
|
||||||
|
"status": status_code,
|
||||||
|
"headers": headers,
|
||||||
|
}
|
||||||
|
|
||||||
|
@sync_to_async
|
||||||
|
def run_wsgi_app(self, body):
|
||||||
|
"""
|
||||||
|
Called in a subthread to run the WSGI app. We encapsulate like
|
||||||
|
this so that the start_response callable is called in the same thread.
|
||||||
|
"""
|
||||||
|
# Translate the scope and incoming request body into a WSGI environ
|
||||||
|
environ = self.build_environ(self.scope, body)
|
||||||
|
# Run the WSGI app
|
||||||
|
bytes_sent = 0
|
||||||
|
for output in self.wsgi_application(environ, self.start_response):
|
||||||
|
# If this is the first response, include the response headers
|
||||||
|
if not self.response_started:
|
||||||
|
self.response_started = True
|
||||||
|
self.sync_send(self.response_start)
|
||||||
|
# If the application supplies a Content-Length header
|
||||||
|
if self.response_content_length is not None:
|
||||||
|
# The server should not transmit more bytes to the client than the header allows
|
||||||
|
bytes_allowed = self.response_content_length - bytes_sent
|
||||||
|
if len(output) > bytes_allowed:
|
||||||
|
output = output[:bytes_allowed]
|
||||||
|
self.sync_send(
|
||||||
|
{"type": "http.response.body", "body": output, "more_body": True}
|
||||||
|
)
|
||||||
|
bytes_sent += len(output)
|
||||||
|
# The server should stop iterating over the response when enough data has been sent
|
||||||
|
if bytes_sent == self.response_content_length:
|
||||||
|
break
|
||||||
|
# Close connection
|
||||||
|
if not self.response_started:
|
||||||
|
self.response_started = True
|
||||||
|
self.sync_send(self.response_start)
|
||||||
|
self.sync_send({"type": "http.response.body"})
|
|
@ -0,0 +1,48 @@
|
||||||
|
Main contributors
|
||||||
|
-----------------
|
||||||
|
- Hideo Hattori (https://github.com/hhatto)
|
||||||
|
- Steven Myint (https://github.com/myint)
|
||||||
|
- Bill Wendling (https://github.com/gwelymernans)
|
||||||
|
|
||||||
|
Patches
|
||||||
|
-------
|
||||||
|
- Fraser Tweedale (https://github.com/frasertweedale)
|
||||||
|
- clach04 (https://github.com/clach04)
|
||||||
|
- Marc Abramowitz (https://github.com/msabramo)
|
||||||
|
- dellis23 (https://github.com/dellis23)
|
||||||
|
- Sam Vilain (https://github.com/samv)
|
||||||
|
- Florent Xicluna (https://github.com/florentx)
|
||||||
|
- Andras Tim (https://github.com/andras-tim)
|
||||||
|
- tomscytale (https://github.com/tomscytale)
|
||||||
|
- Filip Noetzel (https://github.com/peritus)
|
||||||
|
- Erik Bray (https://github.com/iguananaut)
|
||||||
|
- Christopher Medrela (https://github.com/chrismedrela)
|
||||||
|
- 小明 (https://github.com/dongweiming)
|
||||||
|
- Andy Hayden (https://github.com/hayd)
|
||||||
|
- Fabio Zadrozny (https://github.com/fabioz)
|
||||||
|
- Alex Chernetz (https://github.com/achernet)
|
||||||
|
- Marc Schlaich (https://github.com/schlamar)
|
||||||
|
- E. M. Bray (https://github.com/embray)
|
||||||
|
- Thomas Hisch (https://github.com/thisch)
|
||||||
|
- Florian Best (https://github.com/spaceone)
|
||||||
|
- Ian Clark (https://github.com/evenicoulddoit)
|
||||||
|
- Khairi Hafsham (https://github.com/khairihafsham)
|
||||||
|
- Neil Halelamien (https://github.com/neilsh)
|
||||||
|
- Hashem Nasarat (https://github.com/Hnasar)
|
||||||
|
- Hugo van Kemenade (https://github.com/hugovk)
|
||||||
|
- gmbnomis (https://github.com/gmbnomis)
|
||||||
|
- Samuel Lelièvre (https://github.com/slel)
|
||||||
|
- bigredengineer (https://github.com/bigredengineer)
|
||||||
|
- Kai Chen (https://github.com/kx-chen)
|
||||||
|
- Anthony Sottile (https://github.com/asottile)
|
||||||
|
- 秋葉 (https://github.com/Hanaasagi)
|
||||||
|
- Christian Clauss (https://github.com/cclauss)
|
||||||
|
- tobixx (https://github.com/tobixx)
|
||||||
|
- bigredengineer (https://github.com/bigredengineer)
|
||||||
|
- Bastien Gérard (https://github.com/bagerard)
|
||||||
|
- nicolasbonifas (https://github.com/nicolasbonifas)
|
||||||
|
- Andrii Yurchuk (https://github.com/Ch00k)
|
||||||
|
- José M. Guisado (https://github.com/pvxe)
|
||||||
|
- Dai Truong (https://github.com/NovaDev94)
|
||||||
|
- jnozsc (https://github.com/jnozsc)
|
||||||
|
- Edwin Shepherd (https://github.com/shardros)
|
|
@ -0,0 +1 @@
|
||||||
|
pip
|
|
@ -0,0 +1,23 @@
|
||||||
|
Copyright (C) 2010-2011 Hideo Hattori
|
||||||
|
Copyright (C) 2011-2013 Hideo Hattori, Steven Myint
|
||||||
|
Copyright (C) 2013-2016 Hideo Hattori, Steven Myint, Bill Wendling
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining
|
||||||
|
a copy of this software and associated documentation files (the
|
||||||
|
"Software"), to deal in the Software without restriction, including
|
||||||
|
without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
distribute, sublicense, and/or sell copies of the Software, and to
|
||||||
|
permit persons to whom the Software is furnished to do so, subject to
|
||||||
|
the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be
|
||||||
|
included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||||
|
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||||
|
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||||
|
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
|
@ -0,0 +1,475 @@
|
||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: autopep8
|
||||||
|
Version: 1.7.0
|
||||||
|
Summary: A tool that automatically formats Python code to conform to the PEP 8 style guide
|
||||||
|
Home-page: https://github.com/hhatto/autopep8
|
||||||
|
Author: Hideo Hattori
|
||||||
|
Author-email: hhatto.jp@gmail.com
|
||||||
|
License: Expat License
|
||||||
|
Keywords: automation,pep8,format,pycodestyle
|
||||||
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
|
Classifier: Environment :: Console
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: License :: OSI Approved :: MIT License
|
||||||
|
Classifier: Operating System :: OS Independent
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Programming Language :: Python :: 2
|
||||||
|
Classifier: Programming Language :: Python :: 2.7
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 3.6
|
||||||
|
Classifier: Programming Language :: Python :: 3.7
|
||||||
|
Classifier: Programming Language :: Python :: 3.8
|
||||||
|
Classifier: Programming Language :: Python :: 3.9
|
||||||
|
Classifier: Programming Language :: Python :: 3.10
|
||||||
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||||
|
Classifier: Topic :: Software Development :: Quality Assurance
|
||||||
|
License-File: LICENSE
|
||||||
|
License-File: AUTHORS.rst
|
||||||
|
Requires-Dist: pycodestyle (>=2.9.1)
|
||||||
|
Requires-Dist: toml
|
||||||
|
|
||||||
|
========
|
||||||
|
autopep8
|
||||||
|
========
|
||||||
|
|
||||||
|
.. image:: https://img.shields.io/pypi/v/autopep8.svg
|
||||||
|
:target: https://pypi.org/project/autopep8/
|
||||||
|
:alt: PyPI Version
|
||||||
|
|
||||||
|
.. image:: https://github.com/hhatto/autopep8/workflows/Python%20package/badge.svg
|
||||||
|
:target: https://github.com/hhatto/autopep8/actions
|
||||||
|
:alt: Build status
|
||||||
|
|
||||||
|
.. image:: https://codecov.io/gh/hhatto/autopep8/branch/main/graph/badge.svg
|
||||||
|
:target: https://codecov.io/gh/hhatto/autopep8
|
||||||
|
:alt: Code Coverage
|
||||||
|
|
||||||
|
autopep8 automatically formats Python code to conform to the `PEP 8`_ style
|
||||||
|
guide. It uses the pycodestyle_ utility to determine what parts of the code
|
||||||
|
needs to be formatted. autopep8 is capable of fixing most of the formatting
|
||||||
|
issues_ that can be reported by pycodestyle.
|
||||||
|
|
||||||
|
.. _PEP 8: https://www.python.org/dev/peps/pep-0008/
|
||||||
|
.. _issues: https://pycodestyle.readthedocs.org/en/latest/intro.html#error-codes
|
||||||
|
|
||||||
|
.. contents::
|
||||||
|
|
||||||
|
|
||||||
|
Installation
|
||||||
|
============
|
||||||
|
|
||||||
|
From pip::
|
||||||
|
|
||||||
|
$ pip install --upgrade autopep8
|
||||||
|
|
||||||
|
Consider using the ``--user`` option_.
|
||||||
|
|
||||||
|
.. _option: https://pip.pypa.io/en/latest/user_guide/#user-installs
|
||||||
|
|
||||||
|
|
||||||
|
Requirements
|
||||||
|
============
|
||||||
|
|
||||||
|
autopep8 requires pycodestyle_.
|
||||||
|
|
||||||
|
.. _pycodestyle: https://github.com/PyCQA/pycodestyle
|
||||||
|
|
||||||
|
|
||||||
|
Usage
|
||||||
|
=====
|
||||||
|
|
||||||
|
To modify a file in place (with aggressive level 2)::
|
||||||
|
|
||||||
|
$ autopep8 --in-place --aggressive --aggressive <filename>
|
||||||
|
|
||||||
|
Before running autopep8.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
import math, sys;
|
||||||
|
|
||||||
|
def example1():
|
||||||
|
####This is a long comment. This should be wrapped to fit within 72 characters.
|
||||||
|
some_tuple=( 1,2, 3,'a' );
|
||||||
|
some_variable={'long':'Long code lines should be wrapped within 79 characters.',
|
||||||
|
'other':[math.pi, 100,200,300,9876543210,'This is a long string that goes on'],
|
||||||
|
'more':{'inner':'This whole logical line should be wrapped.',some_tuple:[1,
|
||||||
|
20,300,40000,500000000,60000000000000000]}}
|
||||||
|
return (some_tuple, some_variable)
|
||||||
|
def example2(): return {'has_key() is deprecated':True}.has_key({'f':2}.has_key(''));
|
||||||
|
class Example3( object ):
|
||||||
|
def __init__ ( self, bar ):
|
||||||
|
#Comments should have a space after the hash.
|
||||||
|
if bar : bar+=1; bar=bar* bar ; return bar
|
||||||
|
else:
|
||||||
|
some_string = """
|
||||||
|
Indentation in multiline strings should not be touched.
|
||||||
|
Only actual code should be reindented.
|
||||||
|
"""
|
||||||
|
return (sys.path, some_string)
|
||||||
|
|
||||||
|
After running autopep8.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
import math
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def example1():
|
||||||
|
# This is a long comment. This should be wrapped to fit within 72
|
||||||
|
# characters.
|
||||||
|
some_tuple = (1, 2, 3, 'a')
|
||||||
|
some_variable = {
|
||||||
|
'long': 'Long code lines should be wrapped within 79 characters.',
|
||||||
|
'other': [
|
||||||
|
math.pi,
|
||||||
|
100,
|
||||||
|
200,
|
||||||
|
300,
|
||||||
|
9876543210,
|
||||||
|
'This is a long string that goes on'],
|
||||||
|
'more': {
|
||||||
|
'inner': 'This whole logical line should be wrapped.',
|
||||||
|
some_tuple: [
|
||||||
|
1,
|
||||||
|
20,
|
||||||
|
300,
|
||||||
|
40000,
|
||||||
|
500000000,
|
||||||
|
60000000000000000]}}
|
||||||
|
return (some_tuple, some_variable)
|
||||||
|
|
||||||
|
|
||||||
|
def example2(): return ('' in {'f': 2}) in {'has_key() is deprecated': True}
|
||||||
|
|
||||||
|
|
||||||
|
class Example3(object):
|
||||||
|
def __init__(self, bar):
|
||||||
|
# Comments should have a space after the hash.
|
||||||
|
if bar:
|
||||||
|
bar += 1
|
||||||
|
bar = bar * bar
|
||||||
|
return bar
|
||||||
|
else:
|
||||||
|
some_string = """
|
||||||
|
Indentation in multiline strings should not be touched.
|
||||||
|
Only actual code should be reindented.
|
||||||
|
"""
|
||||||
|
return (sys.path, some_string)
|
||||||
|
|
||||||
|
Options::
|
||||||
|
|
||||||
|
usage: autopep8 [-h] [--version] [-v] [-d] [-i] [--global-config filename]
|
||||||
|
[--ignore-local-config] [-r] [-j n] [-p n] [-a]
|
||||||
|
[--experimental] [--exclude globs] [--list-fixes]
|
||||||
|
[--ignore errors] [--select errors] [--max-line-length n]
|
||||||
|
[--line-range line line] [--hang-closing] [--exit-code]
|
||||||
|
[files [files ...]]
|
||||||
|
|
||||||
|
Automatically formats Python code to conform to the PEP 8 style guide.
|
||||||
|
|
||||||
|
positional arguments:
|
||||||
|
files files to format or '-' for standard in
|
||||||
|
|
||||||
|
optional arguments:
|
||||||
|
-h, --help show this help message and exit
|
||||||
|
--version show program's version number and exit
|
||||||
|
-v, --verbose print verbose messages; multiple -v result in more
|
||||||
|
verbose messages
|
||||||
|
-d, --diff print the diff for the fixed source
|
||||||
|
-i, --in-place make changes to files in place
|
||||||
|
--global-config filename
|
||||||
|
path to a global pep8 config file; if this file does
|
||||||
|
not exist then this is ignored (default:
|
||||||
|
~/.config/pep8)
|
||||||
|
--ignore-local-config
|
||||||
|
don't look for and apply local config files; if not
|
||||||
|
passed, defaults are updated with any config files in
|
||||||
|
the project's root directory
|
||||||
|
-r, --recursive run recursively over directories; must be used with
|
||||||
|
--in-place or --diff
|
||||||
|
-j n, --jobs n number of parallel jobs; match CPU count if value is
|
||||||
|
less than 1
|
||||||
|
-p n, --pep8-passes n
|
||||||
|
maximum number of additional pep8 passes (default:
|
||||||
|
infinite)
|
||||||
|
-a, --aggressive enable non-whitespace changes; multiple -a result in
|
||||||
|
more aggressive changes
|
||||||
|
--experimental enable experimental fixes
|
||||||
|
--exclude globs exclude file/directory names that match these comma-
|
||||||
|
separated globs
|
||||||
|
--list-fixes list codes for fixes; used by --ignore and --select
|
||||||
|
--ignore errors do not fix these errors/warnings (default:
|
||||||
|
E226,E24,W50,W690)
|
||||||
|
--select errors fix only these errors/warnings (e.g. E4,W)
|
||||||
|
--max-line-length n set maximum allowed line length (default: 79)
|
||||||
|
--line-range line line, --range line line
|
||||||
|
only fix errors found within this inclusive range of
|
||||||
|
line numbers (e.g. 1 99); line numbers are indexed at
|
||||||
|
1
|
||||||
|
--hang-closing hang-closing option passed to pycodestyle
|
||||||
|
--exit-code change to behavior of exit code. default behavior of
|
||||||
|
return value, 0 is no differences, 1 is error exit.
|
||||||
|
return 2 when add this option. 2 is exists
|
||||||
|
differences.
|
||||||
|
|
||||||
|
|
||||||
|
Features
|
||||||
|
========
|
||||||
|
|
||||||
|
autopep8 fixes the following issues_ reported by pycodestyle_::
|
||||||
|
|
||||||
|
E101 - Reindent all lines.
|
||||||
|
E11 - Fix indentation.
|
||||||
|
E121 - Fix indentation to be a multiple of four.
|
||||||
|
E122 - Add absent indentation for hanging indentation.
|
||||||
|
E123 - Align closing bracket to match opening bracket.
|
||||||
|
E124 - Align closing bracket to match visual indentation.
|
||||||
|
E125 - Indent to distinguish line from next logical line.
|
||||||
|
E126 - Fix over-indented hanging indentation.
|
||||||
|
E127 - Fix visual indentation.
|
||||||
|
E128 - Fix visual indentation.
|
||||||
|
E129 - Fix visual indentation.
|
||||||
|
E131 - Fix hanging indent for unaligned continuation line.
|
||||||
|
E133 - Fix missing indentation for closing bracket.
|
||||||
|
E20 - Remove extraneous whitespace.
|
||||||
|
E211 - Remove extraneous whitespace.
|
||||||
|
E22 - Fix extraneous whitespace around keywords.
|
||||||
|
E224 - Remove extraneous whitespace around operator.
|
||||||
|
E225 - Fix missing whitespace around operator.
|
||||||
|
E226 - Fix missing whitespace around arithmetic operator.
|
||||||
|
E227 - Fix missing whitespace around bitwise/shift operator.
|
||||||
|
E228 - Fix missing whitespace around modulo operator.
|
||||||
|
E231 - Add missing whitespace.
|
||||||
|
E241 - Fix extraneous whitespace around keywords.
|
||||||
|
E242 - Remove extraneous whitespace around operator.
|
||||||
|
E251 - Remove whitespace around parameter '=' sign.
|
||||||
|
E252 - Missing whitespace around parameter equals.
|
||||||
|
E26 - Fix spacing after comment hash for inline comments.
|
||||||
|
E265 - Fix spacing after comment hash for block comments.
|
||||||
|
E266 - Fix too many leading '#' for block comments.
|
||||||
|
E27 - Fix extraneous whitespace around keywords.
|
||||||
|
E301 - Add missing blank line.
|
||||||
|
E302 - Add missing 2 blank lines.
|
||||||
|
E303 - Remove extra blank lines.
|
||||||
|
E304 - Remove blank line following function decorator.
|
||||||
|
E305 - Expected 2 blank lines after end of function or class.
|
||||||
|
E306 - Expected 1 blank line before a nested definition.
|
||||||
|
E401 - Put imports on separate lines.
|
||||||
|
E402 - Fix module level import not at top of file
|
||||||
|
E501 - Try to make lines fit within --max-line-length characters.
|
||||||
|
E502 - Remove extraneous escape of newline.
|
||||||
|
E701 - Put colon-separated compound statement on separate lines.
|
||||||
|
E70 - Put semicolon-separated compound statement on separate lines.
|
||||||
|
E711 - Fix comparison with None.
|
||||||
|
E712 - Fix comparison with boolean.
|
||||||
|
E713 - Use 'not in' for test for membership.
|
||||||
|
E714 - Use 'is not' test for object identity.
|
||||||
|
E721 - Use "isinstance()" instead of comparing types directly.
|
||||||
|
E722 - Fix bare except.
|
||||||
|
E731 - Use a def when use do not assign a lambda expression.
|
||||||
|
W291 - Remove trailing whitespace.
|
||||||
|
W292 - Add a single newline at the end of the file.
|
||||||
|
W293 - Remove trailing whitespace on blank line.
|
||||||
|
W391 - Remove trailing blank lines.
|
||||||
|
W503 - Fix line break before binary operator.
|
||||||
|
W504 - Fix line break after binary operator.
|
||||||
|
W601 - Use "in" rather than "has_key()".
|
||||||
|
W602 - Fix deprecated form of raising exception.
|
||||||
|
W603 - Use "!=" instead of "<>"
|
||||||
|
W604 - Use "repr()" instead of backticks.
|
||||||
|
W605 - Fix invalid escape sequence 'x'.
|
||||||
|
W690 - Fix various deprecated code (via lib2to3).
|
||||||
|
|
||||||
|
autopep8 also fixes some issues not found by pycodestyle_.
|
||||||
|
|
||||||
|
- Correct deprecated or non-idiomatic Python code (via ``lib2to3``). Use this
|
||||||
|
for making Python 2.7 code more compatible with Python 3. (This is triggered
|
||||||
|
if ``W690`` is enabled.)
|
||||||
|
- Normalize files with mixed line endings.
|
||||||
|
- Put a blank line between a class docstring and its first method
|
||||||
|
declaration. (Enabled with ``E301``.)
|
||||||
|
- Remove blank lines between a function declaration and its docstring. (Enabled
|
||||||
|
with ``E303``.)
|
||||||
|
|
||||||
|
autopep8 avoids fixing some issues found by pycodestyle_.
|
||||||
|
|
||||||
|
- ``E112``/``E113`` for non comments are reports of bad indentation that break
|
||||||
|
syntax rules. These should not be modified at all.
|
||||||
|
- ``E265``, which refers to spacing after comment hash, is ignored if the
|
||||||
|
comment looks like code. autopep8 avoids modifying these since they are not
|
||||||
|
real comments. If you really want to get rid of the pycodestyle_ warning,
|
||||||
|
consider just removing the commented-out code. (This can be automated via
|
||||||
|
eradicate_.)
|
||||||
|
|
||||||
|
.. _eradicate: https://github.com/myint/eradicate
|
||||||
|
|
||||||
|
|
||||||
|
More advanced usage
|
||||||
|
===================
|
||||||
|
|
||||||
|
By default autopep8 only makes whitespace changes. Thus, by default, it does
|
||||||
|
not fix ``E711`` and ``E712``. (Changing ``x == None`` to ``x is None`` may
|
||||||
|
change the meaning of the program if ``x`` has its ``__eq__`` method
|
||||||
|
overridden.) Nor does it correct deprecated code ``W6``. To enable these
|
||||||
|
more aggressive fixes, use the ``--aggressive`` option::
|
||||||
|
|
||||||
|
$ autopep8 --aggressive <filename>
|
||||||
|
|
||||||
|
Use multiple ``--aggressive`` to increase the aggressiveness level. For
|
||||||
|
example, ``E712`` requires aggressiveness level 2 (since ``x == True`` could be
|
||||||
|
changed to either ``x`` or ``x is True``, but autopep8 chooses the former).
|
||||||
|
|
||||||
|
``--aggressive`` will also shorten lines more aggressively. It will also remove
|
||||||
|
trailing whitespace more aggressively. (Usually, we don't touch trailing
|
||||||
|
whitespace in docstrings and other multiline strings. And to do even more
|
||||||
|
aggressive changes to docstrings, use docformatter_.)
|
||||||
|
|
||||||
|
.. _docformatter: https://github.com/myint/docformatter
|
||||||
|
|
||||||
|
To enable only a subset of the fixes, use the ``--select`` option. For example,
|
||||||
|
to fix various types of indentation issues::
|
||||||
|
|
||||||
|
$ autopep8 --select=E1,W1 <filename>
|
||||||
|
|
||||||
|
Similarly, to just fix deprecated code::
|
||||||
|
|
||||||
|
$ autopep8 --aggressive --select=W6 <filename>
|
||||||
|
|
||||||
|
The above is useful when trying to port a single code base to work with both
|
||||||
|
Python 2 and Python 3 at the same time.
|
||||||
|
|
||||||
|
If the file being fixed is large, you may want to enable verbose progress
|
||||||
|
messages::
|
||||||
|
|
||||||
|
$ autopep8 -v <filename>
|
||||||
|
|
||||||
|
Passing in ``--experimental`` enables the following functionality:
|
||||||
|
|
||||||
|
- Shortens code lines by taking its length into account
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
$ autopep8 --experimental <filename>
|
||||||
|
|
||||||
|
Disabling line-by-line
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
It is possible to disable autopep8 untill it it turned back on again in the file, using ``autopep8: off`` and then renabling ``autopep8: on``.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
# autopep8: off
|
||||||
|
[
|
||||||
|
[23, 23, 13, 43],
|
||||||
|
[32, 34, 34, 34],
|
||||||
|
[56, 34, 34, 11],
|
||||||
|
[10, 10, 10, 10],
|
||||||
|
]
|
||||||
|
# autopep8: on
|
||||||
|
|
||||||
|
``fmt: off`` and ``fmt: on`` are also valid.
|
||||||
|
|
||||||
|
Use as a module
|
||||||
|
===============
|
||||||
|
|
||||||
|
The simplest way of using autopep8 as a module is via the ``fix_code()``
|
||||||
|
function:
|
||||||
|
|
||||||
|
>>> import autopep8
|
||||||
|
>>> autopep8.fix_code('x= 123\n')
|
||||||
|
'x = 123\n'
|
||||||
|
|
||||||
|
Or with options:
|
||||||
|
|
||||||
|
>>> import autopep8
|
||||||
|
>>> autopep8.fix_code('x.has_key(y)\n',
|
||||||
|
... options={'aggressive': 1})
|
||||||
|
'y in x\n'
|
||||||
|
>>> autopep8.fix_code('print( 123 )\n',
|
||||||
|
... options={'ignore': ['E']})
|
||||||
|
'print( 123 )\n'
|
||||||
|
|
||||||
|
|
||||||
|
Configuration
|
||||||
|
=============
|
||||||
|
|
||||||
|
By default, if ``$HOME/.config/pycodestyle`` (``~\.pycodestyle`` in Windows
|
||||||
|
environment) exists, it will be used as global configuration file.
|
||||||
|
Alternatively, you can specify the global configuration file with the
|
||||||
|
``--global-config`` option.
|
||||||
|
|
||||||
|
Also, if ``setup.cfg``, ``tox.ini``, ``.pep8`` and ``.flake8`` files exist
|
||||||
|
in the directory where the target file exists, it will be used as the
|
||||||
|
configuration file.
|
||||||
|
|
||||||
|
``pep8``, ``pycodestyle``, and ``flake8`` can be used as a section.
|
||||||
|
|
||||||
|
configuration file example::
|
||||||
|
|
||||||
|
[pycodestyle]
|
||||||
|
max_line_length = 120
|
||||||
|
ignore = E501
|
||||||
|
|
||||||
|
pyproject.toml
|
||||||
|
--------------
|
||||||
|
|
||||||
|
autopep8 can also use ``pyproject.toml``.
|
||||||
|
The section must be ``[tool.autopep8]``, and ``pyproject.toml`` takes precedence
|
||||||
|
over any other configuration files.
|
||||||
|
|
||||||
|
configuration file example::
|
||||||
|
|
||||||
|
[tool.autopep8]
|
||||||
|
max_line_length = 120
|
||||||
|
ignore = "E501,W6" # or ["E501", "W6"]
|
||||||
|
in-place = true
|
||||||
|
recursive = true
|
||||||
|
aggressive = 3
|
||||||
|
|
||||||
|
|
||||||
|
Testing
|
||||||
|
=======
|
||||||
|
|
||||||
|
Test cases are in ``test/test_autopep8.py``. They can be run directly via
|
||||||
|
``python test/test_autopep8.py`` or via tox_. The latter is useful for
|
||||||
|
testing against multiple Python interpreters. (We currently test against
|
||||||
|
CPython versions 2.7, 3.6 3.7 and 3.8. We also test against PyPy.)
|
||||||
|
|
||||||
|
.. _`tox`: https://pypi.org/project/tox/
|
||||||
|
|
||||||
|
Broad spectrum testing is available via ``test/acid.py``. This script runs
|
||||||
|
autopep8 against Python code and checks for correctness and completeness of the
|
||||||
|
code fixes. It can check that the bytecode remains identical.
|
||||||
|
``test/acid_pypi.py`` makes use of ``acid.py`` to test against the latest
|
||||||
|
released packages on PyPI.
|
||||||
|
|
||||||
|
|
||||||
|
Troubleshooting
|
||||||
|
===============
|
||||||
|
|
||||||
|
``pkg_resources.DistributionNotFound``
|
||||||
|
--------------------------------------
|
||||||
|
|
||||||
|
If you are using an ancient version of ``setuptools``, you might encounter
|
||||||
|
``pkg_resources.DistributionNotFound`` when trying to run ``autopep8``. Try
|
||||||
|
upgrading ``setuptools`` to workaround this ``setuptools`` problem::
|
||||||
|
|
||||||
|
$ pip install --upgrade setuptools
|
||||||
|
|
||||||
|
Use ``sudo`` if you are installing to the system.
|
||||||
|
|
||||||
|
|
||||||
|
Links
|
||||||
|
=====
|
||||||
|
|
||||||
|
* PyPI_
|
||||||
|
* GitHub_
|
||||||
|
* `Travis CI`_
|
||||||
|
* Coveralls_
|
||||||
|
|
||||||
|
.. _PyPI: https://pypi.org/project/autopep8/
|
||||||
|
.. _GitHub: https://github.com/hhatto/autopep8
|
||||||
|
.. _`Travis CI`: https://travis-ci.org/hhatto/autopep8
|
||||||
|
.. _`Coveralls`: https://coveralls.io/r/hhatto/autopep8
|
|
@ -0,0 +1,11 @@
|
||||||
|
../../../bin/autopep8,sha256=t1G3vaa203Z2FX0MpL9wajptBX3IMXUOxsS53ANwkAI,246
|
||||||
|
__pycache__/autopep8.cpython-38.pyc,,
|
||||||
|
autopep8-1.7.0.dist-info/AUTHORS.rst,sha256=tiTPsbzGl9dtXCMEWXbWSV1zan1M-BoWtiixs46GIWk,2003
|
||||||
|
autopep8-1.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
autopep8-1.7.0.dist-info/LICENSE,sha256=jR0COOSFQ0QZFMqwdB1N4-Bwobg2f3h69fIJr7YLCWo,1181
|
||||||
|
autopep8-1.7.0.dist-info/METADATA,sha256=uf9qENqUy_VnrVYXoyCkoLVjkcbTVut_FPcntXpbFQk,17302
|
||||||
|
autopep8-1.7.0.dist-info/RECORD,,
|
||||||
|
autopep8-1.7.0.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
|
||||||
|
autopep8-1.7.0.dist-info/entry_points.txt,sha256=zEduLXzN3YzTTZBwxjhEKW7PVLqSqVG8-ocCaCR3P4A,43
|
||||||
|
autopep8-1.7.0.dist-info/top_level.txt,sha256=s2x-di3QBwGxr7kd5xErt2pom8dsFRdINbmwsOEgLfU,9
|
||||||
|
autopep8.py,sha256=DS3qpM_YacgSCQWofj_6yRbkFr12T_IX1fS9HShhgYs,156300
|
|
@ -0,0 +1,6 @@
|
||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: bdist_wheel (0.34.2)
|
||||||
|
Root-Is-Purelib: true
|
||||||
|
Tag: py2-none-any
|
||||||
|
Tag: py3-none-any
|
||||||
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
[console_scripts]
|
||||||
|
autopep8 = autopep8:main
|
|
@ -0,0 +1 @@
|
||||||
|
autopep8
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1 @@
|
||||||
|
pip
|
|
@ -0,0 +1,15 @@
|
||||||
|
Apache Software License 2.0
|
||||||
|
|
||||||
|
Copyright (c) 2020, Paul Ganssle (Google)
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
201
env/lib/python3.8/site-packages/backports.zoneinfo-0.2.1.dist-info/LICENSE_APACHE
vendored
Normal file
201
env/lib/python3.8/site-packages/backports.zoneinfo-0.2.1.dist-info/LICENSE_APACHE
vendored
Normal file
|
@ -0,0 +1,201 @@
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
114
env/lib/python3.8/site-packages/backports.zoneinfo-0.2.1.dist-info/METADATA
vendored
Normal file
114
env/lib/python3.8/site-packages/backports.zoneinfo-0.2.1.dist-info/METADATA
vendored
Normal file
|
@ -0,0 +1,114 @@
|
||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: backports.zoneinfo
|
||||||
|
Version: 0.2.1
|
||||||
|
Summary: Backport of the standard library zoneinfo module
|
||||||
|
Home-page: https://github.com/pganssle/zoneinfo
|
||||||
|
Author: Python Software Foundation
|
||||||
|
Author-email: datetime-sig@python.org
|
||||||
|
License: Apache-2.0
|
||||||
|
Project-URL: Source, https://github.com/pganssle/zoneinfo
|
||||||
|
Project-URL: Documentation, https://zoneinfo.readthedocs.io/en/latest/
|
||||||
|
Project-URL: Bug Reports, https://github.com/pganssle/zoneinfo/issues
|
||||||
|
Platform: UNKNOWN
|
||||||
|
Classifier: Development Status :: 4 - Beta
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: License :: OSI Approved :: Apache Software License
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 3 :: Only
|
||||||
|
Classifier: Programming Language :: Python :: 3.6
|
||||||
|
Classifier: Programming Language :: Python :: 3.7
|
||||||
|
Classifier: Programming Language :: Python :: 3.8
|
||||||
|
Requires-Python: >=3.6
|
||||||
|
Description-Content-Type: text/markdown
|
||||||
|
Requires-Dist: importlib-resources ; python_version < "3.7"
|
||||||
|
Provides-Extra: tzdata
|
||||||
|
Requires-Dist: tzdata ; extra == 'tzdata'
|
||||||
|
|
||||||
|
# `backports.zoneinfo`: Backport of the standard library module `zoneinfo`
|
||||||
|
|
||||||
|
This package was originally the reference implementation for [PEP 615](https://www.python.org/dev/peps/pep-0615/), which proposes support for the IANA time zone database in the standard library, and now serves as a backport to Python 3.6+ (including PyPy).
|
||||||
|
|
||||||
|
This exposes the `backports.zoneinfo` module, which is a backport of the [`zoneinfo`](https://docs.python.org/3.9/library/zoneinfo.html#module-zoneinfo) module. The backport's documentation can be found [on readthedocs](https://zoneinfo.readthedocs.io/en/latest/).
|
||||||
|
|
||||||
|
The module uses the system time zone data if available, and falls back to the [`tzdata`](https://tzdata.readthedocs.io/en/latest/) package (available [on PyPI](https://pypi.org/project/tzdata/)) if installed.
|
||||||
|
|
||||||
|
## Installation and depending on this library
|
||||||
|
|
||||||
|
This module is called [`backports.zoneinfo`](https://pypi.org/project/backports.zoneinfo) on PyPI. To install it in your local environment, use:
|
||||||
|
|
||||||
|
```
|
||||||
|
pip install backports.zoneinfo
|
||||||
|
```
|
||||||
|
|
||||||
|
Or (particularly on Windows), you can also use the `tzdata` extra (which basically just declares a dependency on `tzdata`, so this doesn't actually save you any typing 😅):
|
||||||
|
|
||||||
|
```
|
||||||
|
pip install backports.zoneinfo[tzdata]
|
||||||
|
```
|
||||||
|
|
||||||
|
If you want to use this in your application, it is best to use [PEP 508 environment markers](https://www.python.org/dev/peps/pep-0508/#environment-markers) to declare a dependency *conditional on the Python version*:
|
||||||
|
|
||||||
|
```
|
||||||
|
backports.zoneinfo;python_version<"3.9"
|
||||||
|
```
|
||||||
|
|
||||||
|
Support for `backports.zoneinfo` in Python 3.9+ is currently minimal, since it is expected that you would use the standard library `zoneinfo` module instead.
|
||||||
|
|
||||||
|
## Use
|
||||||
|
|
||||||
|
The `backports.zoneinfo` module should be a drop-in replacement for the Python 3.9 standard library module `zoneinfo`. If you do not support anything earlier than Python 3.9, **you do not need this library**; if you are supporting Python 3.6+, you may want to use this idiom to "fall back" to ``backports.zoneinfo``:
|
||||||
|
|
||||||
|
```python
|
||||||
|
try:
|
||||||
|
import zoneinfo
|
||||||
|
except ImportError:
|
||||||
|
from backports import zoneinfo
|
||||||
|
```
|
||||||
|
|
||||||
|
To get access to time zones with this module, construct a `ZoneInfo` object and attach it to your datetime:
|
||||||
|
|
||||||
|
```python
|
||||||
|
>>> from backports.zoneinfo import ZoneInfo
|
||||||
|
>>> from datetime import datetime, timedelta, timezone
|
||||||
|
>>> dt = datetime(1992, 3, 1, tzinfo=ZoneInfo("Europe/Minsk"))
|
||||||
|
>>> print(dt)
|
||||||
|
1992-03-01 00:00:00+02:00
|
||||||
|
>>> print(dt.utcoffset())
|
||||||
|
2:00:00
|
||||||
|
>>> print(dt.tzname())
|
||||||
|
EET
|
||||||
|
```
|
||||||
|
|
||||||
|
Arithmetic works as expected without the need for a "normalization" step:
|
||||||
|
|
||||||
|
```python
|
||||||
|
>>> dt += timedelta(days=90)
|
||||||
|
>>> print(dt)
|
||||||
|
1992-05-30 00:00:00+03:00
|
||||||
|
>>> dt.utcoffset()
|
||||||
|
datetime.timedelta(seconds=10800)
|
||||||
|
>>> dt.tzname()
|
||||||
|
'EEST'
|
||||||
|
```
|
||||||
|
|
||||||
|
Ambiguous and imaginary times are handled using the `fold` attribute added in [PEP 495](https://www.python.org/dev/peps/pep-0495/):
|
||||||
|
|
||||||
|
```python
|
||||||
|
>>> dt = datetime(2020, 11, 1, 1, tzinfo=ZoneInfo("America/Chicago"))
|
||||||
|
>>> print(dt)
|
||||||
|
2020-11-01 01:00:00-05:00
|
||||||
|
>>> print(dt.replace(fold=1))
|
||||||
|
2020-11-01 01:00:00-06:00
|
||||||
|
|
||||||
|
>>> UTC = timezone.utc
|
||||||
|
>>> print(dt.astimezone(UTC))
|
||||||
|
2020-11-01 06:00:00+00:00
|
||||||
|
>>> print(dt.replace(fold=1).astimezone(UTC))
|
||||||
|
2020-11-01 07:00:00+00:00
|
||||||
|
```
|
||||||
|
|
||||||
|
# Contributing
|
||||||
|
|
||||||
|
Currently we are not accepting contributions to this repository because we have not put the CLA in place and we would like to avoid complicating the process of adoption into the standard library. Contributions to [CPython](https://github.com/python/cpython) will eventually be backported to this repository — see [the Python developer's guide](https://devguide.python.org/) for more information on how to contribute to CPython.
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,22 @@
|
||||||
|
backports.zoneinfo-0.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
backports.zoneinfo-0.2.1.dist-info/LICENSE,sha256=M-jlAC01EtP8wigrmV5rrZ0zR4G5xawxhD9ASQDh87Q,592
|
||||||
|
backports.zoneinfo-0.2.1.dist-info/LICENSE_APACHE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
||||||
|
backports.zoneinfo-0.2.1.dist-info/METADATA,sha256=KGF_lTJSMsl-lWaaSl4jC5m_ovX4PIwsEwNMeEaaEuU,4721
|
||||||
|
backports.zoneinfo-0.2.1.dist-info/RECORD,,
|
||||||
|
backports.zoneinfo-0.2.1.dist-info/WHEEL,sha256=0wGQBSV-BlYX9ESMZyEiLMpXIYwrZGj6McPYyDp_RjA,108
|
||||||
|
backports.zoneinfo-0.2.1.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10
|
||||||
|
backports/__init__.py,sha256=KNscjLyptBUeU07KtwwRFdTJqAVURMf4GjM9CqXnxMI,227
|
||||||
|
backports/__pycache__/__init__.cpython-38.pyc,,
|
||||||
|
backports/zoneinfo/__init__.py,sha256=atCU_fMgkQIE-DCjSJOist9GAqaWQAGhrDA5bCXPQxU,1235
|
||||||
|
backports/zoneinfo/__init__.pyi,sha256=1_T7dB1-Fh1s7f2zNa1QrP9pO_aBHemeaIiJBPQz3Fs,1234
|
||||||
|
backports/zoneinfo/__pycache__/__init__.cpython-38.pyc,,
|
||||||
|
backports/zoneinfo/__pycache__/_common.cpython-38.pyc,,
|
||||||
|
backports/zoneinfo/__pycache__/_tzpath.cpython-38.pyc,,
|
||||||
|
backports/zoneinfo/__pycache__/_version.cpython-38.pyc,,
|
||||||
|
backports/zoneinfo/__pycache__/_zoneinfo.cpython-38.pyc,,
|
||||||
|
backports/zoneinfo/_common.py,sha256=gKuY_V-YKbaabT5VRw1MWYJxfuiDDLCg7UolYYI42rw,5469
|
||||||
|
backports/zoneinfo/_czoneinfo.cpython-38-x86_64-linux-gnu.so,sha256=h8QcJrT32tvEhZdGnJ0V6VE0Ea7ELE6m4U4W3WXQUU0,148747
|
||||||
|
backports/zoneinfo/_tzpath.py,sha256=yFkwr6qKvcC6wtuhrJRh487x57L39up8bpl9w-gOGX8,5860
|
||||||
|
backports/zoneinfo/_version.py,sha256=HfjVOrpTnmZ-xVFCYSVmX50EXaBQeJteUHG-PD6iQs8,22
|
||||||
|
backports/zoneinfo/_zoneinfo.py,sha256=B1W3KlPeUHjk1qpBybGHorPMvfcLwGF7dtYj-IFAxY4,24353
|
||||||
|
backports/zoneinfo/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@ -0,0 +1,5 @@
|
||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: bdist_wheel (0.34.2)
|
||||||
|
Root-Is-Purelib: false
|
||||||
|
Tag: cp38-cp38-manylinux1_x86_64
|
||||||
|
|
1
env/lib/python3.8/site-packages/backports.zoneinfo-0.2.1.dist-info/top_level.txt
vendored
Normal file
1
env/lib/python3.8/site-packages/backports.zoneinfo-0.2.1.dist-info/top_level.txt
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
backports
|
|
@ -0,0 +1,5 @@
|
||||||
|
# A Python "namespace package" http://www.python.org/dev/peps/pep-0382/
|
||||||
|
# This always goes inside of a namespace package's __init__.py
|
||||||
|
from pkgutil import extend_path
|
||||||
|
|
||||||
|
__path__ = extend_path(__path__, __name__) # type: ignore
|
BIN
env/lib/python3.8/site-packages/backports/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
BIN
env/lib/python3.8/site-packages/backports/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
Binary file not shown.
|
@ -0,0 +1,49 @@
|
||||||
|
__all__ = [
|
||||||
|
"ZoneInfo",
|
||||||
|
"reset_tzpath",
|
||||||
|
"available_timezones",
|
||||||
|
"TZPATH",
|
||||||
|
"ZoneInfoNotFoundError",
|
||||||
|
"InvalidTZPathWarning",
|
||||||
|
]
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from . import _tzpath
|
||||||
|
from ._common import ZoneInfoNotFoundError
|
||||||
|
from ._version import __version__
|
||||||
|
|
||||||
|
try:
|
||||||
|
from ._czoneinfo import ZoneInfo
|
||||||
|
except ImportError: # pragma: nocover
|
||||||
|
from ._zoneinfo import ZoneInfo
|
||||||
|
|
||||||
|
reset_tzpath = _tzpath.reset_tzpath
|
||||||
|
available_timezones = _tzpath.available_timezones
|
||||||
|
InvalidTZPathWarning = _tzpath.InvalidTZPathWarning
|
||||||
|
|
||||||
|
if sys.version_info < (3, 7):
|
||||||
|
# Module-level __getattr__ was added in Python 3.7, so instead of lazily
|
||||||
|
# populating TZPATH on every access, we will register a callback with
|
||||||
|
# reset_tzpath to update the top-level tuple.
|
||||||
|
TZPATH = _tzpath.TZPATH
|
||||||
|
|
||||||
|
def _tzpath_callback(new_tzpath):
|
||||||
|
global TZPATH
|
||||||
|
TZPATH = new_tzpath
|
||||||
|
|
||||||
|
_tzpath.TZPATH_CALLBACKS.append(_tzpath_callback)
|
||||||
|
del _tzpath_callback
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
def __getattr__(name):
|
||||||
|
if name == "TZPATH":
|
||||||
|
return _tzpath.TZPATH
|
||||||
|
else:
|
||||||
|
raise AttributeError(
|
||||||
|
f"module {__name__!r} has no attribute {name!r}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def __dir__():
|
||||||
|
return sorted(list(globals()) + ["TZPATH"])
|
|
@ -0,0 +1,45 @@
|
||||||
|
import os
|
||||||
|
import typing
|
||||||
|
from datetime import datetime, tzinfo
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Iterable,
|
||||||
|
Optional,
|
||||||
|
Protocol,
|
||||||
|
Sequence,
|
||||||
|
Set,
|
||||||
|
Type,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
|
_T = typing.TypeVar("_T", bound="ZoneInfo")
|
||||||
|
|
||||||
|
class _IOBytes(Protocol):
|
||||||
|
def read(self, __size: int) -> bytes: ...
|
||||||
|
def seek(self, __size: int, __whence: int = ...) -> Any: ...
|
||||||
|
|
||||||
|
class ZoneInfo(tzinfo):
|
||||||
|
@property
|
||||||
|
def key(self) -> str: ...
|
||||||
|
def __init__(self, key: str) -> None: ...
|
||||||
|
@classmethod
|
||||||
|
def no_cache(cls: Type[_T], key: str) -> _T: ...
|
||||||
|
@classmethod
|
||||||
|
def from_file(
|
||||||
|
cls: Type[_T], __fobj: _IOBytes, key: Optional[str] = ...
|
||||||
|
) -> _T: ...
|
||||||
|
@classmethod
|
||||||
|
def clear_cache(cls, *, only_keys: Iterable[str] = ...) -> None: ...
|
||||||
|
|
||||||
|
# Note: Both here and in clear_cache, the types allow the use of `str` where
|
||||||
|
# a sequence of strings is required. This should be remedied if a solution
|
||||||
|
# to this typing bug is found: https://github.com/python/typing/issues/256
|
||||||
|
def reset_tzpath(
|
||||||
|
to: Optional[Sequence[Union[os.PathLike, str]]] = ...
|
||||||
|
) -> None: ...
|
||||||
|
def available_timezones() -> Set[str]: ...
|
||||||
|
|
||||||
|
TZPATH: Sequence[str]
|
||||||
|
|
||||||
|
class ZoneInfoNotFoundError(KeyError): ...
|
||||||
|
class InvalidTZPathWarning(RuntimeWarning): ...
|
BIN
env/lib/python3.8/site-packages/backports/zoneinfo/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
BIN
env/lib/python3.8/site-packages/backports/zoneinfo/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
env/lib/python3.8/site-packages/backports/zoneinfo/__pycache__/_common.cpython-38.pyc
vendored
Normal file
BIN
env/lib/python3.8/site-packages/backports/zoneinfo/__pycache__/_common.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
env/lib/python3.8/site-packages/backports/zoneinfo/__pycache__/_tzpath.cpython-38.pyc
vendored
Normal file
BIN
env/lib/python3.8/site-packages/backports/zoneinfo/__pycache__/_tzpath.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
env/lib/python3.8/site-packages/backports/zoneinfo/__pycache__/_version.cpython-38.pyc
vendored
Normal file
BIN
env/lib/python3.8/site-packages/backports/zoneinfo/__pycache__/_version.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
env/lib/python3.8/site-packages/backports/zoneinfo/__pycache__/_zoneinfo.cpython-38.pyc
vendored
Normal file
BIN
env/lib/python3.8/site-packages/backports/zoneinfo/__pycache__/_zoneinfo.cpython-38.pyc
vendored
Normal file
Binary file not shown.
|
@ -0,0 +1,171 @@
|
||||||
|
import struct
|
||||||
|
|
||||||
|
|
||||||
|
def load_tzdata(key):
|
||||||
|
try:
|
||||||
|
import importlib.resources as importlib_resources
|
||||||
|
except ImportError:
|
||||||
|
import importlib_resources
|
||||||
|
|
||||||
|
components = key.split("/")
|
||||||
|
package_name = ".".join(["tzdata.zoneinfo"] + components[:-1])
|
||||||
|
resource_name = components[-1]
|
||||||
|
|
||||||
|
try:
|
||||||
|
return importlib_resources.open_binary(package_name, resource_name)
|
||||||
|
except (ImportError, FileNotFoundError, UnicodeEncodeError):
|
||||||
|
# There are three types of exception that can be raised that all amount
|
||||||
|
# to "we cannot find this key":
|
||||||
|
#
|
||||||
|
# ImportError: If package_name doesn't exist (e.g. if tzdata is not
|
||||||
|
# installed, or if there's an error in the folder name like
|
||||||
|
# Amrica/New_York)
|
||||||
|
# FileNotFoundError: If resource_name doesn't exist in the package
|
||||||
|
# (e.g. Europe/Krasnoy)
|
||||||
|
# UnicodeEncodeError: If package_name or resource_name are not UTF-8,
|
||||||
|
# such as keys containing a surrogate character.
|
||||||
|
raise ZoneInfoNotFoundError(f"No time zone found with key {key}")
|
||||||
|
|
||||||
|
|
||||||
|
def load_data(fobj):
|
||||||
|
header = _TZifHeader.from_file(fobj)
|
||||||
|
|
||||||
|
if header.version == 1:
|
||||||
|
time_size = 4
|
||||||
|
time_type = "l"
|
||||||
|
else:
|
||||||
|
# Version 2+ has 64-bit integer transition times
|
||||||
|
time_size = 8
|
||||||
|
time_type = "q"
|
||||||
|
|
||||||
|
# Version 2+ also starts with a Version 1 header and data, which
|
||||||
|
# we need to skip now
|
||||||
|
skip_bytes = (
|
||||||
|
header.timecnt * 5 # Transition times and types
|
||||||
|
+ header.typecnt * 6 # Local time type records
|
||||||
|
+ header.charcnt # Time zone designations
|
||||||
|
+ header.leapcnt * 8 # Leap second records
|
||||||
|
+ header.isstdcnt # Standard/wall indicators
|
||||||
|
+ header.isutcnt # UT/local indicators
|
||||||
|
)
|
||||||
|
|
||||||
|
fobj.seek(skip_bytes, 1)
|
||||||
|
|
||||||
|
# Now we need to read the second header, which is not the same
|
||||||
|
# as the first
|
||||||
|
header = _TZifHeader.from_file(fobj)
|
||||||
|
|
||||||
|
typecnt = header.typecnt
|
||||||
|
timecnt = header.timecnt
|
||||||
|
charcnt = header.charcnt
|
||||||
|
|
||||||
|
# The data portion starts with timecnt transitions and indices
|
||||||
|
if timecnt:
|
||||||
|
trans_list_utc = struct.unpack(
|
||||||
|
f">{timecnt}{time_type}", fobj.read(timecnt * time_size)
|
||||||
|
)
|
||||||
|
trans_idx = struct.unpack(f">{timecnt}B", fobj.read(timecnt))
|
||||||
|
else:
|
||||||
|
trans_list_utc = ()
|
||||||
|
trans_idx = ()
|
||||||
|
|
||||||
|
# Read the ttinfo struct, (utoff, isdst, abbrind)
|
||||||
|
if typecnt:
|
||||||
|
utcoff, isdst, abbrind = zip(
|
||||||
|
*(struct.unpack(">lbb", fobj.read(6)) for i in range(typecnt))
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
utcoff = ()
|
||||||
|
isdst = ()
|
||||||
|
abbrind = ()
|
||||||
|
|
||||||
|
# Now read the abbreviations. They are null-terminated strings, indexed
|
||||||
|
# not by position in the array but by position in the unsplit
|
||||||
|
# abbreviation string. I suppose this makes more sense in C, which uses
|
||||||
|
# null to terminate the strings, but it's inconvenient here...
|
||||||
|
abbr_vals = {}
|
||||||
|
abbr_chars = fobj.read(charcnt)
|
||||||
|
|
||||||
|
def get_abbr(idx):
|
||||||
|
# Gets a string starting at idx and running until the next \x00
|
||||||
|
#
|
||||||
|
# We cannot pre-populate abbr_vals by splitting on \x00 because there
|
||||||
|
# are some zones that use subsets of longer abbreviations, like so:
|
||||||
|
#
|
||||||
|
# LMT\x00AHST\x00HDT\x00
|
||||||
|
#
|
||||||
|
# Where the idx to abbr mapping should be:
|
||||||
|
#
|
||||||
|
# {0: "LMT", 4: "AHST", 5: "HST", 9: "HDT"}
|
||||||
|
if idx not in abbr_vals:
|
||||||
|
span_end = abbr_chars.find(b"\x00", idx)
|
||||||
|
abbr_vals[idx] = abbr_chars[idx:span_end].decode()
|
||||||
|
|
||||||
|
return abbr_vals[idx]
|
||||||
|
|
||||||
|
abbr = tuple(get_abbr(idx) for idx in abbrind)
|
||||||
|
|
||||||
|
# The remainder of the file consists of leap seconds (currently unused) and
|
||||||
|
# the standard/wall and ut/local indicators, which are metadata we don't need.
|
||||||
|
# In version 2 files, we need to skip the unnecessary data to get at the TZ string:
|
||||||
|
if header.version >= 2:
|
||||||
|
# Each leap second record has size (time_size + 4)
|
||||||
|
skip_bytes = header.isutcnt + header.isstdcnt + header.leapcnt * 12
|
||||||
|
fobj.seek(skip_bytes, 1)
|
||||||
|
|
||||||
|
c = fobj.read(1) # Should be \n
|
||||||
|
assert c == b"\n", c
|
||||||
|
|
||||||
|
tz_bytes = b""
|
||||||
|
while True:
|
||||||
|
c = fobj.read(1)
|
||||||
|
if c == b"\n":
|
||||||
|
break
|
||||||
|
tz_bytes += c
|
||||||
|
|
||||||
|
tz_str = tz_bytes
|
||||||
|
else:
|
||||||
|
tz_str = None
|
||||||
|
|
||||||
|
return trans_idx, trans_list_utc, utcoff, isdst, abbr, tz_str
|
||||||
|
|
||||||
|
|
||||||
|
class _TZifHeader:
|
||||||
|
__slots__ = [
|
||||||
|
"version",
|
||||||
|
"isutcnt",
|
||||||
|
"isstdcnt",
|
||||||
|
"leapcnt",
|
||||||
|
"timecnt",
|
||||||
|
"typecnt",
|
||||||
|
"charcnt",
|
||||||
|
]
|
||||||
|
|
||||||
|
def __init__(self, *args):
|
||||||
|
assert len(self.__slots__) == len(args)
|
||||||
|
for attr, val in zip(self.__slots__, args):
|
||||||
|
setattr(self, attr, val)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_file(cls, stream):
|
||||||
|
# The header starts with a 4-byte "magic" value
|
||||||
|
if stream.read(4) != b"TZif":
|
||||||
|
raise ValueError("Invalid TZif file: magic not found")
|
||||||
|
|
||||||
|
_version = stream.read(1)
|
||||||
|
if _version == b"\x00":
|
||||||
|
version = 1
|
||||||
|
else:
|
||||||
|
version = int(_version)
|
||||||
|
stream.read(15)
|
||||||
|
|
||||||
|
args = (version,)
|
||||||
|
|
||||||
|
# Slots are defined in the order that the bytes are arranged
|
||||||
|
args = args + struct.unpack(">6l", stream.read(24))
|
||||||
|
|
||||||
|
return cls(*args)
|
||||||
|
|
||||||
|
|
||||||
|
class ZoneInfoNotFoundError(KeyError):
|
||||||
|
"""Exception raised when a ZoneInfo key is not found."""
|
BIN
env/lib/python3.8/site-packages/backports/zoneinfo/_czoneinfo.cpython-38-x86_64-linux-gnu.so
vendored
Executable file
BIN
env/lib/python3.8/site-packages/backports/zoneinfo/_czoneinfo.cpython-38-x86_64-linux-gnu.so
vendored
Executable file
Binary file not shown.
|
@ -0,0 +1,207 @@
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
PY36 = sys.version_info < (3, 7)
|
||||||
|
|
||||||
|
|
||||||
|
def reset_tzpath(to=None):
|
||||||
|
global TZPATH
|
||||||
|
|
||||||
|
tzpaths = to
|
||||||
|
if tzpaths is not None:
|
||||||
|
if isinstance(tzpaths, (str, bytes)):
|
||||||
|
raise TypeError(
|
||||||
|
f"tzpaths must be a list or tuple, "
|
||||||
|
+ f"not {type(tzpaths)}: {tzpaths!r}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not all(map(os.path.isabs, tzpaths)):
|
||||||
|
raise ValueError(_get_invalid_paths_message(tzpaths))
|
||||||
|
base_tzpath = tzpaths
|
||||||
|
else:
|
||||||
|
env_var = os.environ.get("PYTHONTZPATH", None)
|
||||||
|
if env_var is not None:
|
||||||
|
base_tzpath = _parse_python_tzpath(env_var)
|
||||||
|
elif sys.platform != "win32":
|
||||||
|
base_tzpath = [
|
||||||
|
"/usr/share/zoneinfo",
|
||||||
|
"/usr/lib/zoneinfo",
|
||||||
|
"/usr/share/lib/zoneinfo",
|
||||||
|
"/etc/zoneinfo",
|
||||||
|
]
|
||||||
|
|
||||||
|
base_tzpath.sort(key=lambda x: not os.path.exists(x))
|
||||||
|
else:
|
||||||
|
base_tzpath = ()
|
||||||
|
|
||||||
|
TZPATH = tuple(base_tzpath)
|
||||||
|
|
||||||
|
if TZPATH_CALLBACKS:
|
||||||
|
for callback in TZPATH_CALLBACKS:
|
||||||
|
callback(TZPATH)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_python_tzpath(env_var):
|
||||||
|
if not env_var:
|
||||||
|
return ()
|
||||||
|
|
||||||
|
raw_tzpath = env_var.split(os.pathsep)
|
||||||
|
new_tzpath = tuple(filter(os.path.isabs, raw_tzpath))
|
||||||
|
|
||||||
|
# If anything has been filtered out, we will warn about it
|
||||||
|
if len(new_tzpath) != len(raw_tzpath):
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
msg = _get_invalid_paths_message(raw_tzpath)
|
||||||
|
|
||||||
|
warnings.warn(
|
||||||
|
"Invalid paths specified in PYTHONTZPATH environment variable."
|
||||||
|
+ msg,
|
||||||
|
InvalidTZPathWarning,
|
||||||
|
)
|
||||||
|
|
||||||
|
return new_tzpath
|
||||||
|
|
||||||
|
|
||||||
|
def _get_invalid_paths_message(tzpaths):
|
||||||
|
invalid_paths = (path for path in tzpaths if not os.path.isabs(path))
|
||||||
|
|
||||||
|
prefix = "\n "
|
||||||
|
indented_str = prefix + prefix.join(invalid_paths)
|
||||||
|
|
||||||
|
return (
|
||||||
|
"Paths should be absolute but found the following relative paths:"
|
||||||
|
+ indented_str
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info < (3, 8):
|
||||||
|
|
||||||
|
def _isfile(path):
|
||||||
|
# bpo-33721: In Python 3.8 non-UTF8 paths return False rather than
|
||||||
|
# raising an error. See https://bugs.python.org/issue33721
|
||||||
|
try:
|
||||||
|
return os.path.isfile(path)
|
||||||
|
except ValueError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
else:
|
||||||
|
_isfile = os.path.isfile
|
||||||
|
|
||||||
|
|
||||||
|
def find_tzfile(key):
|
||||||
|
"""Retrieve the path to a TZif file from a key."""
|
||||||
|
_validate_tzfile_path(key)
|
||||||
|
for search_path in TZPATH:
|
||||||
|
filepath = os.path.join(search_path, key)
|
||||||
|
if _isfile(filepath):
|
||||||
|
return filepath
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
_TEST_PATH = os.path.normpath(os.path.join("_", "_"))[:-1]
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_tzfile_path(path, _base=_TEST_PATH):
|
||||||
|
if os.path.isabs(path):
|
||||||
|
raise ValueError(
|
||||||
|
f"ZoneInfo keys may not be absolute paths, got: {path}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# We only care about the kinds of path normalizations that would change the
|
||||||
|
# length of the key - e.g. a/../b -> a/b, or a/b/ -> a/b. On Windows,
|
||||||
|
# normpath will also change from a/b to a\b, but that would still preserve
|
||||||
|
# the length.
|
||||||
|
new_path = os.path.normpath(path)
|
||||||
|
if len(new_path) != len(path):
|
||||||
|
raise ValueError(
|
||||||
|
f"ZoneInfo keys must be normalized relative paths, got: {path}"
|
||||||
|
)
|
||||||
|
|
||||||
|
resolved = os.path.normpath(os.path.join(_base, new_path))
|
||||||
|
if not resolved.startswith(_base):
|
||||||
|
raise ValueError(
|
||||||
|
f"ZoneInfo keys must refer to subdirectories of TZPATH, got: {path}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
del _TEST_PATH
|
||||||
|
|
||||||
|
|
||||||
|
def available_timezones():
|
||||||
|
"""Returns a set containing all available time zones.
|
||||||
|
|
||||||
|
.. caution::
|
||||||
|
|
||||||
|
This may attempt to open a large number of files, since the best way to
|
||||||
|
determine if a given file on the time zone search path is to open it
|
||||||
|
and check for the "magic string" at the beginning.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
from importlib import resources
|
||||||
|
except ImportError:
|
||||||
|
import importlib_resources as resources
|
||||||
|
|
||||||
|
valid_zones = set()
|
||||||
|
|
||||||
|
# Start with loading from the tzdata package if it exists: this has a
|
||||||
|
# pre-assembled list of zones that only requires opening one file.
|
||||||
|
try:
|
||||||
|
with resources.open_text("tzdata", "zones") as f:
|
||||||
|
for zone in f:
|
||||||
|
zone = zone.strip()
|
||||||
|
if zone:
|
||||||
|
valid_zones.add(zone)
|
||||||
|
except (ImportError, FileNotFoundError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def valid_key(fpath):
|
||||||
|
try:
|
||||||
|
with open(fpath, "rb") as f:
|
||||||
|
return f.read(4) == b"TZif"
|
||||||
|
except Exception: # pragma: nocover
|
||||||
|
return False
|
||||||
|
|
||||||
|
for tz_root in TZPATH:
|
||||||
|
if not os.path.exists(tz_root):
|
||||||
|
continue
|
||||||
|
|
||||||
|
for root, dirnames, files in os.walk(tz_root):
|
||||||
|
if root == tz_root:
|
||||||
|
# right/ and posix/ are special directories and shouldn't be
|
||||||
|
# included in the output of available zones
|
||||||
|
if "right" in dirnames:
|
||||||
|
dirnames.remove("right")
|
||||||
|
if "posix" in dirnames:
|
||||||
|
dirnames.remove("posix")
|
||||||
|
|
||||||
|
for file in files:
|
||||||
|
fpath = os.path.join(root, file)
|
||||||
|
|
||||||
|
key = os.path.relpath(fpath, start=tz_root)
|
||||||
|
if os.sep != "/": # pragma: nocover
|
||||||
|
key = key.replace(os.sep, "/")
|
||||||
|
|
||||||
|
if not key or key in valid_zones:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if valid_key(fpath):
|
||||||
|
valid_zones.add(key)
|
||||||
|
|
||||||
|
if "posixrules" in valid_zones:
|
||||||
|
# posixrules is a special symlink-only time zone where it exists, it
|
||||||
|
# should not be included in the output
|
||||||
|
valid_zones.remove("posixrules")
|
||||||
|
|
||||||
|
return valid_zones
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidTZPathWarning(RuntimeWarning):
|
||||||
|
"""Warning raised if an invalid path is specified in PYTHONTZPATH."""
|
||||||
|
|
||||||
|
|
||||||
|
TZPATH = ()
|
||||||
|
TZPATH_CALLBACKS = []
|
||||||
|
reset_tzpath()
|
|
@ -0,0 +1 @@
|
||||||
|
__version__ = "0.2.1"
|
|
@ -0,0 +1,754 @@
|
||||||
|
import bisect
|
||||||
|
import calendar
|
||||||
|
import collections
|
||||||
|
import functools
|
||||||
|
import re
|
||||||
|
import weakref
|
||||||
|
from datetime import datetime, timedelta, tzinfo
|
||||||
|
|
||||||
|
from . import _common, _tzpath
|
||||||
|
|
||||||
|
EPOCH = datetime(1970, 1, 1)
|
||||||
|
EPOCHORDINAL = datetime(1970, 1, 1).toordinal()
|
||||||
|
|
||||||
|
# It is relatively expensive to construct new timedelta objects, and in most
|
||||||
|
# cases we're looking at the same deltas, like integer numbers of hours, etc.
|
||||||
|
# To improve speed and memory use, we'll keep a dictionary with references
|
||||||
|
# to the ones we've already used so far.
|
||||||
|
#
|
||||||
|
# Loading every time zone in the 2020a version of the time zone database
|
||||||
|
# requires 447 timedeltas, which requires approximately the amount of space
|
||||||
|
# that ZoneInfo("America/New_York") with 236 transitions takes up, so we will
|
||||||
|
# set the cache size to 512 so that in the common case we always get cache
|
||||||
|
# hits, but specifically crafted ZoneInfo objects don't leak arbitrary amounts
|
||||||
|
# of memory.
|
||||||
|
@functools.lru_cache(maxsize=512)
|
||||||
|
def _load_timedelta(seconds):
|
||||||
|
return timedelta(seconds=seconds)
|
||||||
|
|
||||||
|
|
||||||
|
class ZoneInfo(tzinfo):
|
||||||
|
_strong_cache_size = 8
|
||||||
|
_strong_cache = collections.OrderedDict()
|
||||||
|
_weak_cache = weakref.WeakValueDictionary()
|
||||||
|
__module__ = "backports.zoneinfo"
|
||||||
|
|
||||||
|
def __init_subclass__(cls):
|
||||||
|
cls._strong_cache = collections.OrderedDict()
|
||||||
|
cls._weak_cache = weakref.WeakValueDictionary()
|
||||||
|
|
||||||
|
def __new__(cls, key):
|
||||||
|
instance = cls._weak_cache.get(key, None)
|
||||||
|
if instance is None:
|
||||||
|
instance = cls._weak_cache.setdefault(key, cls._new_instance(key))
|
||||||
|
instance._from_cache = True
|
||||||
|
|
||||||
|
# Update the "strong" cache
|
||||||
|
cls._strong_cache[key] = cls._strong_cache.pop(key, instance)
|
||||||
|
|
||||||
|
if len(cls._strong_cache) > cls._strong_cache_size:
|
||||||
|
cls._strong_cache.popitem(last=False)
|
||||||
|
|
||||||
|
return instance
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def no_cache(cls, key):
|
||||||
|
obj = cls._new_instance(key)
|
||||||
|
obj._from_cache = False
|
||||||
|
|
||||||
|
return obj
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _new_instance(cls, key):
|
||||||
|
obj = super().__new__(cls)
|
||||||
|
obj._key = key
|
||||||
|
obj._file_path = obj._find_tzfile(key)
|
||||||
|
|
||||||
|
if obj._file_path is not None:
|
||||||
|
file_obj = open(obj._file_path, "rb")
|
||||||
|
else:
|
||||||
|
file_obj = _common.load_tzdata(key)
|
||||||
|
|
||||||
|
with file_obj as f:
|
||||||
|
obj._load_file(f)
|
||||||
|
|
||||||
|
return obj
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_file(cls, fobj, key=None):
|
||||||
|
obj = super().__new__(cls)
|
||||||
|
obj._key = key
|
||||||
|
obj._file_path = None
|
||||||
|
obj._load_file(fobj)
|
||||||
|
obj._file_repr = repr(fobj)
|
||||||
|
|
||||||
|
# Disable pickling for objects created from files
|
||||||
|
obj.__reduce__ = obj._file_reduce
|
||||||
|
|
||||||
|
return obj
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def clear_cache(cls, *, only_keys=None):
|
||||||
|
if only_keys is not None:
|
||||||
|
for key in only_keys:
|
||||||
|
cls._weak_cache.pop(key, None)
|
||||||
|
cls._strong_cache.pop(key, None)
|
||||||
|
|
||||||
|
else:
|
||||||
|
cls._weak_cache.clear()
|
||||||
|
cls._strong_cache.clear()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def key(self):
|
||||||
|
return self._key
|
||||||
|
|
||||||
|
def utcoffset(self, dt):
|
||||||
|
return self._find_trans(dt).utcoff
|
||||||
|
|
||||||
|
def dst(self, dt):
|
||||||
|
return self._find_trans(dt).dstoff
|
||||||
|
|
||||||
|
def tzname(self, dt):
|
||||||
|
return self._find_trans(dt).tzname
|
||||||
|
|
||||||
|
def fromutc(self, dt):
|
||||||
|
"""Convert from datetime in UTC to datetime in local time"""
|
||||||
|
|
||||||
|
if not isinstance(dt, datetime):
|
||||||
|
raise TypeError("fromutc() requires a datetime argument")
|
||||||
|
if dt.tzinfo is not self:
|
||||||
|
raise ValueError("dt.tzinfo is not self")
|
||||||
|
|
||||||
|
timestamp = self._get_local_timestamp(dt)
|
||||||
|
num_trans = len(self._trans_utc)
|
||||||
|
|
||||||
|
if num_trans >= 1 and timestamp < self._trans_utc[0]:
|
||||||
|
tti = self._tti_before
|
||||||
|
fold = 0
|
||||||
|
elif (
|
||||||
|
num_trans == 0 or timestamp > self._trans_utc[-1]
|
||||||
|
) and not isinstance(self._tz_after, _ttinfo):
|
||||||
|
tti, fold = self._tz_after.get_trans_info_fromutc(
|
||||||
|
timestamp, dt.year
|
||||||
|
)
|
||||||
|
elif num_trans == 0:
|
||||||
|
tti = self._tz_after
|
||||||
|
fold = 0
|
||||||
|
else:
|
||||||
|
idx = bisect.bisect_right(self._trans_utc, timestamp)
|
||||||
|
|
||||||
|
if num_trans > 1 and timestamp >= self._trans_utc[1]:
|
||||||
|
tti_prev, tti = self._ttinfos[idx - 2 : idx]
|
||||||
|
elif timestamp > self._trans_utc[-1]:
|
||||||
|
tti_prev = self._ttinfos[-1]
|
||||||
|
tti = self._tz_after
|
||||||
|
else:
|
||||||
|
tti_prev = self._tti_before
|
||||||
|
tti = self._ttinfos[0]
|
||||||
|
|
||||||
|
# Detect fold
|
||||||
|
shift = tti_prev.utcoff - tti.utcoff
|
||||||
|
fold = shift.total_seconds() > timestamp - self._trans_utc[idx - 1]
|
||||||
|
dt += tti.utcoff
|
||||||
|
if fold:
|
||||||
|
return dt.replace(fold=1)
|
||||||
|
else:
|
||||||
|
return dt
|
||||||
|
|
||||||
|
def _find_trans(self, dt):
|
||||||
|
if dt is None:
|
||||||
|
if self._fixed_offset:
|
||||||
|
return self._tz_after
|
||||||
|
else:
|
||||||
|
return _NO_TTINFO
|
||||||
|
|
||||||
|
ts = self._get_local_timestamp(dt)
|
||||||
|
|
||||||
|
lt = self._trans_local[dt.fold]
|
||||||
|
|
||||||
|
num_trans = len(lt)
|
||||||
|
|
||||||
|
if num_trans and ts < lt[0]:
|
||||||
|
return self._tti_before
|
||||||
|
elif not num_trans or ts > lt[-1]:
|
||||||
|
if isinstance(self._tz_after, _TZStr):
|
||||||
|
return self._tz_after.get_trans_info(ts, dt.year, dt.fold)
|
||||||
|
else:
|
||||||
|
return self._tz_after
|
||||||
|
else:
|
||||||
|
# idx is the transition that occurs after this timestamp, so we
|
||||||
|
# subtract off 1 to get the current ttinfo
|
||||||
|
idx = bisect.bisect_right(lt, ts) - 1
|
||||||
|
assert idx >= 0
|
||||||
|
return self._ttinfos[idx]
|
||||||
|
|
||||||
|
def _get_local_timestamp(self, dt):
|
||||||
|
return (
|
||||||
|
(dt.toordinal() - EPOCHORDINAL) * 86400
|
||||||
|
+ dt.hour * 3600
|
||||||
|
+ dt.minute * 60
|
||||||
|
+ dt.second
|
||||||
|
)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
if self._key is not None:
|
||||||
|
return f"{self._key}"
|
||||||
|
else:
|
||||||
|
return repr(self)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
if self._key is not None:
|
||||||
|
return f"{self.__class__.__name__}(key={self._key!r})"
|
||||||
|
else:
|
||||||
|
return f"{self.__class__.__name__}.from_file({self._file_repr})"
|
||||||
|
|
||||||
|
def __reduce__(self):
|
||||||
|
return (self.__class__._unpickle, (self._key, self._from_cache))
|
||||||
|
|
||||||
|
def _file_reduce(self):
|
||||||
|
import pickle
|
||||||
|
|
||||||
|
raise pickle.PicklingError(
|
||||||
|
"Cannot pickle a ZoneInfo file created from a file stream."
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _unpickle(cls, key, from_cache):
|
||||||
|
if from_cache:
|
||||||
|
return cls(key)
|
||||||
|
else:
|
||||||
|
return cls.no_cache(key)
|
||||||
|
|
||||||
|
def _find_tzfile(self, key):
|
||||||
|
return _tzpath.find_tzfile(key)
|
||||||
|
|
||||||
|
def _load_file(self, fobj):
|
||||||
|
# Retrieve all the data as it exists in the zoneinfo file
|
||||||
|
trans_idx, trans_utc, utcoff, isdst, abbr, tz_str = _common.load_data(
|
||||||
|
fobj
|
||||||
|
)
|
||||||
|
|
||||||
|
# Infer the DST offsets (needed for .dst()) from the data
|
||||||
|
dstoff = self._utcoff_to_dstoff(trans_idx, utcoff, isdst)
|
||||||
|
|
||||||
|
# Convert all the transition times (UTC) into "seconds since 1970-01-01 local time"
|
||||||
|
trans_local = self._ts_to_local(trans_idx, trans_utc, utcoff)
|
||||||
|
|
||||||
|
# Construct `_ttinfo` objects for each transition in the file
|
||||||
|
_ttinfo_list = [
|
||||||
|
_ttinfo(
|
||||||
|
_load_timedelta(utcoffset), _load_timedelta(dstoffset), tzname
|
||||||
|
)
|
||||||
|
for utcoffset, dstoffset, tzname in zip(utcoff, dstoff, abbr)
|
||||||
|
]
|
||||||
|
|
||||||
|
self._trans_utc = trans_utc
|
||||||
|
self._trans_local = trans_local
|
||||||
|
self._ttinfos = [_ttinfo_list[idx] for idx in trans_idx]
|
||||||
|
|
||||||
|
# Find the first non-DST transition
|
||||||
|
for i in range(len(isdst)):
|
||||||
|
if not isdst[i]:
|
||||||
|
self._tti_before = _ttinfo_list[i]
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
if self._ttinfos:
|
||||||
|
self._tti_before = self._ttinfos[0]
|
||||||
|
else:
|
||||||
|
self._tti_before = None
|
||||||
|
|
||||||
|
# Set the "fallback" time zone
|
||||||
|
if tz_str is not None and tz_str != b"":
|
||||||
|
self._tz_after = _parse_tz_str(tz_str.decode())
|
||||||
|
else:
|
||||||
|
if not self._ttinfos and not _ttinfo_list:
|
||||||
|
raise ValueError("No time zone information found.")
|
||||||
|
|
||||||
|
if self._ttinfos:
|
||||||
|
self._tz_after = self._ttinfos[-1]
|
||||||
|
else:
|
||||||
|
self._tz_after = _ttinfo_list[-1]
|
||||||
|
|
||||||
|
# Determine if this is a "fixed offset" zone, meaning that the output
|
||||||
|
# of the utcoffset, dst and tzname functions does not depend on the
|
||||||
|
# specific datetime passed.
|
||||||
|
#
|
||||||
|
# We make three simplifying assumptions here:
|
||||||
|
#
|
||||||
|
# 1. If _tz_after is not a _ttinfo, it has transitions that might
|
||||||
|
# actually occur (it is possible to construct TZ strings that
|
||||||
|
# specify STD and DST but no transitions ever occur, such as
|
||||||
|
# AAA0BBB,0/0,J365/25).
|
||||||
|
# 2. If _ttinfo_list contains more than one _ttinfo object, the objects
|
||||||
|
# represent different offsets.
|
||||||
|
# 3. _ttinfo_list contains no unused _ttinfos (in which case an
|
||||||
|
# otherwise fixed-offset zone with extra _ttinfos defined may
|
||||||
|
# appear to *not* be a fixed offset zone).
|
||||||
|
#
|
||||||
|
# Violations to these assumptions would be fairly exotic, and exotic
|
||||||
|
# zones should almost certainly not be used with datetime.time (the
|
||||||
|
# only thing that would be affected by this).
|
||||||
|
if len(_ttinfo_list) > 1 or not isinstance(self._tz_after, _ttinfo):
|
||||||
|
self._fixed_offset = False
|
||||||
|
elif not _ttinfo_list:
|
||||||
|
self._fixed_offset = True
|
||||||
|
else:
|
||||||
|
self._fixed_offset = _ttinfo_list[0] == self._tz_after
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _utcoff_to_dstoff(trans_idx, utcoffsets, isdsts):
|
||||||
|
# Now we must transform our ttis and abbrs into `_ttinfo` objects,
|
||||||
|
# but there is an issue: .dst() must return a timedelta with the
|
||||||
|
# difference between utcoffset() and the "standard" offset, but
|
||||||
|
# the "base offset" and "DST offset" are not encoded in the file;
|
||||||
|
# we can infer what they are from the isdst flag, but it is not
|
||||||
|
# sufficient to to just look at the last standard offset, because
|
||||||
|
# occasionally countries will shift both DST offset and base offset.
|
||||||
|
|
||||||
|
typecnt = len(isdsts)
|
||||||
|
dstoffs = [0] * typecnt # Provisionally assign all to 0.
|
||||||
|
dst_cnt = sum(isdsts)
|
||||||
|
dst_found = 0
|
||||||
|
|
||||||
|
for i in range(1, len(trans_idx)):
|
||||||
|
if dst_cnt == dst_found:
|
||||||
|
break
|
||||||
|
|
||||||
|
idx = trans_idx[i]
|
||||||
|
|
||||||
|
dst = isdsts[idx]
|
||||||
|
|
||||||
|
# We're only going to look at daylight saving time
|
||||||
|
if not dst:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Skip any offsets that have already been assigned
|
||||||
|
if dstoffs[idx] != 0:
|
||||||
|
continue
|
||||||
|
|
||||||
|
dstoff = 0
|
||||||
|
utcoff = utcoffsets[idx]
|
||||||
|
|
||||||
|
comp_idx = trans_idx[i - 1]
|
||||||
|
|
||||||
|
if not isdsts[comp_idx]:
|
||||||
|
dstoff = utcoff - utcoffsets[comp_idx]
|
||||||
|
|
||||||
|
if not dstoff and idx < (typecnt - 1):
|
||||||
|
comp_idx = trans_idx[i + 1]
|
||||||
|
|
||||||
|
# If the following transition is also DST and we couldn't
|
||||||
|
# find the DST offset by this point, we're going ot have to
|
||||||
|
# skip it and hope this transition gets assigned later
|
||||||
|
if isdsts[comp_idx]:
|
||||||
|
continue
|
||||||
|
|
||||||
|
dstoff = utcoff - utcoffsets[comp_idx]
|
||||||
|
|
||||||
|
if dstoff:
|
||||||
|
dst_found += 1
|
||||||
|
dstoffs[idx] = dstoff
|
||||||
|
else:
|
||||||
|
# If we didn't find a valid value for a given index, we'll end up
|
||||||
|
# with dstoff = 0 for something where `isdst=1`. This is obviously
|
||||||
|
# wrong - one hour will be a much better guess than 0
|
||||||
|
for idx in range(typecnt):
|
||||||
|
if not dstoffs[idx] and isdsts[idx]:
|
||||||
|
dstoffs[idx] = 3600
|
||||||
|
|
||||||
|
return dstoffs
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _ts_to_local(trans_idx, trans_list_utc, utcoffsets):
|
||||||
|
"""Generate number of seconds since 1970 *in the local time*.
|
||||||
|
|
||||||
|
This is necessary to easily find the transition times in local time"""
|
||||||
|
if not trans_list_utc:
|
||||||
|
return [[], []]
|
||||||
|
|
||||||
|
# Start with the timestamps and modify in-place
|
||||||
|
trans_list_wall = [list(trans_list_utc), list(trans_list_utc)]
|
||||||
|
|
||||||
|
if len(utcoffsets) > 1:
|
||||||
|
offset_0 = utcoffsets[0]
|
||||||
|
offset_1 = utcoffsets[trans_idx[0]]
|
||||||
|
if offset_1 > offset_0:
|
||||||
|
offset_1, offset_0 = offset_0, offset_1
|
||||||
|
else:
|
||||||
|
offset_0 = offset_1 = utcoffsets[0]
|
||||||
|
|
||||||
|
trans_list_wall[0][0] += offset_0
|
||||||
|
trans_list_wall[1][0] += offset_1
|
||||||
|
|
||||||
|
for i in range(1, len(trans_idx)):
|
||||||
|
offset_0 = utcoffsets[trans_idx[i - 1]]
|
||||||
|
offset_1 = utcoffsets[trans_idx[i]]
|
||||||
|
|
||||||
|
if offset_1 > offset_0:
|
||||||
|
offset_1, offset_0 = offset_0, offset_1
|
||||||
|
|
||||||
|
trans_list_wall[0][i] += offset_0
|
||||||
|
trans_list_wall[1][i] += offset_1
|
||||||
|
|
||||||
|
return trans_list_wall
|
||||||
|
|
||||||
|
|
||||||
|
class _ttinfo:
|
||||||
|
__slots__ = ["utcoff", "dstoff", "tzname"]
|
||||||
|
|
||||||
|
def __init__(self, utcoff, dstoff, tzname):
|
||||||
|
self.utcoff = utcoff
|
||||||
|
self.dstoff = dstoff
|
||||||
|
self.tzname = tzname
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return (
|
||||||
|
self.utcoff == other.utcoff
|
||||||
|
and self.dstoff == other.dstoff
|
||||||
|
and self.tzname == other.tzname
|
||||||
|
)
|
||||||
|
|
||||||
|
def __repr__(self): # pragma: nocover
|
||||||
|
return (
|
||||||
|
f"{self.__class__.__name__}"
|
||||||
|
+ f"({self.utcoff}, {self.dstoff}, {self.tzname})"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
_NO_TTINFO = _ttinfo(None, None, None)
|
||||||
|
|
||||||
|
|
||||||
|
class _TZStr:
|
||||||
|
__slots__ = (
|
||||||
|
"std",
|
||||||
|
"dst",
|
||||||
|
"start",
|
||||||
|
"end",
|
||||||
|
"get_trans_info",
|
||||||
|
"get_trans_info_fromutc",
|
||||||
|
"dst_diff",
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, std_abbr, std_offset, dst_abbr, dst_offset, start=None, end=None
|
||||||
|
):
|
||||||
|
self.dst_diff = dst_offset - std_offset
|
||||||
|
std_offset = _load_timedelta(std_offset)
|
||||||
|
self.std = _ttinfo(
|
||||||
|
utcoff=std_offset, dstoff=_load_timedelta(0), tzname=std_abbr
|
||||||
|
)
|
||||||
|
|
||||||
|
self.start = start
|
||||||
|
self.end = end
|
||||||
|
|
||||||
|
dst_offset = _load_timedelta(dst_offset)
|
||||||
|
delta = _load_timedelta(self.dst_diff)
|
||||||
|
self.dst = _ttinfo(utcoff=dst_offset, dstoff=delta, tzname=dst_abbr)
|
||||||
|
|
||||||
|
# These are assertions because the constructor should only be called
|
||||||
|
# by functions that would fail before passing start or end
|
||||||
|
assert start is not None, "No transition start specified"
|
||||||
|
assert end is not None, "No transition end specified"
|
||||||
|
|
||||||
|
self.get_trans_info = self._get_trans_info
|
||||||
|
self.get_trans_info_fromutc = self._get_trans_info_fromutc
|
||||||
|
|
||||||
|
def transitions(self, year):
|
||||||
|
start = self.start.year_to_epoch(year)
|
||||||
|
end = self.end.year_to_epoch(year)
|
||||||
|
return start, end
|
||||||
|
|
||||||
|
def _get_trans_info(self, ts, year, fold):
|
||||||
|
"""Get the information about the current transition - tti"""
|
||||||
|
start, end = self.transitions(year)
|
||||||
|
|
||||||
|
# With fold = 0, the period (denominated in local time) with the
|
||||||
|
# smaller offset starts at the end of the gap and ends at the end of
|
||||||
|
# the fold; with fold = 1, it runs from the start of the gap to the
|
||||||
|
# beginning of the fold.
|
||||||
|
#
|
||||||
|
# So in order to determine the DST boundaries we need to know both
|
||||||
|
# the fold and whether DST is positive or negative (rare), and it
|
||||||
|
# turns out that this boils down to fold XOR is_positive.
|
||||||
|
if fold == (self.dst_diff >= 0):
|
||||||
|
end -= self.dst_diff
|
||||||
|
else:
|
||||||
|
start += self.dst_diff
|
||||||
|
|
||||||
|
if start < end:
|
||||||
|
isdst = start <= ts < end
|
||||||
|
else:
|
||||||
|
isdst = not (end <= ts < start)
|
||||||
|
|
||||||
|
return self.dst if isdst else self.std
|
||||||
|
|
||||||
|
def _get_trans_info_fromutc(self, ts, year):
|
||||||
|
start, end = self.transitions(year)
|
||||||
|
start -= self.std.utcoff.total_seconds()
|
||||||
|
end -= self.dst.utcoff.total_seconds()
|
||||||
|
|
||||||
|
if start < end:
|
||||||
|
isdst = start <= ts < end
|
||||||
|
else:
|
||||||
|
isdst = not (end <= ts < start)
|
||||||
|
|
||||||
|
# For positive DST, the ambiguous period is one dst_diff after the end
|
||||||
|
# of DST; for negative DST, the ambiguous period is one dst_diff before
|
||||||
|
# the start of DST.
|
||||||
|
if self.dst_diff > 0:
|
||||||
|
ambig_start = end
|
||||||
|
ambig_end = end + self.dst_diff
|
||||||
|
else:
|
||||||
|
ambig_start = start
|
||||||
|
ambig_end = start - self.dst_diff
|
||||||
|
|
||||||
|
fold = ambig_start <= ts < ambig_end
|
||||||
|
|
||||||
|
return (self.dst if isdst else self.std, fold)
|
||||||
|
|
||||||
|
|
||||||
|
def _post_epoch_days_before_year(year):
|
||||||
|
"""Get the number of days between 1970-01-01 and YEAR-01-01"""
|
||||||
|
y = year - 1
|
||||||
|
return y * 365 + y // 4 - y // 100 + y // 400 - EPOCHORDINAL
|
||||||
|
|
||||||
|
|
||||||
|
class _DayOffset:
|
||||||
|
__slots__ = ["d", "julian", "hour", "minute", "second"]
|
||||||
|
|
||||||
|
def __init__(self, d, julian, hour=2, minute=0, second=0):
|
||||||
|
if not (0 + julian) <= d <= 365:
|
||||||
|
min_day = 0 + julian
|
||||||
|
raise ValueError(f"d must be in [{min_day}, 365], not: {d}")
|
||||||
|
|
||||||
|
self.d = d
|
||||||
|
self.julian = julian
|
||||||
|
self.hour = hour
|
||||||
|
self.minute = minute
|
||||||
|
self.second = second
|
||||||
|
|
||||||
|
def year_to_epoch(self, year):
|
||||||
|
days_before_year = _post_epoch_days_before_year(year)
|
||||||
|
|
||||||
|
d = self.d
|
||||||
|
if self.julian and d >= 59 and calendar.isleap(year):
|
||||||
|
d += 1
|
||||||
|
|
||||||
|
epoch = (days_before_year + d) * 86400
|
||||||
|
epoch += self.hour * 3600 + self.minute * 60 + self.second
|
||||||
|
|
||||||
|
return epoch
|
||||||
|
|
||||||
|
|
||||||
|
class _CalendarOffset:
|
||||||
|
__slots__ = ["m", "w", "d", "hour", "minute", "second"]
|
||||||
|
|
||||||
|
_DAYS_BEFORE_MONTH = (
|
||||||
|
-1,
|
||||||
|
0,
|
||||||
|
31,
|
||||||
|
59,
|
||||||
|
90,
|
||||||
|
120,
|
||||||
|
151,
|
||||||
|
181,
|
||||||
|
212,
|
||||||
|
243,
|
||||||
|
273,
|
||||||
|
304,
|
||||||
|
334,
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, m, w, d, hour=2, minute=0, second=0):
|
||||||
|
if not 0 < m <= 12:
|
||||||
|
raise ValueError("m must be in (0, 12]")
|
||||||
|
|
||||||
|
if not 0 < w <= 5:
|
||||||
|
raise ValueError("w must be in (0, 5]")
|
||||||
|
|
||||||
|
if not 0 <= d <= 6:
|
||||||
|
raise ValueError("d must be in [0, 6]")
|
||||||
|
|
||||||
|
self.m = m
|
||||||
|
self.w = w
|
||||||
|
self.d = d
|
||||||
|
self.hour = hour
|
||||||
|
self.minute = minute
|
||||||
|
self.second = second
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _ymd2ord(cls, year, month, day):
|
||||||
|
return (
|
||||||
|
_post_epoch_days_before_year(year)
|
||||||
|
+ cls._DAYS_BEFORE_MONTH[month]
|
||||||
|
+ (month > 2 and calendar.isleap(year))
|
||||||
|
+ day
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: These are not actually epoch dates as they are expressed in local time
|
||||||
|
def year_to_epoch(self, year):
|
||||||
|
"""Calculates the datetime of the occurrence from the year"""
|
||||||
|
# We know year and month, we need to convert w, d into day of month
|
||||||
|
#
|
||||||
|
# Week 1 is the first week in which day `d` (where 0 = Sunday) appears.
|
||||||
|
# Week 5 represents the last occurrence of day `d`, so we need to know
|
||||||
|
# the range of the month.
|
||||||
|
first_day, days_in_month = calendar.monthrange(year, self.m)
|
||||||
|
|
||||||
|
# This equation seems magical, so I'll break it down:
|
||||||
|
# 1. calendar says 0 = Monday, POSIX says 0 = Sunday
|
||||||
|
# so we need first_day + 1 to get 1 = Monday -> 7 = Sunday,
|
||||||
|
# which is still equivalent because this math is mod 7
|
||||||
|
# 2. Get first day - desired day mod 7: -1 % 7 = 6, so we don't need
|
||||||
|
# to do anything to adjust negative numbers.
|
||||||
|
# 3. Add 1 because month days are a 1-based index.
|
||||||
|
month_day = (self.d - (first_day + 1)) % 7 + 1
|
||||||
|
|
||||||
|
# Now use a 0-based index version of `w` to calculate the w-th
|
||||||
|
# occurrence of `d`
|
||||||
|
month_day += (self.w - 1) * 7
|
||||||
|
|
||||||
|
# month_day will only be > days_in_month if w was 5, and `w` means
|
||||||
|
# "last occurrence of `d`", so now we just check if we over-shot the
|
||||||
|
# end of the month and if so knock off 1 week.
|
||||||
|
if month_day > days_in_month:
|
||||||
|
month_day -= 7
|
||||||
|
|
||||||
|
ordinal = self._ymd2ord(year, self.m, month_day)
|
||||||
|
epoch = ordinal * 86400
|
||||||
|
epoch += self.hour * 3600 + self.minute * 60 + self.second
|
||||||
|
return epoch
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_tz_str(tz_str):
|
||||||
|
# The tz string has the format:
|
||||||
|
#
|
||||||
|
# std[offset[dst[offset],start[/time],end[/time]]]
|
||||||
|
#
|
||||||
|
# std and dst must be 3 or more characters long and must not contain
|
||||||
|
# a leading colon, embedded digits, commas, nor a plus or minus signs;
|
||||||
|
# The spaces between "std" and "offset" are only for display and are
|
||||||
|
# not actually present in the string.
|
||||||
|
#
|
||||||
|
# The format of the offset is ``[+|-]hh[:mm[:ss]]``
|
||||||
|
|
||||||
|
offset_str, *start_end_str = tz_str.split(",", 1)
|
||||||
|
|
||||||
|
# fmt: off
|
||||||
|
parser_re = re.compile(
|
||||||
|
r"(?P<std>[^<0-9:.+-]+|<[a-zA-Z0-9+\-]+>)" +
|
||||||
|
r"((?P<stdoff>[+-]?\d{1,2}(:\d{2}(:\d{2})?)?)" +
|
||||||
|
r"((?P<dst>[^0-9:.+-]+|<[a-zA-Z0-9+\-]+>)" +
|
||||||
|
r"((?P<dstoff>[+-]?\d{1,2}(:\d{2}(:\d{2})?)?))?" +
|
||||||
|
r")?" + # dst
|
||||||
|
r")?$" # stdoff
|
||||||
|
)
|
||||||
|
# fmt: on
|
||||||
|
|
||||||
|
m = parser_re.match(offset_str)
|
||||||
|
|
||||||
|
if m is None:
|
||||||
|
raise ValueError(f"{tz_str} is not a valid TZ string")
|
||||||
|
|
||||||
|
std_abbr = m.group("std")
|
||||||
|
dst_abbr = m.group("dst")
|
||||||
|
dst_offset = None
|
||||||
|
|
||||||
|
std_abbr = std_abbr.strip("<>")
|
||||||
|
|
||||||
|
if dst_abbr:
|
||||||
|
dst_abbr = dst_abbr.strip("<>")
|
||||||
|
|
||||||
|
std_offset = m.group("stdoff")
|
||||||
|
if std_offset:
|
||||||
|
try:
|
||||||
|
std_offset = _parse_tz_delta(std_offset)
|
||||||
|
except ValueError as e:
|
||||||
|
raise ValueError(f"Invalid STD offset in {tz_str}") from e
|
||||||
|
else:
|
||||||
|
std_offset = 0
|
||||||
|
|
||||||
|
if dst_abbr is not None:
|
||||||
|
dst_offset = m.group("dstoff")
|
||||||
|
if dst_offset:
|
||||||
|
try:
|
||||||
|
dst_offset = _parse_tz_delta(dst_offset)
|
||||||
|
except ValueError as e:
|
||||||
|
raise ValueError(f"Invalid DST offset in {tz_str}") from e
|
||||||
|
else:
|
||||||
|
dst_offset = std_offset + 3600
|
||||||
|
|
||||||
|
if not start_end_str:
|
||||||
|
raise ValueError(f"Missing transition rules: {tz_str}")
|
||||||
|
|
||||||
|
start_end_strs = start_end_str[0].split(",", 1)
|
||||||
|
try:
|
||||||
|
start, end = (_parse_dst_start_end(x) for x in start_end_strs)
|
||||||
|
except ValueError as e:
|
||||||
|
raise ValueError(f"Invalid TZ string: {tz_str}") from e
|
||||||
|
|
||||||
|
return _TZStr(std_abbr, std_offset, dst_abbr, dst_offset, start, end)
|
||||||
|
elif start_end_str:
|
||||||
|
raise ValueError(f"Transition rule present without DST: {tz_str}")
|
||||||
|
else:
|
||||||
|
# This is a static ttinfo, don't return _TZStr
|
||||||
|
return _ttinfo(
|
||||||
|
_load_timedelta(std_offset), _load_timedelta(0), std_abbr
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_dst_start_end(dststr):
|
||||||
|
date, *time = dststr.split("/")
|
||||||
|
if date[0] == "M":
|
||||||
|
n_is_julian = False
|
||||||
|
m = re.match(r"M(\d{1,2})\.(\d).(\d)$", date)
|
||||||
|
if m is None:
|
||||||
|
raise ValueError(f"Invalid dst start/end date: {dststr}")
|
||||||
|
date_offset = tuple(map(int, m.groups()))
|
||||||
|
offset = _CalendarOffset(*date_offset)
|
||||||
|
else:
|
||||||
|
if date[0] == "J":
|
||||||
|
n_is_julian = True
|
||||||
|
date = date[1:]
|
||||||
|
else:
|
||||||
|
n_is_julian = False
|
||||||
|
|
||||||
|
doy = int(date)
|
||||||
|
offset = _DayOffset(doy, n_is_julian)
|
||||||
|
|
||||||
|
if time:
|
||||||
|
time_components = list(map(int, time[0].split(":")))
|
||||||
|
n_components = len(time_components)
|
||||||
|
if n_components < 3:
|
||||||
|
time_components.extend([0] * (3 - n_components))
|
||||||
|
offset.hour, offset.minute, offset.second = time_components
|
||||||
|
|
||||||
|
return offset
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_tz_delta(tz_delta):
|
||||||
|
match = re.match(
|
||||||
|
r"(?P<sign>[+-])?(?P<h>\d{1,2})(:(?P<m>\d{2})(:(?P<s>\d{2}))?)?",
|
||||||
|
tz_delta,
|
||||||
|
)
|
||||||
|
# Anything passed to this function should already have hit an equivalent
|
||||||
|
# regular expression to find the section to parse.
|
||||||
|
assert match is not None, tz_delta
|
||||||
|
|
||||||
|
h, m, s = (
|
||||||
|
int(v) if v is not None else 0
|
||||||
|
for v in map(match.group, ("h", "m", "s"))
|
||||||
|
)
|
||||||
|
|
||||||
|
total = h * 3600 + m * 60 + s
|
||||||
|
|
||||||
|
if not -86400 < total < 86400:
|
||||||
|
raise ValueError(
|
||||||
|
"Offset must be strictly between -24h and +24h:" + tz_delta
|
||||||
|
)
|
||||||
|
|
||||||
|
# Yes, +5 maps to an offset of -5h
|
||||||
|
if match.group("sign") != "-":
|
||||||
|
total *= -1
|
||||||
|
|
||||||
|
return total
|
|
@ -0,0 +1,24 @@
|
||||||
|
from django.utils.version import get_version
|
||||||
|
|
||||||
|
VERSION = (4, 1, 0, "final", 0)
|
||||||
|
|
||||||
|
__version__ = get_version(VERSION)
|
||||||
|
|
||||||
|
|
||||||
|
def setup(set_prefix=True):
|
||||||
|
"""
|
||||||
|
Configure the settings (this happens as a side effect of accessing the
|
||||||
|
first setting), configure logging and populate the app registry.
|
||||||
|
Set the thread-local urlresolvers script prefix if `set_prefix` is True.
|
||||||
|
"""
|
||||||
|
from django.apps import apps
|
||||||
|
from django.conf import settings
|
||||||
|
from django.urls import set_script_prefix
|
||||||
|
from django.utils.log import configure_logging
|
||||||
|
|
||||||
|
configure_logging(settings.LOGGING_CONFIG, settings.LOGGING)
|
||||||
|
if set_prefix:
|
||||||
|
set_script_prefix(
|
||||||
|
"/" if settings.FORCE_SCRIPT_NAME is None else settings.FORCE_SCRIPT_NAME
|
||||||
|
)
|
||||||
|
apps.populate(settings.INSTALLED_APPS)
|
|
@ -0,0 +1,9 @@
|
||||||
|
"""
|
||||||
|
Invokes django-admin when the django module is run as a script.
|
||||||
|
|
||||||
|
Example: python -m django check
|
||||||
|
"""
|
||||||
|
from django.core import management
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
management.execute_from_command_line()
|
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue