📚 Documentação inicial do ALETHEIA

- MANUAL-PRODUTO.md: Manual do usuário final
- MANUAL-VENDAS.md: Estratégia comercial e vendas
- MANUAL-TECNICO.md: Infraestrutura e deploy
- README.md: Visão geral do projeto
This commit is contained in:
2026-02-10 15:08:15 -03:00
commit 20a26affaa
16617 changed files with 3202171 additions and 0 deletions

View File

@@ -0,0 +1,247 @@
<#
.Synopsis
Activate a Python virtual environment for the current PowerShell session.
.Description
Pushes the python executable for a virtual environment to the front of the
$Env:PATH environment variable and sets the prompt to signify that you are
in a Python virtual environment. Makes use of the command line switches as
well as the `pyvenv.cfg` file values present in the virtual environment.
.Parameter VenvDir
Path to the directory that contains the virtual environment to activate. The
default value for this is the parent of the directory that the Activate.ps1
script is located within.
.Parameter Prompt
The prompt prefix to display when this virtual environment is activated. By
default, this prompt is the name of the virtual environment folder (VenvDir)
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
.Example
Activate.ps1
Activates the Python virtual environment that contains the Activate.ps1 script.
.Example
Activate.ps1 -Verbose
Activates the Python virtual environment that contains the Activate.ps1 script,
and shows extra information about the activation as it executes.
.Example
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
Activates the Python virtual environment located in the specified location.
.Example
Activate.ps1 -Prompt "MyPython"
Activates the Python virtual environment that contains the Activate.ps1 script,
and prefixes the current prompt with the specified string (surrounded in
parentheses) while the virtual environment is active.
.Notes
On Windows, it may be required to enable this Activate.ps1 script by setting the
execution policy for the user. You can do this by issuing the following PowerShell
command:
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
For more information on Execution Policies:
https://go.microsoft.com/fwlink/?LinkID=135170
#>
Param(
[Parameter(Mandatory = $false)]
[String]
$VenvDir,
[Parameter(Mandatory = $false)]
[String]
$Prompt
)
<# Function declarations --------------------------------------------------- #>
<#
.Synopsis
Remove all shell session elements added by the Activate script, including the
addition of the virtual environment's Python executable from the beginning of
the PATH variable.
.Parameter NonDestructive
If present, do not remove this function from the global namespace for the
session.
#>
function global:deactivate ([switch]$NonDestructive) {
# Revert to original values
# The prior prompt:
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
}
# The prior PYTHONHOME:
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
}
# The prior PATH:
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
}
# Just remove the VIRTUAL_ENV altogether:
if (Test-Path -Path Env:VIRTUAL_ENV) {
Remove-Item -Path env:VIRTUAL_ENV
}
# Just remove VIRTUAL_ENV_PROMPT altogether.
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
}
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
}
# Leave deactivate function in the global namespace if requested:
if (-not $NonDestructive) {
Remove-Item -Path function:deactivate
}
}
<#
.Description
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
given folder, and returns them in a map.
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
two strings separated by `=` (with any amount of whitespace surrounding the =)
then it is considered a `key = value` line. The left hand string is the key,
the right hand is the value.
If the value starts with a `'` or a `"` then the first and last character is
stripped from the value before being captured.
.Parameter ConfigDir
Path to the directory that contains the `pyvenv.cfg` file.
#>
function Get-PyVenvConfig(
[String]
$ConfigDir
) {
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
# An empty map will be returned if no config file is found.
$pyvenvConfig = @{ }
if ($pyvenvConfigPath) {
Write-Verbose "File exists, parse `key = value` lines"
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
$pyvenvConfigContent | ForEach-Object {
$keyval = $PSItem -split "\s*=\s*", 2
if ($keyval[0] -and $keyval[1]) {
$val = $keyval[1]
# Remove extraneous quotations around a string value.
if ("'""".Contains($val.Substring(0, 1))) {
$val = $val.Substring(1, $val.Length - 2)
}
$pyvenvConfig[$keyval[0]] = $val
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
}
}
}
return $pyvenvConfig
}
<# Begin Activate script --------------------------------------------------- #>
# Determine the containing directory of this script
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
$VenvExecDir = Get-Item -Path $VenvExecPath
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
# Set values required in priority: CmdLine, ConfigFile, Default
# First, get the location of the virtual environment, it might not be
# VenvExecDir if specified on the command line.
if ($VenvDir) {
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
}
else {
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
Write-Verbose "VenvDir=$VenvDir"
}
# Next, read the `pyvenv.cfg` file to determine any required value such
# as `prompt`.
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
# Next, set the prompt from the command line, or the config file, or
# just use the name of the virtual environment folder.
if ($Prompt) {
Write-Verbose "Prompt specified as argument, using '$Prompt'"
}
else {
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
$Prompt = $pyvenvCfg['prompt'];
}
else {
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
$Prompt = Split-Path -Path $venvDir -Leaf
}
}
Write-Verbose "Prompt = '$Prompt'"
Write-Verbose "VenvDir='$VenvDir'"
# Deactivate any currently active virtual environment, but leave the
# deactivate function in place.
deactivate -nondestructive
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
# that there is an activated venv.
$env:VIRTUAL_ENV = $VenvDir
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
Write-Verbose "Setting prompt to '$Prompt'"
# Set the prompt to include the env name
# Make sure _OLD_VIRTUAL_PROMPT is global
function global:_OLD_VIRTUAL_PROMPT { "" }
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
function global:prompt {
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
_OLD_VIRTUAL_PROMPT
}
$env:VIRTUAL_ENV_PROMPT = $Prompt
}
# Clear PYTHONHOME
if (Test-Path -Path Env:PYTHONHOME) {
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
Remove-Item -Path Env:PYTHONHOME
}
# Add the venv to the PATH
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"

70
backend/venv/bin/activate Normal file
View File

@@ -0,0 +1,70 @@
# This file must be used with "source bin/activate" *from bash*
# You cannot run it directly
deactivate () {
# reset old environment variables
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
PATH="${_OLD_VIRTUAL_PATH:-}"
export PATH
unset _OLD_VIRTUAL_PATH
fi
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
export PYTHONHOME
unset _OLD_VIRTUAL_PYTHONHOME
fi
# Call hash to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
hash -r 2> /dev/null
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
PS1="${_OLD_VIRTUAL_PS1:-}"
export PS1
unset _OLD_VIRTUAL_PS1
fi
unset VIRTUAL_ENV
unset VIRTUAL_ENV_PROMPT
if [ ! "${1:-}" = "nondestructive" ] ; then
# Self destruct!
unset -f deactivate
fi
}
# unset irrelevant variables
deactivate nondestructive
# on Windows, a path can contain colons and backslashes and has to be converted:
if [ "${OSTYPE:-}" = "cygwin" ] || [ "${OSTYPE:-}" = "msys" ] ; then
# transform D:\path\to\venv to /d/path/to/venv on MSYS
# and to /cygdrive/d/path/to/venv on Cygwin
export VIRTUAL_ENV=$(cygpath /home/kernelpanic/projetos_jarvis/aletheia/backend/venv)
else
# use the path as-is
export VIRTUAL_ENV=/home/kernelpanic/projetos_jarvis/aletheia/backend/venv
fi
_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/"bin":$PATH"
export PATH
# unset PYTHONHOME if set
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
# could use `if (set -u; : $PYTHONHOME) ;` in bash
if [ -n "${PYTHONHOME:-}" ] ; then
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
unset PYTHONHOME
fi
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
_OLD_VIRTUAL_PS1="${PS1:-}"
PS1='(venv) '"${PS1:-}"
export PS1
VIRTUAL_ENV_PROMPT='(venv) '
export VIRTUAL_ENV_PROMPT
fi
# Call hash to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
hash -r 2> /dev/null

View File

@@ -0,0 +1,27 @@
# This file must be used with "source bin/activate.csh" *from csh*.
# You cannot run it directly.
# Created by Davide Di Blasi <davidedb@gmail.com>.
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
# Unset irrelevant variables.
deactivate nondestructive
setenv VIRTUAL_ENV /home/kernelpanic/projetos_jarvis/aletheia/backend/venv
set _OLD_VIRTUAL_PATH="$PATH"
setenv PATH "$VIRTUAL_ENV/"bin":$PATH"
set _OLD_VIRTUAL_PROMPT="$prompt"
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
set prompt = '(venv) '"$prompt"
setenv VIRTUAL_ENV_PROMPT '(venv) '
endif
alias pydoc python -m pydoc
rehash

View File

@@ -0,0 +1,69 @@
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
# (https://fishshell.com/). You cannot run it directly.
function deactivate -d "Exit virtual environment and return to normal shell environment"
# reset old environment variables
if test -n "$_OLD_VIRTUAL_PATH"
set -gx PATH $_OLD_VIRTUAL_PATH
set -e _OLD_VIRTUAL_PATH
end
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
set -e _OLD_VIRTUAL_PYTHONHOME
end
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
set -e _OLD_FISH_PROMPT_OVERRIDE
# prevents error when using nested fish instances (Issue #93858)
if functions -q _old_fish_prompt
functions -e fish_prompt
functions -c _old_fish_prompt fish_prompt
functions -e _old_fish_prompt
end
end
set -e VIRTUAL_ENV
set -e VIRTUAL_ENV_PROMPT
if test "$argv[1]" != "nondestructive"
# Self-destruct!
functions -e deactivate
end
end
# Unset irrelevant variables.
deactivate nondestructive
set -gx VIRTUAL_ENV /home/kernelpanic/projetos_jarvis/aletheia/backend/venv
set -gx _OLD_VIRTUAL_PATH $PATH
set -gx PATH "$VIRTUAL_ENV/"bin $PATH
# Unset PYTHONHOME if set.
if set -q PYTHONHOME
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
set -e PYTHONHOME
end
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
# fish uses a function instead of an env var to generate the prompt.
# Save the current fish_prompt function as the function _old_fish_prompt.
functions -c fish_prompt _old_fish_prompt
# With the original prompt function renamed, we can override with our own.
function fish_prompt
# Save the return status of the last command.
set -l old_status $status
# Output the venv prompt; color taken from the blue of the Python logo.
printf "%s%s%s" (set_color 4B8BBE) '(venv) ' (set_color normal)
# Restore the return status of the previous command.
echo "exit $old_status" | .
# Output the original/"old" prompt.
_old_fish_prompt
end
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
set -gx VIRTUAL_ENV_PROMPT '(venv) '
end

8
backend/venv/bin/distro Executable file
View File

@@ -0,0 +1,8 @@
#!/home/kernelpanic/projetos_jarvis/aletheia/backend/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from distro.distro import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
backend/venv/bin/dotenv Executable file
View File

@@ -0,0 +1,8 @@
#!/home/kernelpanic/projetos_jarvis/aletheia/backend/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from dotenv.__main__ import cli
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(cli())

8
backend/venv/bin/fastapi Executable file
View File

@@ -0,0 +1,8 @@
#!/home/kernelpanic/projetos_jarvis/aletheia/backend/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from fastapi.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
backend/venv/bin/httpx Executable file
View File

@@ -0,0 +1,8 @@
#!/home/kernelpanic/projetos_jarvis/aletheia/backend/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from httpx import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
backend/venv/bin/openai Executable file
View File

@@ -0,0 +1,8 @@
#!/home/kernelpanic/projetos_jarvis/aletheia/backend/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from openai.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
backend/venv/bin/pip Executable file
View File

@@ -0,0 +1,8 @@
#!/home/kernelpanic/projetos_jarvis/aletheia/backend/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
backend/venv/bin/pip3 Executable file
View File

@@ -0,0 +1,8 @@
#!/home/kernelpanic/projetos_jarvis/aletheia/backend/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
backend/venv/bin/pip3.12 Executable file
View File

@@ -0,0 +1,8 @@
#!/home/kernelpanic/projetos_jarvis/aletheia/backend/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
backend/venv/bin/pyrsa-decrypt Executable file
View File

@@ -0,0 +1,8 @@
#!/home/kernelpanic/projetos_jarvis/aletheia/backend/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from rsa.cli import decrypt
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(decrypt())

8
backend/venv/bin/pyrsa-encrypt Executable file
View File

@@ -0,0 +1,8 @@
#!/home/kernelpanic/projetos_jarvis/aletheia/backend/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from rsa.cli import encrypt
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(encrypt())

8
backend/venv/bin/pyrsa-keygen Executable file
View File

@@ -0,0 +1,8 @@
#!/home/kernelpanic/projetos_jarvis/aletheia/backend/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from rsa.cli import keygen
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(keygen())

View File

@@ -0,0 +1,8 @@
#!/home/kernelpanic/projetos_jarvis/aletheia/backend/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from rsa.util import private_to_public
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(private_to_public())

8
backend/venv/bin/pyrsa-sign Executable file
View File

@@ -0,0 +1,8 @@
#!/home/kernelpanic/projetos_jarvis/aletheia/backend/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from rsa.cli import sign
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(sign())

8
backend/venv/bin/pyrsa-verify Executable file
View File

@@ -0,0 +1,8 @@
#!/home/kernelpanic/projetos_jarvis/aletheia/backend/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from rsa.cli import verify
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(verify())

1
backend/venv/bin/python Symbolic link
View File

@@ -0,0 +1 @@
python3

1
backend/venv/bin/python3 Symbolic link
View File

@@ -0,0 +1 @@
/usr/bin/python3

1
backend/venv/bin/python3.12 Symbolic link
View File

@@ -0,0 +1 @@
python3

8
backend/venv/bin/tqdm Executable file
View File

@@ -0,0 +1,8 @@
#!/home/kernelpanic/projetos_jarvis/aletheia/backend/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from tqdm.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
backend/venv/bin/uvicorn Executable file
View File

@@ -0,0 +1,8 @@
#!/home/kernelpanic/projetos_jarvis/aletheia/backend/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from uvicorn.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
backend/venv/bin/watchfiles Executable file
View File

@@ -0,0 +1,8 @@
#!/home/kernelpanic/projetos_jarvis/aletheia/backend/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from watchfiles.cli import cli
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(cli())

8
backend/venv/bin/websockets Executable file
View File

@@ -0,0 +1,8 @@
#!/home/kernelpanic/projetos_jarvis/aletheia/backend/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from websockets.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

View File

@@ -0,0 +1,164 @@
/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */
/* Greenlet object interface */
#ifndef Py_GREENLETOBJECT_H
#define Py_GREENLETOBJECT_H
#include <Python.h>
#ifdef __cplusplus
extern "C" {
#endif
/* This is deprecated and undocumented. It does not change. */
#define GREENLET_VERSION "1.0.0"
#ifndef GREENLET_MODULE
#define implementation_ptr_t void*
#endif
typedef struct _greenlet {
PyObject_HEAD
PyObject* weakreflist;
PyObject* dict;
implementation_ptr_t pimpl;
} PyGreenlet;
#define PyGreenlet_Check(op) (op && PyObject_TypeCheck(op, &PyGreenlet_Type))
/* C API functions */
/* Total number of symbols that are exported */
#define PyGreenlet_API_pointers 12
#define PyGreenlet_Type_NUM 0
#define PyExc_GreenletError_NUM 1
#define PyExc_GreenletExit_NUM 2
#define PyGreenlet_New_NUM 3
#define PyGreenlet_GetCurrent_NUM 4
#define PyGreenlet_Throw_NUM 5
#define PyGreenlet_Switch_NUM 6
#define PyGreenlet_SetParent_NUM 7
#define PyGreenlet_MAIN_NUM 8
#define PyGreenlet_STARTED_NUM 9
#define PyGreenlet_ACTIVE_NUM 10
#define PyGreenlet_GET_PARENT_NUM 11
#ifndef GREENLET_MODULE
/* This section is used by modules that uses the greenlet C API */
static void** _PyGreenlet_API = NULL;
# define PyGreenlet_Type \
(*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM])
# define PyExc_GreenletError \
((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM])
# define PyExc_GreenletExit \
((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM])
/*
* PyGreenlet_New(PyObject *args)
*
* greenlet.greenlet(run, parent=None)
*/
# define PyGreenlet_New \
(*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \
_PyGreenlet_API[PyGreenlet_New_NUM])
/*
* PyGreenlet_GetCurrent(void)
*
* greenlet.getcurrent()
*/
# define PyGreenlet_GetCurrent \
(*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM])
/*
* PyGreenlet_Throw(
* PyGreenlet *greenlet,
* PyObject *typ,
* PyObject *val,
* PyObject *tb)
*
* g.throw(...)
*/
# define PyGreenlet_Throw \
(*(PyObject * (*)(PyGreenlet * self, \
PyObject * typ, \
PyObject * val, \
PyObject * tb)) \
_PyGreenlet_API[PyGreenlet_Throw_NUM])
/*
* PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args)
*
* g.switch(*args, **kwargs)
*/
# define PyGreenlet_Switch \
(*(PyObject * \
(*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \
_PyGreenlet_API[PyGreenlet_Switch_NUM])
/*
* PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent)
*
* g.parent = new_parent
*/
# define PyGreenlet_SetParent \
(*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \
_PyGreenlet_API[PyGreenlet_SetParent_NUM])
/*
* PyGreenlet_GetParent(PyObject* greenlet)
*
* return greenlet.parent;
*
* This could return NULL even if there is no exception active.
* If it does not return NULL, you are responsible for decrementing the
* reference count.
*/
# define PyGreenlet_GetParent \
(*(PyGreenlet* (*)(PyGreenlet*)) \
_PyGreenlet_API[PyGreenlet_GET_PARENT_NUM])
/*
* deprecated, undocumented alias.
*/
# define PyGreenlet_GET_PARENT PyGreenlet_GetParent
# define PyGreenlet_MAIN \
(*(int (*)(PyGreenlet*)) \
_PyGreenlet_API[PyGreenlet_MAIN_NUM])
# define PyGreenlet_STARTED \
(*(int (*)(PyGreenlet*)) \
_PyGreenlet_API[PyGreenlet_STARTED_NUM])
# define PyGreenlet_ACTIVE \
(*(int (*)(PyGreenlet*)) \
_PyGreenlet_API[PyGreenlet_ACTIVE_NUM])
/* Macro that imports greenlet and initializes C API */
/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we
keep the older definition to be sure older code that might have a copy of
the header still works. */
# define PyGreenlet_Import() \
{ \
_PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \
}
#endif /* GREENLET_MODULE */
#ifdef __cplusplus
}
#endif
#endif /* !Py_GREENLETOBJECT_H */

View File

@@ -0,0 +1,19 @@
Copyright 2005-2024 SQLAlchemy authors and contributors <see AUTHORS file>.
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,242 @@
Metadata-Version: 2.1
Name: SQLAlchemy
Version: 2.0.35
Summary: Database Abstraction Library
Home-page: https://www.sqlalchemy.org
Author: Mike Bayer
Author-email: mike_mp@zzzcomputing.com
License: MIT
Project-URL: Documentation, https://docs.sqlalchemy.org
Project-URL: Issue Tracker, https://github.com/sqlalchemy/sqlalchemy/
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Database :: Front-Ends
Requires-Python: >=3.7
Description-Content-Type: text/x-rst
License-File: LICENSE
Requires-Dist: typing-extensions >=4.6.0
Requires-Dist: greenlet !=0.4.17 ; python_version < "3.13" and (platform_machine == "aarch64" or (platform_machine == "ppc64le" or (platform_machine == "x86_64" or (platform_machine == "amd64" or (platform_machine == "AMD64" or (platform_machine == "win32" or platform_machine == "WIN32"))))))
Requires-Dist: importlib-metadata ; python_version < "3.8"
Provides-Extra: aiomysql
Requires-Dist: greenlet !=0.4.17 ; extra == 'aiomysql'
Requires-Dist: aiomysql >=0.2.0 ; extra == 'aiomysql'
Provides-Extra: aioodbc
Requires-Dist: greenlet !=0.4.17 ; extra == 'aioodbc'
Requires-Dist: aioodbc ; extra == 'aioodbc'
Provides-Extra: aiosqlite
Requires-Dist: greenlet !=0.4.17 ; extra == 'aiosqlite'
Requires-Dist: aiosqlite ; extra == 'aiosqlite'
Requires-Dist: typing-extensions !=3.10.0.1 ; extra == 'aiosqlite'
Provides-Extra: asyncio
Requires-Dist: greenlet !=0.4.17 ; extra == 'asyncio'
Provides-Extra: asyncmy
Requires-Dist: greenlet !=0.4.17 ; extra == 'asyncmy'
Requires-Dist: asyncmy !=0.2.4,!=0.2.6,>=0.2.3 ; extra == 'asyncmy'
Provides-Extra: mariadb_connector
Requires-Dist: mariadb !=1.1.2,!=1.1.5,>=1.0.1 ; extra == 'mariadb_connector'
Provides-Extra: mssql
Requires-Dist: pyodbc ; extra == 'mssql'
Provides-Extra: mssql_pymssql
Requires-Dist: pymssql ; extra == 'mssql_pymssql'
Provides-Extra: mssql_pyodbc
Requires-Dist: pyodbc ; extra == 'mssql_pyodbc'
Provides-Extra: mypy
Requires-Dist: mypy >=0.910 ; extra == 'mypy'
Provides-Extra: mysql
Requires-Dist: mysqlclient >=1.4.0 ; extra == 'mysql'
Provides-Extra: mysql_connector
Requires-Dist: mysql-connector-python ; extra == 'mysql_connector'
Provides-Extra: oracle
Requires-Dist: cx-oracle >=8 ; extra == 'oracle'
Provides-Extra: oracle_oracledb
Requires-Dist: oracledb >=1.0.1 ; extra == 'oracle_oracledb'
Provides-Extra: postgresql
Requires-Dist: psycopg2 >=2.7 ; extra == 'postgresql'
Provides-Extra: postgresql_asyncpg
Requires-Dist: greenlet !=0.4.17 ; extra == 'postgresql_asyncpg'
Requires-Dist: asyncpg ; extra == 'postgresql_asyncpg'
Provides-Extra: postgresql_pg8000
Requires-Dist: pg8000 >=1.29.1 ; extra == 'postgresql_pg8000'
Provides-Extra: postgresql_psycopg
Requires-Dist: psycopg >=3.0.7 ; extra == 'postgresql_psycopg'
Provides-Extra: postgresql_psycopg2binary
Requires-Dist: psycopg2-binary ; extra == 'postgresql_psycopg2binary'
Provides-Extra: postgresql_psycopg2cffi
Requires-Dist: psycopg2cffi ; extra == 'postgresql_psycopg2cffi'
Provides-Extra: postgresql_psycopgbinary
Requires-Dist: psycopg[binary] >=3.0.7 ; extra == 'postgresql_psycopgbinary'
Provides-Extra: pymysql
Requires-Dist: pymysql ; extra == 'pymysql'
Provides-Extra: sqlcipher
Requires-Dist: sqlcipher3-binary ; extra == 'sqlcipher'
SQLAlchemy
==========
|PyPI| |Python| |Downloads|
.. |PyPI| image:: https://img.shields.io/pypi/v/sqlalchemy
:target: https://pypi.org/project/sqlalchemy
:alt: PyPI
.. |Python| image:: https://img.shields.io/pypi/pyversions/sqlalchemy
:target: https://pypi.org/project/sqlalchemy
:alt: PyPI - Python Version
.. |Downloads| image:: https://static.pepy.tech/badge/sqlalchemy/month
:target: https://pepy.tech/project/sqlalchemy
:alt: PyPI - Downloads
The Python SQL Toolkit and Object Relational Mapper
Introduction
-------------
SQLAlchemy is the Python SQL toolkit and Object Relational Mapper
that gives application developers the full power and
flexibility of SQL. SQLAlchemy provides a full suite
of well known enterprise-level persistence patterns,
designed for efficient and high-performing database
access, adapted into a simple and Pythonic domain
language.
Major SQLAlchemy features include:
* An industrial strength ORM, built
from the core on the identity map, unit of work,
and data mapper patterns. These patterns
allow transparent persistence of objects
using a declarative configuration system.
Domain models
can be constructed and manipulated naturally,
and changes are synchronized with the
current transaction automatically.
* A relationally-oriented query system, exposing
the full range of SQL's capabilities
explicitly, including joins, subqueries,
correlation, and most everything else,
in terms of the object model.
Writing queries with the ORM uses the same
techniques of relational composition you use
when writing SQL. While you can drop into
literal SQL at any time, it's virtually never
needed.
* A comprehensive and flexible system
of eager loading for related collections and objects.
Collections are cached within a session,
and can be loaded on individual access, all
at once using joins, or by query per collection
across the full result set.
* A Core SQL construction system and DBAPI
interaction layer. The SQLAlchemy Core is
separate from the ORM and is a full database
abstraction layer in its own right, and includes
an extensible Python-based SQL expression
language, schema metadata, connection pooling,
type coercion, and custom types.
* All primary and foreign key constraints are
assumed to be composite and natural. Surrogate
integer primary keys are of course still the
norm, but SQLAlchemy never assumes or hardcodes
to this model.
* Database introspection and generation. Database
schemas can be "reflected" in one step into
Python structures representing database metadata;
those same structures can then generate
CREATE statements right back out - all within
the Core, independent of the ORM.
SQLAlchemy's philosophy:
* SQL databases behave less and less like object
collections the more size and performance start to
matter; object collections behave less and less like
tables and rows the more abstraction starts to matter.
SQLAlchemy aims to accommodate both of these
principles.
* An ORM doesn't need to hide the "R". A relational
database provides rich, set-based functionality
that should be fully exposed. SQLAlchemy's
ORM provides an open-ended set of patterns
that allow a developer to construct a custom
mediation layer between a domain model and
a relational schema, turning the so-called
"object relational impedance" issue into
a distant memory.
* The developer, in all cases, makes all decisions
regarding the design, structure, and naming conventions
of both the object model as well as the relational
schema. SQLAlchemy only provides the means
to automate the execution of these decisions.
* With SQLAlchemy, there's no such thing as
"the ORM generated a bad query" - you
retain full control over the structure of
queries, including how joins are organized,
how subqueries and correlation is used, what
columns are requested. Everything SQLAlchemy
does is ultimately the result of a developer-initiated
decision.
* Don't use an ORM if the problem doesn't need one.
SQLAlchemy consists of a Core and separate ORM
component. The Core offers a full SQL expression
language that allows Pythonic construction
of SQL constructs that render directly to SQL
strings for a target database, returning
result sets that are essentially enhanced DBAPI
cursors.
* Transactions should be the norm. With SQLAlchemy's
ORM, nothing goes to permanent storage until
commit() is called. SQLAlchemy encourages applications
to create a consistent means of delineating
the start and end of a series of operations.
* Never render a literal value in a SQL statement.
Bound parameters are used to the greatest degree
possible, allowing query optimizers to cache
query plans effectively and making SQL injection
attacks a non-issue.
Documentation
-------------
Latest documentation is at:
https://www.sqlalchemy.org/docs/
Installation / Requirements
---------------------------
Full documentation for installation is at
`Installation <https://www.sqlalchemy.org/docs/intro.html#installation>`_.
Getting Help / Development / Bug reporting
------------------------------------------
Please refer to the `SQLAlchemy Community Guide <https://www.sqlalchemy.org/support.html>`_.
Code of Conduct
---------------
Above all, SQLAlchemy places great emphasis on polite, thoughtful, and
constructive communication between users and developers.
Please see our current Code of Conduct at
`Code of Conduct <https://www.sqlalchemy.org/codeofconduct.html>`_.
License
-------
SQLAlchemy is distributed under the `MIT license
<https://www.opensource.org/licenses/mit-license.php>`_.

View File

@@ -0,0 +1,530 @@
SQLAlchemy-2.0.35.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
SQLAlchemy-2.0.35.dist-info/LICENSE,sha256=PA9Zq4h9BB3mpOUv_j6e212VIt6Qn66abNettue-MpM,1100
SQLAlchemy-2.0.35.dist-info/METADATA,sha256=nz-ukujebAhwlZl0txMOxlkTwDyPwTn3tWuKTcw8OMU,9632
SQLAlchemy-2.0.35.dist-info/RECORD,,
SQLAlchemy-2.0.35.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
SQLAlchemy-2.0.35.dist-info/WHEEL,sha256=7B4nnId14TToQHuAKpxbDLCJbNciqBsV-mvXE2hVLJc,151
SQLAlchemy-2.0.35.dist-info/top_level.txt,sha256=rp-ZgB7D8G11ivXON5VGPjupT1voYmWqkciDt5Uaw_Q,11
sqlalchemy/__init__.py,sha256=xHEMNoAWZaZfX2Zdm2gnVT8EWjaBfg6HpzG4ncSmPrk,13033
sqlalchemy/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/__pycache__/events.cpython-312.pyc,,
sqlalchemy/__pycache__/exc.cpython-312.pyc,,
sqlalchemy/__pycache__/inspection.cpython-312.pyc,,
sqlalchemy/__pycache__/log.cpython-312.pyc,,
sqlalchemy/__pycache__/schema.cpython-312.pyc,,
sqlalchemy/__pycache__/types.cpython-312.pyc,,
sqlalchemy/connectors/__init__.py,sha256=PzXPqZqi3BzEnrs1eW0DcsR4lyknAzhhN9rWcQ97hb4,476
sqlalchemy/connectors/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/connectors/__pycache__/aioodbc.cpython-312.pyc,,
sqlalchemy/connectors/__pycache__/asyncio.cpython-312.pyc,,
sqlalchemy/connectors/__pycache__/pyodbc.cpython-312.pyc,,
sqlalchemy/connectors/aioodbc.py,sha256=GSTiNMO9h0qjPxgqaxDwWZ8HvhWMFNVR6MJQnN1oc40,5288
sqlalchemy/connectors/asyncio.py,sha256=Hq2bkXmG6-KO_RfCrwMqx4oGH-uH1Z1WWKqPWNjz8p4,6138
sqlalchemy/connectors/pyodbc.py,sha256=t7AjyxIOnaWg3CrlUEpBs4Y5l0HFdNt3P_cSSKhbi0Y,8501
sqlalchemy/cyextension/__init__.py,sha256=GzhhN8cjMnDTE0qerlUlpbrNmFPHQWCZ4Gk74OAxl04,244
sqlalchemy/cyextension/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/cyextension/collections.cpython-312-x86_64-linux-gnu.so,sha256=wiVAQHnu9pZiSNMZmfxQQs6cS19-erq0DXux92G4VSo,1932256
sqlalchemy/cyextension/collections.pyx,sha256=L7DZ3DGKpgw2MT2ZZRRxCnrcyE5pU1NAFowWgAzQPEc,12571
sqlalchemy/cyextension/immutabledict.cpython-312-x86_64-linux-gnu.so,sha256=rl6xddEXaRQGh9OlPP2LDlAmSP_8OrLrVv4gJbcPiCc,805632
sqlalchemy/cyextension/immutabledict.pxd,sha256=3x3-rXG5eRQ7bBnktZ-OJ9-6ft8zToPmTDOd92iXpB0,291
sqlalchemy/cyextension/immutabledict.pyx,sha256=KfDTYbTfebstE8xuqAtuXsHNAK0_b5q_ymUiinUe_xs,3535
sqlalchemy/cyextension/processors.cpython-312-x86_64-linux-gnu.so,sha256=7JFk5KtZ2DQzB4TKTz7-Zb7EWTUHOVTw7-sfKHQm4MY,530680
sqlalchemy/cyextension/processors.pyx,sha256=R1rHsGLEaGeBq5VeCydjClzYlivERIJ9B-XLOJlf2MQ,1792
sqlalchemy/cyextension/resultproxy.cpython-312-x86_64-linux-gnu.so,sha256=05hidqye-dCbUfWu9RYtdtftx0Fo9tTsBaO6Tp5sSQg,621328
sqlalchemy/cyextension/resultproxy.pyx,sha256=eWLdyBXiBy_CLQrF5ScfWJm7X0NeelscSXedtj1zv9Q,2725
sqlalchemy/cyextension/util.cpython-312-x86_64-linux-gnu.so,sha256=dfsFk5a4zF5Z9Afrxu6JB8GBoRcfVSp9KuSwFIgtmlU,950928
sqlalchemy/cyextension/util.pyx,sha256=B85orxa9LddLuQEaDoVSq1XmAXIbLKxrxpvuB8ogV_o,2530
sqlalchemy/dialects/__init__.py,sha256=Kos9Gf5JZg1Vg6GWaCqEbD6e0r1jCwCmcnJIfcxDdcY,1770
sqlalchemy/dialects/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/dialects/__pycache__/_typing.cpython-312.pyc,,
sqlalchemy/dialects/_typing.py,sha256=hyv0nKucX2gI8ispB1IsvaUgrEPn9zEcq9hS7kfstEw,888
sqlalchemy/dialects/mssql/__init__.py,sha256=r5t8wFRNtBQoiUWh0WfIEWzXZW6f3D0uDt6NZTW_7Cc,1880
sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/dialects/mssql/__pycache__/aioodbc.cpython-312.pyc,,
sqlalchemy/dialects/mssql/__pycache__/base.cpython-312.pyc,,
sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-312.pyc,,
sqlalchemy/dialects/mssql/__pycache__/json.cpython-312.pyc,,
sqlalchemy/dialects/mssql/__pycache__/provision.cpython-312.pyc,,
sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-312.pyc,,
sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-312.pyc,,
sqlalchemy/dialects/mssql/aioodbc.py,sha256=UQd9ecSMIML713TDnLAviuBVJle7P7i1FtqGZZePk2Y,2022
sqlalchemy/dialects/mssql/base.py,sha256=TQekFOsor8Rc-cnDPmbq_JLQekKrHPtUdBdNsCYFw7w,132447
sqlalchemy/dialects/mssql/information_schema.py,sha256=HswjDc6y0mPXCf_x6VyylHlBdBa4PSY6Evxmmlch700,8084
sqlalchemy/dialects/mssql/json.py,sha256=evUACW2O62TAPq8B7QIPagz7jfc664ql9ms68JqiYzg,4816
sqlalchemy/dialects/mssql/provision.py,sha256=ZAtt6Div9NLIngMs8kyloxfphw0KDNMsnRCAVd7-esE,5593
sqlalchemy/dialects/mssql/pymssql.py,sha256=LAv43q4vBCB85OsAwHQItaQUYTYIO0QJ-jvzaBrswmY,4097
sqlalchemy/dialects/mssql/pyodbc.py,sha256=vwM-vBlmRwrqxOc73P0sFOrBSwn24wzc5IkEOpalbXQ,27056
sqlalchemy/dialects/mysql/__init__.py,sha256=bxbi4hkysUK2OOVvr1F49akUj1cky27kKb07tgFzI9U,2153
sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/aiomysql.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/asyncmy.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/base.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/dml.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/enumerated.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/expression.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/json.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/mariadb.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/mariadbconnector.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/provision.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/reflection.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/reserved_words.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/types.cpython-312.pyc,,
sqlalchemy/dialects/mysql/aiomysql.py,sha256=-oMZnCqNsSki8mlQRTWIwiQPT1OVdZIuANkb90q8LAs,9999
sqlalchemy/dialects/mysql/asyncmy.py,sha256=YpuuOh8VknEeqHqUXQGfQ3jhfO3Xb-vZv78Jq5cscJ0,10067
sqlalchemy/dialects/mysql/base.py,sha256=8shrZRSnVwDXtl6ybaO6cvul_RZ34zEWqvpYW5sNfOQ,120986
sqlalchemy/dialects/mysql/cymysql.py,sha256=eXT1ry0w_qRxjiO24M980c-8PZ9qSsbhqBHntjEiKB0,2300
sqlalchemy/dialects/mysql/dml.py,sha256=HXJMAvimJsqvhj3UZO4vW_6LkF5RqaKbHvklAjor7yU,7645
sqlalchemy/dialects/mysql/enumerated.py,sha256=ipEPPQqoXfFwcywNdcLlZCEzHBtnitHRah1Gn6nItcg,8448
sqlalchemy/dialects/mysql/expression.py,sha256=lsmQCHKwfPezUnt27d2kR6ohk4IRFCA64KBS16kx5dc,4097
sqlalchemy/dialects/mysql/json.py,sha256=l6MEZ0qp8FgiRrIQvOMhyEJq0q6OqiEnvDTx5Cbt9uQ,2269
sqlalchemy/dialects/mysql/mariadb.py,sha256=kTfBLioLKk4JFFst4TY_iWqPtnvvQXFHknLfm89H2N8,853
sqlalchemy/dialects/mysql/mariadbconnector.py,sha256=_S1aV93kyP52Nvj7HR9weThML4oUvSLsLqiVFdoLR2o,8623
sqlalchemy/dialects/mysql/mysqlconnector.py,sha256=oq3mtsNOMldUjs32JbJG2u3Hy3DObyVzUUMYfOkwkHg,5729
sqlalchemy/dialects/mysql/mysqldb.py,sha256=qUBbA6STeYGozutyTxHCo5p1W3p59QFFS2FwCgPrjBA,9503
sqlalchemy/dialects/mysql/provision.py,sha256=Jnk8UO9_Apd2odR2IQFLrscCfAmYxuBKcB8giS3bBog,3575
sqlalchemy/dialects/mysql/pymysql.py,sha256=GUnSHd2M2uKjmN46Hheymtm26g7phEgwYOXrX0zLY8M,4083
sqlalchemy/dialects/mysql/pyodbc.py,sha256=072crI4qVyPhajYvHnsfFeSrNjLFVPIjBQKo5uyz5yk,4297
sqlalchemy/dialects/mysql/reflection.py,sha256=3u34YwT1JJh3uThGZJZ3FKdnUcT7v08QB-tAl1r7VRk,22834
sqlalchemy/dialects/mysql/reserved_words.py,sha256=ucKX2p2c3UnMq2ayZuOHuf73eXhu7SKsOsTlIN1Q83I,9258
sqlalchemy/dialects/mysql/types.py,sha256=L5cTCsMT1pTedszNEM3jSxFNZEMcHQLprYCZ0vmfsnA,24343
sqlalchemy/dialects/oracle/__init__.py,sha256=p4-2gw7TT0bX_MoJXTGD4i8WHctYsK9kCRbkpzykBrc,1493
sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/dialects/oracle/__pycache__/base.cpython-312.pyc,,
sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-312.pyc,,
sqlalchemy/dialects/oracle/__pycache__/dictionary.cpython-312.pyc,,
sqlalchemy/dialects/oracle/__pycache__/oracledb.cpython-312.pyc,,
sqlalchemy/dialects/oracle/__pycache__/provision.cpython-312.pyc,,
sqlalchemy/dialects/oracle/__pycache__/types.cpython-312.pyc,,
sqlalchemy/dialects/oracle/base.py,sha256=HykzMCkN1LSRteoEttWb_ln9Q0YmOJ5tLbJan7TuWZ4,119699
sqlalchemy/dialects/oracle/cx_oracle.py,sha256=q8Nyj15UZCE2TWOmxuWp5ZsxiCiGMzqfd_9UkmjIja0,55235
sqlalchemy/dialects/oracle/dictionary.py,sha256=7WMrbPkqo8ZdGjaEZyQr-5f2pajSOF1OTGb8P97z8-g,19519
sqlalchemy/dialects/oracle/oracledb.py,sha256=fZRKGqNIwW9LG4i8yDOXABrucbfzn_yC86Od-BJ3PcM,13619
sqlalchemy/dialects/oracle/provision.py,sha256=O9ZpF4OG6Cx4mMzLRfZwhs8dZjrJETWR402n9c7726A,8304
sqlalchemy/dialects/oracle/types.py,sha256=QK3hJvWzKnnCe3oD3rItwEEIwcoBze8qGg7VFOvVlIk,8231
sqlalchemy/dialects/postgresql/__init__.py,sha256=wwnNAq4wDQzrlPRzDNB06ayuq3L2HNO99nzeEvq-YcU,3892
sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/_psycopg_common.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/array.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/asyncpg.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/base.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/dml.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/ext.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/json.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/named_types.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/operators.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/pg_catalog.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/provision.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/psycopg.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/types.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/_psycopg_common.py,sha256=7TudtgsPiSB8O5kX8W8KxcNYR8t5h_UHb86b_ChL0P8,5696
sqlalchemy/dialects/postgresql/array.py,sha256=bWcame7ntmI_Kx6gmBX0-chwADFdLHeCvaDQ4iX8id8,13734
sqlalchemy/dialects/postgresql/asyncpg.py,sha256=se1HXkikKQFcR2D-15jS6iKIaQs-M8Y8ke99ih0Y4NM,41132
sqlalchemy/dialects/postgresql/base.py,sha256=ed49Ode09deJF3jXr8V5sSHTwVoFVq0dlvNdP3mJaBQ,178989
sqlalchemy/dialects/postgresql/dml.py,sha256=Pc69Le6qzmUHHb1FT5zeUSD31dWm6SBgdCAGW89cs3s,11212
sqlalchemy/dialects/postgresql/ext.py,sha256=1bZ--iNh2O9ym7l2gXZX48yP3yMO4dqb9RpYro2Mj2Q,16262
sqlalchemy/dialects/postgresql/hstore.py,sha256=otAx-RTDfpi_tcXkMuQV0JOIXtYgevgnsikLKKOkI6U,11541
sqlalchemy/dialects/postgresql/json.py,sha256=73nmtG-7TN01DytjIJnG8CHa8Q7KwlRdN0bhx4fEAT0,11590
sqlalchemy/dialects/postgresql/named_types.py,sha256=3IV1ufo7zJjKmX4VtGDEnoXE6xEqLJAtGG82IiqHXwY,17594
sqlalchemy/dialects/postgresql/operators.py,sha256=NsAaWun_tL3d_be0fs9YL6T4LPKK6crnmFxxIJHgyeY,2808
sqlalchemy/dialects/postgresql/pg8000.py,sha256=3yoekiWSF-xnaWMqG76XrYPMqerg-42TdmfsW_ivK9E,18640
sqlalchemy/dialects/postgresql/pg_catalog.py,sha256=hY3NXEUHxTWD4umhd2aowNu3laC-61Q_qQ_pReyXTUM,9254
sqlalchemy/dialects/postgresql/provision.py,sha256=t6TZj0XaWG9zrpCjNr0oJRjAC_WQzaNdp3kaKJIbS8I,5770
sqlalchemy/dialects/postgresql/psycopg.py,sha256=ACkfuT87vUJEW4kJyUqDYTwlnl5u0FZFlDcvLzxNzFQ,23226
sqlalchemy/dialects/postgresql/psycopg2.py,sha256=kwEnflz5bAqJcuO_20eYiCtha_a4m_tg5_lppdDnaeU,31998
sqlalchemy/dialects/postgresql/psycopg2cffi.py,sha256=M7wAYSL6Pvt-4nbfacAHGyyw4XMKJ_bQZ1tc1pBtIdg,1756
sqlalchemy/dialects/postgresql/ranges.py,sha256=6CgV7qkxEMJ9AQsiibo_XBLJYzGh-2ZxpG83sRaesVY,32949
sqlalchemy/dialects/postgresql/types.py,sha256=Jfxqw9JaKNOq29JRWBublywgb3lLMyzx8YZI7CXpS2s,7300
sqlalchemy/dialects/sqlite/__init__.py,sha256=lp9DIggNn349M-7IYhUA8et8--e8FRExWD2V_r1LJk4,1182
sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/aiosqlite.cpython-312.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/base.cpython-312.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/dml.cpython-312.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/json.cpython-312.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/provision.cpython-312.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-312.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-312.pyc,,
sqlalchemy/dialects/sqlite/aiosqlite.py,sha256=g3qGV6jmiXabWyb3282g_Nmxtj1jThxGSe9C9yalb-U,12345
sqlalchemy/dialects/sqlite/base.py,sha256=P15AcHoVWS20mMNQqnCfj94owUsTEX2ULtRn290AoF0,97837
sqlalchemy/dialects/sqlite/dml.py,sha256=9GE55WvwoktKy2fHeT-Wbc9xPHgsbh5oBfd_fckMH5Q,8443
sqlalchemy/dialects/sqlite/json.py,sha256=Eoplbb_4dYlfrtmQaI8Xddd2suAIHA-IdbDQYM-LIhs,2777
sqlalchemy/dialects/sqlite/provision.py,sha256=UCpmwxf4IWlrpb2eLHGbPTpCFVbdI_KAh2mKtjiLYao,5632
sqlalchemy/dialects/sqlite/pysqlcipher.py,sha256=OL2S_05DK9kllZj6DOz7QtEl7jI7syxjW6woS725ii4,5356
sqlalchemy/dialects/sqlite/pysqlite.py,sha256=aDp47n0J509kl2hDchoaBKXEQVZtkux54DwfKytUAe4,28068
sqlalchemy/dialects/type_migration_guidelines.txt,sha256=-uHNdmYFGB7bzUNT6i8M5nb4j6j9YUKAtW4lcBZqsMg,8239
sqlalchemy/engine/__init__.py,sha256=Stb2oV6l8w65JvqEo6J4qtKoApcmOpXy3AAxQud4C1o,2818
sqlalchemy/engine/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/_py_processors.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/_py_row.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/_py_util.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/base.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/characteristics.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/create.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/cursor.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/default.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/events.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/interfaces.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/mock.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/processors.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/reflection.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/result.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/row.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/strategies.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/url.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/util.cpython-312.pyc,,
sqlalchemy/engine/_py_processors.py,sha256=j9i_lcYYQOYJMcsDerPxI0sVFBIlX5sqoYMdMJlgWPI,3744
sqlalchemy/engine/_py_row.py,sha256=wSqoUFzLOJ1f89kgDb6sJm9LUrF5LMFpXPcK1vUsKcs,3787
sqlalchemy/engine/_py_util.py,sha256=f2DI3AN1kv6EplelowesCVpwS8hSXNufRkZoQmJtSH8,2484
sqlalchemy/engine/base.py,sha256=frWSMmt3dlentYH4QNN3cijdGzp8NbunColUZwWsWgI,122958
sqlalchemy/engine/characteristics.py,sha256=N3kbvw_ApMh86wb5yAGnxtPYD4YRhYMWion1H_aVZBI,4765
sqlalchemy/engine/create.py,sha256=mYJtOG2ZKM8sgyfjpGpamW15RDU7JXi5s6iibbJHMIs,33206
sqlalchemy/engine/cursor.py,sha256=cFq61yrw76k-QR_xNUBWuL-Zeyb14ltG-6jo2Q2iuuw,76392
sqlalchemy/engine/default.py,sha256=2wwKKdsagb3QTajRSEw8Hl-EnQ-LmRxy822xOGyenHc,84648
sqlalchemy/engine/events.py,sha256=c0unNFFiHzTAvkUtXoJaxzMFMDwurBkHiiUhuN8qluc,37381
sqlalchemy/engine/interfaces.py,sha256=fcVHOmnMo7JZLHzgSKoK3QsdVHH7kJ_AmrDvwW9Ka3k,112936
sqlalchemy/engine/mock.py,sha256=yvpxgFmRw5G4QsHeF-ZwQGHKES-HqQOucTxFtN1uzdk,4179
sqlalchemy/engine/processors.py,sha256=XyfINKbo-2fjN-mW55YybvFyQMOil50_kVqsunahkNs,2379
sqlalchemy/engine/reflection.py,sha256=gwGs8y7x6py5z-ZWx3hQqQrwpHepMCTJyQcFwWJjPlw,75364
sqlalchemy/engine/result.py,sha256=j6BI4Wj2bziQNQG5OlG_Cm4KcNWY9AoYvTXVlJUU-D8,77603
sqlalchemy/engine/row.py,sha256=9AAQo9zYDL88GcZ3bjcQTwMT-YIcuGTSMAyTfmBJ_yM,12032
sqlalchemy/engine/strategies.py,sha256=DqFSWaXJPL-29Omot9O0aOcuGL8KmCGyOvnPGDkAJoE,442
sqlalchemy/engine/url.py,sha256=8eWkUaIUyDExOcJ2D4xJXRcn4OY1GQJ3Q2duSX6UGAg,30784
sqlalchemy/engine/util.py,sha256=bNirO8k1S8yOW61uNH-a9QrWtAJ9VGFgbiR0lk1lUQU,5682
sqlalchemy/event/__init__.py,sha256=KBrp622xojnC3FFquxa2JsMamwAbfkvzfv6Op0NKiYc,997
sqlalchemy/event/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/event/__pycache__/api.cpython-312.pyc,,
sqlalchemy/event/__pycache__/attr.cpython-312.pyc,,
sqlalchemy/event/__pycache__/base.cpython-312.pyc,,
sqlalchemy/event/__pycache__/legacy.cpython-312.pyc,,
sqlalchemy/event/__pycache__/registry.cpython-312.pyc,,
sqlalchemy/event/api.py,sha256=DtDVgjKSorOfp9MGJ7fgMWrj4seC_hkwF4D8CW1RFZU,8226
sqlalchemy/event/attr.py,sha256=X8QeHGK4ioSYht1vkhc11f606_mq_t91jMNIT314ubs,20751
sqlalchemy/event/base.py,sha256=270OShTD17-bSFUFnPtKdVnB0NFJZ2AouYPo1wT0aJw,15127
sqlalchemy/event/legacy.py,sha256=teMPs00fO-4g8a_z2omcVKkYce5wj_1uvJO2n2MIeuo,8227
sqlalchemy/event/registry.py,sha256=nfTSSyhjZZXc5wseWB4sXn-YibSc0LKX8mg17XlWmAo,10835
sqlalchemy/events.py,sha256=k-ZD38aSPD29LYhED7CBqttp5MDVVx_YSaWC2-cu9ec,525
sqlalchemy/exc.py,sha256=M_8-O1hd8i6gbyx-TapV400p_Lxq2QqTGMXUAO-YgCc,23976
sqlalchemy/ext/__init__.py,sha256=S1fGKAbycnQDV01gs-JWGaFQ9GCD4QHwKcU2wnugg_o,322
sqlalchemy/ext/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/associationproxy.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/automap.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/baked.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/compiler.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/horizontal_shard.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/hybrid.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/indexable.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/instrumentation.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/mutable.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/orderinglist.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/serializer.cpython-312.pyc,,
sqlalchemy/ext/associationproxy.py,sha256=5O5ANHARO8jytvqBQmOu-QjNVE4Hh3tfYquqKAj5ajs,65771
sqlalchemy/ext/asyncio/__init__.py,sha256=1OqSxEyIUn7RWLGyO12F-jAUIvk1I6DXlVy80-Gvkds,1317
sqlalchemy/ext/asyncio/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/ext/asyncio/__pycache__/base.cpython-312.pyc,,
sqlalchemy/ext/asyncio/__pycache__/engine.cpython-312.pyc,,
sqlalchemy/ext/asyncio/__pycache__/exc.cpython-312.pyc,,
sqlalchemy/ext/asyncio/__pycache__/result.cpython-312.pyc,,
sqlalchemy/ext/asyncio/__pycache__/scoping.cpython-312.pyc,,
sqlalchemy/ext/asyncio/__pycache__/session.cpython-312.pyc,,
sqlalchemy/ext/asyncio/base.py,sha256=fl7wxZD9KjgFiCtG3WXrYjHEvanamcsodCqq9pH9lOk,8905
sqlalchemy/ext/asyncio/engine.py,sha256=S_IRWX4QAjj2veLSu4Y3gKBIXkKQt7_2StJAK2_KUDY,48190
sqlalchemy/ext/asyncio/exc.py,sha256=8sII7VMXzs2TrhizhFQMzSfcroRtiesq8o3UwLfXSgQ,639
sqlalchemy/ext/asyncio/result.py,sha256=ID2eh-NHW-lnNFTxbKhje8fr-tnsucUsiw_jcpGcSPc,30409
sqlalchemy/ext/asyncio/scoping.py,sha256=UxHAFxtWKqA7TEozyN2h7MJyzSspTCrS-1SlgQLTExo,52608
sqlalchemy/ext/asyncio/session.py,sha256=mkFFC1C2mPuopz3BwkfSVTlp3vuIDc8hxiQx5ky5rvc,63103
sqlalchemy/ext/automap.py,sha256=r0mUSyogNyqdBL4m9AA1NXbLiTLQmtvyQymsssNEipo,61581
sqlalchemy/ext/baked.py,sha256=H6T1il7GY84BhzPFj49UECSpZh_eBuiHomA-QIsYOYQ,17807
sqlalchemy/ext/compiler.py,sha256=ONPoxoKD2yUS9R2-oOhmPsA7efm-Bs0BXo7HE1dGlsU,20391
sqlalchemy/ext/declarative/__init__.py,sha256=20psLdFQbbOWfpdXHZ0CTY6I1k4UqXvKemNVu1LvPOI,1818
sqlalchemy/ext/declarative/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/ext/declarative/__pycache__/extensions.cpython-312.pyc,,
sqlalchemy/ext/declarative/extensions.py,sha256=uCjN1GisQt54AjqYnKYzJdUjnGd2pZBW47WWdPlS7FE,19547
sqlalchemy/ext/horizontal_shard.py,sha256=wuwAPnHymln0unSBnyx-cpX0AfESKSsypaSQTYCvzDk,16750
sqlalchemy/ext/hybrid.py,sha256=IYkCaPZ29gm2cPKPg0cWMkLCEqMykD8-JJTvgacGbmc,52458
sqlalchemy/ext/indexable.py,sha256=UkTelbydKCdKelzbv3HWFFavoET9WocKaGRPGEOVfN8,11032
sqlalchemy/ext/instrumentation.py,sha256=sg8ghDjdHSODFXh_jAmpgemnNX1rxCeeXEG3-PMdrNk,15707
sqlalchemy/ext/mutable.py,sha256=L5ZkHBGYhMaqO75Xtyrk2DBR44RDk0g6Rz2HzHH0F8Q,37355
sqlalchemy/ext/mypy/__init__.py,sha256=0WebDIZmqBD0OTq5JLtd_PmfF9JGxe4d4Qv3Ml3PKUg,241
sqlalchemy/ext/mypy/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/ext/mypy/__pycache__/apply.cpython-312.pyc,,
sqlalchemy/ext/mypy/__pycache__/decl_class.cpython-312.pyc,,
sqlalchemy/ext/mypy/__pycache__/infer.cpython-312.pyc,,
sqlalchemy/ext/mypy/__pycache__/names.cpython-312.pyc,,
sqlalchemy/ext/mypy/__pycache__/plugin.cpython-312.pyc,,
sqlalchemy/ext/mypy/__pycache__/util.cpython-312.pyc,,
sqlalchemy/ext/mypy/apply.py,sha256=Aek_-XA1eXihT4attxhfE43yBKtCgsxBSb--qgZKUqc,10550
sqlalchemy/ext/mypy/decl_class.py,sha256=1vVJRII2apnLTUbc5HkJS6Z2GueaUv_eKvhbqh7Wik4,17384
sqlalchemy/ext/mypy/infer.py,sha256=KVnmLFEVS33Al8pUKI7MJbJQu3KeveBUMl78EluBORw,19369
sqlalchemy/ext/mypy/names.py,sha256=Q3ef8XQBgVm9WUwlItqlYCXDNi_kbV5DdLEgbtEMEI8,10479
sqlalchemy/ext/mypy/plugin.py,sha256=74ML8LI9xar0V86oCxnPFv5FQGEEfUzK64vOay4BKFs,9750
sqlalchemy/ext/mypy/util.py,sha256=DKRaurkXHI2lAMAAcEO5GLXbX_m2Xqy7l_juh8Byf5U,9960
sqlalchemy/ext/orderinglist.py,sha256=TGYbsGH72wEZcFNQDYDsZg9OSPuzf__P8YX8_2HtYUo,14384
sqlalchemy/ext/serializer.py,sha256=D0g4jMZkRk0Gjr0L-FZe81SR63h0Zs-9JzuWtT_SD7k,6140
sqlalchemy/future/__init__.py,sha256=q2mw-gxk_xoxJLEvRoyMha3vO1xSRHrslcExOHZwmPA,512
sqlalchemy/future/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/future/__pycache__/engine.cpython-312.pyc,,
sqlalchemy/future/engine.py,sha256=AgIw6vMsef8W6tynOTkxsjd6o_OQDwGjLdbpoMD8ue8,495
sqlalchemy/inspection.py,sha256=MF-LE358wZDUEl1IH8-Uwt2HI65EsQpQW5o5udHkZwA,5063
sqlalchemy/log.py,sha256=8x9UR3nj0uFm6or6bQF-JWb4fYv2zOeQjG_w-0wOJFA,8607
sqlalchemy/orm/__init__.py,sha256=ZYys5nL3RFUDCMOLFDBrRI52F6er3S1U1OY9TeORuKs,8463
sqlalchemy/orm/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/_orm_constructors.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/_typing.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/attributes.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/base.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/bulk_persistence.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/clsregistry.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/collections.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/context.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/decl_api.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/decl_base.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/dependency.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/descriptor_props.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/dynamic.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/evaluator.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/events.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/exc.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/identity.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/instrumentation.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/interfaces.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/loading.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/mapped_collection.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/mapper.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/path_registry.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/persistence.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/properties.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/query.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/relationships.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/scoping.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/session.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/state.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/state_changes.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/strategies.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/strategy_options.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/sync.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/unitofwork.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/util.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/writeonly.cpython-312.pyc,,
sqlalchemy/orm/_orm_constructors.py,sha256=c4RMiYKIXzOTjuPIBbyRIzpizYjUBm2sd7k0n1vqb2o,100717
sqlalchemy/orm/_typing.py,sha256=DVBfpHmDVK4x1zxaGJPY2GoTrAsyR6uexv20Lzf1afc,4973
sqlalchemy/orm/attributes.py,sha256=lorOHBJvJJYndOuafWJhHBbQ1pR6FAyimhqz-mErBRQ,92534
sqlalchemy/orm/base.py,sha256=re6A8ooMxLyfCAeQfhBwwxeJQkiH7EtzzOZIVIiTPlw,27466
sqlalchemy/orm/bulk_persistence.py,sha256=gNp2gFsGvV-5NhOPBL51TqKgR3ZRrS43jSwqUF-SwgY,71430
sqlalchemy/orm/clsregistry.py,sha256=IjoDZwWpjG42ji59L4M1EZvjBEoXPZykzENDtKWxU8A,17974
sqlalchemy/orm/collections.py,sha256=WEKuUCRgLhDhJEIBhZ21UrE0pBOyRm2zxD20GvbgA9g,52243
sqlalchemy/orm/context.py,sha256=FMPyw07OA9OXWQ32RQx52AEa2xTLSkqdYgx9R_yN1x0,112955
sqlalchemy/orm/decl_api.py,sha256=_WPKQ_vSE5k2TLtNmkaxxYmvbhZvkRMrrvCeDxdqDQE,63998
sqlalchemy/orm/decl_base.py,sha256=FTHf3bCVlg52KR6JWVEwuG3XjOaGQ5KJMYFYXastGhw,82832
sqlalchemy/orm/dependency.py,sha256=hgjksUWhgbmgHK5GdJdiDCBgDAIGQXIrY-Tj79tbL2k,47631
sqlalchemy/orm/descriptor_props.py,sha256=dR_h4Gvdtpcdp4sj_ZOR4P5Nng2J2vhsvFHouRLlntc,37244
sqlalchemy/orm/dynamic.py,sha256=rWAZ-nfAkREuNjt8e_FRdqYrvHDdbODn1CcfyP8Y18k,9816
sqlalchemy/orm/evaluator.py,sha256=tRETz4dNZ71VsEA8nG0hpefByB-W0zBt02IxcSR5H2g,12353
sqlalchemy/orm/events.py,sha256=1PiGT7JMUWTDAb3X1T79P02BMVDmcWEpatz1FwpLqoA,127777
sqlalchemy/orm/exc.py,sha256=IP40P-wOeXhkYk0YizuTC3wqm6W9cPTaQU08f5MMaQ0,7413
sqlalchemy/orm/identity.py,sha256=jHdCxCpCyda_8mFOfGmN_Pr0XZdKiU-2hFZshlNxbHs,9249
sqlalchemy/orm/instrumentation.py,sha256=M-kZmkUvHUxtf-0mCA8RIM5QmMH1hWlYR_pKMwaidjA,24321
sqlalchemy/orm/interfaces.py,sha256=Hmf1BjbfOarZRgMlruqghR7cgH2xyugA9v5t0x-a-wU,48502
sqlalchemy/orm/loading.py,sha256=9RacpzFOWbuKgPRWHFmyIvD4fYCLAnkpwBFASyQ2CoI,58277
sqlalchemy/orm/mapped_collection.py,sha256=zK3d3iozORzDruBUrAmkVC0RR3Orj5szk-TSQ24xzIU,19682
sqlalchemy/orm/mapper.py,sha256=bfoRzNKKnjF-CDvr2Df7HZC9TepvtuQ49LRz_fW7DGQ,171088
sqlalchemy/orm/path_registry.py,sha256=sJZMv_WPqUpHfQtKWaX3WYFeKBcNJ8C3wOM2mkBGkTE,25920
sqlalchemy/orm/persistence.py,sha256=dzyB2JOXNwQgaCbN8kh0sEz00WFePr48qf8NWVCUZH8,61701
sqlalchemy/orm/properties.py,sha256=eDPFzxYUgdM3uWjHywnb1XW-i0tVKKyx7A2MCD31GQU,29306
sqlalchemy/orm/query.py,sha256=Cf0e94-u1XyoXJoOAmr4iFvtCwNY98kxUYyMPenaWTE,117708
sqlalchemy/orm/relationships.py,sha256=dS5SY0v1MiD7iCNnAQlHaI6prUQhL5EkXT7ijc8FR8E,128644
sqlalchemy/orm/scoping.py,sha256=rJVc7_Lic4V00HZ-UvYFWkVpXqdrMayRmIs4fIwH1UA,78688
sqlalchemy/orm/session.py,sha256=CZJTQ-wPwIy0c3AMFxgJnBgaft6eEf4JzcCLcaaCSjg,195979
sqlalchemy/orm/state.py,sha256=327-F4TG29s6mLC8oWRiO2PuvYIUZzY1MqUPjtUy7M4,37670
sqlalchemy/orm/state_changes.py,sha256=qKYg7NxwrDkuUY3EPygAztym6oAVUFcP2wXn7QD3Mz4,6815
sqlalchemy/orm/strategies.py,sha256=U5EL1FBXOmkIV5HsryIlTvfQfoajYGCnvTbuKBnH0pQ,116224
sqlalchemy/orm/strategy_options.py,sha256=oeDl_rMDNAC_90N7ytsni-psXWAeQMhABQFyKBSmai0,85353
sqlalchemy/orm/sync.py,sha256=g7iZfSge1HgxMk9SKRgUgtHEbpbZ1kP_CBqOIdTOXqc,5779
sqlalchemy/orm/unitofwork.py,sha256=fiVaqcymbDDHRa1NjS90N9Z466nd5pkJOEi1dHO6QLY,27033
sqlalchemy/orm/util.py,sha256=Q0JT2JydyGiNiYT8AVe9B6jOdBRVHSOVDIcWOsXXIUQ,80929
sqlalchemy/orm/writeonly.py,sha256=SYu2sAaHZONk2pW4PmtE871LG-O0P_bjidvKzY1H_zI,22305
sqlalchemy/pool/__init__.py,sha256=qiDdq4r4FFAoDrK6ncugF_i6usi_X1LeJt-CuBHey0s,1804
sqlalchemy/pool/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/pool/__pycache__/base.cpython-312.pyc,,
sqlalchemy/pool/__pycache__/events.cpython-312.pyc,,
sqlalchemy/pool/__pycache__/impl.cpython-312.pyc,,
sqlalchemy/pool/base.py,sha256=WF4az4ZKuzQGuKeSJeyexaYjmWZUvYdC6KIi8zTGodw,52236
sqlalchemy/pool/events.py,sha256=xGjkIUZl490ZDtCHqnQF9ZCwe2Jv93eGXmnQxftB11E,13147
sqlalchemy/pool/impl.py,sha256=JwpALSkH-pCoO_6oENbkHYY00Jx9nlttyoI61LivRNc,18944
sqlalchemy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
sqlalchemy/schema.py,sha256=dKiWmgHYjcKQ4TiiD6vD0UMmIsD8u0Fsor1M9AAeGUs,3194
sqlalchemy/sql/__init__.py,sha256=UNa9EUiYWoPayf-FzNcwVgQvpsBdInPZfpJesAStN9o,5820
sqlalchemy/sql/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/_dml_constructors.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/_elements_constructors.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/_orm_types.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/_py_util.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/_selectable_constructors.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/_typing.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/annotation.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/base.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/cache_key.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/coercions.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/compiler.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/crud.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/ddl.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/default_comparator.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/dml.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/elements.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/events.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/expression.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/functions.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/lambdas.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/naming.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/operators.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/roles.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/schema.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/selectable.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/sqltypes.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/traversals.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/type_api.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/util.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/visitors.cpython-312.pyc,,
sqlalchemy/sql/_dml_constructors.py,sha256=YdBJex0MCVACv4q2nl_ii3uhxzwU6aDB8zAsratX5UQ,3867
sqlalchemy/sql/_elements_constructors.py,sha256=1SX6o1ezeB8C9DAa2m0WxmfhM3ji3FeCprXFQkNerNY,63048
sqlalchemy/sql/_orm_types.py,sha256=T-vjcry4C1y0GToFKVxQCnmly_-Zsq4IO4SHN6bvUF4,625
sqlalchemy/sql/_py_util.py,sha256=hiM9ePbRSGs60bAMxPFuJCIC_p9SQ1VzqXGiPchiYwE,2173
sqlalchemy/sql/_selectable_constructors.py,sha256=wjE6HrLm9cR7bxvZXT8sFLUqT6t_J9G1XyQCnYmBDl0,18780
sqlalchemy/sql/_typing.py,sha256=oqwrYHVMtK-AuKGH9c4SgfiOEJUt5vjkzSEzzscMHkM,12771
sqlalchemy/sql/annotation.py,sha256=aqbbVz9kfbCT3_66CZ9GEirVN197Cukoqt8rq48FgkQ,18245
sqlalchemy/sql/base.py,sha256=M1b-Tg49ikUW2mnZv0aI38oASG6dgeo4jBNWDgJgAg8,73925
sqlalchemy/sql/cache_key.py,sha256=0Db8mR8IrpBgdzXs4TGTt98LOpL3c7KABd72MAPKUQQ,33668
sqlalchemy/sql/coercions.py,sha256=KjrVc2ks6cx_o6-_Zpu6uPuNGjJy2xvFzXy8z5oshQg,40628
sqlalchemy/sql/compiler.py,sha256=hrTptbOKIgVIHapywj4Lk5OMwpXvHS-KGg3odFwlo-I,274687
sqlalchemy/sql/crud.py,sha256=HBX4QPtW_PYYJmIKfNr-wE8IdEr963N24WXzFBUZOo0,56514
sqlalchemy/sql/ddl.py,sha256=CIqMilCKfuQnF0lrZsQdTxgrbXqcTauKr0Ojzj77PFQ,45602
sqlalchemy/sql/default_comparator.py,sha256=utXWsZVGEjflhFfCT4ywa6RnhORc1Rryo87Hga71Rps,16707
sqlalchemy/sql/dml.py,sha256=pn0Lm1ofC5qVZzwGWFW73lPCiNba8OsTeemurJgwRyg,65614
sqlalchemy/sql/elements.py,sha256=YfccXzQc9DlgF8q15kDf-zKBUY_vpIe0FGaVDBPoic4,176544
sqlalchemy/sql/events.py,sha256=iC_Q1Htm1Aobt5tOYxWfHHqNpoytrULORmUKcusH_-E,18290
sqlalchemy/sql/expression.py,sha256=VMX-dLpsZYnVRJpYNDozDUgaj7iQ0HuewUKVefD57PE,7586
sqlalchemy/sql/functions.py,sha256=kMMYplvuIHFAPwxBI03SizwaLcYEHzysecWk-R1V-JM,63762
sqlalchemy/sql/lambdas.py,sha256=DP0Qz7Ypo8QhzMwygGHYgRhwJMx-rNezO1euouH3iYU,49292
sqlalchemy/sql/naming.py,sha256=ZHs1qSV3ou8TYmZ92uvU3sfdklUQlIz4uhe330n05SU,6858
sqlalchemy/sql/operators.py,sha256=himArRqBzrljob3Zfhi_ZS-Jleg1u6YFp0g3d7Co6IM,76106
sqlalchemy/sql/roles.py,sha256=pOsVn_OZD7mF2gJByHf24Rjopt0_Hu3dUCEOK5t4KS8,7662
sqlalchemy/sql/schema.py,sha256=iFleWHkxi-3mKGiK_N1TzUqxnNwOpypB4bWDuAVQe8c,229717
sqlalchemy/sql/selectable.py,sha256=cgyV0AsPy4CXAFdhMiTCkbgaHiFilW9sclzxlHJKH3o,236460
sqlalchemy/sql/sqltypes.py,sha256=fajBVE_CCJykrQpOOnb8_HFOKCSoUF48AxljJymvGk4,127330
sqlalchemy/sql/traversals.py,sha256=3ScTC1fh1-y8Y478h_2Azmd2xdQdWPWkDve4YgrwMf8,33664
sqlalchemy/sql/type_api.py,sha256=q_FieFRALHiRkhy5Bj-cVgFHa7DXF1dP23FaRRYKeho,83717
sqlalchemy/sql/util.py,sha256=qGHQF-tPCj-m1FBerzT7weCanGcXU7dK5m-W7NHio-4,48077
sqlalchemy/sql/visitors.py,sha256=71wdVvhhZL4nJvVwFAs6ssaW-qZgNRSmKjpAcOzF_TA,36317
sqlalchemy/testing/__init__.py,sha256=zgitAYzsCWT_U48ZiifXHHLJFo8nZBYmI-5TueA4_lE,3160
sqlalchemy/testing/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/assertions.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/assertsql.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/asyncio.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/config.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/engines.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/entities.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/exclusions.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/pickleable.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/profiling.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/provision.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/requirements.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/schema.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/util.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/warnings.cpython-312.pyc,,
sqlalchemy/testing/assertions.py,sha256=gL0rA7CCZJbcVgvWOPV91tTZTRwQc1_Ta0-ykBn83Ew,31439
sqlalchemy/testing/assertsql.py,sha256=IgQG7l94WaiRP8nTbilJh1ZHZl125g7GPq-S5kmQZN0,16817
sqlalchemy/testing/asyncio.py,sha256=kM8uuOqDBagZF0r9xvGmsiirUVLUQ_KBzjUFU67W-b8,3830
sqlalchemy/testing/config.py,sha256=AqyH1qub_gDqX0BvlL-JBQe7N-t2wo8655FtwblUNOY,12090
sqlalchemy/testing/engines.py,sha256=HFJceEBD3Q_TTFQMTtIV5wGWO_a7oUgoKtUF_z636SM,13481
sqlalchemy/testing/entities.py,sha256=IphFegPKbff3Un47jY6bi7_MQXy6qkx_50jX2tHZJR4,3354
sqlalchemy/testing/exclusions.py,sha256=T8B01hmm8WVs-EKcUOQRzabahPqblWJfOidi6bHJ6GA,12460
sqlalchemy/testing/fixtures/__init__.py,sha256=dMClrIoxqlYIFpk2ia4RZpkbfxsS_3EBigr9QsPJ66g,1198
sqlalchemy/testing/fixtures/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/testing/fixtures/__pycache__/base.cpython-312.pyc,,
sqlalchemy/testing/fixtures/__pycache__/mypy.cpython-312.pyc,,
sqlalchemy/testing/fixtures/__pycache__/orm.cpython-312.pyc,,
sqlalchemy/testing/fixtures/__pycache__/sql.cpython-312.pyc,,
sqlalchemy/testing/fixtures/base.py,sha256=9r_J2ksiTzClpUxW0TczICHrWR7Ny8PV8IsBz6TsGFI,12256
sqlalchemy/testing/fixtures/mypy.py,sha256=gdxiwNFIzDlNGSOdvM3gbwDceVCC9t8oM5kKbwyhGBk,11973
sqlalchemy/testing/fixtures/orm.py,sha256=8EFbnaBbXX_Bf4FcCzBUaAHgyVpsLGBHX16SGLqE3Fg,6095
sqlalchemy/testing/fixtures/sql.py,sha256=KZMjco9_3dsuspmkew5Ejp88Wlr9PsSBB1qeJGFxQAk,15900
sqlalchemy/testing/pickleable.py,sha256=U9mIqk-zaxq9Xfy7HErP7UrKgTov-A3QFnhZh-NiOjI,2833
sqlalchemy/testing/plugin/__init__.py,sha256=79F--BIY_NTBzVRIlJGgAY5LNJJ3cD19XvrAo4X0W9A,247
sqlalchemy/testing/plugin/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-312.pyc,,
sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-312.pyc,,
sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-312.pyc,,
sqlalchemy/testing/plugin/bootstrap.py,sha256=oYScMbEW4pCnWlPEAq1insFruCXFQeEVBwo__i4McpU,1685
sqlalchemy/testing/plugin/plugin_base.py,sha256=BgNzWNEmgpK4CwhyblQQKnH-7FDKVi_Uul5vw8fFjBU,21578
sqlalchemy/testing/plugin/pytestplugin.py,sha256=6jkQHH2VQMD75k2As9CuWXmEy9jrscoFRhCNg6-PaTw,27656
sqlalchemy/testing/profiling.py,sha256=PbuPhRFbauFilUONeY3tV_Y_5lBkD7iCa8VVyH2Sk9Y,10148
sqlalchemy/testing/provision.py,sha256=3qFor_sN1FFlS7odUGkKqLUxGmQZC9XM67I9vQ_zeXo,14626
sqlalchemy/testing/requirements.py,sha256=Z__o-1Rj9B7dI8E_l3qsKTvsg0rK198vB0A1p7A5dcM,52832
sqlalchemy/testing/schema.py,sha256=lr4GkGrGwagaHMuSGzWdzkMaj3HnS7dgfLLWfxt__-U,6513
sqlalchemy/testing/suite/__init__.py,sha256=Y5DRNG0Yl1u3ypt9zVF0Z9suPZeuO_UQGLl-wRgvTjU,722
sqlalchemy/testing/suite/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_cte.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_deprecations.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_insert.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_results.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_rowcount.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_select.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_types.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_unicode_ddl.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-312.pyc,,
sqlalchemy/testing/suite/test_cte.py,sha256=6zBC3W2OwX1Xs-HedzchcKN2S7EaLNkgkvV_JSZ_Pq0,6451
sqlalchemy/testing/suite/test_ddl.py,sha256=1Npkf0C_4UNxphthAGjG078n0vPEgnSIHpDu5MfokxQ,12031
sqlalchemy/testing/suite/test_deprecations.py,sha256=BcJxZTcjYqeOAENVElCg3hVvU6fkGEW3KGBMfnW8bng,5337
sqlalchemy/testing/suite/test_dialect.py,sha256=EH4ZQWbnGdtjmx5amZtTyhYmrkXJCvW1SQoLahoE7uk,22923
sqlalchemy/testing/suite/test_insert.py,sha256=9azifj6-OCD7s8h_tAO1uPw100ibQv8YoKc_VA3hn3c,18824
sqlalchemy/testing/suite/test_reflection.py,sha256=7sML8-owubSQeEM7Ve6LbnB8uIVlNV00WWepKwII2a8,109648
sqlalchemy/testing/suite/test_results.py,sha256=X720GafdA4p75SOGS93j-dXkt6QDEnnJbU2bh18VCcg,16914
sqlalchemy/testing/suite/test_rowcount.py,sha256=3KDTlRgjpQ1OVfp__1cv8Hvq4CsDKzmrhJQ_WIJWoJg,7900
sqlalchemy/testing/suite/test_select.py,sha256=ulRZQJlzkwwcewEyisuBEXVWFR0Wshz9MEDxYYiYLwQ,61732
sqlalchemy/testing/suite/test_sequence.py,sha256=66bCoy4xo99GBSaX6Hxb88foANAykLGRz1YEKbvpfuA,9923
sqlalchemy/testing/suite/test_types.py,sha256=K4MGHvnTtgqeksoQOBCZRVQYC7HoYO6Z6rVt5vj2t9o,67805
sqlalchemy/testing/suite/test_unicode_ddl.py,sha256=c3_eIxLyORuSOhNDP0jWKxPyUf3SwMFpdalxtquwqlM,6141
sqlalchemy/testing/suite/test_update_delete.py,sha256=yTiM2unnfOK9rK8ZkqeTTU_MkT-RsKFLmdYliniZfAY,3994
sqlalchemy/testing/util.py,sha256=qldXKw8gRJ4I2x3uXsBssYMqwatmcMFMTOveRQCmfDU,14469
sqlalchemy/testing/warnings.py,sha256=fJ-QJUY2zY2PPxZJKv9medW-BKKbCNbA4Ns_V3YwFXM,1546
sqlalchemy/types.py,sha256=cQFM-hFRmaf1GErun1qqgEs6QxufvzMuwKqj9tuMPpE,3168
sqlalchemy/util/__init__.py,sha256=B3bedg-LSQEscwqgmYYU-VENUX8_zAE3q9vb7tkfJNY,8277
sqlalchemy/util/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/util/__pycache__/_collections.cpython-312.pyc,,
sqlalchemy/util/__pycache__/_concurrency_py3k.cpython-312.pyc,,
sqlalchemy/util/__pycache__/_has_cy.cpython-312.pyc,,
sqlalchemy/util/__pycache__/_py_collections.cpython-312.pyc,,
sqlalchemy/util/__pycache__/compat.cpython-312.pyc,,
sqlalchemy/util/__pycache__/concurrency.cpython-312.pyc,,
sqlalchemy/util/__pycache__/deprecations.cpython-312.pyc,,
sqlalchemy/util/__pycache__/langhelpers.cpython-312.pyc,,
sqlalchemy/util/__pycache__/preloaded.cpython-312.pyc,,
sqlalchemy/util/__pycache__/queue.cpython-312.pyc,,
sqlalchemy/util/__pycache__/tool_support.cpython-312.pyc,,
sqlalchemy/util/__pycache__/topological.cpython-312.pyc,,
sqlalchemy/util/__pycache__/typing.cpython-312.pyc,,
sqlalchemy/util/_collections.py,sha256=aZoSAVOXnHBoYEsxDOi0O9odg9wqLbGb7PGjaWQKiyY,20078
sqlalchemy/util/_concurrency_py3k.py,sha256=zb0Bow2Y_QjTdaACEviBEEaFvqDuVvpJfmwCjaw8xNE,9170
sqlalchemy/util/_has_cy.py,sha256=wCQmeSjT3jaH_oxfCEtGk-1g0gbSpt5MCK5UcWdMWqk,1247
sqlalchemy/util/_py_collections.py,sha256=U6L5AoyLdgSv7cdqB4xxQbw1rpeJjyOZVXffgxgga8I,16714
sqlalchemy/util/compat.py,sha256=fJWqZVkW7qPY2l0DxK-7fHKjyp7b-wa-5wOlmuZlick,8724
sqlalchemy/util/concurrency.py,sha256=9lT_cMoO1fZNdY8QTUZ22oeSf-L5I-79Ke7chcBNPA0,3304
sqlalchemy/util/deprecations.py,sha256=YBwvvYhSB8LhasIZRKvg_-WNoVhPUcaYI1ZrnjDn868,11971
sqlalchemy/util/langhelpers.py,sha256=uIK3szZuq9aMnO-vEpSlNekNWv4I-E391e56bkTnUm0,65090
sqlalchemy/util/preloaded.py,sha256=az7NmLJLsqs0mtM9uBkIu10-841RYDq8wOyqJ7xXvqE,5904
sqlalchemy/util/queue.py,sha256=CaeSEaYZ57YwtmLdNdOIjT5PK_LCuwMFiO0mpp39ybM,10185
sqlalchemy/util/tool_support.py,sha256=9braZyidaiNrZVsWtGmkSmus50-byhuYrlAqvhjcmnA,6135
sqlalchemy/util/topological.py,sha256=N3M3Le7KzGHCmqPGg0ZBqixTDGwmFLhOZvBtc4rHL_g,3458
sqlalchemy/util/typing.py,sha256=lFcGo1dJbZIZ9drAnvef-PzP0cX4LMxMSwgk3lJBb0g,18182

View File

@@ -0,0 +1,6 @@
Wheel-Version: 1.0
Generator: setuptools (75.1.0)
Root-Is-Purelib: false
Tag: cp312-cp312-manylinux_2_17_x86_64
Tag: cp312-cp312-manylinux2014_x86_64

View File

@@ -0,0 +1,33 @@
# This is a stub package designed to roughly emulate the _yaml
# extension module, which previously existed as a standalone module
# and has been moved into the `yaml` package namespace.
# It does not perfectly mimic its old counterpart, but should get
# close enough for anyone who's relying on it even when they shouldn't.
import yaml
# in some circumstances, the yaml module we imoprted may be from a different version, so we need
# to tread carefully when poking at it here (it may not have the attributes we expect)
if not getattr(yaml, '__with_libyaml__', False):
from sys import version_info
exc = ModuleNotFoundError if version_info >= (3, 6) else ImportError
raise exc("No module named '_yaml'")
else:
from yaml._yaml import *
import warnings
warnings.warn(
'The _yaml extension module is now located at yaml._yaml'
' and its location is subject to change. To use the'
' LibYAML-based parser and emitter, import from `yaml`:'
' `from yaml import CLoader as Loader, CDumper as Dumper`.',
DeprecationWarning
)
del warnings
# Don't `del yaml` here because yaml is actually an existing
# namespace member of _yaml.
__name__ = '_yaml'
# If the module is top-level (i.e. not a part of any specific package)
# then the attribute should be set to ''.
# https://docs.python.org/3.8/library/types.html
__package__ = ''

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2022 Amethyst Reese
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,122 @@
Metadata-Version: 2.1
Name: aiosqlite
Version: 0.20.0
Summary: asyncio bridge to the standard sqlite3 module
Author-email: Amethyst Reese <amy@n7.gg>
Requires-Python: >=3.8
Description-Content-Type: text/x-rst
Classifier: Development Status :: 5 - Production/Stable
Classifier: Framework :: AsyncIO
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Topic :: Software Development :: Libraries
Requires-Dist: typing_extensions >= 4.0
Requires-Dist: attribution==1.7.0 ; extra == "dev"
Requires-Dist: black==24.2.0 ; extra == "dev"
Requires-Dist: coverage[toml]==7.4.1 ; extra == "dev"
Requires-Dist: flake8==7.0.0 ; extra == "dev"
Requires-Dist: flake8-bugbear==24.2.6 ; extra == "dev"
Requires-Dist: flit==3.9.0 ; extra == "dev"
Requires-Dist: mypy==1.8.0 ; extra == "dev"
Requires-Dist: ufmt==2.3.0 ; extra == "dev"
Requires-Dist: usort==1.0.8.post1 ; extra == "dev"
Requires-Dist: sphinx==7.2.6 ; extra == "docs"
Requires-Dist: sphinx-mdinclude==0.5.3 ; extra == "docs"
Project-URL: Documentation, https://aiosqlite.omnilib.dev
Project-URL: Github, https://github.com/omnilib/aiosqlite
Provides-Extra: dev
Provides-Extra: docs
aiosqlite\: Sqlite for AsyncIO
==============================
.. image:: https://readthedocs.org/projects/aiosqlite/badge/?version=latest
:target: https://aiosqlite.omnilib.dev/en/latest/?badge=latest
:alt: Documentation Status
.. image:: https://img.shields.io/pypi/v/aiosqlite.svg
:target: https://pypi.org/project/aiosqlite
:alt: PyPI Release
.. image:: https://img.shields.io/badge/change-log-blue
:target: https://github.com/omnilib/aiosqlite/blob/master/CHANGELOG.md
:alt: Changelog
.. image:: https://img.shields.io/pypi/l/aiosqlite.svg
:target: https://github.com/omnilib/aiosqlite/blob/master/LICENSE
:alt: MIT Licensed
aiosqlite provides a friendly, async interface to sqlite databases.
It replicates the standard ``sqlite3`` module, but with async versions
of all the standard connection and cursor methods, plus context managers for
automatically closing connections and cursors:
.. code-block:: python
async with aiosqlite.connect(...) as db:
await db.execute("INSERT INTO some_table ...")
await db.commit()
async with db.execute("SELECT * FROM some_table") as cursor:
async for row in cursor:
...
It can also be used in the traditional, procedural manner:
.. code-block:: python
db = await aiosqlite.connect(...)
cursor = await db.execute('SELECT * FROM some_table')
row = await cursor.fetchone()
rows = await cursor.fetchall()
await cursor.close()
await db.close()
aiosqlite also replicates most of the advanced features of ``sqlite3``:
.. code-block:: python
async with aiosqlite.connect(...) as db:
db.row_factory = aiosqlite.Row
async with db.execute('SELECT * FROM some_table') as cursor:
async for row in cursor:
value = row['column']
await db.execute('INSERT INTO foo some_table')
assert db.total_changes > 0
Install
-------
aiosqlite is compatible with Python 3.8 and newer.
You can install it from PyPI:
.. code-block:: console
$ pip install aiosqlite
Details
-------
aiosqlite allows interaction with SQLite databases on the main AsyncIO event
loop without blocking execution of other coroutines while waiting for queries
or data fetches. It does this by using a single, shared thread per connection.
This thread executes all actions within a shared request queue to prevent
overlapping actions.
Connection objects are proxies to the real connections, contain the shared
execution thread, and provide context managers to handle automatically closing
connections. Cursors are similarly proxies to the real cursors, and provide
async iterators to query results.
License
-------
aiosqlite is copyright `Amethyst Reese <https://noswap.com>`_, and licensed under the
MIT license. I am providing code in this repository to you under an open source
license. This is my personal repository; the license you receive to my code
is from me and not from my employer. See the `LICENSE`_ file for details.
.. _LICENSE: https://github.com/omnilib/aiosqlite/blob/master/LICENSE

View File

@@ -0,0 +1,27 @@
aiosqlite-0.20.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
aiosqlite-0.20.0.dist-info/LICENSE,sha256=qwwXHcPvi_MlqEu3fYVUIfJhEzXd9uCIFrKSLE7cD3Y,1071
aiosqlite-0.20.0.dist-info/METADATA,sha256=Ec4xgHlepuHQhJ8Ldg7EkveV2Aj8wdc8P6gaUzfNU28,4275
aiosqlite-0.20.0.dist-info/RECORD,,
aiosqlite-0.20.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
aiosqlite-0.20.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
aiosqlite/__init__.py,sha256=JMIfUTUQRPkz3IrNFp3MK3uIMfGkMauUKZ3TS1vnqqk,893
aiosqlite/__pycache__/__init__.cpython-312.pyc,,
aiosqlite/__pycache__/__version__.cpython-312.pyc,,
aiosqlite/__pycache__/context.cpython-312.pyc,,
aiosqlite/__pycache__/core.cpython-312.pyc,,
aiosqlite/__pycache__/cursor.cpython-312.pyc,,
aiosqlite/__version__.py,sha256=6eee2XXMX-6B4aIDDNSY-QHv9gvpf7vhSTbYTyDebNU,157
aiosqlite/context.py,sha256=d78NqvrUgj3UQFFbwpJM0S1TjNtJJ5VpQ77pq3j3OIk,1372
aiosqlite/core.py,sha256=37wORBXtJ4xhDdqOVvqppDNM4XeNYD6RHslVkzMCBYI,12226
aiosqlite/cursor.py,sha256=XzdHbUCy76Ot53r5cAHvepU6LyXxNSNR2Oh9L1a4M6s,3504
aiosqlite/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
aiosqlite/tests/__init__.py,sha256=nKLBhR8SLx2dkw4UL052QKD7EzC9ctagfSJ9MpsDPrY,95
aiosqlite/tests/__main__.py,sha256=V3sH21pONg0y2-WN7rkfvgjf8zO-lS1wFyINq5hTdpA,167
aiosqlite/tests/__pycache__/__init__.cpython-312.pyc,,
aiosqlite/tests/__pycache__/__main__.cpython-312.pyc,,
aiosqlite/tests/__pycache__/helpers.cpython-312.pyc,,
aiosqlite/tests/__pycache__/perf.cpython-312.pyc,,
aiosqlite/tests/__pycache__/smoke.cpython-312.pyc,,
aiosqlite/tests/helpers.py,sha256=ghmZrf_nEfjkuRpZkCH9Pmg2ZnC19c2Leiq9nXqVX_g,727
aiosqlite/tests/perf.py,sha256=pBCOKkTLh0j4cz7ZWe3WNxsVmdKTKAw0VWN_K-YwRmc,6593
aiosqlite/tests/smoke.py,sha256=9B-6Wo4rLP9rfdyRxG_QY2JAJtq7fDTJoT8gjeCkWi4,16580

View File

@@ -0,0 +1,4 @@
Wheel-Version: 1.0
Generator: flit 3.9.0
Root-Is-Purelib: true
Tag: py3-none-any

View File

@@ -0,0 +1,44 @@
# Copyright 2022 Amethyst Reese
# Licensed under the MIT license
"""asyncio bridge to the standard sqlite3 module"""
from sqlite3 import ( # pylint: disable=redefined-builtin
DatabaseError,
Error,
IntegrityError,
NotSupportedError,
OperationalError,
paramstyle,
ProgrammingError,
register_adapter,
register_converter,
Row,
sqlite_version,
sqlite_version_info,
Warning,
)
__author__ = "Amethyst Reese"
from .__version__ import __version__
from .core import connect, Connection, Cursor
__all__ = [
"__version__",
"paramstyle",
"register_adapter",
"register_converter",
"sqlite_version",
"sqlite_version_info",
"connect",
"Connection",
"Cursor",
"Row",
"Warning",
"Error",
"DatabaseError",
"IntegrityError",
"ProgrammingError",
"OperationalError",
"NotSupportedError",
]

View File

@@ -0,0 +1,7 @@
"""
This file is automatically generated by attribution.
Do not edit manually. Get more info at https://attribution.omnilib.dev
"""
__version__ = "0.20.0"

View File

@@ -0,0 +1,54 @@
# Copyright 2018
# Licensed under the MIT license
from functools import wraps
from typing import Any, AsyncContextManager, Callable, Coroutine, Generator, TypeVar
from .cursor import Cursor
_T = TypeVar("_T")
class Result(AsyncContextManager[_T], Coroutine[Any, Any, _T]):
__slots__ = ("_coro", "_obj")
def __init__(self, coro: Coroutine[Any, Any, _T]):
self._coro = coro
self._obj: _T
def send(self, value) -> None:
return self._coro.send(value)
def throw(self, typ, val=None, tb=None) -> None:
if val is None:
return self._coro.throw(typ)
if tb is None:
return self._coro.throw(typ, val)
return self._coro.throw(typ, val, tb)
def close(self) -> None:
return self._coro.close()
def __await__(self) -> Generator[Any, None, _T]:
return self._coro.__await__()
async def __aenter__(self) -> _T:
self._obj = await self._coro
return self._obj
async def __aexit__(self, exc_type, exc, tb) -> None:
if isinstance(self._obj, Cursor):
await self._obj.close()
def contextmanager(
method: Callable[..., Coroutine[Any, Any, _T]]
) -> Callable[..., Result[_T]]:
@wraps(method)
def wrapper(self, *args, **kwargs) -> Result[_T]:
return Result(method(self, *args, **kwargs))
return wrapper

View File

@@ -0,0 +1,394 @@
# Copyright 2022 Amethyst Reese
# Licensed under the MIT license
"""
Core implementation of aiosqlite proxies
"""
import asyncio
import logging
import sqlite3
from functools import partial
from pathlib import Path
from queue import Empty, Queue, SimpleQueue
from threading import Thread
from typing import (
Any,
AsyncIterator,
Callable,
Generator,
Iterable,
Literal,
Optional,
Tuple,
Type,
Union,
)
from warnings import warn
from .context import contextmanager
from .cursor import Cursor
__all__ = ["connect", "Connection", "Cursor"]
LOG = logging.getLogger("aiosqlite")
IsolationLevel = Optional[Literal["DEFERRED", "IMMEDIATE", "EXCLUSIVE"]]
def set_result(fut: asyncio.Future, result: Any) -> None:
"""Set the result of a future if it hasn't been set already."""
if not fut.done():
fut.set_result(result)
def set_exception(fut: asyncio.Future, e: BaseException) -> None:
"""Set the exception of a future if it hasn't been set already."""
if not fut.done():
fut.set_exception(e)
_STOP_RUNNING_SENTINEL = object()
class Connection(Thread):
def __init__(
self,
connector: Callable[[], sqlite3.Connection],
iter_chunk_size: int,
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> None:
super().__init__()
self._running = True
self._connection: Optional[sqlite3.Connection] = None
self._connector = connector
self._tx: SimpleQueue[Tuple[asyncio.Future, Callable[[], Any]]] = SimpleQueue()
self._iter_chunk_size = iter_chunk_size
if loop is not None:
warn(
"aiosqlite.Connection no longer uses the `loop` parameter",
DeprecationWarning,
)
def _stop_running(self):
self._running = False
# PEP 661 is not accepted yet, so we cannot type a sentinel
self._tx.put_nowait(_STOP_RUNNING_SENTINEL) # type: ignore[arg-type]
@property
def _conn(self) -> sqlite3.Connection:
if self._connection is None:
raise ValueError("no active connection")
return self._connection
def _execute_insert(self, sql: str, parameters: Any) -> Optional[sqlite3.Row]:
cursor = self._conn.execute(sql, parameters)
cursor.execute("SELECT last_insert_rowid()")
return cursor.fetchone()
def _execute_fetchall(self, sql: str, parameters: Any) -> Iterable[sqlite3.Row]:
cursor = self._conn.execute(sql, parameters)
return cursor.fetchall()
def run(self) -> None:
"""
Execute function calls on a separate thread.
:meta private:
"""
while True:
# Continues running until all queue items are processed,
# even after connection is closed (so we can finalize all
# futures)
tx_item = self._tx.get()
if tx_item is _STOP_RUNNING_SENTINEL:
break
future, function = tx_item
try:
LOG.debug("executing %s", function)
result = function()
LOG.debug("operation %s completed", function)
future.get_loop().call_soon_threadsafe(set_result, future, result)
except BaseException as e: # noqa B036
LOG.debug("returning exception %s", e)
future.get_loop().call_soon_threadsafe(set_exception, future, e)
async def _execute(self, fn, *args, **kwargs):
"""Queue a function with the given arguments for execution."""
if not self._running or not self._connection:
raise ValueError("Connection closed")
function = partial(fn, *args, **kwargs)
future = asyncio.get_event_loop().create_future()
self._tx.put_nowait((future, function))
return await future
async def _connect(self) -> "Connection":
"""Connect to the actual sqlite database."""
if self._connection is None:
try:
future = asyncio.get_event_loop().create_future()
self._tx.put_nowait((future, self._connector))
self._connection = await future
except Exception:
self._stop_running()
self._connection = None
raise
return self
def __await__(self) -> Generator[Any, None, "Connection"]:
self.start()
return self._connect().__await__()
async def __aenter__(self) -> "Connection":
return await self
async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
await self.close()
@contextmanager
async def cursor(self) -> Cursor:
"""Create an aiosqlite cursor wrapping a sqlite3 cursor object."""
return Cursor(self, await self._execute(self._conn.cursor))
async def commit(self) -> None:
"""Commit the current transaction."""
await self._execute(self._conn.commit)
async def rollback(self) -> None:
"""Roll back the current transaction."""
await self._execute(self._conn.rollback)
async def close(self) -> None:
"""Complete queued queries/cursors and close the connection."""
if self._connection is None:
return
try:
await self._execute(self._conn.close)
except Exception:
LOG.info("exception occurred while closing connection")
raise
finally:
self._stop_running()
self._connection = None
@contextmanager
async def execute(
self, sql: str, parameters: Optional[Iterable[Any]] = None
) -> Cursor:
"""Helper to create a cursor and execute the given query."""
if parameters is None:
parameters = []
cursor = await self._execute(self._conn.execute, sql, parameters)
return Cursor(self, cursor)
@contextmanager
async def execute_insert(
self, sql: str, parameters: Optional[Iterable[Any]] = None
) -> Optional[sqlite3.Row]:
"""Helper to insert and get the last_insert_rowid."""
if parameters is None:
parameters = []
return await self._execute(self._execute_insert, sql, parameters)
@contextmanager
async def execute_fetchall(
self, sql: str, parameters: Optional[Iterable[Any]] = None
) -> Iterable[sqlite3.Row]:
"""Helper to execute a query and return all the data."""
if parameters is None:
parameters = []
return await self._execute(self._execute_fetchall, sql, parameters)
@contextmanager
async def executemany(
self, sql: str, parameters: Iterable[Iterable[Any]]
) -> Cursor:
"""Helper to create a cursor and execute the given multiquery."""
cursor = await self._execute(self._conn.executemany, sql, parameters)
return Cursor(self, cursor)
@contextmanager
async def executescript(self, sql_script: str) -> Cursor:
"""Helper to create a cursor and execute a user script."""
cursor = await self._execute(self._conn.executescript, sql_script)
return Cursor(self, cursor)
async def interrupt(self) -> None:
"""Interrupt pending queries."""
return self._conn.interrupt()
async def create_function(
self, name: str, num_params: int, func: Callable, deterministic: bool = False
) -> None:
"""
Create user-defined function that can be later used
within SQL statements. Must be run within the same thread
that query executions take place so instead of executing directly
against the connection, we defer this to `run` function.
If ``deterministic`` is true, the created function is marked as deterministic,
which allows SQLite to perform additional optimizations. This flag is supported
by SQLite 3.8.3 or higher, ``NotSupportedError`` will be raised if used with
older versions.
"""
await self._execute(
self._conn.create_function,
name,
num_params,
func,
deterministic=deterministic,
)
@property
def in_transaction(self) -> bool:
return self._conn.in_transaction
@property
def isolation_level(self) -> Optional[str]:
return self._conn.isolation_level
@isolation_level.setter
def isolation_level(self, value: IsolationLevel) -> None:
self._conn.isolation_level = value
@property
def row_factory(self) -> Optional[Type]:
return self._conn.row_factory
@row_factory.setter
def row_factory(self, factory: Optional[Type]) -> None:
self._conn.row_factory = factory
@property
def text_factory(self) -> Callable[[bytes], Any]:
return self._conn.text_factory
@text_factory.setter
def text_factory(self, factory: Callable[[bytes], Any]) -> None:
self._conn.text_factory = factory
@property
def total_changes(self) -> int:
return self._conn.total_changes
async def enable_load_extension(self, value: bool) -> None:
await self._execute(self._conn.enable_load_extension, value) # type: ignore
async def load_extension(self, path: str):
await self._execute(self._conn.load_extension, path) # type: ignore
async def set_progress_handler(
self, handler: Callable[[], Optional[int]], n: int
) -> None:
await self._execute(self._conn.set_progress_handler, handler, n)
async def set_trace_callback(self, handler: Callable) -> None:
await self._execute(self._conn.set_trace_callback, handler)
async def iterdump(self) -> AsyncIterator[str]:
"""
Return an async iterator to dump the database in SQL text format.
Example::
async for line in db.iterdump():
...
"""
dump_queue: Queue = Queue()
def dumper():
try:
for line in self._conn.iterdump():
dump_queue.put_nowait(line)
dump_queue.put_nowait(None)
except Exception:
LOG.exception("exception while dumping db")
dump_queue.put_nowait(None)
raise
fut = self._execute(dumper)
task = asyncio.ensure_future(fut)
while True:
try:
line: Optional[str] = dump_queue.get_nowait()
if line is None:
break
yield line
except Empty:
if task.done():
LOG.warning("iterdump completed unexpectedly")
break
await asyncio.sleep(0.01)
await task
async def backup(
self,
target: Union["Connection", sqlite3.Connection],
*,
pages: int = 0,
progress: Optional[Callable[[int, int, int], None]] = None,
name: str = "main",
sleep: float = 0.250,
) -> None:
"""
Make a backup of the current database to the target database.
Takes either a standard sqlite3 or aiosqlite Connection object as the target.
"""
if isinstance(target, Connection):
target = target._conn
await self._execute(
self._conn.backup,
target,
pages=pages,
progress=progress,
name=name,
sleep=sleep,
)
def connect(
database: Union[str, Path],
*,
iter_chunk_size=64,
loop: Optional[asyncio.AbstractEventLoop] = None,
**kwargs: Any,
) -> Connection:
"""Create and return a connection proxy to the sqlite database."""
if loop is not None:
warn(
"aiosqlite.connect() no longer uses the `loop` parameter",
DeprecationWarning,
)
def connector() -> sqlite3.Connection:
if isinstance(database, str):
loc = database
elif isinstance(database, bytes):
loc = database.decode("utf-8")
else:
loc = str(database)
return sqlite3.connect(loc, **kwargs)
return Connection(connector, iter_chunk_size)

View File

@@ -0,0 +1,118 @@
# Copyright 2022 Amethyst Reese
# Licensed under the MIT license
import sqlite3
from typing import (
Any,
AsyncIterator,
Callable,
Iterable,
Optional,
Tuple,
Type,
TYPE_CHECKING,
)
if TYPE_CHECKING:
from .core import Connection
class Cursor:
def __init__(self, conn: "Connection", cursor: sqlite3.Cursor) -> None:
self.iter_chunk_size = conn._iter_chunk_size
self._conn = conn
self._cursor = cursor
def __aiter__(self) -> AsyncIterator[sqlite3.Row]:
"""The cursor proxy is also an async iterator."""
return self._fetch_chunked()
async def _fetch_chunked(self):
while True:
rows = await self.fetchmany(self.iter_chunk_size)
if not rows:
return
for row in rows:
yield row
async def _execute(self, fn, *args, **kwargs):
"""Execute the given function on the shared connection's thread."""
return await self._conn._execute(fn, *args, **kwargs)
async def execute(
self, sql: str, parameters: Optional[Iterable[Any]] = None
) -> "Cursor":
"""Execute the given query."""
if parameters is None:
parameters = []
await self._execute(self._cursor.execute, sql, parameters)
return self
async def executemany(
self, sql: str, parameters: Iterable[Iterable[Any]]
) -> "Cursor":
"""Execute the given multiquery."""
await self._execute(self._cursor.executemany, sql, parameters)
return self
async def executescript(self, sql_script: str) -> "Cursor":
"""Execute a user script."""
await self._execute(self._cursor.executescript, sql_script)
return self
async def fetchone(self) -> Optional[sqlite3.Row]:
"""Fetch a single row."""
return await self._execute(self._cursor.fetchone)
async def fetchmany(self, size: Optional[int] = None) -> Iterable[sqlite3.Row]:
"""Fetch up to `cursor.arraysize` number of rows."""
args: Tuple[int, ...] = ()
if size is not None:
args = (size,)
return await self._execute(self._cursor.fetchmany, *args)
async def fetchall(self) -> Iterable[sqlite3.Row]:
"""Fetch all remaining rows."""
return await self._execute(self._cursor.fetchall)
async def close(self) -> None:
"""Close the cursor."""
await self._execute(self._cursor.close)
@property
def rowcount(self) -> int:
return self._cursor.rowcount
@property
def lastrowid(self) -> Optional[int]:
return self._cursor.lastrowid
@property
def arraysize(self) -> int:
return self._cursor.arraysize
@arraysize.setter
def arraysize(self, value: int) -> None:
self._cursor.arraysize = value
@property
def description(self) -> Tuple[Tuple[str, None, None, None, None, None, None], ...]:
return self._cursor.description
@property
def row_factory(self) -> Optional[Callable[[sqlite3.Cursor, sqlite3.Row], object]]:
return self._cursor.row_factory
@row_factory.setter
def row_factory(self, factory: Optional[Type]) -> None:
self._cursor.row_factory = factory
@property
def connection(self) -> sqlite3.Connection:
return self._cursor.connection
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self.close()

View File

@@ -0,0 +1,4 @@
# Copyright 2022 Amethyst Reese
# Licensed under the MIT license
from .smoke import SmokeTest

View File

@@ -0,0 +1,7 @@
# Copyright 2022 Amethyst Reese
# Licensed under the MIT license
import unittest
if __name__ == "__main__":
unittest.main(module="aiosqlite.tests", verbosity=2)

View File

@@ -0,0 +1,29 @@
# Copyright 2022 Amethyst Reese
# Licensed under the MIT license
import logging
import sys
def setup_logger():
log = logging.getLogger("")
log.setLevel(logging.INFO)
logging.addLevelName(logging.ERROR, "E")
logging.addLevelName(logging.WARNING, "W")
logging.addLevelName(logging.INFO, "I")
logging.addLevelName(logging.DEBUG, "V")
date_fmt = r"%H:%M:%S"
verbose_fmt = (
"%(asctime)s,%(msecs)d %(levelname)s "
"%(module)s:%(funcName)s():%(lineno)d "
"%(message)s"
)
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.INFO)
handler.setFormatter(logging.Formatter(verbose_fmt, date_fmt))
log.addHandler(handler)
return log

View File

@@ -0,0 +1,203 @@
# Copyright 2022 Amethyst Reese
# Licensed under the MIT license
"""
Simple perf tests for aiosqlite and the asyncio run loop.
"""
import string
import tempfile
import time
from unittest import IsolatedAsyncioTestCase as TestCase
import aiosqlite
from .smoke import setup_logger
TEST_DB = ":memory:"
TARGET = 2.0
RESULTS = {}
def timed(fn, name=None):
"""
Decorator for perf testing a block of async code.
Expects the wrapped function to return an async generator.
The generator should do setup, then yield when ready to start perf testing.
The decorator will then pump the generator repeatedly until the target
time has been reached, then close the generator and print perf results.
"""
name = name or fn.__name__
async def wrapper(*args, **kwargs):
gen = fn(*args, **kwargs)
await gen.asend(None)
count = 0
before = time.time()
while True:
count += 1
value = time.time() - before < TARGET
try:
if value:
await gen.asend(value)
else:
await gen.aclose()
break
except StopAsyncIteration:
break
except Exception as e:
print(f"exception occurred: {e}")
return
duration = time.time() - before
RESULTS[name] = (count, duration)
return wrapper
class PerfTest(TestCase):
@classmethod
def setUpClass(cls):
print(f"Running perf tests for at least {TARGET:.1f}s each...")
setup_logger()
@classmethod
def tearDownClass(cls):
print(f"\n{'Perf Test':<25} Iterations Duration {'Rate':>11}")
for name in sorted(RESULTS):
count, duration = RESULTS[name]
rate = count / duration
name = name.replace("test_", "")
print(f"{name:<25} {count:>10} {duration:>7.1f}s {rate:>9.1f}/s")
@timed
async def test_connection_memory(self):
while True:
yield
async with aiosqlite.connect(TEST_DB):
pass
@timed
async def test_connection_file(self):
with tempfile.NamedTemporaryFile(delete=False) as tf:
path = tf.name
tf.close()
async with aiosqlite.connect(path) as db:
await db.execute(
"create table perf (i integer primary key asc, k integer)"
)
await db.execute("insert into perf (k) values (2), (3)")
await db.commit()
while True:
yield
async with aiosqlite.connect(path):
pass
@timed
async def test_atomics(self):
async with aiosqlite.connect(TEST_DB) as db:
await db.execute("create table perf (i integer primary key asc, k integer)")
await db.execute("insert into perf (k) values (2), (3)")
await db.commit()
while True:
yield
async with db.execute("select last_insert_rowid()") as cursor:
await cursor.fetchone()
@timed
async def test_inserts(self):
async with aiosqlite.connect(TEST_DB) as db:
await db.execute("create table perf (i integer primary key asc, k integer)")
await db.commit()
while True:
yield
await db.execute("insert into perf (k) values (1), (2), (3)")
await db.commit()
@timed
async def test_insert_ids(self):
async with aiosqlite.connect(TEST_DB) as db:
await db.execute("create table perf (i integer primary key asc, k integer)")
await db.commit()
while True:
yield
cursor = await db.execute("insert into perf (k) values (1)")
await cursor.execute("select last_insert_rowid()")
await cursor.fetchone()
await db.commit()
@timed
async def test_insert_macro_ids(self):
async with aiosqlite.connect(TEST_DB) as db:
await db.execute("create table perf (i integer primary key asc, k integer)")
await db.commit()
while True:
yield
await db.execute_insert("insert into perf (k) values (1)")
await db.commit()
@timed
async def test_select(self):
async with aiosqlite.connect(TEST_DB) as db:
await db.execute("create table perf (i integer primary key asc, k integer)")
for i in range(100):
await db.execute("insert into perf (k) values (%d)" % (i,))
await db.commit()
while True:
yield
cursor = await db.execute("select i, k from perf")
assert len(await cursor.fetchall()) == 100
@timed
async def test_select_macro(self):
async with aiosqlite.connect(TEST_DB) as db:
await db.execute("create table perf (i integer primary key asc, k integer)")
for i in range(100):
await db.execute("insert into perf (k) values (%d)" % (i,))
await db.commit()
while True:
yield
assert len(await db.execute_fetchall("select i, k from perf")) == 100
async def test_iterable_cursor_perf(self):
async with aiosqlite.connect(TEST_DB) as db:
await db.execute(
"create table ic_perf ("
"i integer primary key asc, k integer, a integer, b integer, c char(16))"
)
for batch in range(128): # add 128k rows
r_start = batch * 1024
await db.executemany(
"insert into ic_perf (k, a, b, c) values(?, 1, 2, ?)",
[
*[
(i, string.ascii_lowercase)
for i in range(r_start, r_start + 1024)
]
],
)
await db.commit()
async def test_perf(chunk_size: int):
while True:
async with db.execute("SELECT * FROM ic_perf") as cursor:
cursor.iter_chunk_size = chunk_size
async for _ in cursor:
yield
for chunk_size in [2**i for i in range(4, 11)]:
await timed(test_perf, f"iterable_cursor @ {chunk_size}")(chunk_size)

View File

@@ -0,0 +1,452 @@
# Copyright 2022 Amethyst Reese
# Licensed under the MIT license
import asyncio
import sqlite3
from pathlib import Path
from sqlite3 import OperationalError
from threading import Thread
from unittest import IsolatedAsyncioTestCase as TestCase, SkipTest
import aiosqlite
from .helpers import setup_logger
TEST_DB = Path("test.db")
# pypy uses non-standard text factory for low-level sqlite implementation
try:
from _sqlite3 import _unicode_text_factory as default_text_factory
except ImportError:
default_text_factory = str
class SmokeTest(TestCase):
@classmethod
def setUpClass(cls):
setup_logger()
def setUp(self):
if TEST_DB.exists():
TEST_DB.unlink()
def tearDown(self):
if TEST_DB.exists():
TEST_DB.unlink()
async def test_connection_await(self):
db = await aiosqlite.connect(TEST_DB)
self.assertIsInstance(db, aiosqlite.Connection)
async with db.execute("select 1, 2") as cursor:
rows = await cursor.fetchall()
self.assertEqual(rows, [(1, 2)])
await db.close()
async def test_connection_context(self):
async with aiosqlite.connect(TEST_DB) as db:
self.assertIsInstance(db, aiosqlite.Connection)
async with db.execute("select 1, 2") as cursor:
rows = await cursor.fetchall()
self.assertEqual(rows, [(1, 2)])
async def test_connection_locations(self):
class Fake: # pylint: disable=too-few-public-methods
def __str__(self):
return str(TEST_DB)
locs = ("test.db", b"test.db", Path("test.db"), Fake())
async with aiosqlite.connect(TEST_DB) as db:
await db.execute("create table foo (i integer, k integer)")
await db.execute("insert into foo (i, k) values (1, 5)")
await db.commit()
cursor = await db.execute("select * from foo")
rows = await cursor.fetchall()
for loc in locs:
async with aiosqlite.connect(loc) as db:
cursor = await db.execute("select * from foo")
self.assertEqual(await cursor.fetchall(), rows)
async def test_multiple_connections(self):
async with aiosqlite.connect(TEST_DB) as db:
await db.execute(
"create table multiple_connections "
"(i integer primary key asc, k integer)"
)
async def do_one_conn(i):
async with aiosqlite.connect(TEST_DB) as db:
await db.execute("insert into multiple_connections (k) values (?)", [i])
await db.commit()
await asyncio.gather(*[do_one_conn(i) for i in range(10)])
async with aiosqlite.connect(TEST_DB) as db:
cursor = await db.execute("select * from multiple_connections")
rows = await cursor.fetchall()
assert len(rows) == 10
async def test_multiple_queries(self):
async with aiosqlite.connect(TEST_DB) as db:
await db.execute(
"create table multiple_queries "
"(i integer primary key asc, k integer)"
)
await asyncio.gather(
*[
db.execute("insert into multiple_queries (k) values (?)", [i])
for i in range(10)
]
)
await db.commit()
async with aiosqlite.connect(TEST_DB) as db:
cursor = await db.execute("select * from multiple_queries")
rows = await cursor.fetchall()
assert len(rows) == 10
async def test_iterable_cursor(self):
async with aiosqlite.connect(TEST_DB) as db:
cursor = await db.cursor()
await cursor.execute(
"create table iterable_cursor " "(i integer primary key asc, k integer)"
)
await cursor.executemany(
"insert into iterable_cursor (k) values (?)", [[i] for i in range(10)]
)
await db.commit()
async with aiosqlite.connect(TEST_DB) as db:
cursor = await db.execute("select * from iterable_cursor")
rows = []
async for row in cursor:
rows.append(row)
assert len(rows) == 10
async def test_multi_loop_usage(self):
results = {}
def runner(k, conn):
async def query():
async with conn.execute("select * from foo") as cursor:
rows = await cursor.fetchall()
self.assertEqual(len(rows), 2)
return rows
with self.subTest(k):
loop = asyncio.new_event_loop()
rows = loop.run_until_complete(query())
loop.close()
results[k] = rows
async with aiosqlite.connect(":memory:") as db:
await db.execute("create table foo (id int, name varchar)")
await db.execute(
"insert into foo values (?, ?), (?, ?)", (1, "Sally", 2, "Janet")
)
await db.commit()
threads = [Thread(target=runner, args=(k, db)) for k in range(4)]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
self.assertEqual(len(results), 4)
for rows in results.values():
self.assertEqual(len(rows), 2)
async def test_context_cursor(self):
async with aiosqlite.connect(TEST_DB) as db:
async with db.cursor() as cursor:
await cursor.execute(
"create table context_cursor "
"(i integer primary key asc, k integer)"
)
await cursor.executemany(
"insert into context_cursor (k) values (?)",
[[i] for i in range(10)],
)
await db.commit()
async with aiosqlite.connect(TEST_DB) as db:
async with db.execute("select * from context_cursor") as cursor:
rows = []
async for row in cursor:
rows.append(row)
assert len(rows) == 10
async def test_cursor_return_self(self):
async with aiosqlite.connect(TEST_DB) as db:
cursor = await db.cursor()
result = await cursor.execute(
"create table test_cursor_return_self (i integer, k integer)"
)
self.assertEqual(result, cursor, "cursor execute returns itself")
result = await cursor.executemany(
"insert into test_cursor_return_self values (?, ?)", [(1, 1), (2, 2)]
)
self.assertEqual(result, cursor)
result = await cursor.executescript(
"insert into test_cursor_return_self values (3, 3);"
"insert into test_cursor_return_self values (4, 4);"
"insert into test_cursor_return_self values (5, 5);"
)
self.assertEqual(result, cursor)
async def test_connection_properties(self):
async with aiosqlite.connect(TEST_DB) as db:
self.assertEqual(db.total_changes, 0)
async with db.cursor() as cursor:
self.assertFalse(db.in_transaction)
await cursor.execute(
"create table test_properties "
"(i integer primary key asc, k integer, d text)"
)
await cursor.execute(
"insert into test_properties (k, d) values (1, 'hi')"
)
self.assertTrue(db.in_transaction)
await db.commit()
self.assertFalse(db.in_transaction)
self.assertEqual(db.total_changes, 1)
self.assertIsNone(db.row_factory)
self.assertEqual(db.text_factory, default_text_factory)
async with db.cursor() as cursor:
await cursor.execute("select * from test_properties")
row = await cursor.fetchone()
self.assertIsInstance(row, tuple)
self.assertEqual(row, (1, 1, "hi"))
with self.assertRaises(TypeError):
_ = row["k"]
async with db.cursor() as cursor:
cursor.row_factory = aiosqlite.Row
self.assertEqual(cursor.row_factory, aiosqlite.Row)
await cursor.execute("select * from test_properties")
row = await cursor.fetchone()
self.assertIsInstance(row, aiosqlite.Row)
self.assertEqual(row[1], 1)
self.assertEqual(row[2], "hi")
self.assertEqual(row["k"], 1)
self.assertEqual(row["d"], "hi")
db.row_factory = aiosqlite.Row
db.text_factory = bytes
self.assertEqual(db.row_factory, aiosqlite.Row)
self.assertEqual(db.text_factory, bytes)
async with db.cursor() as cursor:
await cursor.execute("select * from test_properties")
row = await cursor.fetchone()
self.assertIsInstance(row, aiosqlite.Row)
self.assertEqual(row[1], 1)
self.assertEqual(row[2], b"hi")
self.assertEqual(row["k"], 1)
self.assertEqual(row["d"], b"hi")
async def test_fetch_all(self):
async with aiosqlite.connect(TEST_DB) as db:
await db.execute(
"create table test_fetch_all (i integer primary key asc, k integer)"
)
await db.execute(
"insert into test_fetch_all (k) values (10), (24), (16), (32)"
)
await db.commit()
async with aiosqlite.connect(TEST_DB) as db:
cursor = await db.execute("select k from test_fetch_all where k < 30")
rows = await cursor.fetchall()
self.assertEqual(rows, [(10,), (24,), (16,)])
async def test_enable_load_extension(self):
"""Assert that after enabling extension loading, they can be loaded"""
async with aiosqlite.connect(TEST_DB) as db:
try:
await db.enable_load_extension(True)
await db.load_extension("test")
except OperationalError as e:
assert "not authorized" not in e.args
except AttributeError as e:
raise SkipTest(
"python was not compiled with sqlite3 "
"extension support, so we can't test it"
) from e
async def test_set_progress_handler(self):
"""
Assert that after setting a progress handler returning 1, DB operations are aborted
"""
async with aiosqlite.connect(TEST_DB) as db:
await db.set_progress_handler(lambda: 1, 1)
with self.assertRaises(OperationalError):
await db.execute(
"create table test_progress_handler (i integer primary key asc, k integer)"
)
async def test_create_function(self):
"""Assert that after creating a custom function, it can be used"""
def no_arg():
return "no arg"
def one_arg(num):
return num * 2
async with aiosqlite.connect(TEST_DB) as db:
await db.create_function("no_arg", 0, no_arg)
await db.create_function("one_arg", 1, one_arg)
async with db.execute("SELECT no_arg();") as res:
row = await res.fetchone()
self.assertEqual(row[0], "no arg")
async with db.execute("SELECT one_arg(10);") as res:
row = await res.fetchone()
self.assertEqual(row[0], 20)
async def test_create_function_deterministic(self):
"""Assert that after creating a deterministic custom function, it can be used.
https://sqlite.org/deterministic.html
"""
def one_arg(num):
return num * 2
async with aiosqlite.connect(TEST_DB) as db:
await db.create_function("one_arg", 1, one_arg, deterministic=True)
await db.execute("create table foo (id int, bar int)")
# Non-deterministic functions cannot be used in indexes
await db.execute("create index t on foo(one_arg(bar))")
async def test_set_trace_callback(self):
statements = []
def callback(statement: str):
statements.append(statement)
async with aiosqlite.connect(TEST_DB) as db:
await db.set_trace_callback(callback)
await db.execute("select 10")
self.assertIn("select 10", statements)
async def test_connect_error(self):
bad_db = Path("/something/that/shouldnt/exist.db")
with self.assertRaisesRegex(OperationalError, "unable to open database"):
async with aiosqlite.connect(bad_db) as db:
self.assertIsNone(db) # should never be reached
with self.assertRaisesRegex(OperationalError, "unable to open database"):
await aiosqlite.connect(bad_db)
async def test_iterdump(self):
async with aiosqlite.connect(":memory:") as db:
await db.execute("create table foo (i integer, k charvar(250))")
await db.executemany(
"insert into foo values (?, ?)", [(1, "hello"), (2, "world")]
)
lines = [line async for line in db.iterdump()]
self.assertEqual(
lines,
[
"BEGIN TRANSACTION;",
"CREATE TABLE foo (i integer, k charvar(250));",
"INSERT INTO \"foo\" VALUES(1,'hello');",
"INSERT INTO \"foo\" VALUES(2,'world');",
"COMMIT;",
],
)
async def test_cursor_on_closed_connection(self):
db = await aiosqlite.connect(TEST_DB)
cursor = await db.execute("select 1, 2")
await db.close()
with self.assertRaisesRegex(ValueError, "Connection closed"):
await cursor.fetchall()
with self.assertRaisesRegex(ValueError, "Connection closed"):
await cursor.fetchall()
async def test_cursor_on_closed_connection_loop(self):
db = await aiosqlite.connect(TEST_DB)
cursor = await db.execute("select 1, 2")
tasks = []
for i in range(100):
if i == 50:
tasks.append(asyncio.ensure_future(db.close()))
tasks.append(asyncio.ensure_future(cursor.fetchall()))
for task in tasks:
try:
await task
except sqlite3.ProgrammingError:
pass
async def test_close_twice(self):
db = await aiosqlite.connect(TEST_DB)
await db.close()
# no error
await db.close()
async def test_backup_aiosqlite(self):
def progress(a, b, c):
print(a, b, c)
async with aiosqlite.connect(":memory:") as db1, aiosqlite.connect(
":memory:"
) as db2:
await db1.execute("create table foo (i integer, k charvar(250))")
await db1.executemany(
"insert into foo values (?, ?)", [(1, "hello"), (2, "world")]
)
await db1.commit()
with self.assertRaisesRegex(OperationalError, "no such table: foo"):
await db2.execute("select * from foo")
await db1.backup(db2, progress=progress)
async with db2.execute("select * from foo") as cursor:
rows = await cursor.fetchall()
self.assertEqual(rows, [(1, "hello"), (2, "world")])
async def test_backup_sqlite(self):
async with aiosqlite.connect(":memory:") as db1:
with sqlite3.connect(":memory:") as db2:
await db1.execute("create table foo (i integer, k charvar(250))")
await db1.executemany(
"insert into foo values (?, ?)", [(1, "hello"), (2, "world")]
)
await db1.commit()
with self.assertRaisesRegex(OperationalError, "no such table: foo"):
db2.execute("select * from foo")
await db1.backup(db2)
cursor = db2.execute("select * from foo")
rows = cursor.fetchall()
self.assertEqual(rows, [(1, "hello"), (2, "world")])

View File

@@ -0,0 +1,295 @@
Metadata-Version: 2.3
Name: annotated-types
Version: 0.7.0
Summary: Reusable constraint types to use with typing.Annotated
Project-URL: Homepage, https://github.com/annotated-types/annotated-types
Project-URL: Source, https://github.com/annotated-types/annotated-types
Project-URL: Changelog, https://github.com/annotated-types/annotated-types/releases
Author-email: Adrian Garcia Badaracco <1755071+adriangb@users.noreply.github.com>, Samuel Colvin <s@muelcolvin.com>, Zac Hatfield-Dodds <zac@zhd.dev>
License-File: LICENSE
Classifier: Development Status :: 4 - Beta
Classifier: Environment :: Console
Classifier: Environment :: MacOS X
Classifier: Intended Audience :: Developers
Classifier: Intended Audience :: Information Technology
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: POSIX :: Linux
Classifier: Operating System :: Unix
Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Classifier: Typing :: Typed
Requires-Python: >=3.8
Requires-Dist: typing-extensions>=4.0.0; python_version < '3.9'
Description-Content-Type: text/markdown
# annotated-types
[![CI](https://github.com/annotated-types/annotated-types/workflows/CI/badge.svg?event=push)](https://github.com/annotated-types/annotated-types/actions?query=event%3Apush+branch%3Amain+workflow%3ACI)
[![pypi](https://img.shields.io/pypi/v/annotated-types.svg)](https://pypi.python.org/pypi/annotated-types)
[![versions](https://img.shields.io/pypi/pyversions/annotated-types.svg)](https://github.com/annotated-types/annotated-types)
[![license](https://img.shields.io/github/license/annotated-types/annotated-types.svg)](https://github.com/annotated-types/annotated-types/blob/main/LICENSE)
[PEP-593](https://peps.python.org/pep-0593/) added `typing.Annotated` as a way of
adding context-specific metadata to existing types, and specifies that
`Annotated[T, x]` _should_ be treated as `T` by any tool or library without special
logic for `x`.
This package provides metadata objects which can be used to represent common
constraints such as upper and lower bounds on scalar values and collection sizes,
a `Predicate` marker for runtime checks, and
descriptions of how we intend these metadata to be interpreted. In some cases,
we also note alternative representations which do not require this package.
## Install
```bash
pip install annotated-types
```
## Examples
```python
from typing import Annotated
from annotated_types import Gt, Len, Predicate
class MyClass:
age: Annotated[int, Gt(18)] # Valid: 19, 20, ...
# Invalid: 17, 18, "19", 19.0, ...
factors: list[Annotated[int, Predicate(is_prime)]] # Valid: 2, 3, 5, 7, 11, ...
# Invalid: 4, 8, -2, 5.0, "prime", ...
my_list: Annotated[list[int], Len(0, 10)] # Valid: [], [10, 20, 30, 40, 50]
# Invalid: (1, 2), ["abc"], [0] * 20
```
## Documentation
_While `annotated-types` avoids runtime checks for performance, users should not
construct invalid combinations such as `MultipleOf("non-numeric")` or `Annotated[int, Len(3)]`.
Downstream implementors may choose to raise an error, emit a warning, silently ignore
a metadata item, etc., if the metadata objects described below are used with an
incompatible type - or for any other reason!_
### Gt, Ge, Lt, Le
Express inclusive and/or exclusive bounds on orderable values - which may be numbers,
dates, times, strings, sets, etc. Note that the boundary value need not be of the
same type that was annotated, so long as they can be compared: `Annotated[int, Gt(1.5)]`
is fine, for example, and implies that the value is an integer x such that `x > 1.5`.
We suggest that implementors may also interpret `functools.partial(operator.le, 1.5)`
as being equivalent to `Gt(1.5)`, for users who wish to avoid a runtime dependency on
the `annotated-types` package.
To be explicit, these types have the following meanings:
* `Gt(x)` - value must be "Greater Than" `x` - equivalent to exclusive minimum
* `Ge(x)` - value must be "Greater than or Equal" to `x` - equivalent to inclusive minimum
* `Lt(x)` - value must be "Less Than" `x` - equivalent to exclusive maximum
* `Le(x)` - value must be "Less than or Equal" to `x` - equivalent to inclusive maximum
### Interval
`Interval(gt, ge, lt, le)` allows you to specify an upper and lower bound with a single
metadata object. `None` attributes should be ignored, and non-`None` attributes
treated as per the single bounds above.
### MultipleOf
`MultipleOf(multiple_of=x)` might be interpreted in two ways:
1. Python semantics, implying `value % multiple_of == 0`, or
2. [JSONschema semantics](https://json-schema.org/draft/2020-12/json-schema-validation.html#rfc.section.6.2.1),
where `int(value / multiple_of) == value / multiple_of`.
We encourage users to be aware of these two common interpretations and their
distinct behaviours, especially since very large or non-integer numbers make
it easy to cause silent data corruption due to floating-point imprecision.
We encourage libraries to carefully document which interpretation they implement.
### MinLen, MaxLen, Len
`Len()` implies that `min_length <= len(value) <= max_length` - lower and upper bounds are inclusive.
As well as `Len()` which can optionally include upper and lower bounds, we also
provide `MinLen(x)` and `MaxLen(y)` which are equivalent to `Len(min_length=x)`
and `Len(max_length=y)` respectively.
`Len`, `MinLen`, and `MaxLen` may be used with any type which supports `len(value)`.
Examples of usage:
* `Annotated[list, MaxLen(10)]` (or `Annotated[list, Len(max_length=10))`) - list must have a length of 10 or less
* `Annotated[str, MaxLen(10)]` - string must have a length of 10 or less
* `Annotated[list, MinLen(3))` (or `Annotated[list, Len(min_length=3))`) - list must have a length of 3 or more
* `Annotated[list, Len(4, 6)]` - list must have a length of 4, 5, or 6
* `Annotated[list, Len(8, 8)]` - list must have a length of exactly 8
#### Changed in v0.4.0
* `min_inclusive` has been renamed to `min_length`, no change in meaning
* `max_exclusive` has been renamed to `max_length`, upper bound is now **inclusive** instead of **exclusive**
* The recommendation that slices are interpreted as `Len` has been removed due to ambiguity and different semantic
meaning of the upper bound in slices vs. `Len`
See [issue #23](https://github.com/annotated-types/annotated-types/issues/23) for discussion.
### Timezone
`Timezone` can be used with a `datetime` or a `time` to express which timezones
are allowed. `Annotated[datetime, Timezone(None)]` must be a naive datetime.
`Timezone[...]` ([literal ellipsis](https://docs.python.org/3/library/constants.html#Ellipsis))
expresses that any timezone-aware datetime is allowed. You may also pass a specific
timezone string or [`tzinfo`](https://docs.python.org/3/library/datetime.html#tzinfo-objects)
object such as `Timezone(timezone.utc)` or `Timezone("Africa/Abidjan")` to express that you only
allow a specific timezone, though we note that this is often a symptom of fragile design.
#### Changed in v0.x.x
* `Timezone` accepts [`tzinfo`](https://docs.python.org/3/library/datetime.html#tzinfo-objects) objects instead of
`timezone`, extending compatibility to [`zoneinfo`](https://docs.python.org/3/library/zoneinfo.html) and third party libraries.
### Unit
`Unit(unit: str)` expresses that the annotated numeric value is the magnitude of
a quantity with the specified unit. For example, `Annotated[float, Unit("m/s")]`
would be a float representing a velocity in meters per second.
Please note that `annotated_types` itself makes no attempt to parse or validate
the unit string in any way. That is left entirely to downstream libraries,
such as [`pint`](https://pint.readthedocs.io) or
[`astropy.units`](https://docs.astropy.org/en/stable/units/).
An example of how a library might use this metadata:
```python
from annotated_types import Unit
from typing import Annotated, TypeVar, Callable, Any, get_origin, get_args
# given a type annotated with a unit:
Meters = Annotated[float, Unit("m")]
# you can cast the annotation to a specific unit type with any
# callable that accepts a string and returns the desired type
T = TypeVar("T")
def cast_unit(tp: Any, unit_cls: Callable[[str], T]) -> T | None:
if get_origin(tp) is Annotated:
for arg in get_args(tp):
if isinstance(arg, Unit):
return unit_cls(arg.unit)
return None
# using `pint`
import pint
pint_unit = cast_unit(Meters, pint.Unit)
# using `astropy.units`
import astropy.units as u
astropy_unit = cast_unit(Meters, u.Unit)
```
### Predicate
`Predicate(func: Callable)` expresses that `func(value)` is truthy for valid values.
Users should prefer the statically inspectable metadata above, but if you need
the full power and flexibility of arbitrary runtime predicates... here it is.
For some common constraints, we provide generic types:
* `IsLower = Annotated[T, Predicate(str.islower)]`
* `IsUpper = Annotated[T, Predicate(str.isupper)]`
* `IsDigit = Annotated[T, Predicate(str.isdigit)]`
* `IsFinite = Annotated[T, Predicate(math.isfinite)]`
* `IsNotFinite = Annotated[T, Predicate(Not(math.isfinite))]`
* `IsNan = Annotated[T, Predicate(math.isnan)]`
* `IsNotNan = Annotated[T, Predicate(Not(math.isnan))]`
* `IsInfinite = Annotated[T, Predicate(math.isinf)]`
* `IsNotInfinite = Annotated[T, Predicate(Not(math.isinf))]`
so that you can write e.g. `x: IsFinite[float] = 2.0` instead of the longer
(but exactly equivalent) `x: Annotated[float, Predicate(math.isfinite)] = 2.0`.
Some libraries might have special logic to handle known or understandable predicates,
for example by checking for `str.isdigit` and using its presence to both call custom
logic to enforce digit-only strings, and customise some generated external schema.
Users are therefore encouraged to avoid indirection like `lambda s: s.lower()`, in
favor of introspectable methods such as `str.lower` or `re.compile("pattern").search`.
To enable basic negation of commonly used predicates like `math.isnan` without introducing introspection that makes it impossible for implementers to introspect the predicate we provide a `Not` wrapper that simply negates the predicate in an introspectable manner. Several of the predicates listed above are created in this manner.
We do not specify what behaviour should be expected for predicates that raise
an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently
skip invalid constraints, or statically raise an error; or it might try calling it
and then propagate or discard the resulting
`TypeError: descriptor 'isdigit' for 'str' objects doesn't apply to a 'int' object`
exception. We encourage libraries to document the behaviour they choose.
### Doc
`doc()` can be used to add documentation information in `Annotated`, for function and method parameters, variables, class attributes, return types, and any place where `Annotated` can be used.
It expects a value that can be statically analyzed, as the main use case is for static analysis, editors, documentation generators, and similar tools.
It returns a `DocInfo` class with a single attribute `documentation` containing the value passed to `doc()`.
This is the early adopter's alternative form of the [`typing-doc` proposal](https://github.com/tiangolo/fastapi/blob/typing-doc/typing_doc.md).
### Integrating downstream types with `GroupedMetadata`
Implementers may choose to provide a convenience wrapper that groups multiple pieces of metadata.
This can help reduce verbosity and cognitive overhead for users.
For example, an implementer like Pydantic might provide a `Field` or `Meta` type that accepts keyword arguments and transforms these into low-level metadata:
```python
from dataclasses import dataclass
from typing import Iterator
from annotated_types import GroupedMetadata, Ge
@dataclass
class Field(GroupedMetadata):
ge: int | None = None
description: str | None = None
def __iter__(self) -> Iterator[object]:
# Iterating over a GroupedMetadata object should yield annotated-types
# constraint metadata objects which describe it as fully as possible,
# and may include other unknown objects too.
if self.ge is not None:
yield Ge(self.ge)
if self.description is not None:
yield Description(self.description)
```
Libraries consuming annotated-types constraints should check for `GroupedMetadata` and unpack it by iterating over the object and treating the results as if they had been "unpacked" in the `Annotated` type. The same logic should be applied to the [PEP 646 `Unpack` type](https://peps.python.org/pep-0646/), so that `Annotated[T, Field(...)]`, `Annotated[T, Unpack[Field(...)]]` and `Annotated[T, *Field(...)]` are all treated consistently.
Libraries consuming annotated-types should also ignore any metadata they do not recongize that came from unpacking a `GroupedMetadata`, just like they ignore unrecognized metadata in `Annotated` itself.
Our own `annotated_types.Interval` class is a `GroupedMetadata` which unpacks itself into `Gt`, `Lt`, etc., so this is not an abstract concern. Similarly, `annotated_types.Len` is a `GroupedMetadata` which unpacks itself into `MinLen` (optionally) and `MaxLen`.
### Consuming metadata
We intend to not be prescriptive as to _how_ the metadata and constraints are used, but as an example of how one might parse constraints from types annotations see our [implementation in `test_main.py`](https://github.com/annotated-types/annotated-types/blob/f59cf6d1b5255a0fe359b93896759a180bec30ae/tests/test_main.py#L94-L103).
It is up to the implementer to determine how this metadata is used.
You could use the metadata for runtime type checking, for generating schemas or to generate example data, amongst other use cases.
## Design & History
This package was designed at the PyCon 2022 sprints by the maintainers of Pydantic
and Hypothesis, with the goal of making it as easy as possible for end-users to
provide more informative annotations for use by runtime libraries.
It is deliberately minimal, and following PEP-593 allows considerable downstream
discretion in what (if anything!) they choose to support. Nonetheless, we expect
that staying simple and covering _only_ the most common use-cases will give users
and maintainers the best experience we can. If you'd like more constraints for your
types - follow our lead, by defining them and documenting them downstream!

View File

@@ -0,0 +1,10 @@
annotated_types-0.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
annotated_types-0.7.0.dist-info/METADATA,sha256=7ltqxksJJ0wCYFGBNIQCWTlWQGeAH0hRFdnK3CB895E,15046
annotated_types-0.7.0.dist-info/RECORD,,
annotated_types-0.7.0.dist-info/WHEEL,sha256=zEMcRr9Kr03x1ozGwg5v9NQBKn3kndp6LSoSlVg-jhU,87
annotated_types-0.7.0.dist-info/licenses/LICENSE,sha256=_hBJiEsaDZNCkB6I4H8ykl0ksxIdmXK2poBfuYJLCV0,1083
annotated_types/__init__.py,sha256=RynLsRKUEGI0KimXydlD1fZEfEzWwDo0Uon3zOKhG1Q,13819
annotated_types/__pycache__/__init__.cpython-312.pyc,,
annotated_types/__pycache__/test_cases.cpython-312.pyc,,
annotated_types/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
annotated_types/test_cases.py,sha256=zHFX6EpcMbGJ8FzBYDbO56bPwx_DYIVSKbZM-4B3_lg,6421

View File

@@ -0,0 +1,4 @@
Wheel-Version: 1.0
Generator: hatchling 1.24.2
Root-Is-Purelib: true
Tag: py3-none-any

View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2022 the contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,432 @@
import math
import sys
import types
from dataclasses import dataclass
from datetime import tzinfo
from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, SupportsFloat, SupportsIndex, TypeVar, Union
if sys.version_info < (3, 8):
from typing_extensions import Protocol, runtime_checkable
else:
from typing import Protocol, runtime_checkable
if sys.version_info < (3, 9):
from typing_extensions import Annotated, Literal
else:
from typing import Annotated, Literal
if sys.version_info < (3, 10):
EllipsisType = type(Ellipsis)
KW_ONLY = {}
SLOTS = {}
else:
from types import EllipsisType
KW_ONLY = {"kw_only": True}
SLOTS = {"slots": True}
__all__ = (
'BaseMetadata',
'GroupedMetadata',
'Gt',
'Ge',
'Lt',
'Le',
'Interval',
'MultipleOf',
'MinLen',
'MaxLen',
'Len',
'Timezone',
'Predicate',
'LowerCase',
'UpperCase',
'IsDigits',
'IsFinite',
'IsNotFinite',
'IsNan',
'IsNotNan',
'IsInfinite',
'IsNotInfinite',
'doc',
'DocInfo',
'__version__',
)
__version__ = '0.7.0'
T = TypeVar('T')
# arguments that start with __ are considered
# positional only
# see https://peps.python.org/pep-0484/#positional-only-arguments
class SupportsGt(Protocol):
def __gt__(self: T, __other: T) -> bool:
...
class SupportsGe(Protocol):
def __ge__(self: T, __other: T) -> bool:
...
class SupportsLt(Protocol):
def __lt__(self: T, __other: T) -> bool:
...
class SupportsLe(Protocol):
def __le__(self: T, __other: T) -> bool:
...
class SupportsMod(Protocol):
def __mod__(self: T, __other: T) -> T:
...
class SupportsDiv(Protocol):
def __div__(self: T, __other: T) -> T:
...
class BaseMetadata:
"""Base class for all metadata.
This exists mainly so that implementers
can do `isinstance(..., BaseMetadata)` while traversing field annotations.
"""
__slots__ = ()
@dataclass(frozen=True, **SLOTS)
class Gt(BaseMetadata):
"""Gt(gt=x) implies that the value must be greater than x.
It can be used with any type that supports the ``>`` operator,
including numbers, dates and times, strings, sets, and so on.
"""
gt: SupportsGt
@dataclass(frozen=True, **SLOTS)
class Ge(BaseMetadata):
"""Ge(ge=x) implies that the value must be greater than or equal to x.
It can be used with any type that supports the ``>=`` operator,
including numbers, dates and times, strings, sets, and so on.
"""
ge: SupportsGe
@dataclass(frozen=True, **SLOTS)
class Lt(BaseMetadata):
"""Lt(lt=x) implies that the value must be less than x.
It can be used with any type that supports the ``<`` operator,
including numbers, dates and times, strings, sets, and so on.
"""
lt: SupportsLt
@dataclass(frozen=True, **SLOTS)
class Le(BaseMetadata):
"""Le(le=x) implies that the value must be less than or equal to x.
It can be used with any type that supports the ``<=`` operator,
including numbers, dates and times, strings, sets, and so on.
"""
le: SupportsLe
@runtime_checkable
class GroupedMetadata(Protocol):
"""A grouping of multiple objects, like typing.Unpack.
`GroupedMetadata` on its own is not metadata and has no meaning.
All of the constraints and metadata should be fully expressable
in terms of the `BaseMetadata`'s returned by `GroupedMetadata.__iter__()`.
Concrete implementations should override `GroupedMetadata.__iter__()`
to add their own metadata.
For example:
>>> @dataclass
>>> class Field(GroupedMetadata):
>>> gt: float | None = None
>>> description: str | None = None
...
>>> def __iter__(self) -> Iterable[object]:
>>> if self.gt is not None:
>>> yield Gt(self.gt)
>>> if self.description is not None:
>>> yield Description(self.gt)
Also see the implementation of `Interval` below for an example.
Parsers should recognize this and unpack it so that it can be used
both with and without unpacking:
- `Annotated[int, Field(...)]` (parser must unpack Field)
- `Annotated[int, *Field(...)]` (PEP-646)
""" # noqa: trailing-whitespace
@property
def __is_annotated_types_grouped_metadata__(self) -> Literal[True]:
return True
def __iter__(self) -> Iterator[object]:
...
if not TYPE_CHECKING:
__slots__ = () # allow subclasses to use slots
def __init_subclass__(cls, *args: Any, **kwargs: Any) -> None:
# Basic ABC like functionality without the complexity of an ABC
super().__init_subclass__(*args, **kwargs)
if cls.__iter__ is GroupedMetadata.__iter__:
raise TypeError("Can't subclass GroupedMetadata without implementing __iter__")
def __iter__(self) -> Iterator[object]: # noqa: F811
raise NotImplementedError # more helpful than "None has no attribute..." type errors
@dataclass(frozen=True, **KW_ONLY, **SLOTS)
class Interval(GroupedMetadata):
"""Interval can express inclusive or exclusive bounds with a single object.
It accepts keyword arguments ``gt``, ``ge``, ``lt``, and/or ``le``, which
are interpreted the same way as the single-bound constraints.
"""
gt: Union[SupportsGt, None] = None
ge: Union[SupportsGe, None] = None
lt: Union[SupportsLt, None] = None
le: Union[SupportsLe, None] = None
def __iter__(self) -> Iterator[BaseMetadata]:
"""Unpack an Interval into zero or more single-bounds."""
if self.gt is not None:
yield Gt(self.gt)
if self.ge is not None:
yield Ge(self.ge)
if self.lt is not None:
yield Lt(self.lt)
if self.le is not None:
yield Le(self.le)
@dataclass(frozen=True, **SLOTS)
class MultipleOf(BaseMetadata):
"""MultipleOf(multiple_of=x) might be interpreted in two ways:
1. Python semantics, implying ``value % multiple_of == 0``, or
2. JSONschema semantics, where ``int(value / multiple_of) == value / multiple_of``
We encourage users to be aware of these two common interpretations,
and libraries to carefully document which they implement.
"""
multiple_of: Union[SupportsDiv, SupportsMod]
@dataclass(frozen=True, **SLOTS)
class MinLen(BaseMetadata):
"""
MinLen() implies minimum inclusive length,
e.g. ``len(value) >= min_length``.
"""
min_length: Annotated[int, Ge(0)]
@dataclass(frozen=True, **SLOTS)
class MaxLen(BaseMetadata):
"""
MaxLen() implies maximum inclusive length,
e.g. ``len(value) <= max_length``.
"""
max_length: Annotated[int, Ge(0)]
@dataclass(frozen=True, **SLOTS)
class Len(GroupedMetadata):
"""
Len() implies that ``min_length <= len(value) <= max_length``.
Upper bound may be omitted or ``None`` to indicate no upper length bound.
"""
min_length: Annotated[int, Ge(0)] = 0
max_length: Optional[Annotated[int, Ge(0)]] = None
def __iter__(self) -> Iterator[BaseMetadata]:
"""Unpack a Len into zone or more single-bounds."""
if self.min_length > 0:
yield MinLen(self.min_length)
if self.max_length is not None:
yield MaxLen(self.max_length)
@dataclass(frozen=True, **SLOTS)
class Timezone(BaseMetadata):
"""Timezone(tz=...) requires a datetime to be aware (or ``tz=None``, naive).
``Annotated[datetime, Timezone(None)]`` must be a naive datetime.
``Timezone[...]`` (the ellipsis literal) expresses that the datetime must be
tz-aware but any timezone is allowed.
You may also pass a specific timezone string or tzinfo object such as
``Timezone(timezone.utc)`` or ``Timezone("Africa/Abidjan")`` to express that
you only allow a specific timezone, though we note that this is often
a symptom of poor design.
"""
tz: Union[str, tzinfo, EllipsisType, None]
@dataclass(frozen=True, **SLOTS)
class Unit(BaseMetadata):
"""Indicates that the value is a physical quantity with the specified unit.
It is intended for usage with numeric types, where the value represents the
magnitude of the quantity. For example, ``distance: Annotated[float, Unit('m')]``
or ``speed: Annotated[float, Unit('m/s')]``.
Interpretation of the unit string is left to the discretion of the consumer.
It is suggested to follow conventions established by python libraries that work
with physical quantities, such as
- ``pint`` : <https://pint.readthedocs.io/en/stable/>
- ``astropy.units``: <https://docs.astropy.org/en/stable/units/>
For indicating a quantity with a certain dimensionality but without a specific unit
it is recommended to use square brackets, e.g. `Annotated[float, Unit('[time]')]`.
Note, however, ``annotated_types`` itself makes no use of the unit string.
"""
unit: str
@dataclass(frozen=True, **SLOTS)
class Predicate(BaseMetadata):
"""``Predicate(func: Callable)`` implies `func(value)` is truthy for valid values.
Users should prefer statically inspectable metadata, but if you need the full
power and flexibility of arbitrary runtime predicates... here it is.
We provide a few predefined predicates for common string constraints:
``IsLower = Predicate(str.islower)``, ``IsUpper = Predicate(str.isupper)``, and
``IsDigits = Predicate(str.isdigit)``. Users are encouraged to use methods which
can be given special handling, and avoid indirection like ``lambda s: s.lower()``.
Some libraries might have special logic to handle certain predicates, e.g. by
checking for `str.isdigit` and using its presence to both call custom logic to
enforce digit-only strings, and customise some generated external schema.
We do not specify what behaviour should be expected for predicates that raise
an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently
skip invalid constraints, or statically raise an error; or it might try calling it
and then propagate or discard the resulting exception.
"""
func: Callable[[Any], bool]
def __repr__(self) -> str:
if getattr(self.func, "__name__", "<lambda>") == "<lambda>":
return f"{self.__class__.__name__}({self.func!r})"
if isinstance(self.func, (types.MethodType, types.BuiltinMethodType)) and (
namespace := getattr(self.func.__self__, "__name__", None)
):
return f"{self.__class__.__name__}({namespace}.{self.func.__name__})"
if isinstance(self.func, type(str.isascii)): # method descriptor
return f"{self.__class__.__name__}({self.func.__qualname__})"
return f"{self.__class__.__name__}({self.func.__name__})"
@dataclass
class Not:
func: Callable[[Any], bool]
def __call__(self, __v: Any) -> bool:
return not self.func(__v)
_StrType = TypeVar("_StrType", bound=str)
LowerCase = Annotated[_StrType, Predicate(str.islower)]
"""
Return True if the string is a lowercase string, False otherwise.
A string is lowercase if all cased characters in the string are lowercase and there is at least one cased character in the string.
""" # noqa: E501
UpperCase = Annotated[_StrType, Predicate(str.isupper)]
"""
Return True if the string is an uppercase string, False otherwise.
A string is uppercase if all cased characters in the string are uppercase and there is at least one cased character in the string.
""" # noqa: E501
IsDigit = Annotated[_StrType, Predicate(str.isdigit)]
IsDigits = IsDigit # type: ignore # plural for backwards compatibility, see #63
"""
Return True if the string is a digit string, False otherwise.
A string is a digit string if all characters in the string are digits and there is at least one character in the string.
""" # noqa: E501
IsAscii = Annotated[_StrType, Predicate(str.isascii)]
"""
Return True if all characters in the string are ASCII, False otherwise.
ASCII characters have code points in the range U+0000-U+007F. Empty string is ASCII too.
"""
_NumericType = TypeVar('_NumericType', bound=Union[SupportsFloat, SupportsIndex])
IsFinite = Annotated[_NumericType, Predicate(math.isfinite)]
"""Return True if x is neither an infinity nor a NaN, and False otherwise."""
IsNotFinite = Annotated[_NumericType, Predicate(Not(math.isfinite))]
"""Return True if x is one of infinity or NaN, and False otherwise"""
IsNan = Annotated[_NumericType, Predicate(math.isnan)]
"""Return True if x is a NaN (not a number), and False otherwise."""
IsNotNan = Annotated[_NumericType, Predicate(Not(math.isnan))]
"""Return True if x is anything but NaN (not a number), and False otherwise."""
IsInfinite = Annotated[_NumericType, Predicate(math.isinf)]
"""Return True if x is a positive or negative infinity, and False otherwise."""
IsNotInfinite = Annotated[_NumericType, Predicate(Not(math.isinf))]
"""Return True if x is neither a positive or negative infinity, and False otherwise."""
try:
from typing_extensions import DocInfo, doc # type: ignore [attr-defined]
except ImportError:
@dataclass(frozen=True, **SLOTS)
class DocInfo: # type: ignore [no-redef]
""" "
The return value of doc(), mainly to be used by tools that want to extract the
Annotated documentation at runtime.
"""
documentation: str
"""The documentation string passed to doc()."""
def doc(
documentation: str,
) -> DocInfo:
"""
Add documentation to a type annotation inside of Annotated.
For example:
>>> def hi(name: Annotated[int, doc("The name of the user")]) -> None: ...
"""
return DocInfo(documentation)

View File

@@ -0,0 +1,151 @@
import math
import sys
from datetime import date, datetime, timedelta, timezone
from decimal import Decimal
from typing import Any, Dict, Iterable, Iterator, List, NamedTuple, Set, Tuple
if sys.version_info < (3, 9):
from typing_extensions import Annotated
else:
from typing import Annotated
import annotated_types as at
class Case(NamedTuple):
"""
A test case for `annotated_types`.
"""
annotation: Any
valid_cases: Iterable[Any]
invalid_cases: Iterable[Any]
def cases() -> Iterable[Case]:
# Gt, Ge, Lt, Le
yield Case(Annotated[int, at.Gt(4)], (5, 6, 1000), (4, 0, -1))
yield Case(Annotated[float, at.Gt(0.5)], (0.6, 0.7, 0.8, 0.9), (0.5, 0.0, -0.1))
yield Case(
Annotated[datetime, at.Gt(datetime(2000, 1, 1))],
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
[datetime(2000, 1, 1), datetime(1999, 12, 31)],
)
yield Case(
Annotated[datetime, at.Gt(date(2000, 1, 1))],
[date(2000, 1, 2), date(2000, 1, 3)],
[date(2000, 1, 1), date(1999, 12, 31)],
)
yield Case(
Annotated[datetime, at.Gt(Decimal('1.123'))],
[Decimal('1.1231'), Decimal('123')],
[Decimal('1.123'), Decimal('0')],
)
yield Case(Annotated[int, at.Ge(4)], (4, 5, 6, 1000, 4), (0, -1))
yield Case(Annotated[float, at.Ge(0.5)], (0.5, 0.6, 0.7, 0.8, 0.9), (0.4, 0.0, -0.1))
yield Case(
Annotated[datetime, at.Ge(datetime(2000, 1, 1))],
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
[datetime(1998, 1, 1), datetime(1999, 12, 31)],
)
yield Case(Annotated[int, at.Lt(4)], (0, -1), (4, 5, 6, 1000, 4))
yield Case(Annotated[float, at.Lt(0.5)], (0.4, 0.0, -0.1), (0.5, 0.6, 0.7, 0.8, 0.9))
yield Case(
Annotated[datetime, at.Lt(datetime(2000, 1, 1))],
[datetime(1999, 12, 31), datetime(1999, 12, 31)],
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
)
yield Case(Annotated[int, at.Le(4)], (4, 0, -1), (5, 6, 1000))
yield Case(Annotated[float, at.Le(0.5)], (0.5, 0.0, -0.1), (0.6, 0.7, 0.8, 0.9))
yield Case(
Annotated[datetime, at.Le(datetime(2000, 1, 1))],
[datetime(2000, 1, 1), datetime(1999, 12, 31)],
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
)
# Interval
yield Case(Annotated[int, at.Interval(gt=4)], (5, 6, 1000), (4, 0, -1))
yield Case(Annotated[int, at.Interval(gt=4, lt=10)], (5, 6), (4, 10, 1000, 0, -1))
yield Case(Annotated[float, at.Interval(ge=0.5, le=1)], (0.5, 0.9, 1), (0.49, 1.1))
yield Case(
Annotated[datetime, at.Interval(gt=datetime(2000, 1, 1), le=datetime(2000, 1, 3))],
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
[datetime(2000, 1, 1), datetime(2000, 1, 4)],
)
yield Case(Annotated[int, at.MultipleOf(multiple_of=3)], (0, 3, 9), (1, 2, 4))
yield Case(Annotated[float, at.MultipleOf(multiple_of=0.5)], (0, 0.5, 1, 1.5), (0.4, 1.1))
# lengths
yield Case(Annotated[str, at.MinLen(3)], ('123', '1234', 'x' * 10), ('', '1', '12'))
yield Case(Annotated[str, at.Len(3)], ('123', '1234', 'x' * 10), ('', '1', '12'))
yield Case(Annotated[List[int], at.MinLen(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2]))
yield Case(Annotated[List[int], at.Len(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2]))
yield Case(Annotated[str, at.MaxLen(4)], ('', '1234'), ('12345', 'x' * 10))
yield Case(Annotated[str, at.Len(0, 4)], ('', '1234'), ('12345', 'x' * 10))
yield Case(Annotated[List[str], at.MaxLen(4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10))
yield Case(Annotated[List[str], at.Len(0, 4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10))
yield Case(Annotated[str, at.Len(3, 5)], ('123', '12345'), ('', '1', '12', '123456', 'x' * 10))
yield Case(Annotated[str, at.Len(3, 3)], ('123',), ('12', '1234'))
yield Case(Annotated[Dict[int, int], at.Len(2, 3)], [{1: 1, 2: 2}], [{}, {1: 1}, {1: 1, 2: 2, 3: 3, 4: 4}])
yield Case(Annotated[Set[int], at.Len(2, 3)], ({1, 2}, {1, 2, 3}), (set(), {1}, {1, 2, 3, 4}))
yield Case(Annotated[Tuple[int, ...], at.Len(2, 3)], ((1, 2), (1, 2, 3)), ((), (1,), (1, 2, 3, 4)))
# Timezone
yield Case(
Annotated[datetime, at.Timezone(None)], [datetime(2000, 1, 1)], [datetime(2000, 1, 1, tzinfo=timezone.utc)]
)
yield Case(
Annotated[datetime, at.Timezone(...)], [datetime(2000, 1, 1, tzinfo=timezone.utc)], [datetime(2000, 1, 1)]
)
yield Case(
Annotated[datetime, at.Timezone(timezone.utc)],
[datetime(2000, 1, 1, tzinfo=timezone.utc)],
[datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))],
)
yield Case(
Annotated[datetime, at.Timezone('Europe/London')],
[datetime(2000, 1, 1, tzinfo=timezone(timedelta(0), name='Europe/London'))],
[datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))],
)
# Quantity
yield Case(Annotated[float, at.Unit(unit='m')], (5, 4.2), ('5m', '4.2m'))
# predicate types
yield Case(at.LowerCase[str], ['abc', 'foobar'], ['', 'A', 'Boom'])
yield Case(at.UpperCase[str], ['ABC', 'DEFO'], ['', 'a', 'abc', 'AbC'])
yield Case(at.IsDigit[str], ['123'], ['', 'ab', 'a1b2'])
yield Case(at.IsAscii[str], ['123', 'foo bar'], ['£100', '😊', 'whatever 👀'])
yield Case(Annotated[int, at.Predicate(lambda x: x % 2 == 0)], [0, 2, 4], [1, 3, 5])
yield Case(at.IsFinite[float], [1.23], [math.nan, math.inf, -math.inf])
yield Case(at.IsNotFinite[float], [math.nan, math.inf], [1.23])
yield Case(at.IsNan[float], [math.nan], [1.23, math.inf])
yield Case(at.IsNotNan[float], [1.23, math.inf], [math.nan])
yield Case(at.IsInfinite[float], [math.inf], [math.nan, 1.23])
yield Case(at.IsNotInfinite[float], [math.nan, 1.23], [math.inf])
# check stacked predicates
yield Case(at.IsInfinite[Annotated[float, at.Predicate(lambda x: x > 0)]], [math.inf], [-math.inf, 1.23, math.nan])
# doc
yield Case(Annotated[int, at.doc("A number")], [1, 2], [])
# custom GroupedMetadata
class MyCustomGroupedMetadata(at.GroupedMetadata):
def __iter__(self) -> Iterator[at.Predicate]:
yield at.Predicate(lambda x: float(x).is_integer())
yield Case(Annotated[float, MyCustomGroupedMetadata()], [0, 2.0], [0.01, 1.5])

View File

@@ -0,0 +1,96 @@
Metadata-Version: 2.4
Name: anyio
Version: 4.12.1
Summary: High-level concurrency and networking framework on top of asyncio or Trio
Author-email: Alex Grönholm <alex.gronholm@nextday.fi>
License-Expression: MIT
Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/
Project-URL: Changelog, https://anyio.readthedocs.io/en/stable/versionhistory.html
Project-URL: Source code, https://github.com/agronholm/anyio
Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: Framework :: AnyIO
Classifier: Typing :: Typed
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: 3.13
Classifier: Programming Language :: Python :: 3.14
Requires-Python: >=3.9
Description-Content-Type: text/x-rst
License-File: LICENSE
Requires-Dist: exceptiongroup>=1.0.2; python_version < "3.11"
Requires-Dist: idna>=2.8
Requires-Dist: typing_extensions>=4.5; python_version < "3.13"
Provides-Extra: trio
Requires-Dist: trio>=0.32.0; python_version >= "3.10" and extra == "trio"
Requires-Dist: trio>=0.31.0; python_version < "3.10" and extra == "trio"
Dynamic: license-file
.. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg
:target: https://github.com/agronholm/anyio/actions/workflows/test.yml
:alt: Build Status
.. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master
:target: https://coveralls.io/github/agronholm/anyio?branch=master
:alt: Code Coverage
.. image:: https://readthedocs.org/projects/anyio/badge/?version=latest
:target: https://anyio.readthedocs.io/en/latest/?badge=latest
:alt: Documentation
.. image:: https://badges.gitter.im/gitterHQ/gitter.svg
:target: https://gitter.im/python-trio/AnyIO
:alt: Gitter chat
AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or
Trio_. It implements Trio-like `structured concurrency`_ (SC) on top of asyncio and works in harmony
with the native SC of Trio itself.
Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or
Trio_. AnyIO can also be adopted into a library or application incrementally bit by bit, no full
refactoring necessary. It will blend in with the native libraries of your chosen backend.
To find out why you might want to use AnyIO's APIs instead of asyncio's, you can read about it
`here <https://anyio.readthedocs.io/en/stable/why.html>`_.
Documentation
-------------
View full documentation at: https://anyio.readthedocs.io/
Features
--------
AnyIO offers the following functionality:
* Task groups (nurseries_ in trio terminology)
* High-level networking (TCP, UDP and UNIX sockets)
* `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python
3.8)
* async/await style UDP sockets (unlike asyncio where you still have to use Transports and
Protocols)
* A versatile API for byte streams and object streams
* Inter-task synchronization and communication (locks, conditions, events, semaphores, object
streams)
* Worker threads
* Subprocesses
* Subinterpreter support for code parallelization (on Python 3.13 and later)
* Asynchronous file I/O (using worker threads)
* Signal handling
* Asynchronous version of the functools_ module
AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures.
It even works with the popular Hypothesis_ library.
.. _asyncio: https://docs.python.org/3/library/asyncio.html
.. _Trio: https://github.com/python-trio/trio
.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency
.. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning
.. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs
.. _pytest: https://docs.pytest.org/en/latest/
.. _functools: https://docs.python.org/3/library/functools.html
.. _Hypothesis: https://hypothesis.works/

View File

@@ -0,0 +1,92 @@
anyio-4.12.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
anyio-4.12.1.dist-info/METADATA,sha256=DfiDab9Tmmcfy802lOLTMEHJQShkOSbopCwqCYbLuJk,4277
anyio-4.12.1.dist-info/RECORD,,
anyio-4.12.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
anyio-4.12.1.dist-info/entry_points.txt,sha256=_d6Yu6uiaZmNe0CydowirE9Cmg7zUL2g08tQpoS3Qvc,39
anyio-4.12.1.dist-info/licenses/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081
anyio-4.12.1.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6
anyio/__init__.py,sha256=7iDVqMUprUuKNY91FuoKqayAhR-OY136YDPI6P78HHk,6170
anyio/__pycache__/__init__.cpython-312.pyc,,
anyio/__pycache__/from_thread.cpython-312.pyc,,
anyio/__pycache__/functools.cpython-312.pyc,,
anyio/__pycache__/lowlevel.cpython-312.pyc,,
anyio/__pycache__/pytest_plugin.cpython-312.pyc,,
anyio/__pycache__/to_interpreter.cpython-312.pyc,,
anyio/__pycache__/to_process.cpython-312.pyc,,
anyio/__pycache__/to_thread.cpython-312.pyc,,
anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
anyio/_backends/__pycache__/__init__.cpython-312.pyc,,
anyio/_backends/__pycache__/_asyncio.cpython-312.pyc,,
anyio/_backends/__pycache__/_trio.cpython-312.pyc,,
anyio/_backends/_asyncio.py,sha256=xG6qv60mgGnL0mK82dxjH2b8hlkMlJ-x2BqIq3qv70Y,98863
anyio/_backends/_trio.py,sha256=30Rctb7lm8g63ZHljVPVnj5aH-uK6oQvphjwUBoAzuI,41456
anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
anyio/_core/__pycache__/__init__.cpython-312.pyc,,
anyio/_core/__pycache__/_asyncio_selector_thread.cpython-312.pyc,,
anyio/_core/__pycache__/_contextmanagers.cpython-312.pyc,,
anyio/_core/__pycache__/_eventloop.cpython-312.pyc,,
anyio/_core/__pycache__/_exceptions.cpython-312.pyc,,
anyio/_core/__pycache__/_fileio.cpython-312.pyc,,
anyio/_core/__pycache__/_resources.cpython-312.pyc,,
anyio/_core/__pycache__/_signals.cpython-312.pyc,,
anyio/_core/__pycache__/_sockets.cpython-312.pyc,,
anyio/_core/__pycache__/_streams.cpython-312.pyc,,
anyio/_core/__pycache__/_subprocesses.cpython-312.pyc,,
anyio/_core/__pycache__/_synchronization.cpython-312.pyc,,
anyio/_core/__pycache__/_tasks.cpython-312.pyc,,
anyio/_core/__pycache__/_tempfile.cpython-312.pyc,,
anyio/_core/__pycache__/_testing.cpython-312.pyc,,
anyio/_core/__pycache__/_typedattr.cpython-312.pyc,,
anyio/_core/_asyncio_selector_thread.py,sha256=2PdxFM3cs02Kp6BSppbvmRT7q7asreTW5FgBxEsflBo,5626
anyio/_core/_contextmanagers.py,sha256=YInBCabiEeS-UaP_Jdxa1CaFC71ETPW8HZTHIM8Rsc8,7215
anyio/_core/_eventloop.py,sha256=c2EdcBX-xnKwxPcC4Pjn3_qG9I-x4IWFO2R9RqCGjM4,6448
anyio/_core/_exceptions.py,sha256=Y3aq-Wxd7Q2HqwSg7nZPvRsHEuGazv_qeet6gqEBdPk,4407
anyio/_core/_fileio.py,sha256=uc7t10Vb-If7GbdWM_zFf-ajUe6uek63fSt7IBLlZW0,25731
anyio/_core/_resources.py,sha256=NbmU5O5UX3xEyACnkmYX28Fmwdl-f-ny0tHym26e0w0,435
anyio/_core/_signals.py,sha256=mjTBB2hTKNPRlU0IhnijeQedpWOGERDiMjSlJQsFrug,1016
anyio/_core/_sockets.py,sha256=RBXHcUqZt5gg_-OOfgHVv8uq2FSKk1uVUzTdpjBoI1o,34977
anyio/_core/_streams.py,sha256=FczFwIgDpnkK0bODWJXMpsUJYdvAD04kaUaGzJU8DK0,1806
anyio/_core/_subprocesses.py,sha256=EXm5igL7dj55iYkPlbYVAqtbqxJxjU-6OndSTIx9SRg,8047
anyio/_core/_synchronization.py,sha256=MgVVqFzvt580tHC31LiOcq1G6aryut--xRG4Ff8KwxQ,20869
anyio/_core/_tasks.py,sha256=pVB7K6AAulzUM8YgXAeqNZG44nSyZ1bYJjH8GznC00I,5435
anyio/_core/_tempfile.py,sha256=lHb7CW4FyIlpkf5ADAf4VmLHCKwEHF9nxqNyBCFFUiA,19697
anyio/_core/_testing.py,sha256=u7MPqGXwpTxqI7hclSdNA30z2GH1Nw258uwKvy_RfBg,2340
anyio/_core/_typedattr.py,sha256=P4ozZikn3-DbpoYcvyghS_FOYAgbmUxeoU8-L_07pZM,2508
anyio/abc/__init__.py,sha256=6mWhcl_pGXhrgZVHP_TCfMvIXIOp9mroEFM90fYCU_U,2869
anyio/abc/__pycache__/__init__.cpython-312.pyc,,
anyio/abc/__pycache__/_eventloop.cpython-312.pyc,,
anyio/abc/__pycache__/_resources.cpython-312.pyc,,
anyio/abc/__pycache__/_sockets.cpython-312.pyc,,
anyio/abc/__pycache__/_streams.cpython-312.pyc,,
anyio/abc/__pycache__/_subprocesses.cpython-312.pyc,,
anyio/abc/__pycache__/_tasks.cpython-312.pyc,,
anyio/abc/__pycache__/_testing.cpython-312.pyc,,
anyio/abc/_eventloop.py,sha256=GlzgB3UJGgG6Kr7olpjOZ-o00PghecXuofVDQ_5611Q,10749
anyio/abc/_resources.py,sha256=DrYvkNN1hH6Uvv5_5uKySvDsnknGVDe8FCKfko0VtN8,783
anyio/abc/_sockets.py,sha256=ECTY0jLEF18gryANHR3vFzXzGdZ-xPwELq1QdgOb0Jo,13258
anyio/abc/_streams.py,sha256=005GKSCXGprxnhucILboSqc2JFovECZk9m3p-qqxXVc,7640
anyio/abc/_subprocesses.py,sha256=cumAPJTktOQtw63IqG0lDpyZqu_l1EElvQHMiwJgL08,2067
anyio/abc/_tasks.py,sha256=KC7wrciE48AINOI-AhPutnFhe1ewfP7QnamFlDzqesQ,3721
anyio/abc/_testing.py,sha256=tBJUzkSfOXJw23fe8qSJ03kJlShOYjjaEyFB6k6MYT8,1821
anyio/from_thread.py,sha256=L-0w1HxJ6BSb-KuVi57k5Tkc3yzQrx3QK5tAxMPcY-0,19141
anyio/functools.py,sha256=HWj7GBEmc0Z-mZg3uok7Z7ZJn0rEC_0Pzbt0nYUDaTQ,10973
anyio/lowlevel.py,sha256=AyKLVK3LaWSoK39LkCKxE4_GDMLKZBNqTrLUgk63y80,5158
anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
anyio/pytest_plugin.py,sha256=3jAFQn0jv_pyoWE2GBBlHaj9sqXj4e8vob0_hgrsXE8,10244
anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
anyio/streams/__pycache__/__init__.cpython-312.pyc,,
anyio/streams/__pycache__/buffered.cpython-312.pyc,,
anyio/streams/__pycache__/file.cpython-312.pyc,,
anyio/streams/__pycache__/memory.cpython-312.pyc,,
anyio/streams/__pycache__/stapled.cpython-312.pyc,,
anyio/streams/__pycache__/text.cpython-312.pyc,,
anyio/streams/__pycache__/tls.cpython-312.pyc,,
anyio/streams/buffered.py,sha256=2R3PeJhe4EXrdYqz44Y6-Eg9R6DrmlsYrP36Ir43-po,6263
anyio/streams/file.py,sha256=4WZ7XGz5WNu39FQHvqbe__TQ0HDP9OOhgO1mk9iVpVU,4470
anyio/streams/memory.py,sha256=F0zwzvFJKAhX_LRZGoKzzqDC2oMM-f-yyTBrEYEGOaU,10740
anyio/streams/stapled.py,sha256=T8Xqwf8K6EgURPxbt1N4i7A8BAk-gScv-GRhjLXIf_o,4390
anyio/streams/text.py,sha256=BcVAGJw1VRvtIqnv-o0Rb0pwH7p8vwlvl21xHq522ag,5765
anyio/streams/tls.py,sha256=Jpxy0Mfbcp1BxHCwE-YjSSFaLnIBbnnwur-excYThs4,15368
anyio/to_interpreter.py,sha256=_mLngrMy97TMR6VbW4Y6YzDUk9ZuPcQMPlkuyRh3C9k,7100
anyio/to_process.py,sha256=J7gAA_YOuoHqnpDAf5fm1Qu6kOmTzdFbiDNvnV755vk,9798
anyio/to_thread.py,sha256=menEgXYmUV7Fjg_9WqCV95P9MAtQS8BzPGGcWB_QnfQ,2687

View File

@@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: setuptools (80.9.0)
Root-Is-Purelib: true
Tag: py3-none-any

View File

@@ -0,0 +1,2 @@
[pytest11]
anyio = anyio.pytest_plugin

View File

@@ -0,0 +1,20 @@
The MIT License (MIT)
Copyright (c) 2018 Alex Grönholm
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -0,0 +1,111 @@
from __future__ import annotations
from ._core._contextmanagers import AsyncContextManagerMixin as AsyncContextManagerMixin
from ._core._contextmanagers import ContextManagerMixin as ContextManagerMixin
from ._core._eventloop import current_time as current_time
from ._core._eventloop import get_all_backends as get_all_backends
from ._core._eventloop import get_available_backends as get_available_backends
from ._core._eventloop import get_cancelled_exc_class as get_cancelled_exc_class
from ._core._eventloop import run as run
from ._core._eventloop import sleep as sleep
from ._core._eventloop import sleep_forever as sleep_forever
from ._core._eventloop import sleep_until as sleep_until
from ._core._exceptions import BrokenResourceError as BrokenResourceError
from ._core._exceptions import BrokenWorkerInterpreter as BrokenWorkerInterpreter
from ._core._exceptions import BrokenWorkerProcess as BrokenWorkerProcess
from ._core._exceptions import BusyResourceError as BusyResourceError
from ._core._exceptions import ClosedResourceError as ClosedResourceError
from ._core._exceptions import ConnectionFailed as ConnectionFailed
from ._core._exceptions import DelimiterNotFound as DelimiterNotFound
from ._core._exceptions import EndOfStream as EndOfStream
from ._core._exceptions import IncompleteRead as IncompleteRead
from ._core._exceptions import NoEventLoopError as NoEventLoopError
from ._core._exceptions import RunFinishedError as RunFinishedError
from ._core._exceptions import TypedAttributeLookupError as TypedAttributeLookupError
from ._core._exceptions import WouldBlock as WouldBlock
from ._core._fileio import AsyncFile as AsyncFile
from ._core._fileio import Path as Path
from ._core._fileio import open_file as open_file
from ._core._fileio import wrap_file as wrap_file
from ._core._resources import aclose_forcefully as aclose_forcefully
from ._core._signals import open_signal_receiver as open_signal_receiver
from ._core._sockets import TCPConnectable as TCPConnectable
from ._core._sockets import UNIXConnectable as UNIXConnectable
from ._core._sockets import as_connectable as as_connectable
from ._core._sockets import connect_tcp as connect_tcp
from ._core._sockets import connect_unix as connect_unix
from ._core._sockets import create_connected_udp_socket as create_connected_udp_socket
from ._core._sockets import (
create_connected_unix_datagram_socket as create_connected_unix_datagram_socket,
)
from ._core._sockets import create_tcp_listener as create_tcp_listener
from ._core._sockets import create_udp_socket as create_udp_socket
from ._core._sockets import create_unix_datagram_socket as create_unix_datagram_socket
from ._core._sockets import create_unix_listener as create_unix_listener
from ._core._sockets import getaddrinfo as getaddrinfo
from ._core._sockets import getnameinfo as getnameinfo
from ._core._sockets import notify_closing as notify_closing
from ._core._sockets import wait_readable as wait_readable
from ._core._sockets import wait_socket_readable as wait_socket_readable
from ._core._sockets import wait_socket_writable as wait_socket_writable
from ._core._sockets import wait_writable as wait_writable
from ._core._streams import create_memory_object_stream as create_memory_object_stream
from ._core._subprocesses import open_process as open_process
from ._core._subprocesses import run_process as run_process
from ._core._synchronization import CapacityLimiter as CapacityLimiter
from ._core._synchronization import (
CapacityLimiterStatistics as CapacityLimiterStatistics,
)
from ._core._synchronization import Condition as Condition
from ._core._synchronization import ConditionStatistics as ConditionStatistics
from ._core._synchronization import Event as Event
from ._core._synchronization import EventStatistics as EventStatistics
from ._core._synchronization import Lock as Lock
from ._core._synchronization import LockStatistics as LockStatistics
from ._core._synchronization import ResourceGuard as ResourceGuard
from ._core._synchronization import Semaphore as Semaphore
from ._core._synchronization import SemaphoreStatistics as SemaphoreStatistics
from ._core._tasks import TASK_STATUS_IGNORED as TASK_STATUS_IGNORED
from ._core._tasks import CancelScope as CancelScope
from ._core._tasks import create_task_group as create_task_group
from ._core._tasks import current_effective_deadline as current_effective_deadline
from ._core._tasks import fail_after as fail_after
from ._core._tasks import move_on_after as move_on_after
from ._core._tempfile import NamedTemporaryFile as NamedTemporaryFile
from ._core._tempfile import SpooledTemporaryFile as SpooledTemporaryFile
from ._core._tempfile import TemporaryDirectory as TemporaryDirectory
from ._core._tempfile import TemporaryFile as TemporaryFile
from ._core._tempfile import gettempdir as gettempdir
from ._core._tempfile import gettempdirb as gettempdirb
from ._core._tempfile import mkdtemp as mkdtemp
from ._core._tempfile import mkstemp as mkstemp
from ._core._testing import TaskInfo as TaskInfo
from ._core._testing import get_current_task as get_current_task
from ._core._testing import get_running_tasks as get_running_tasks
from ._core._testing import wait_all_tasks_blocked as wait_all_tasks_blocked
from ._core._typedattr import TypedAttributeProvider as TypedAttributeProvider
from ._core._typedattr import TypedAttributeSet as TypedAttributeSet
from ._core._typedattr import typed_attribute as typed_attribute
# Re-export imports so they look like they live directly in this package
for __value in list(locals().values()):
if getattr(__value, "__module__", "").startswith("anyio."):
__value.__module__ = __name__
del __value
def __getattr__(attr: str) -> type[BrokenWorkerInterpreter]:
"""Support deprecated aliases."""
if attr == "BrokenWorkerIntepreter":
import warnings
warnings.warn(
"The 'BrokenWorkerIntepreter' alias is deprecated, use 'BrokenWorkerInterpreter' instead.",
DeprecationWarning,
stacklevel=2,
)
return BrokenWorkerInterpreter
raise AttributeError(f"module {__name__!r} has no attribute {attr!r}")

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More