Hello nurse
This commit is contained in:
241
.venv/bin/Activate.ps1
Normal file
241
.venv/bin/Activate.ps1
Normal file
@@ -0,0 +1,241 @@
|
||||
<#
|
||||
.Synopsis
|
||||
Activate a Python virtual environment for the current PowerShell session.
|
||||
|
||||
.Description
|
||||
Pushes the python executable for a virtual environment to the front of the
|
||||
$Env:PATH environment variable and sets the prompt to signify that you are
|
||||
in a Python virtual environment. Makes use of the command line switches as
|
||||
well as the `pyvenv.cfg` file values present in the virtual environment.
|
||||
|
||||
.Parameter VenvDir
|
||||
Path to the directory that contains the virtual environment to activate. The
|
||||
default value for this is the parent of the directory that the Activate.ps1
|
||||
script is located within.
|
||||
|
||||
.Parameter Prompt
|
||||
The prompt prefix to display when this virtual environment is activated. By
|
||||
default, this prompt is the name of the virtual environment folder (VenvDir)
|
||||
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
|
||||
|
||||
.Example
|
||||
Activate.ps1
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -Verbose
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||
and shows extra information about the activation as it executes.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
|
||||
Activates the Python virtual environment located in the specified location.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -Prompt "MyPython"
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||
and prefixes the current prompt with the specified string (surrounded in
|
||||
parentheses) while the virtual environment is active.
|
||||
|
||||
.Notes
|
||||
On Windows, it may be required to enable this Activate.ps1 script by setting the
|
||||
execution policy for the user. You can do this by issuing the following PowerShell
|
||||
command:
|
||||
|
||||
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
||||
|
||||
For more information on Execution Policies:
|
||||
https://go.microsoft.com/fwlink/?LinkID=135170
|
||||
|
||||
#>
|
||||
Param(
|
||||
[Parameter(Mandatory = $false)]
|
||||
[String]
|
||||
$VenvDir,
|
||||
[Parameter(Mandatory = $false)]
|
||||
[String]
|
||||
$Prompt
|
||||
)
|
||||
|
||||
<# Function declarations --------------------------------------------------- #>
|
||||
|
||||
<#
|
||||
.Synopsis
|
||||
Remove all shell session elements added by the Activate script, including the
|
||||
addition of the virtual environment's Python executable from the beginning of
|
||||
the PATH variable.
|
||||
|
||||
.Parameter NonDestructive
|
||||
If present, do not remove this function from the global namespace for the
|
||||
session.
|
||||
|
||||
#>
|
||||
function global:deactivate ([switch]$NonDestructive) {
|
||||
# Revert to original values
|
||||
|
||||
# The prior prompt:
|
||||
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
|
||||
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
|
||||
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
|
||||
}
|
||||
|
||||
# The prior PYTHONHOME:
|
||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
|
||||
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
|
||||
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
|
||||
}
|
||||
|
||||
# The prior PATH:
|
||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
|
||||
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
|
||||
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
|
||||
}
|
||||
|
||||
# Just remove the VIRTUAL_ENV altogether:
|
||||
if (Test-Path -Path Env:VIRTUAL_ENV) {
|
||||
Remove-Item -Path env:VIRTUAL_ENV
|
||||
}
|
||||
|
||||
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
|
||||
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
|
||||
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
|
||||
}
|
||||
|
||||
# Leave deactivate function in the global namespace if requested:
|
||||
if (-not $NonDestructive) {
|
||||
Remove-Item -Path function:deactivate
|
||||
}
|
||||
}
|
||||
|
||||
<#
|
||||
.Description
|
||||
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
|
||||
given folder, and returns them in a map.
|
||||
|
||||
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
|
||||
two strings separated by `=` (with any amount of whitespace surrounding the =)
|
||||
then it is considered a `key = value` line. The left hand string is the key,
|
||||
the right hand is the value.
|
||||
|
||||
If the value starts with a `'` or a `"` then the first and last character is
|
||||
stripped from the value before being captured.
|
||||
|
||||
.Parameter ConfigDir
|
||||
Path to the directory that contains the `pyvenv.cfg` file.
|
||||
#>
|
||||
function Get-PyVenvConfig(
|
||||
[String]
|
||||
$ConfigDir
|
||||
) {
|
||||
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
|
||||
|
||||
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
|
||||
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
|
||||
|
||||
# An empty map will be returned if no config file is found.
|
||||
$pyvenvConfig = @{ }
|
||||
|
||||
if ($pyvenvConfigPath) {
|
||||
|
||||
Write-Verbose "File exists, parse `key = value` lines"
|
||||
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
|
||||
|
||||
$pyvenvConfigContent | ForEach-Object {
|
||||
$keyval = $PSItem -split "\s*=\s*", 2
|
||||
if ($keyval[0] -and $keyval[1]) {
|
||||
$val = $keyval[1]
|
||||
|
||||
# Remove extraneous quotations around a string value.
|
||||
if ("'""".Contains($val.Substring(0, 1))) {
|
||||
$val = $val.Substring(1, $val.Length - 2)
|
||||
}
|
||||
|
||||
$pyvenvConfig[$keyval[0]] = $val
|
||||
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
|
||||
}
|
||||
}
|
||||
}
|
||||
return $pyvenvConfig
|
||||
}
|
||||
|
||||
|
||||
<# Begin Activate script --------------------------------------------------- #>
|
||||
|
||||
# Determine the containing directory of this script
|
||||
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$VenvExecDir = Get-Item -Path $VenvExecPath
|
||||
|
||||
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
|
||||
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
|
||||
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
|
||||
|
||||
# Set values required in priority: CmdLine, ConfigFile, Default
|
||||
# First, get the location of the virtual environment, it might not be
|
||||
# VenvExecDir if specified on the command line.
|
||||
if ($VenvDir) {
|
||||
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
|
||||
}
|
||||
else {
|
||||
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
|
||||
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
|
||||
Write-Verbose "VenvDir=$VenvDir"
|
||||
}
|
||||
|
||||
# Next, read the `pyvenv.cfg` file to determine any required value such
|
||||
# as `prompt`.
|
||||
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
|
||||
|
||||
# Next, set the prompt from the command line, or the config file, or
|
||||
# just use the name of the virtual environment folder.
|
||||
if ($Prompt) {
|
||||
Write-Verbose "Prompt specified as argument, using '$Prompt'"
|
||||
}
|
||||
else {
|
||||
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
|
||||
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
|
||||
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
|
||||
$Prompt = $pyvenvCfg['prompt'];
|
||||
}
|
||||
else {
|
||||
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virutal environment)"
|
||||
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
|
||||
$Prompt = Split-Path -Path $venvDir -Leaf
|
||||
}
|
||||
}
|
||||
|
||||
Write-Verbose "Prompt = '$Prompt'"
|
||||
Write-Verbose "VenvDir='$VenvDir'"
|
||||
|
||||
# Deactivate any currently active virtual environment, but leave the
|
||||
# deactivate function in place.
|
||||
deactivate -nondestructive
|
||||
|
||||
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
|
||||
# that there is an activated venv.
|
||||
$env:VIRTUAL_ENV = $VenvDir
|
||||
|
||||
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
|
||||
|
||||
Write-Verbose "Setting prompt to '$Prompt'"
|
||||
|
||||
# Set the prompt to include the env name
|
||||
# Make sure _OLD_VIRTUAL_PROMPT is global
|
||||
function global:_OLD_VIRTUAL_PROMPT { "" }
|
||||
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
|
||||
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
|
||||
|
||||
function global:prompt {
|
||||
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
|
||||
_OLD_VIRTUAL_PROMPT
|
||||
}
|
||||
}
|
||||
|
||||
# Clear PYTHONHOME
|
||||
if (Test-Path -Path Env:PYTHONHOME) {
|
||||
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
|
||||
Remove-Item -Path Env:PYTHONHOME
|
||||
}
|
||||
|
||||
# Add the venv to the PATH
|
||||
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
|
||||
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
|
||||
66
.venv/bin/activate
Normal file
66
.venv/bin/activate
Normal file
@@ -0,0 +1,66 @@
|
||||
# This file must be used with "source bin/activate" *from bash*
|
||||
# you cannot run it directly
|
||||
|
||||
deactivate () {
|
||||
# reset old environment variables
|
||||
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
|
||||
PATH="${_OLD_VIRTUAL_PATH:-}"
|
||||
export PATH
|
||||
unset _OLD_VIRTUAL_PATH
|
||||
fi
|
||||
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
|
||||
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
|
||||
export PYTHONHOME
|
||||
unset _OLD_VIRTUAL_PYTHONHOME
|
||||
fi
|
||||
|
||||
# This should detect bash and zsh, which have a hash command that must
|
||||
# be called to get it to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||
hash -r 2> /dev/null
|
||||
fi
|
||||
|
||||
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
|
||||
PS1="${_OLD_VIRTUAL_PS1:-}"
|
||||
export PS1
|
||||
unset _OLD_VIRTUAL_PS1
|
||||
fi
|
||||
|
||||
unset VIRTUAL_ENV
|
||||
if [ ! "${1:-}" = "nondestructive" ] ; then
|
||||
# Self destruct!
|
||||
unset -f deactivate
|
||||
fi
|
||||
}
|
||||
|
||||
# unset irrelevant variables
|
||||
deactivate nondestructive
|
||||
|
||||
VIRTUAL_ENV="/Users/michaelmolloy/tools/openclaw-scripts/.venv"
|
||||
export VIRTUAL_ENV
|
||||
|
||||
_OLD_VIRTUAL_PATH="$PATH"
|
||||
PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
export PATH
|
||||
|
||||
# unset PYTHONHOME if set
|
||||
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
|
||||
# could use `if (set -u; : $PYTHONHOME) ;` in bash
|
||||
if [ -n "${PYTHONHOME:-}" ] ; then
|
||||
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
|
||||
unset PYTHONHOME
|
||||
fi
|
||||
|
||||
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
|
||||
_OLD_VIRTUAL_PS1="${PS1:-}"
|
||||
PS1="(.venv) ${PS1:-}"
|
||||
export PS1
|
||||
fi
|
||||
|
||||
# This should detect bash and zsh, which have a hash command that must
|
||||
# be called to get it to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||
hash -r 2> /dev/null
|
||||
fi
|
||||
25
.venv/bin/activate.csh
Normal file
25
.venv/bin/activate.csh
Normal file
@@ -0,0 +1,25 @@
|
||||
# This file must be used with "source bin/activate.csh" *from csh*.
|
||||
# You cannot run it directly.
|
||||
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
||||
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
|
||||
|
||||
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate'
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
setenv VIRTUAL_ENV "/Users/michaelmolloy/tools/openclaw-scripts/.venv"
|
||||
|
||||
set _OLD_VIRTUAL_PATH="$PATH"
|
||||
setenv PATH "$VIRTUAL_ENV/bin:$PATH"
|
||||
|
||||
|
||||
set _OLD_VIRTUAL_PROMPT="$prompt"
|
||||
|
||||
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
|
||||
set prompt = "(.venv) $prompt"
|
||||
endif
|
||||
|
||||
alias pydoc python -m pydoc
|
||||
|
||||
rehash
|
||||
64
.venv/bin/activate.fish
Normal file
64
.venv/bin/activate.fish
Normal file
@@ -0,0 +1,64 @@
|
||||
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
|
||||
# (https://fishshell.com/); you cannot run it directly.
|
||||
|
||||
function deactivate -d "Exit virtual environment and return to normal shell environment"
|
||||
# reset old environment variables
|
||||
if test -n "$_OLD_VIRTUAL_PATH"
|
||||
set -gx PATH $_OLD_VIRTUAL_PATH
|
||||
set -e _OLD_VIRTUAL_PATH
|
||||
end
|
||||
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
||||
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
|
||||
set -e _OLD_VIRTUAL_PYTHONHOME
|
||||
end
|
||||
|
||||
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
||||
functions -e fish_prompt
|
||||
set -e _OLD_FISH_PROMPT_OVERRIDE
|
||||
functions -c _old_fish_prompt fish_prompt
|
||||
functions -e _old_fish_prompt
|
||||
end
|
||||
|
||||
set -e VIRTUAL_ENV
|
||||
if test "$argv[1]" != "nondestructive"
|
||||
# Self-destruct!
|
||||
functions -e deactivate
|
||||
end
|
||||
end
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
set -gx VIRTUAL_ENV "/Users/michaelmolloy/tools/openclaw-scripts/.venv"
|
||||
|
||||
set -gx _OLD_VIRTUAL_PATH $PATH
|
||||
set -gx PATH "$VIRTUAL_ENV/bin" $PATH
|
||||
|
||||
# Unset PYTHONHOME if set.
|
||||
if set -q PYTHONHOME
|
||||
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
||||
set -e PYTHONHOME
|
||||
end
|
||||
|
||||
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
||||
# fish uses a function instead of an env var to generate the prompt.
|
||||
|
||||
# Save the current fish_prompt function as the function _old_fish_prompt.
|
||||
functions -c fish_prompt _old_fish_prompt
|
||||
|
||||
# With the original prompt function renamed, we can override with our own.
|
||||
function fish_prompt
|
||||
# Save the return status of the last command.
|
||||
set -l old_status $status
|
||||
|
||||
# Output the venv prompt; color taken from the blue of the Python logo.
|
||||
printf "%s%s%s" (set_color 4B8BBE) "(.venv) " (set_color normal)
|
||||
|
||||
# Restore the return status of the previous command.
|
||||
echo "exit $old_status" | .
|
||||
# Output the original/"old" prompt.
|
||||
_old_fish_prompt
|
||||
end
|
||||
|
||||
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
||||
end
|
||||
6
.venv/bin/jsonschema
Executable file
6
.venv/bin/jsonschema
Executable file
@@ -0,0 +1,6 @@
|
||||
#!/Users/michaelmolloy/tools/openclaw-scripts/.venv/bin/python
|
||||
import sys
|
||||
from jsonschema.cli import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = sys.argv[0].removesuffix('.exe')
|
||||
sys.exit(main())
|
||||
8
.venv/bin/pip
Executable file
8
.venv/bin/pip
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/Users/michaelmolloy/tools/openclaw-scripts/.venv/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
.venv/bin/pip3
Executable file
8
.venv/bin/pip3
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/Users/michaelmolloy/tools/openclaw-scripts/.venv/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
.venv/bin/pip3.9
Executable file
8
.venv/bin/pip3.9
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/Users/michaelmolloy/tools/openclaw-scripts/.venv/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
1
.venv/bin/python
Symbolic link
1
.venv/bin/python
Symbolic link
@@ -0,0 +1 @@
|
||||
python3.9
|
||||
1
.venv/bin/python3
Symbolic link
1
.venv/bin/python3
Symbolic link
@@ -0,0 +1 @@
|
||||
python3.9
|
||||
1
.venv/bin/python3.9
Symbolic link
1
.venv/bin/python3.9
Symbolic link
@@ -0,0 +1 @@
|
||||
/usr/local/opt/python@3.9/bin/python3.9
|
||||
123
.venv/lib/python3.9/site-packages/_distutils_hack/__init__.py
Normal file
123
.venv/lib/python3.9/site-packages/_distutils_hack/__init__.py
Normal file
@@ -0,0 +1,123 @@
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import importlib
|
||||
import warnings
|
||||
|
||||
|
||||
is_pypy = '__pypy__' in sys.builtin_module_names
|
||||
|
||||
|
||||
def warn_distutils_present():
|
||||
if 'distutils' not in sys.modules:
|
||||
return
|
||||
if is_pypy and sys.version_info < (3, 7):
|
||||
# PyPy for 3.6 unconditionally imports distutils, so bypass the warning
|
||||
# https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
|
||||
return
|
||||
warnings.warn(
|
||||
"Distutils was imported before Setuptools, but importing Setuptools "
|
||||
"also replaces the `distutils` module in `sys.modules`. This may lead "
|
||||
"to undesirable behaviors or errors. To avoid these issues, avoid "
|
||||
"using distutils directly, ensure that setuptools is installed in the "
|
||||
"traditional way (e.g. not an editable install), and/or make sure "
|
||||
"that setuptools is always imported before distutils.")
|
||||
|
||||
|
||||
def clear_distutils():
|
||||
if 'distutils' not in sys.modules:
|
||||
return
|
||||
warnings.warn("Setuptools is replacing distutils.")
|
||||
mods = [name for name in sys.modules if re.match(r'distutils\b', name)]
|
||||
for name in mods:
|
||||
del sys.modules[name]
|
||||
|
||||
|
||||
def enabled():
|
||||
"""
|
||||
Allow selection of distutils by environment variable.
|
||||
"""
|
||||
which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'stdlib')
|
||||
return which == 'local'
|
||||
|
||||
|
||||
def ensure_local_distutils():
|
||||
clear_distutils()
|
||||
distutils = importlib.import_module('setuptools._distutils')
|
||||
distutils.__name__ = 'distutils'
|
||||
sys.modules['distutils'] = distutils
|
||||
|
||||
# sanity check that submodules load as expected
|
||||
core = importlib.import_module('distutils.core')
|
||||
assert '_distutils' in core.__file__, core.__file__
|
||||
|
||||
|
||||
def do_override():
|
||||
"""
|
||||
Ensure that the local copy of distutils is preferred over stdlib.
|
||||
|
||||
See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
|
||||
for more motivation.
|
||||
"""
|
||||
if enabled():
|
||||
warn_distutils_present()
|
||||
ensure_local_distutils()
|
||||
|
||||
|
||||
class DistutilsMetaFinder:
|
||||
def find_spec(self, fullname, path, target=None):
|
||||
if path is not None:
|
||||
return
|
||||
|
||||
method_name = 'spec_for_{fullname}'.format(**locals())
|
||||
method = getattr(self, method_name, lambda: None)
|
||||
return method()
|
||||
|
||||
def spec_for_distutils(self):
|
||||
import importlib.abc
|
||||
import importlib.util
|
||||
|
||||
class DistutilsLoader(importlib.abc.Loader):
|
||||
|
||||
def create_module(self, spec):
|
||||
return importlib.import_module('setuptools._distutils')
|
||||
|
||||
def exec_module(self, module):
|
||||
pass
|
||||
|
||||
return importlib.util.spec_from_loader('distutils', DistutilsLoader())
|
||||
|
||||
def spec_for_pip(self):
|
||||
"""
|
||||
Ensure stdlib distutils when running under pip.
|
||||
See pypa/pip#8761 for rationale.
|
||||
"""
|
||||
if self.pip_imported_during_build():
|
||||
return
|
||||
clear_distutils()
|
||||
self.spec_for_distutils = lambda: None
|
||||
|
||||
@staticmethod
|
||||
def pip_imported_during_build():
|
||||
"""
|
||||
Detect if pip is being imported in a build script. Ref #2355.
|
||||
"""
|
||||
import traceback
|
||||
return any(
|
||||
frame.f_globals['__file__'].endswith('setup.py')
|
||||
for frame, line in traceback.walk_stack(None)
|
||||
)
|
||||
|
||||
|
||||
DISTUTILS_FINDER = DistutilsMetaFinder()
|
||||
|
||||
|
||||
def add_shim():
|
||||
sys.meta_path.insert(0, DISTUTILS_FINDER)
|
||||
|
||||
|
||||
def remove_shim():
|
||||
try:
|
||||
sys.meta_path.remove(DISTUTILS_FINDER)
|
||||
except ValueError:
|
||||
pass
|
||||
@@ -0,0 +1 @@
|
||||
__import__('_distutils_hack').do_override()
|
||||
104
.venv/lib/python3.9/site-packages/attr/__init__.py
Normal file
104
.venv/lib/python3.9/site-packages/attr/__init__.py
Normal file
@@ -0,0 +1,104 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Classes Without Boilerplate
|
||||
"""
|
||||
|
||||
from functools import partial
|
||||
from typing import Callable, Literal, Protocol
|
||||
|
||||
from . import converters, exceptions, filters, setters, validators
|
||||
from ._cmp import cmp_using
|
||||
from ._config import get_run_validators, set_run_validators
|
||||
from ._funcs import asdict, assoc, astuple, has, resolve_types
|
||||
from ._make import (
|
||||
NOTHING,
|
||||
Attribute,
|
||||
Converter,
|
||||
Factory,
|
||||
_Nothing,
|
||||
attrib,
|
||||
attrs,
|
||||
evolve,
|
||||
fields,
|
||||
fields_dict,
|
||||
make_class,
|
||||
validate,
|
||||
)
|
||||
from ._next_gen import define, field, frozen, mutable
|
||||
from ._version_info import VersionInfo
|
||||
|
||||
|
||||
s = attributes = attrs
|
||||
ib = attr = attrib
|
||||
dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
|
||||
|
||||
|
||||
class AttrsInstance(Protocol):
|
||||
pass
|
||||
|
||||
|
||||
NothingType = Literal[_Nothing.NOTHING]
|
||||
|
||||
__all__ = [
|
||||
"NOTHING",
|
||||
"Attribute",
|
||||
"AttrsInstance",
|
||||
"Converter",
|
||||
"Factory",
|
||||
"NothingType",
|
||||
"asdict",
|
||||
"assoc",
|
||||
"astuple",
|
||||
"attr",
|
||||
"attrib",
|
||||
"attributes",
|
||||
"attrs",
|
||||
"cmp_using",
|
||||
"converters",
|
||||
"define",
|
||||
"evolve",
|
||||
"exceptions",
|
||||
"field",
|
||||
"fields",
|
||||
"fields_dict",
|
||||
"filters",
|
||||
"frozen",
|
||||
"get_run_validators",
|
||||
"has",
|
||||
"ib",
|
||||
"make_class",
|
||||
"mutable",
|
||||
"resolve_types",
|
||||
"s",
|
||||
"set_run_validators",
|
||||
"setters",
|
||||
"validate",
|
||||
"validators",
|
||||
]
|
||||
|
||||
|
||||
def _make_getattr(mod_name: str) -> Callable:
|
||||
"""
|
||||
Create a metadata proxy for packaging information that uses *mod_name* in
|
||||
its warnings and errors.
|
||||
"""
|
||||
|
||||
def __getattr__(name: str) -> str:
|
||||
if name not in ("__version__", "__version_info__"):
|
||||
msg = f"module {mod_name} has no attribute {name}"
|
||||
raise AttributeError(msg)
|
||||
|
||||
from importlib.metadata import metadata
|
||||
|
||||
meta = metadata("attrs")
|
||||
|
||||
if name == "__version_info__":
|
||||
return VersionInfo._from_version_string(meta["version"])
|
||||
|
||||
return meta["version"]
|
||||
|
||||
return __getattr__
|
||||
|
||||
|
||||
__getattr__ = _make_getattr(__name__)
|
||||
389
.venv/lib/python3.9/site-packages/attr/__init__.pyi
Normal file
389
.venv/lib/python3.9/site-packages/attr/__init__.pyi
Normal file
@@ -0,0 +1,389 @@
|
||||
import enum
|
||||
import sys
|
||||
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Generic,
|
||||
Literal,
|
||||
Mapping,
|
||||
Protocol,
|
||||
Sequence,
|
||||
TypeVar,
|
||||
overload,
|
||||
)
|
||||
|
||||
# `import X as X` is required to make these public
|
||||
from . import converters as converters
|
||||
from . import exceptions as exceptions
|
||||
from . import filters as filters
|
||||
from . import setters as setters
|
||||
from . import validators as validators
|
||||
from ._cmp import cmp_using as cmp_using
|
||||
from ._typing_compat import AttrsInstance_
|
||||
from ._version_info import VersionInfo
|
||||
from attrs import (
|
||||
define as define,
|
||||
field as field,
|
||||
mutable as mutable,
|
||||
frozen as frozen,
|
||||
_EqOrderType,
|
||||
_ValidatorType,
|
||||
_ConverterType,
|
||||
_ReprArgType,
|
||||
_OnSetAttrType,
|
||||
_OnSetAttrArgType,
|
||||
_FieldTransformer,
|
||||
_ValidatorArgType,
|
||||
)
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
from typing import TypeGuard, TypeAlias
|
||||
else:
|
||||
from typing_extensions import TypeGuard, TypeAlias
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
from typing import dataclass_transform
|
||||
else:
|
||||
from typing_extensions import dataclass_transform
|
||||
|
||||
__version__: str
|
||||
__version_info__: VersionInfo
|
||||
__title__: str
|
||||
__description__: str
|
||||
__url__: str
|
||||
__uri__: str
|
||||
__author__: str
|
||||
__email__: str
|
||||
__license__: str
|
||||
__copyright__: str
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_C = TypeVar("_C", bound=type)
|
||||
|
||||
_FilterType = Callable[["Attribute[_T]", _T], bool]
|
||||
|
||||
# We subclass this here to keep the protocol's qualified name clean.
|
||||
class AttrsInstance(AttrsInstance_, Protocol):
|
||||
pass
|
||||
|
||||
_A = TypeVar("_A", bound=type[AttrsInstance])
|
||||
|
||||
class _Nothing(enum.Enum):
|
||||
NOTHING = enum.auto()
|
||||
|
||||
NOTHING = _Nothing.NOTHING
|
||||
NothingType: TypeAlias = Literal[_Nothing.NOTHING]
|
||||
|
||||
# NOTE: Factory lies about its return type to make this possible:
|
||||
# `x: List[int] # = Factory(list)`
|
||||
# Work around mypy issue #4554 in the common case by using an overload.
|
||||
|
||||
@overload
|
||||
def Factory(factory: Callable[[], _T]) -> _T: ...
|
||||
@overload
|
||||
def Factory(
|
||||
factory: Callable[[Any], _T],
|
||||
takes_self: Literal[True],
|
||||
) -> _T: ...
|
||||
@overload
|
||||
def Factory(
|
||||
factory: Callable[[], _T],
|
||||
takes_self: Literal[False],
|
||||
) -> _T: ...
|
||||
|
||||
In = TypeVar("In")
|
||||
Out = TypeVar("Out")
|
||||
|
||||
class Converter(Generic[In, Out]):
|
||||
@overload
|
||||
def __init__(self, converter: Callable[[In], Out]) -> None: ...
|
||||
@overload
|
||||
def __init__(
|
||||
self,
|
||||
converter: Callable[[In, AttrsInstance, Attribute], Out],
|
||||
*,
|
||||
takes_self: Literal[True],
|
||||
takes_field: Literal[True],
|
||||
) -> None: ...
|
||||
@overload
|
||||
def __init__(
|
||||
self,
|
||||
converter: Callable[[In, Attribute], Out],
|
||||
*,
|
||||
takes_field: Literal[True],
|
||||
) -> None: ...
|
||||
@overload
|
||||
def __init__(
|
||||
self,
|
||||
converter: Callable[[In, AttrsInstance], Out],
|
||||
*,
|
||||
takes_self: Literal[True],
|
||||
) -> None: ...
|
||||
|
||||
class Attribute(Generic[_T]):
|
||||
name: str
|
||||
default: _T | None
|
||||
validator: _ValidatorType[_T] | None
|
||||
repr: _ReprArgType
|
||||
cmp: _EqOrderType
|
||||
eq: _EqOrderType
|
||||
order: _EqOrderType
|
||||
hash: bool | None
|
||||
init: bool
|
||||
converter: Converter | None
|
||||
metadata: dict[Any, Any]
|
||||
type: type[_T] | None
|
||||
kw_only: bool
|
||||
on_setattr: _OnSetAttrType
|
||||
alias: str | None
|
||||
|
||||
def evolve(self, **changes: Any) -> "Attribute[Any]": ...
|
||||
|
||||
# NOTE: We had several choices for the annotation to use for type arg:
|
||||
# 1) Type[_T]
|
||||
# - Pros: Handles simple cases correctly
|
||||
# - Cons: Might produce less informative errors in the case of conflicting
|
||||
# TypeVars e.g. `attr.ib(default='bad', type=int)`
|
||||
# 2) Callable[..., _T]
|
||||
# - Pros: Better error messages than #1 for conflicting TypeVars
|
||||
# - Cons: Terrible error messages for validator checks.
|
||||
# e.g. attr.ib(type=int, validator=validate_str)
|
||||
# -> error: Cannot infer function type argument
|
||||
# 3) type (and do all of the work in the mypy plugin)
|
||||
# - Pros: Simple here, and we could customize the plugin with our own errors.
|
||||
# - Cons: Would need to write mypy plugin code to handle all the cases.
|
||||
# We chose option #1.
|
||||
|
||||
# `attr` lies about its return type to make the following possible:
|
||||
# attr() -> Any
|
||||
# attr(8) -> int
|
||||
# attr(validator=<some callable>) -> Whatever the callable expects.
|
||||
# This makes this type of assignments possible:
|
||||
# x: int = attr(8)
|
||||
#
|
||||
# This form catches explicit None or no default but with no other arguments
|
||||
# returns Any.
|
||||
@overload
|
||||
def attrib(
|
||||
default: None = ...,
|
||||
validator: None = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
cmp: _EqOrderType | None = ...,
|
||||
hash: bool | None = ...,
|
||||
init: bool = ...,
|
||||
metadata: Mapping[Any, Any] | None = ...,
|
||||
type: None = ...,
|
||||
converter: None = ...,
|
||||
factory: None = ...,
|
||||
kw_only: bool | None = ...,
|
||||
eq: _EqOrderType | None = ...,
|
||||
order: _EqOrderType | None = ...,
|
||||
on_setattr: _OnSetAttrArgType | None = ...,
|
||||
alias: str | None = ...,
|
||||
) -> Any: ...
|
||||
|
||||
# This form catches an explicit None or no default and infers the type from the
|
||||
# other arguments.
|
||||
@overload
|
||||
def attrib(
|
||||
default: None = ...,
|
||||
validator: _ValidatorArgType[_T] | None = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
cmp: _EqOrderType | None = ...,
|
||||
hash: bool | None = ...,
|
||||
init: bool = ...,
|
||||
metadata: Mapping[Any, Any] | None = ...,
|
||||
type: type[_T] | None = ...,
|
||||
converter: _ConverterType
|
||||
| list[_ConverterType]
|
||||
| tuple[_ConverterType]
|
||||
| None = ...,
|
||||
factory: Callable[[], _T] | None = ...,
|
||||
kw_only: bool | None = ...,
|
||||
eq: _EqOrderType | None = ...,
|
||||
order: _EqOrderType | None = ...,
|
||||
on_setattr: _OnSetAttrArgType | None = ...,
|
||||
alias: str | None = ...,
|
||||
) -> _T: ...
|
||||
|
||||
# This form catches an explicit default argument.
|
||||
@overload
|
||||
def attrib(
|
||||
default: _T,
|
||||
validator: _ValidatorArgType[_T] | None = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
cmp: _EqOrderType | None = ...,
|
||||
hash: bool | None = ...,
|
||||
init: bool = ...,
|
||||
metadata: Mapping[Any, Any] | None = ...,
|
||||
type: type[_T] | None = ...,
|
||||
converter: _ConverterType
|
||||
| list[_ConverterType]
|
||||
| tuple[_ConverterType]
|
||||
| None = ...,
|
||||
factory: Callable[[], _T] | None = ...,
|
||||
kw_only: bool | None = ...,
|
||||
eq: _EqOrderType | None = ...,
|
||||
order: _EqOrderType | None = ...,
|
||||
on_setattr: _OnSetAttrArgType | None = ...,
|
||||
alias: str | None = ...,
|
||||
) -> _T: ...
|
||||
|
||||
# This form covers type=non-Type: e.g. forward references (str), Any
|
||||
@overload
|
||||
def attrib(
|
||||
default: _T | None = ...,
|
||||
validator: _ValidatorArgType[_T] | None = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
cmp: _EqOrderType | None = ...,
|
||||
hash: bool | None = ...,
|
||||
init: bool = ...,
|
||||
metadata: Mapping[Any, Any] | None = ...,
|
||||
type: object = ...,
|
||||
converter: _ConverterType
|
||||
| list[_ConverterType]
|
||||
| tuple[_ConverterType]
|
||||
| None = ...,
|
||||
factory: Callable[[], _T] | None = ...,
|
||||
kw_only: bool | None = ...,
|
||||
eq: _EqOrderType | None = ...,
|
||||
order: _EqOrderType | None = ...,
|
||||
on_setattr: _OnSetAttrArgType | None = ...,
|
||||
alias: str | None = ...,
|
||||
) -> Any: ...
|
||||
@overload
|
||||
@dataclass_transform(order_default=True, field_specifiers=(attrib, field))
|
||||
def attrs(
|
||||
maybe_cls: _C,
|
||||
these: dict[str, Any] | None = ...,
|
||||
repr_ns: str | None = ...,
|
||||
repr: bool = ...,
|
||||
cmp: _EqOrderType | None = ...,
|
||||
hash: bool | None = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: _EqOrderType | None = ...,
|
||||
order: _EqOrderType | None = ...,
|
||||
auto_detect: bool = ...,
|
||||
collect_by_mro: bool = ...,
|
||||
getstate_setstate: bool | None = ...,
|
||||
on_setattr: _OnSetAttrArgType | None = ...,
|
||||
field_transformer: _FieldTransformer | None = ...,
|
||||
match_args: bool = ...,
|
||||
unsafe_hash: bool | None = ...,
|
||||
) -> _C: ...
|
||||
@overload
|
||||
@dataclass_transform(order_default=True, field_specifiers=(attrib, field))
|
||||
def attrs(
|
||||
maybe_cls: None = ...,
|
||||
these: dict[str, Any] | None = ...,
|
||||
repr_ns: str | None = ...,
|
||||
repr: bool = ...,
|
||||
cmp: _EqOrderType | None = ...,
|
||||
hash: bool | None = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: _EqOrderType | None = ...,
|
||||
order: _EqOrderType | None = ...,
|
||||
auto_detect: bool = ...,
|
||||
collect_by_mro: bool = ...,
|
||||
getstate_setstate: bool | None = ...,
|
||||
on_setattr: _OnSetAttrArgType | None = ...,
|
||||
field_transformer: _FieldTransformer | None = ...,
|
||||
match_args: bool = ...,
|
||||
unsafe_hash: bool | None = ...,
|
||||
) -> Callable[[_C], _C]: ...
|
||||
def fields(cls: type[AttrsInstance]) -> Any: ...
|
||||
def fields_dict(cls: type[AttrsInstance]) -> dict[str, Attribute[Any]]: ...
|
||||
def validate(inst: AttrsInstance) -> None: ...
|
||||
def resolve_types(
|
||||
cls: _A,
|
||||
globalns: dict[str, Any] | None = ...,
|
||||
localns: dict[str, Any] | None = ...,
|
||||
attribs: list[Attribute[Any]] | None = ...,
|
||||
include_extras: bool = ...,
|
||||
) -> _A: ...
|
||||
|
||||
# TODO: add support for returning a proper attrs class from the mypy plugin
|
||||
# we use Any instead of _CountingAttr so that e.g. `make_class('Foo',
|
||||
# [attr.ib()])` is valid
|
||||
def make_class(
|
||||
name: str,
|
||||
attrs: list[str] | tuple[str, ...] | dict[str, Any],
|
||||
bases: tuple[type, ...] = ...,
|
||||
class_body: dict[str, Any] | None = ...,
|
||||
repr_ns: str | None = ...,
|
||||
repr: bool = ...,
|
||||
cmp: _EqOrderType | None = ...,
|
||||
hash: bool | None = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: _EqOrderType | None = ...,
|
||||
order: _EqOrderType | None = ...,
|
||||
collect_by_mro: bool = ...,
|
||||
on_setattr: _OnSetAttrArgType | None = ...,
|
||||
field_transformer: _FieldTransformer | None = ...,
|
||||
) -> type: ...
|
||||
|
||||
# _funcs --
|
||||
|
||||
# TODO: add support for returning TypedDict from the mypy plugin
|
||||
# FIXME: asdict/astuple do not honor their factory args. Waiting on one of
|
||||
# these:
|
||||
# https://github.com/python/mypy/issues/4236
|
||||
# https://github.com/python/typing/issues/253
|
||||
# XXX: remember to fix attrs.asdict/astuple too!
|
||||
def asdict(
|
||||
inst: AttrsInstance,
|
||||
recurse: bool = ...,
|
||||
filter: _FilterType[Any] | None = ...,
|
||||
dict_factory: type[Mapping[Any, Any]] = ...,
|
||||
retain_collection_types: bool = ...,
|
||||
value_serializer: Callable[[type, Attribute[Any], Any], Any] | None = ...,
|
||||
tuple_keys: bool | None = ...,
|
||||
) -> dict[str, Any]: ...
|
||||
|
||||
# TODO: add support for returning NamedTuple from the mypy plugin
|
||||
def astuple(
|
||||
inst: AttrsInstance,
|
||||
recurse: bool = ...,
|
||||
filter: _FilterType[Any] | None = ...,
|
||||
tuple_factory: type[Sequence[Any]] = ...,
|
||||
retain_collection_types: bool = ...,
|
||||
) -> tuple[Any, ...]: ...
|
||||
def has(cls: type) -> TypeGuard[type[AttrsInstance]]: ...
|
||||
def assoc(inst: _T, **changes: Any) -> _T: ...
|
||||
def evolve(inst: _T, **changes: Any) -> _T: ...
|
||||
|
||||
# _config --
|
||||
|
||||
def set_run_validators(run: bool) -> None: ...
|
||||
def get_run_validators() -> bool: ...
|
||||
|
||||
# aliases --
|
||||
|
||||
s = attributes = attrs
|
||||
ib = attr = attrib
|
||||
dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;)
|
||||
160
.venv/lib/python3.9/site-packages/attr/_cmp.py
Normal file
160
.venv/lib/python3.9/site-packages/attr/_cmp.py
Normal file
@@ -0,0 +1,160 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
|
||||
import functools
|
||||
import types
|
||||
|
||||
from ._make import __ne__
|
||||
|
||||
|
||||
_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="}
|
||||
|
||||
|
||||
def cmp_using(
|
||||
eq=None,
|
||||
lt=None,
|
||||
le=None,
|
||||
gt=None,
|
||||
ge=None,
|
||||
require_same_type=True,
|
||||
class_name="Comparable",
|
||||
):
|
||||
"""
|
||||
Create a class that can be passed into `attrs.field`'s ``eq``, ``order``,
|
||||
and ``cmp`` arguments to customize field comparison.
|
||||
|
||||
The resulting class will have a full set of ordering methods if at least
|
||||
one of ``{lt, le, gt, ge}`` and ``eq`` are provided.
|
||||
|
||||
Args:
|
||||
eq (typing.Callable | None):
|
||||
Callable used to evaluate equality of two objects.
|
||||
|
||||
lt (typing.Callable | None):
|
||||
Callable used to evaluate whether one object is less than another
|
||||
object.
|
||||
|
||||
le (typing.Callable | None):
|
||||
Callable used to evaluate whether one object is less than or equal
|
||||
to another object.
|
||||
|
||||
gt (typing.Callable | None):
|
||||
Callable used to evaluate whether one object is greater than
|
||||
another object.
|
||||
|
||||
ge (typing.Callable | None):
|
||||
Callable used to evaluate whether one object is greater than or
|
||||
equal to another object.
|
||||
|
||||
require_same_type (bool):
|
||||
When `True`, equality and ordering methods will return
|
||||
`NotImplemented` if objects are not of the same type.
|
||||
|
||||
class_name (str | None): Name of class. Defaults to "Comparable".
|
||||
|
||||
See `comparison` for more details.
|
||||
|
||||
.. versionadded:: 21.1.0
|
||||
"""
|
||||
|
||||
body = {
|
||||
"__slots__": ["value"],
|
||||
"__init__": _make_init(),
|
||||
"_requirements": [],
|
||||
"_is_comparable_to": _is_comparable_to,
|
||||
}
|
||||
|
||||
# Add operations.
|
||||
num_order_functions = 0
|
||||
has_eq_function = False
|
||||
|
||||
if eq is not None:
|
||||
has_eq_function = True
|
||||
body["__eq__"] = _make_operator("eq", eq)
|
||||
body["__ne__"] = __ne__
|
||||
|
||||
if lt is not None:
|
||||
num_order_functions += 1
|
||||
body["__lt__"] = _make_operator("lt", lt)
|
||||
|
||||
if le is not None:
|
||||
num_order_functions += 1
|
||||
body["__le__"] = _make_operator("le", le)
|
||||
|
||||
if gt is not None:
|
||||
num_order_functions += 1
|
||||
body["__gt__"] = _make_operator("gt", gt)
|
||||
|
||||
if ge is not None:
|
||||
num_order_functions += 1
|
||||
body["__ge__"] = _make_operator("ge", ge)
|
||||
|
||||
type_ = types.new_class(
|
||||
class_name, (object,), {}, lambda ns: ns.update(body)
|
||||
)
|
||||
|
||||
# Add same type requirement.
|
||||
if require_same_type:
|
||||
type_._requirements.append(_check_same_type)
|
||||
|
||||
# Add total ordering if at least one operation was defined.
|
||||
if 0 < num_order_functions < 4:
|
||||
if not has_eq_function:
|
||||
# functools.total_ordering requires __eq__ to be defined,
|
||||
# so raise early error here to keep a nice stack.
|
||||
msg = "eq must be define is order to complete ordering from lt, le, gt, ge."
|
||||
raise ValueError(msg)
|
||||
type_ = functools.total_ordering(type_)
|
||||
|
||||
return type_
|
||||
|
||||
|
||||
def _make_init():
|
||||
"""
|
||||
Create __init__ method.
|
||||
"""
|
||||
|
||||
def __init__(self, value):
|
||||
"""
|
||||
Initialize object with *value*.
|
||||
"""
|
||||
self.value = value
|
||||
|
||||
return __init__
|
||||
|
||||
|
||||
def _make_operator(name, func):
|
||||
"""
|
||||
Create operator method.
|
||||
"""
|
||||
|
||||
def method(self, other):
|
||||
if not self._is_comparable_to(other):
|
||||
return NotImplemented
|
||||
|
||||
result = func(self.value, other.value)
|
||||
if result is NotImplemented:
|
||||
return NotImplemented
|
||||
|
||||
return result
|
||||
|
||||
method.__name__ = f"__{name}__"
|
||||
method.__doc__ = (
|
||||
f"Return a {_operation_names[name]} b. Computed by attrs."
|
||||
)
|
||||
|
||||
return method
|
||||
|
||||
|
||||
def _is_comparable_to(self, other):
|
||||
"""
|
||||
Check whether `other` is comparable to `self`.
|
||||
"""
|
||||
return all(func(self, other) for func in self._requirements)
|
||||
|
||||
|
||||
def _check_same_type(self, other):
|
||||
"""
|
||||
Return True if *self* and *other* are of the same type, False otherwise.
|
||||
"""
|
||||
return other.value.__class__ is self.value.__class__
|
||||
13
.venv/lib/python3.9/site-packages/attr/_cmp.pyi
Normal file
13
.venv/lib/python3.9/site-packages/attr/_cmp.pyi
Normal file
@@ -0,0 +1,13 @@
|
||||
from typing import Any, Callable
|
||||
|
||||
_CompareWithType = Callable[[Any, Any], bool]
|
||||
|
||||
def cmp_using(
|
||||
eq: _CompareWithType | None = ...,
|
||||
lt: _CompareWithType | None = ...,
|
||||
le: _CompareWithType | None = ...,
|
||||
gt: _CompareWithType | None = ...,
|
||||
ge: _CompareWithType | None = ...,
|
||||
require_same_type: bool = ...,
|
||||
class_name: str = ...,
|
||||
) -> type: ...
|
||||
99
.venv/lib/python3.9/site-packages/attr/_compat.py
Normal file
99
.venv/lib/python3.9/site-packages/attr/_compat.py
Normal file
@@ -0,0 +1,99 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
import inspect
|
||||
import platform
|
||||
import sys
|
||||
import threading
|
||||
|
||||
from collections.abc import Mapping, Sequence # noqa: F401
|
||||
from typing import _GenericAlias
|
||||
|
||||
|
||||
PYPY = platform.python_implementation() == "PyPy"
|
||||
PY_3_10_PLUS = sys.version_info[:2] >= (3, 10)
|
||||
PY_3_11_PLUS = sys.version_info[:2] >= (3, 11)
|
||||
PY_3_12_PLUS = sys.version_info[:2] >= (3, 12)
|
||||
PY_3_13_PLUS = sys.version_info[:2] >= (3, 13)
|
||||
PY_3_14_PLUS = sys.version_info[:2] >= (3, 14)
|
||||
|
||||
|
||||
if PY_3_14_PLUS:
|
||||
import annotationlib
|
||||
|
||||
# We request forward-ref annotations to not break in the presence of
|
||||
# forward references.
|
||||
|
||||
def _get_annotations(cls):
|
||||
return annotationlib.get_annotations(
|
||||
cls, format=annotationlib.Format.FORWARDREF
|
||||
)
|
||||
|
||||
else:
|
||||
|
||||
def _get_annotations(cls):
|
||||
"""
|
||||
Get annotations for *cls*.
|
||||
"""
|
||||
return cls.__dict__.get("__annotations__", {})
|
||||
|
||||
|
||||
class _AnnotationExtractor:
|
||||
"""
|
||||
Extract type annotations from a callable, returning None whenever there
|
||||
is none.
|
||||
"""
|
||||
|
||||
__slots__ = ["sig"]
|
||||
|
||||
def __init__(self, callable):
|
||||
try:
|
||||
self.sig = inspect.signature(callable)
|
||||
except (ValueError, TypeError): # inspect failed
|
||||
self.sig = None
|
||||
|
||||
def get_first_param_type(self):
|
||||
"""
|
||||
Return the type annotation of the first argument if it's not empty.
|
||||
"""
|
||||
if not self.sig:
|
||||
return None
|
||||
|
||||
params = list(self.sig.parameters.values())
|
||||
if params and params[0].annotation is not inspect.Parameter.empty:
|
||||
return params[0].annotation
|
||||
|
||||
return None
|
||||
|
||||
def get_return_type(self):
|
||||
"""
|
||||
Return the return type if it's not empty.
|
||||
"""
|
||||
if (
|
||||
self.sig
|
||||
and self.sig.return_annotation is not inspect.Signature.empty
|
||||
):
|
||||
return self.sig.return_annotation
|
||||
|
||||
return None
|
||||
|
||||
|
||||
# Thread-local global to track attrs instances which are already being repr'd.
|
||||
# This is needed because there is no other (thread-safe) way to pass info
|
||||
# about the instances that are already being repr'd through the call stack
|
||||
# in order to ensure we don't perform infinite recursion.
|
||||
#
|
||||
# For instance, if an instance contains a dict which contains that instance,
|
||||
# we need to know that we're already repr'ing the outside instance from within
|
||||
# the dict's repr() call.
|
||||
#
|
||||
# This lives here rather than in _make.py so that the functions in _make.py
|
||||
# don't have a direct reference to the thread-local in their globals dict.
|
||||
# If they have such a reference, it breaks cloudpickle.
|
||||
repr_context = threading.local()
|
||||
|
||||
|
||||
def get_generic_base(cl):
|
||||
"""If this is a generic class (A[str]), return the generic base for it."""
|
||||
if cl.__class__ is _GenericAlias:
|
||||
return cl.__origin__
|
||||
return None
|
||||
31
.venv/lib/python3.9/site-packages/attr/_config.py
Normal file
31
.venv/lib/python3.9/site-packages/attr/_config.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
__all__ = ["get_run_validators", "set_run_validators"]
|
||||
|
||||
_run_validators = True
|
||||
|
||||
|
||||
def set_run_validators(run):
|
||||
"""
|
||||
Set whether or not validators are run. By default, they are run.
|
||||
|
||||
.. deprecated:: 21.3.0 It will not be removed, but it also will not be
|
||||
moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()`
|
||||
instead.
|
||||
"""
|
||||
if not isinstance(run, bool):
|
||||
msg = "'run' must be bool."
|
||||
raise TypeError(msg)
|
||||
global _run_validators
|
||||
_run_validators = run
|
||||
|
||||
|
||||
def get_run_validators():
|
||||
"""
|
||||
Return whether or not validators are run.
|
||||
|
||||
.. deprecated:: 21.3.0 It will not be removed, but it also will not be
|
||||
moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()`
|
||||
instead.
|
||||
"""
|
||||
return _run_validators
|
||||
497
.venv/lib/python3.9/site-packages/attr/_funcs.py
Normal file
497
.venv/lib/python3.9/site-packages/attr/_funcs.py
Normal file
@@ -0,0 +1,497 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
|
||||
import copy
|
||||
|
||||
from ._compat import get_generic_base
|
||||
from ._make import _OBJ_SETATTR, NOTHING, fields
|
||||
from .exceptions import AttrsAttributeNotFoundError
|
||||
|
||||
|
||||
_ATOMIC_TYPES = frozenset(
|
||||
{
|
||||
type(None),
|
||||
bool,
|
||||
int,
|
||||
float,
|
||||
str,
|
||||
complex,
|
||||
bytes,
|
||||
type(...),
|
||||
type,
|
||||
range,
|
||||
property,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def asdict(
|
||||
inst,
|
||||
recurse=True,
|
||||
filter=None,
|
||||
dict_factory=dict,
|
||||
retain_collection_types=False,
|
||||
value_serializer=None,
|
||||
):
|
||||
"""
|
||||
Return the *attrs* attribute values of *inst* as a dict.
|
||||
|
||||
Optionally recurse into other *attrs*-decorated classes.
|
||||
|
||||
Args:
|
||||
inst: Instance of an *attrs*-decorated class.
|
||||
|
||||
recurse (bool): Recurse into classes that are also *attrs*-decorated.
|
||||
|
||||
filter (~typing.Callable):
|
||||
A callable whose return code determines whether an attribute or
|
||||
element is included (`True`) or dropped (`False`). Is called with
|
||||
the `attrs.Attribute` as the first argument and the value as the
|
||||
second argument.
|
||||
|
||||
dict_factory (~typing.Callable):
|
||||
A callable to produce dictionaries from. For example, to produce
|
||||
ordered dictionaries instead of normal Python dictionaries, pass in
|
||||
``collections.OrderedDict``.
|
||||
|
||||
retain_collection_types (bool):
|
||||
Do not convert to `list` when encountering an attribute whose type
|
||||
is `tuple` or `set`. Only meaningful if *recurse* is `True`.
|
||||
|
||||
value_serializer (typing.Callable | None):
|
||||
A hook that is called for every attribute or dict key/value. It
|
||||
receives the current instance, field and value and must return the
|
||||
(updated) value. The hook is run *after* the optional *filter* has
|
||||
been applied.
|
||||
|
||||
Returns:
|
||||
Return type of *dict_factory*.
|
||||
|
||||
Raises:
|
||||
attrs.exceptions.NotAnAttrsClassError:
|
||||
If *cls* is not an *attrs* class.
|
||||
|
||||
.. versionadded:: 16.0.0 *dict_factory*
|
||||
.. versionadded:: 16.1.0 *retain_collection_types*
|
||||
.. versionadded:: 20.3.0 *value_serializer*
|
||||
.. versionadded:: 21.3.0
|
||||
If a dict has a collection for a key, it is serialized as a tuple.
|
||||
"""
|
||||
attrs = fields(inst.__class__)
|
||||
rv = dict_factory()
|
||||
for a in attrs:
|
||||
v = getattr(inst, a.name)
|
||||
if filter is not None and not filter(a, v):
|
||||
continue
|
||||
|
||||
if value_serializer is not None:
|
||||
v = value_serializer(inst, a, v)
|
||||
|
||||
if recurse is True:
|
||||
value_type = type(v)
|
||||
if value_type in _ATOMIC_TYPES:
|
||||
rv[a.name] = v
|
||||
elif has(value_type):
|
||||
rv[a.name] = asdict(
|
||||
v,
|
||||
recurse=True,
|
||||
filter=filter,
|
||||
dict_factory=dict_factory,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
)
|
||||
elif issubclass(value_type, (tuple, list, set, frozenset)):
|
||||
cf = value_type if retain_collection_types is True else list
|
||||
items = [
|
||||
_asdict_anything(
|
||||
i,
|
||||
is_key=False,
|
||||
filter=filter,
|
||||
dict_factory=dict_factory,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
)
|
||||
for i in v
|
||||
]
|
||||
try:
|
||||
rv[a.name] = cf(items)
|
||||
except TypeError:
|
||||
if not issubclass(cf, tuple):
|
||||
raise
|
||||
# Workaround for TypeError: cf.__new__() missing 1 required
|
||||
# positional argument (which appears, for a namedturle)
|
||||
rv[a.name] = cf(*items)
|
||||
elif issubclass(value_type, dict):
|
||||
df = dict_factory
|
||||
rv[a.name] = df(
|
||||
(
|
||||
_asdict_anything(
|
||||
kk,
|
||||
is_key=True,
|
||||
filter=filter,
|
||||
dict_factory=df,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
),
|
||||
_asdict_anything(
|
||||
vv,
|
||||
is_key=False,
|
||||
filter=filter,
|
||||
dict_factory=df,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
),
|
||||
)
|
||||
for kk, vv in v.items()
|
||||
)
|
||||
else:
|
||||
rv[a.name] = v
|
||||
else:
|
||||
rv[a.name] = v
|
||||
return rv
|
||||
|
||||
|
||||
def _asdict_anything(
|
||||
val,
|
||||
is_key,
|
||||
filter,
|
||||
dict_factory,
|
||||
retain_collection_types,
|
||||
value_serializer,
|
||||
):
|
||||
"""
|
||||
``asdict`` only works on attrs instances, this works on anything.
|
||||
"""
|
||||
val_type = type(val)
|
||||
if val_type in _ATOMIC_TYPES:
|
||||
rv = val
|
||||
if value_serializer is not None:
|
||||
rv = value_serializer(None, None, rv)
|
||||
elif getattr(val_type, "__attrs_attrs__", None) is not None:
|
||||
# Attrs class.
|
||||
rv = asdict(
|
||||
val,
|
||||
recurse=True,
|
||||
filter=filter,
|
||||
dict_factory=dict_factory,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
)
|
||||
elif issubclass(val_type, (tuple, list, set, frozenset)):
|
||||
if retain_collection_types is True:
|
||||
cf = val.__class__
|
||||
elif is_key:
|
||||
cf = tuple
|
||||
else:
|
||||
cf = list
|
||||
|
||||
rv = cf(
|
||||
[
|
||||
_asdict_anything(
|
||||
i,
|
||||
is_key=False,
|
||||
filter=filter,
|
||||
dict_factory=dict_factory,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
)
|
||||
for i in val
|
||||
]
|
||||
)
|
||||
elif issubclass(val_type, dict):
|
||||
df = dict_factory
|
||||
rv = df(
|
||||
(
|
||||
_asdict_anything(
|
||||
kk,
|
||||
is_key=True,
|
||||
filter=filter,
|
||||
dict_factory=df,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
),
|
||||
_asdict_anything(
|
||||
vv,
|
||||
is_key=False,
|
||||
filter=filter,
|
||||
dict_factory=df,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
),
|
||||
)
|
||||
for kk, vv in val.items()
|
||||
)
|
||||
else:
|
||||
rv = val
|
||||
if value_serializer is not None:
|
||||
rv = value_serializer(None, None, rv)
|
||||
|
||||
return rv
|
||||
|
||||
|
||||
def astuple(
|
||||
inst,
|
||||
recurse=True,
|
||||
filter=None,
|
||||
tuple_factory=tuple,
|
||||
retain_collection_types=False,
|
||||
):
|
||||
"""
|
||||
Return the *attrs* attribute values of *inst* as a tuple.
|
||||
|
||||
Optionally recurse into other *attrs*-decorated classes.
|
||||
|
||||
Args:
|
||||
inst: Instance of an *attrs*-decorated class.
|
||||
|
||||
recurse (bool):
|
||||
Recurse into classes that are also *attrs*-decorated.
|
||||
|
||||
filter (~typing.Callable):
|
||||
A callable whose return code determines whether an attribute or
|
||||
element is included (`True`) or dropped (`False`). Is called with
|
||||
the `attrs.Attribute` as the first argument and the value as the
|
||||
second argument.
|
||||
|
||||
tuple_factory (~typing.Callable):
|
||||
A callable to produce tuples from. For example, to produce lists
|
||||
instead of tuples.
|
||||
|
||||
retain_collection_types (bool):
|
||||
Do not convert to `list` or `dict` when encountering an attribute
|
||||
which type is `tuple`, `dict` or `set`. Only meaningful if
|
||||
*recurse* is `True`.
|
||||
|
||||
Returns:
|
||||
Return type of *tuple_factory*
|
||||
|
||||
Raises:
|
||||
attrs.exceptions.NotAnAttrsClassError:
|
||||
If *cls* is not an *attrs* class.
|
||||
|
||||
.. versionadded:: 16.2.0
|
||||
"""
|
||||
attrs = fields(inst.__class__)
|
||||
rv = []
|
||||
retain = retain_collection_types # Very long. :/
|
||||
for a in attrs:
|
||||
v = getattr(inst, a.name)
|
||||
if filter is not None and not filter(a, v):
|
||||
continue
|
||||
value_type = type(v)
|
||||
if recurse is True:
|
||||
if value_type in _ATOMIC_TYPES:
|
||||
rv.append(v)
|
||||
elif has(value_type):
|
||||
rv.append(
|
||||
astuple(
|
||||
v,
|
||||
recurse=True,
|
||||
filter=filter,
|
||||
tuple_factory=tuple_factory,
|
||||
retain_collection_types=retain,
|
||||
)
|
||||
)
|
||||
elif issubclass(value_type, (tuple, list, set, frozenset)):
|
||||
cf = v.__class__ if retain is True else list
|
||||
items = [
|
||||
(
|
||||
astuple(
|
||||
j,
|
||||
recurse=True,
|
||||
filter=filter,
|
||||
tuple_factory=tuple_factory,
|
||||
retain_collection_types=retain,
|
||||
)
|
||||
if has(j.__class__)
|
||||
else j
|
||||
)
|
||||
for j in v
|
||||
]
|
||||
try:
|
||||
rv.append(cf(items))
|
||||
except TypeError:
|
||||
if not issubclass(cf, tuple):
|
||||
raise
|
||||
# Workaround for TypeError: cf.__new__() missing 1 required
|
||||
# positional argument (which appears, for a namedturle)
|
||||
rv.append(cf(*items))
|
||||
elif issubclass(value_type, dict):
|
||||
df = value_type if retain is True else dict
|
||||
rv.append(
|
||||
df(
|
||||
(
|
||||
(
|
||||
astuple(
|
||||
kk,
|
||||
tuple_factory=tuple_factory,
|
||||
retain_collection_types=retain,
|
||||
)
|
||||
if has(kk.__class__)
|
||||
else kk
|
||||
),
|
||||
(
|
||||
astuple(
|
||||
vv,
|
||||
tuple_factory=tuple_factory,
|
||||
retain_collection_types=retain,
|
||||
)
|
||||
if has(vv.__class__)
|
||||
else vv
|
||||
),
|
||||
)
|
||||
for kk, vv in v.items()
|
||||
)
|
||||
)
|
||||
else:
|
||||
rv.append(v)
|
||||
else:
|
||||
rv.append(v)
|
||||
|
||||
return rv if tuple_factory is list else tuple_factory(rv)
|
||||
|
||||
|
||||
def has(cls):
|
||||
"""
|
||||
Check whether *cls* is a class with *attrs* attributes.
|
||||
|
||||
Args:
|
||||
cls (type): Class to introspect.
|
||||
|
||||
Raises:
|
||||
TypeError: If *cls* is not a class.
|
||||
|
||||
Returns:
|
||||
bool:
|
||||
"""
|
||||
attrs = getattr(cls, "__attrs_attrs__", None)
|
||||
if attrs is not None:
|
||||
return True
|
||||
|
||||
# No attrs, maybe it's a specialized generic (A[str])?
|
||||
generic_base = get_generic_base(cls)
|
||||
if generic_base is not None:
|
||||
generic_attrs = getattr(generic_base, "__attrs_attrs__", None)
|
||||
if generic_attrs is not None:
|
||||
# Stick it on here for speed next time.
|
||||
cls.__attrs_attrs__ = generic_attrs
|
||||
return generic_attrs is not None
|
||||
return False
|
||||
|
||||
|
||||
def assoc(inst, **changes):
|
||||
"""
|
||||
Copy *inst* and apply *changes*.
|
||||
|
||||
This is different from `evolve` that applies the changes to the arguments
|
||||
that create the new instance.
|
||||
|
||||
`evolve`'s behavior is preferable, but there are `edge cases`_ where it
|
||||
doesn't work. Therefore `assoc` is deprecated, but will not be removed.
|
||||
|
||||
.. _`edge cases`: https://github.com/python-attrs/attrs/issues/251
|
||||
|
||||
Args:
|
||||
inst: Instance of a class with *attrs* attributes.
|
||||
|
||||
changes: Keyword changes in the new copy.
|
||||
|
||||
Returns:
|
||||
A copy of inst with *changes* incorporated.
|
||||
|
||||
Raises:
|
||||
attrs.exceptions.AttrsAttributeNotFoundError:
|
||||
If *attr_name* couldn't be found on *cls*.
|
||||
|
||||
attrs.exceptions.NotAnAttrsClassError:
|
||||
If *cls* is not an *attrs* class.
|
||||
|
||||
.. deprecated:: 17.1.0
|
||||
Use `attrs.evolve` instead if you can. This function will not be
|
||||
removed du to the slightly different approach compared to
|
||||
`attrs.evolve`, though.
|
||||
"""
|
||||
new = copy.copy(inst)
|
||||
attrs = fields(inst.__class__)
|
||||
for k, v in changes.items():
|
||||
a = getattr(attrs, k, NOTHING)
|
||||
if a is NOTHING:
|
||||
msg = f"{k} is not an attrs attribute on {new.__class__}."
|
||||
raise AttrsAttributeNotFoundError(msg)
|
||||
_OBJ_SETATTR(new, k, v)
|
||||
return new
|
||||
|
||||
|
||||
def resolve_types(
|
||||
cls, globalns=None, localns=None, attribs=None, include_extras=True
|
||||
):
|
||||
"""
|
||||
Resolve any strings and forward annotations in type annotations.
|
||||
|
||||
This is only required if you need concrete types in :class:`Attribute`'s
|
||||
*type* field. In other words, you don't need to resolve your types if you
|
||||
only use them for static type checking.
|
||||
|
||||
With no arguments, names will be looked up in the module in which the class
|
||||
was created. If this is not what you want, for example, if the name only
|
||||
exists inside a method, you may pass *globalns* or *localns* to specify
|
||||
other dictionaries in which to look up these names. See the docs of
|
||||
`typing.get_type_hints` for more details.
|
||||
|
||||
Args:
|
||||
cls (type): Class to resolve.
|
||||
|
||||
globalns (dict | None): Dictionary containing global variables.
|
||||
|
||||
localns (dict | None): Dictionary containing local variables.
|
||||
|
||||
attribs (list | None):
|
||||
List of attribs for the given class. This is necessary when calling
|
||||
from inside a ``field_transformer`` since *cls* is not an *attrs*
|
||||
class yet.
|
||||
|
||||
include_extras (bool):
|
||||
Resolve more accurately, if possible. Pass ``include_extras`` to
|
||||
``typing.get_hints``, if supported by the typing module. On
|
||||
supported Python versions (3.9+), this resolves the types more
|
||||
accurately.
|
||||
|
||||
Raises:
|
||||
TypeError: If *cls* is not a class.
|
||||
|
||||
attrs.exceptions.NotAnAttrsClassError:
|
||||
If *cls* is not an *attrs* class and you didn't pass any attribs.
|
||||
|
||||
NameError: If types cannot be resolved because of missing variables.
|
||||
|
||||
Returns:
|
||||
*cls* so you can use this function also as a class decorator. Please
|
||||
note that you have to apply it **after** `attrs.define`. That means the
|
||||
decorator has to come in the line **before** `attrs.define`.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
.. versionadded:: 21.1.0 *attribs*
|
||||
.. versionadded:: 23.1.0 *include_extras*
|
||||
"""
|
||||
# Since calling get_type_hints is expensive we cache whether we've
|
||||
# done it already.
|
||||
if getattr(cls, "__attrs_types_resolved__", None) != cls:
|
||||
import typing
|
||||
|
||||
kwargs = {
|
||||
"globalns": globalns,
|
||||
"localns": localns,
|
||||
"include_extras": include_extras,
|
||||
}
|
||||
|
||||
hints = typing.get_type_hints(cls, **kwargs)
|
||||
for field in fields(cls) if attribs is None else attribs:
|
||||
if field.name in hints:
|
||||
# Since fields have been frozen we must work around it.
|
||||
_OBJ_SETATTR(field, "type", hints[field.name])
|
||||
# We store the class we resolved so that subclasses know they haven't
|
||||
# been resolved.
|
||||
cls.__attrs_types_resolved__ = cls
|
||||
|
||||
# Return the class so you can use it as a decorator too.
|
||||
return cls
|
||||
3362
.venv/lib/python3.9/site-packages/attr/_make.py
Normal file
3362
.venv/lib/python3.9/site-packages/attr/_make.py
Normal file
File diff suppressed because it is too large
Load Diff
674
.venv/lib/python3.9/site-packages/attr/_next_gen.py
Normal file
674
.venv/lib/python3.9/site-packages/attr/_next_gen.py
Normal file
@@ -0,0 +1,674 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
These are keyword-only APIs that call `attr.s` and `attr.ib` with different
|
||||
default values.
|
||||
"""
|
||||
|
||||
from functools import partial
|
||||
|
||||
from . import setters
|
||||
from ._funcs import asdict as _asdict
|
||||
from ._funcs import astuple as _astuple
|
||||
from ._make import (
|
||||
_DEFAULT_ON_SETATTR,
|
||||
NOTHING,
|
||||
_frozen_setattrs,
|
||||
attrib,
|
||||
attrs,
|
||||
)
|
||||
from .exceptions import NotAnAttrsClassError, UnannotatedAttributeError
|
||||
|
||||
|
||||
def define(
|
||||
maybe_cls=None,
|
||||
*,
|
||||
these=None,
|
||||
repr=None,
|
||||
unsafe_hash=None,
|
||||
hash=None,
|
||||
init=None,
|
||||
slots=True,
|
||||
frozen=False,
|
||||
weakref_slot=True,
|
||||
str=False,
|
||||
auto_attribs=None,
|
||||
kw_only=False,
|
||||
cache_hash=False,
|
||||
auto_exc=True,
|
||||
eq=None,
|
||||
order=False,
|
||||
auto_detect=True,
|
||||
getstate_setstate=None,
|
||||
on_setattr=None,
|
||||
field_transformer=None,
|
||||
match_args=True,
|
||||
force_kw_only=False,
|
||||
):
|
||||
r"""
|
||||
A class decorator that adds :term:`dunder methods` according to
|
||||
:term:`fields <field>` specified using :doc:`type annotations <types>`,
|
||||
`field()` calls, or the *these* argument.
|
||||
|
||||
Since *attrs* patches or replaces an existing class, you cannot use
|
||||
`object.__init_subclass__` with *attrs* classes, because it runs too early.
|
||||
As a replacement, you can define ``__attrs_init_subclass__`` on your class.
|
||||
It will be called by *attrs* classes that subclass it after they're
|
||||
created. See also :ref:`init-subclass`.
|
||||
|
||||
Args:
|
||||
slots (bool):
|
||||
Create a :term:`slotted class <slotted classes>` that's more
|
||||
memory-efficient. Slotted classes are generally superior to the
|
||||
default dict classes, but have some gotchas you should know about,
|
||||
so we encourage you to read the :term:`glossary entry <slotted
|
||||
classes>`.
|
||||
|
||||
auto_detect (bool):
|
||||
Instead of setting the *init*, *repr*, *eq*, and *hash* arguments
|
||||
explicitly, assume they are set to True **unless any** of the
|
||||
involved methods for one of the arguments is implemented in the
|
||||
*current* class (meaning, it is *not* inherited from some base
|
||||
class).
|
||||
|
||||
So, for example by implementing ``__eq__`` on a class yourself,
|
||||
*attrs* will deduce ``eq=False`` and will create *neither*
|
||||
``__eq__`` *nor* ``__ne__`` (but Python classes come with a
|
||||
sensible ``__ne__`` by default, so it *should* be enough to only
|
||||
implement ``__eq__`` in most cases).
|
||||
|
||||
Passing :data:`True` or :data:`False` to *init*, *repr*, *eq*, or *hash*
|
||||
overrides whatever *auto_detect* would determine.
|
||||
|
||||
auto_exc (bool):
|
||||
If the class subclasses `BaseException` (which implicitly includes
|
||||
any subclass of any exception), the following happens to behave
|
||||
like a well-behaved Python exception class:
|
||||
|
||||
- the values for *eq*, *order*, and *hash* are ignored and the
|
||||
instances compare and hash by the instance's ids [#]_ ,
|
||||
- all attributes that are either passed into ``__init__`` or have a
|
||||
default value are additionally available as a tuple in the
|
||||
``args`` attribute,
|
||||
- the value of *str* is ignored leaving ``__str__`` to base
|
||||
classes.
|
||||
|
||||
.. [#]
|
||||
Note that *attrs* will *not* remove existing implementations of
|
||||
``__hash__`` or the equality methods. It just won't add own
|
||||
ones.
|
||||
|
||||
on_setattr (~typing.Callable | list[~typing.Callable] | None | ~typing.Literal[attrs.setters.NO_OP]):
|
||||
A callable that is run whenever the user attempts to set an
|
||||
attribute (either by assignment like ``i.x = 42`` or by using
|
||||
`setattr` like ``setattr(i, "x", 42)``). It receives the same
|
||||
arguments as validators: the instance, the attribute that is being
|
||||
modified, and the new value.
|
||||
|
||||
If no exception is raised, the attribute is set to the return value
|
||||
of the callable.
|
||||
|
||||
If a list of callables is passed, they're automatically wrapped in
|
||||
an `attrs.setters.pipe`.
|
||||
|
||||
If left None, the default behavior is to run converters and
|
||||
validators whenever an attribute is set.
|
||||
|
||||
init (bool):
|
||||
Create a ``__init__`` method that initializes the *attrs*
|
||||
attributes. Leading underscores are stripped for the argument name,
|
||||
unless an alias is set on the attribute.
|
||||
|
||||
.. seealso::
|
||||
`init` shows advanced ways to customize the generated
|
||||
``__init__`` method, including executing code before and after.
|
||||
|
||||
repr(bool):
|
||||
Create a ``__repr__`` method with a human readable representation
|
||||
of *attrs* attributes.
|
||||
|
||||
str (bool):
|
||||
Create a ``__str__`` method that is identical to ``__repr__``. This
|
||||
is usually not necessary except for `Exception`\ s.
|
||||
|
||||
eq (bool | None):
|
||||
If True or None (default), add ``__eq__`` and ``__ne__`` methods
|
||||
that check two instances for equality.
|
||||
|
||||
.. seealso::
|
||||
`comparison` describes how to customize the comparison behavior
|
||||
going as far comparing NumPy arrays.
|
||||
|
||||
order (bool | None):
|
||||
If True, add ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__``
|
||||
methods that behave like *eq* above and allow instances to be
|
||||
ordered.
|
||||
|
||||
They compare the instances as if they were tuples of their *attrs*
|
||||
attributes if and only if the types of both classes are
|
||||
*identical*.
|
||||
|
||||
If `None` mirror value of *eq*.
|
||||
|
||||
.. seealso:: `comparison`
|
||||
|
||||
unsafe_hash (bool | None):
|
||||
If None (default), the ``__hash__`` method is generated according
|
||||
how *eq* and *frozen* are set.
|
||||
|
||||
1. If *both* are True, *attrs* will generate a ``__hash__`` for
|
||||
you.
|
||||
2. If *eq* is True and *frozen* is False, ``__hash__`` will be set
|
||||
to None, marking it unhashable (which it is).
|
||||
3. If *eq* is False, ``__hash__`` will be left untouched meaning
|
||||
the ``__hash__`` method of the base class will be used. If the
|
||||
base class is `object`, this means it will fall back to id-based
|
||||
hashing.
|
||||
|
||||
Although not recommended, you can decide for yourself and force
|
||||
*attrs* to create one (for example, if the class is immutable even
|
||||
though you didn't freeze it programmatically) by passing True or
|
||||
not. Both of these cases are rather special and should be used
|
||||
carefully.
|
||||
|
||||
.. seealso::
|
||||
|
||||
- Our documentation on `hashing`,
|
||||
- Python's documentation on `object.__hash__`,
|
||||
- and the `GitHub issue that led to the default \ behavior
|
||||
<https://github.com/python-attrs/attrs/issues/136>`_ for more
|
||||
details.
|
||||
|
||||
hash (bool | None):
|
||||
Deprecated alias for *unsafe_hash*. *unsafe_hash* takes precedence.
|
||||
|
||||
cache_hash (bool):
|
||||
Ensure that the object's hash code is computed only once and stored
|
||||
on the object. If this is set to True, hashing must be either
|
||||
explicitly or implicitly enabled for this class. If the hash code
|
||||
is cached, avoid any reassignments of fields involved in hash code
|
||||
computation or mutations of the objects those fields point to after
|
||||
object creation. If such changes occur, the behavior of the
|
||||
object's hash code is undefined.
|
||||
|
||||
frozen (bool):
|
||||
Make instances immutable after initialization. If someone attempts
|
||||
to modify a frozen instance, `attrs.exceptions.FrozenInstanceError`
|
||||
is raised.
|
||||
|
||||
.. note::
|
||||
|
||||
1. This is achieved by installing a custom ``__setattr__``
|
||||
method on your class, so you can't implement your own.
|
||||
|
||||
2. True immutability is impossible in Python.
|
||||
|
||||
3. This *does* have a minor a runtime performance `impact
|
||||
<how-frozen>` when initializing new instances. In other
|
||||
words: ``__init__`` is slightly slower with ``frozen=True``.
|
||||
|
||||
4. If a class is frozen, you cannot modify ``self`` in
|
||||
``__attrs_post_init__`` or a self-written ``__init__``. You
|
||||
can circumvent that limitation by using
|
||||
``object.__setattr__(self, "attribute_name", value)``.
|
||||
|
||||
5. Subclasses of a frozen class are frozen too.
|
||||
|
||||
kw_only (bool):
|
||||
Make attributes keyword-only in the generated ``__init__`` (if
|
||||
*init* is False, this parameter is ignored). Attributes that
|
||||
explicitly set ``kw_only=False`` are not affected; base class
|
||||
attributes are also not affected.
|
||||
|
||||
Also see *force_kw_only*.
|
||||
|
||||
weakref_slot (bool):
|
||||
Make instances weak-referenceable. This has no effect unless
|
||||
*slots* is True.
|
||||
|
||||
field_transformer (~typing.Callable | None):
|
||||
A function that is called with the original class object and all
|
||||
fields right before *attrs* finalizes the class. You can use this,
|
||||
for example, to automatically add converters or validators to
|
||||
fields based on their types.
|
||||
|
||||
.. seealso:: `transform-fields`
|
||||
|
||||
match_args (bool):
|
||||
If True (default), set ``__match_args__`` on the class to support
|
||||
:pep:`634` (*Structural Pattern Matching*). It is a tuple of all
|
||||
non-keyword-only ``__init__`` parameter names on Python 3.10 and
|
||||
later. Ignored on older Python versions.
|
||||
|
||||
collect_by_mro (bool):
|
||||
If True, *attrs* collects attributes from base classes correctly
|
||||
according to the `method resolution order
|
||||
<https://docs.python.org/3/howto/mro.html>`_. If False, *attrs*
|
||||
will mimic the (wrong) behavior of `dataclasses` and :pep:`681`.
|
||||
|
||||
See also `issue #428
|
||||
<https://github.com/python-attrs/attrs/issues/428>`_.
|
||||
|
||||
force_kw_only (bool):
|
||||
A back-compat flag for restoring pre-25.4.0 behavior. If True and
|
||||
``kw_only=True``, all attributes are made keyword-only, including
|
||||
base class attributes, and those set to ``kw_only=False`` at the
|
||||
attribute level. Defaults to False.
|
||||
|
||||
See also `issue #980
|
||||
<https://github.com/python-attrs/attrs/issues/980>`_.
|
||||
|
||||
getstate_setstate (bool | None):
|
||||
.. note::
|
||||
|
||||
This is usually only interesting for slotted classes and you
|
||||
should probably just set *auto_detect* to True.
|
||||
|
||||
If True, ``__getstate__`` and ``__setstate__`` are generated and
|
||||
attached to the class. This is necessary for slotted classes to be
|
||||
pickleable. If left None, it's True by default for slotted classes
|
||||
and False for dict classes.
|
||||
|
||||
If *auto_detect* is True, and *getstate_setstate* is left None, and
|
||||
**either** ``__getstate__`` or ``__setstate__`` is detected
|
||||
directly on the class (meaning: not inherited), it is set to False
|
||||
(this is usually what you want).
|
||||
|
||||
auto_attribs (bool | None):
|
||||
If True, look at type annotations to determine which attributes to
|
||||
use, like `dataclasses`. If False, it will only look for explicit
|
||||
:func:`field` class attributes, like classic *attrs*.
|
||||
|
||||
If left None, it will guess:
|
||||
|
||||
1. If any attributes are annotated and no unannotated
|
||||
`attrs.field`\ s are found, it assumes *auto_attribs=True*.
|
||||
2. Otherwise it assumes *auto_attribs=False* and tries to collect
|
||||
`attrs.field`\ s.
|
||||
|
||||
If *attrs* decides to look at type annotations, **all** fields
|
||||
**must** be annotated. If *attrs* encounters a field that is set to
|
||||
a :func:`field` / `attr.ib` but lacks a type annotation, an
|
||||
`attrs.exceptions.UnannotatedAttributeError` is raised. Use
|
||||
``field_name: typing.Any = field(...)`` if you don't want to set a
|
||||
type.
|
||||
|
||||
.. warning::
|
||||
|
||||
For features that use the attribute name to create decorators
|
||||
(for example, :ref:`validators <validators>`), you still *must*
|
||||
assign :func:`field` / `attr.ib` to them. Otherwise Python will
|
||||
either not find the name or try to use the default value to
|
||||
call, for example, ``validator`` on it.
|
||||
|
||||
Attributes annotated as `typing.ClassVar`, and attributes that are
|
||||
neither annotated nor set to an `field()` are **ignored**.
|
||||
|
||||
these (dict[str, object]):
|
||||
A dictionary of name to the (private) return value of `field()`
|
||||
mappings. This is useful to avoid the definition of your attributes
|
||||
within the class body because you can't (for example, if you want
|
||||
to add ``__repr__`` methods to Django models) or don't want to.
|
||||
|
||||
If *these* is not `None`, *attrs* will *not* search the class body
|
||||
for attributes and will *not* remove any attributes from it.
|
||||
|
||||
The order is deduced from the order of the attributes inside
|
||||
*these*.
|
||||
|
||||
Arguably, this is a rather obscure feature.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
.. versionchanged:: 21.3.0 Converters are also run ``on_setattr``.
|
||||
.. versionadded:: 22.2.0
|
||||
*unsafe_hash* as an alias for *hash* (for :pep:`681` compliance).
|
||||
.. versionchanged:: 24.1.0
|
||||
Instances are not compared as tuples of attributes anymore, but using a
|
||||
big ``and`` condition. This is faster and has more correct behavior for
|
||||
uncomparable values like `math.nan`.
|
||||
.. versionadded:: 24.1.0
|
||||
If a class has an *inherited* classmethod called
|
||||
``__attrs_init_subclass__``, it is executed after the class is created.
|
||||
.. deprecated:: 24.1.0 *hash* is deprecated in favor of *unsafe_hash*.
|
||||
.. versionadded:: 24.3.0
|
||||
Unless already present, a ``__replace__`` method is automatically
|
||||
created for `copy.replace` (Python 3.13+ only).
|
||||
.. versionchanged:: 25.4.0
|
||||
*kw_only* now only applies to attributes defined in the current class,
|
||||
and respects attribute-level ``kw_only=False`` settings.
|
||||
.. versionadded:: 25.4.0
|
||||
Added *force_kw_only* to go back to the previous *kw_only* behavior.
|
||||
|
||||
.. note::
|
||||
|
||||
The main differences to the classic `attr.s` are:
|
||||
|
||||
- Automatically detect whether or not *auto_attribs* should be `True`
|
||||
(c.f. *auto_attribs* parameter).
|
||||
- Converters and validators run when attributes are set by default --
|
||||
if *frozen* is `False`.
|
||||
- *slots=True*
|
||||
|
||||
Usually, this has only upsides and few visible effects in everyday
|
||||
programming. But it *can* lead to some surprising behaviors, so
|
||||
please make sure to read :term:`slotted classes`.
|
||||
|
||||
- *auto_exc=True*
|
||||
- *auto_detect=True*
|
||||
- *order=False*
|
||||
- *force_kw_only=False*
|
||||
- Some options that were only relevant on Python 2 or were kept around
|
||||
for backwards-compatibility have been removed.
|
||||
|
||||
"""
|
||||
|
||||
def do_it(cls, auto_attribs):
|
||||
return attrs(
|
||||
maybe_cls=cls,
|
||||
these=these,
|
||||
repr=repr,
|
||||
hash=hash,
|
||||
unsafe_hash=unsafe_hash,
|
||||
init=init,
|
||||
slots=slots,
|
||||
frozen=frozen,
|
||||
weakref_slot=weakref_slot,
|
||||
str=str,
|
||||
auto_attribs=auto_attribs,
|
||||
kw_only=kw_only,
|
||||
cache_hash=cache_hash,
|
||||
auto_exc=auto_exc,
|
||||
eq=eq,
|
||||
order=order,
|
||||
auto_detect=auto_detect,
|
||||
collect_by_mro=True,
|
||||
getstate_setstate=getstate_setstate,
|
||||
on_setattr=on_setattr,
|
||||
field_transformer=field_transformer,
|
||||
match_args=match_args,
|
||||
force_kw_only=force_kw_only,
|
||||
)
|
||||
|
||||
def wrap(cls):
|
||||
"""
|
||||
Making this a wrapper ensures this code runs during class creation.
|
||||
|
||||
We also ensure that frozen-ness of classes is inherited.
|
||||
"""
|
||||
nonlocal frozen, on_setattr
|
||||
|
||||
had_on_setattr = on_setattr not in (None, setters.NO_OP)
|
||||
|
||||
# By default, mutable classes convert & validate on setattr.
|
||||
if frozen is False and on_setattr is None:
|
||||
on_setattr = _DEFAULT_ON_SETATTR
|
||||
|
||||
# However, if we subclass a frozen class, we inherit the immutability
|
||||
# and disable on_setattr.
|
||||
for base_cls in cls.__bases__:
|
||||
if base_cls.__setattr__ is _frozen_setattrs:
|
||||
if had_on_setattr:
|
||||
msg = "Frozen classes can't use on_setattr (frozen-ness was inherited)."
|
||||
raise ValueError(msg)
|
||||
|
||||
on_setattr = setters.NO_OP
|
||||
break
|
||||
|
||||
if auto_attribs is not None:
|
||||
return do_it(cls, auto_attribs)
|
||||
|
||||
try:
|
||||
return do_it(cls, True)
|
||||
except UnannotatedAttributeError:
|
||||
return do_it(cls, False)
|
||||
|
||||
# maybe_cls's type depends on the usage of the decorator. It's a class
|
||||
# if it's used as `@attrs` but `None` if used as `@attrs()`.
|
||||
if maybe_cls is None:
|
||||
return wrap
|
||||
|
||||
return wrap(maybe_cls)
|
||||
|
||||
|
||||
mutable = define
|
||||
frozen = partial(define, frozen=True, on_setattr=None)
|
||||
|
||||
|
||||
def field(
|
||||
*,
|
||||
default=NOTHING,
|
||||
validator=None,
|
||||
repr=True,
|
||||
hash=None,
|
||||
init=True,
|
||||
metadata=None,
|
||||
type=None,
|
||||
converter=None,
|
||||
factory=None,
|
||||
kw_only=None,
|
||||
eq=None,
|
||||
order=None,
|
||||
on_setattr=None,
|
||||
alias=None,
|
||||
):
|
||||
"""
|
||||
Create a new :term:`field` / :term:`attribute` on a class.
|
||||
|
||||
.. warning::
|
||||
|
||||
Does **nothing** unless the class is also decorated with
|
||||
`attrs.define` (or similar)!
|
||||
|
||||
Args:
|
||||
default:
|
||||
A value that is used if an *attrs*-generated ``__init__`` is used
|
||||
and no value is passed while instantiating or the attribute is
|
||||
excluded using ``init=False``.
|
||||
|
||||
If the value is an instance of `attrs.Factory`, its callable will
|
||||
be used to construct a new value (useful for mutable data types
|
||||
like lists or dicts).
|
||||
|
||||
If a default is not set (or set manually to `attrs.NOTHING`), a
|
||||
value *must* be supplied when instantiating; otherwise a
|
||||
`TypeError` will be raised.
|
||||
|
||||
.. seealso:: `defaults`
|
||||
|
||||
factory (~typing.Callable):
|
||||
Syntactic sugar for ``default=attr.Factory(factory)``.
|
||||
|
||||
validator (~typing.Callable | list[~typing.Callable]):
|
||||
Callable that is called by *attrs*-generated ``__init__`` methods
|
||||
after the instance has been initialized. They receive the
|
||||
initialized instance, the :func:`~attrs.Attribute`, and the passed
|
||||
value.
|
||||
|
||||
The return value is *not* inspected so the validator has to throw
|
||||
an exception itself.
|
||||
|
||||
If a `list` is passed, its items are treated as validators and must
|
||||
all pass.
|
||||
|
||||
Validators can be globally disabled and re-enabled using
|
||||
`attrs.validators.get_disabled` / `attrs.validators.set_disabled`.
|
||||
|
||||
The validator can also be set using decorator notation as shown
|
||||
below.
|
||||
|
||||
.. seealso:: :ref:`validators`
|
||||
|
||||
repr (bool | ~typing.Callable):
|
||||
Include this attribute in the generated ``__repr__`` method. If
|
||||
True, include the attribute; if False, omit it. By default, the
|
||||
built-in ``repr()`` function is used. To override how the attribute
|
||||
value is formatted, pass a ``callable`` that takes a single value
|
||||
and returns a string. Note that the resulting string is used as-is,
|
||||
which means it will be used directly *instead* of calling
|
||||
``repr()`` (the default).
|
||||
|
||||
eq (bool | ~typing.Callable):
|
||||
If True (default), include this attribute in the generated
|
||||
``__eq__`` and ``__ne__`` methods that check two instances for
|
||||
equality. To override how the attribute value is compared, pass a
|
||||
callable that takes a single value and returns the value to be
|
||||
compared.
|
||||
|
||||
.. seealso:: `comparison`
|
||||
|
||||
order (bool | ~typing.Callable):
|
||||
If True (default), include this attributes in the generated
|
||||
``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. To
|
||||
override how the attribute value is ordered, pass a callable that
|
||||
takes a single value and returns the value to be ordered.
|
||||
|
||||
.. seealso:: `comparison`
|
||||
|
||||
hash (bool | None):
|
||||
Include this attribute in the generated ``__hash__`` method. If
|
||||
None (default), mirror *eq*'s value. This is the correct behavior
|
||||
according the Python spec. Setting this value to anything else
|
||||
than None is *discouraged*.
|
||||
|
||||
.. seealso:: `hashing`
|
||||
|
||||
init (bool):
|
||||
Include this attribute in the generated ``__init__`` method.
|
||||
|
||||
It is possible to set this to False and set a default value. In
|
||||
that case this attributed is unconditionally initialized with the
|
||||
specified default value or factory.
|
||||
|
||||
.. seealso:: `init`
|
||||
|
||||
converter (typing.Callable | Converter):
|
||||
A callable that is called by *attrs*-generated ``__init__`` methods
|
||||
to convert attribute's value to the desired format.
|
||||
|
||||
If a vanilla callable is passed, it is given the passed-in value as
|
||||
the only positional argument. It is possible to receive additional
|
||||
arguments by wrapping the callable in a `Converter`.
|
||||
|
||||
Either way, the returned value will be used as the new value of the
|
||||
attribute. The value is converted before being passed to the
|
||||
validator, if any.
|
||||
|
||||
.. seealso:: :ref:`converters`
|
||||
|
||||
metadata (dict | None):
|
||||
An arbitrary mapping, to be used by third-party code.
|
||||
|
||||
.. seealso:: `extending-metadata`.
|
||||
|
||||
type (type):
|
||||
The type of the attribute. Nowadays, the preferred method to
|
||||
specify the type is using a variable annotation (see :pep:`526`).
|
||||
This argument is provided for backwards-compatibility and for usage
|
||||
with `make_class`. Regardless of the approach used, the type will
|
||||
be stored on ``Attribute.type``.
|
||||
|
||||
Please note that *attrs* doesn't do anything with this metadata by
|
||||
itself. You can use it as part of your own code or for `static type
|
||||
checking <types>`.
|
||||
|
||||
kw_only (bool | None):
|
||||
Make this attribute keyword-only in the generated ``__init__`` (if
|
||||
*init* is False, this parameter is ignored). If None (default),
|
||||
mirror the setting from `attrs.define`.
|
||||
|
||||
on_setattr (~typing.Callable | list[~typing.Callable] | None | ~typing.Literal[attrs.setters.NO_OP]):
|
||||
Allows to overwrite the *on_setattr* setting from `attr.s`. If left
|
||||
None, the *on_setattr* value from `attr.s` is used. Set to
|
||||
`attrs.setters.NO_OP` to run **no** `setattr` hooks for this
|
||||
attribute -- regardless of the setting in `define()`.
|
||||
|
||||
alias (str | None):
|
||||
Override this attribute's parameter name in the generated
|
||||
``__init__`` method. If left None, default to ``name`` stripped
|
||||
of leading underscores. See `private-attributes`.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
.. versionchanged:: 21.1.0
|
||||
*eq*, *order*, and *cmp* also accept a custom callable
|
||||
.. versionadded:: 22.2.0 *alias*
|
||||
.. versionadded:: 23.1.0
|
||||
The *type* parameter has been re-added; mostly for `attrs.make_class`.
|
||||
Please note that type checkers ignore this metadata.
|
||||
.. versionchanged:: 25.4.0
|
||||
*kw_only* can now be None, and its default is also changed from False to
|
||||
None.
|
||||
|
||||
.. seealso::
|
||||
|
||||
`attr.ib`
|
||||
"""
|
||||
return attrib(
|
||||
default=default,
|
||||
validator=validator,
|
||||
repr=repr,
|
||||
hash=hash,
|
||||
init=init,
|
||||
metadata=metadata,
|
||||
type=type,
|
||||
converter=converter,
|
||||
factory=factory,
|
||||
kw_only=kw_only,
|
||||
eq=eq,
|
||||
order=order,
|
||||
on_setattr=on_setattr,
|
||||
alias=alias,
|
||||
)
|
||||
|
||||
|
||||
def asdict(inst, *, recurse=True, filter=None, value_serializer=None):
|
||||
"""
|
||||
Same as `attr.asdict`, except that collections types are always retained
|
||||
and dict is always used as *dict_factory*.
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _asdict(
|
||||
inst=inst,
|
||||
recurse=recurse,
|
||||
filter=filter,
|
||||
value_serializer=value_serializer,
|
||||
retain_collection_types=True,
|
||||
)
|
||||
|
||||
|
||||
def astuple(inst, *, recurse=True, filter=None):
|
||||
"""
|
||||
Same as `attr.astuple`, except that collections types are always retained
|
||||
and `tuple` is always used as the *tuple_factory*.
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _astuple(
|
||||
inst=inst, recurse=recurse, filter=filter, retain_collection_types=True
|
||||
)
|
||||
|
||||
|
||||
def inspect(cls):
|
||||
"""
|
||||
Inspect the class and return its effective build parameters.
|
||||
|
||||
Warning:
|
||||
This feature is currently **experimental** and is not covered by our
|
||||
strict backwards-compatibility guarantees.
|
||||
|
||||
Args:
|
||||
cls: The *attrs*-decorated class to inspect.
|
||||
|
||||
Returns:
|
||||
The effective build parameters of the class.
|
||||
|
||||
Raises:
|
||||
NotAnAttrsClassError: If the class is not an *attrs*-decorated class.
|
||||
|
||||
.. versionadded:: 25.4.0
|
||||
"""
|
||||
try:
|
||||
return cls.__dict__["__attrs_props__"]
|
||||
except KeyError:
|
||||
msg = f"{cls!r} is not an attrs-decorated class."
|
||||
raise NotAnAttrsClassError(msg) from None
|
||||
15
.venv/lib/python3.9/site-packages/attr/_typing_compat.pyi
Normal file
15
.venv/lib/python3.9/site-packages/attr/_typing_compat.pyi
Normal file
@@ -0,0 +1,15 @@
|
||||
from typing import Any, ClassVar, Protocol
|
||||
|
||||
# MYPY is a special constant in mypy which works the same way as `TYPE_CHECKING`.
|
||||
MYPY = False
|
||||
|
||||
if MYPY:
|
||||
# A protocol to be able to statically accept an attrs class.
|
||||
class AttrsInstance_(Protocol):
|
||||
__attrs_attrs__: ClassVar[Any]
|
||||
|
||||
else:
|
||||
# For type checkers without plug-in support use an empty protocol that
|
||||
# will (hopefully) be combined into a union.
|
||||
class AttrsInstance_(Protocol):
|
||||
pass
|
||||
89
.venv/lib/python3.9/site-packages/attr/_version_info.py
Normal file
89
.venv/lib/python3.9/site-packages/attr/_version_info.py
Normal file
@@ -0,0 +1,89 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
|
||||
from functools import total_ordering
|
||||
|
||||
from ._funcs import astuple
|
||||
from ._make import attrib, attrs
|
||||
|
||||
|
||||
@total_ordering
|
||||
@attrs(eq=False, order=False, slots=True, frozen=True)
|
||||
class VersionInfo:
|
||||
"""
|
||||
A version object that can be compared to tuple of length 1--4:
|
||||
|
||||
>>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2)
|
||||
True
|
||||
>>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1)
|
||||
True
|
||||
>>> vi = attr.VersionInfo(19, 2, 0, "final")
|
||||
>>> vi < (19, 1, 1)
|
||||
False
|
||||
>>> vi < (19,)
|
||||
False
|
||||
>>> vi == (19, 2,)
|
||||
True
|
||||
>>> vi == (19, 2, 1)
|
||||
False
|
||||
|
||||
.. versionadded:: 19.2
|
||||
"""
|
||||
|
||||
year = attrib(type=int)
|
||||
minor = attrib(type=int)
|
||||
micro = attrib(type=int)
|
||||
releaselevel = attrib(type=str)
|
||||
|
||||
@classmethod
|
||||
def _from_version_string(cls, s):
|
||||
"""
|
||||
Parse *s* and return a _VersionInfo.
|
||||
"""
|
||||
v = s.split(".")
|
||||
if len(v) == 3:
|
||||
v.append("final")
|
||||
|
||||
return cls(
|
||||
year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3]
|
||||
)
|
||||
|
||||
def _ensure_tuple(self, other):
|
||||
"""
|
||||
Ensure *other* is a tuple of a valid length.
|
||||
|
||||
Returns a possibly transformed *other* and ourselves as a tuple of
|
||||
the same length as *other*.
|
||||
"""
|
||||
|
||||
if self.__class__ is other.__class__:
|
||||
other = astuple(other)
|
||||
|
||||
if not isinstance(other, tuple):
|
||||
raise NotImplementedError
|
||||
|
||||
if not (1 <= len(other) <= 4):
|
||||
raise NotImplementedError
|
||||
|
||||
return astuple(self)[: len(other)], other
|
||||
|
||||
def __eq__(self, other):
|
||||
try:
|
||||
us, them = self._ensure_tuple(other)
|
||||
except NotImplementedError:
|
||||
return NotImplemented
|
||||
|
||||
return us == them
|
||||
|
||||
def __lt__(self, other):
|
||||
try:
|
||||
us, them = self._ensure_tuple(other)
|
||||
except NotImplementedError:
|
||||
return NotImplemented
|
||||
|
||||
# Since alphabetically "dev0" < "final" < "post1" < "post2", we don't
|
||||
# have to do anything special with releaselevel for now.
|
||||
return us < them
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.year, self.minor, self.micro, self.releaselevel))
|
||||
9
.venv/lib/python3.9/site-packages/attr/_version_info.pyi
Normal file
9
.venv/lib/python3.9/site-packages/attr/_version_info.pyi
Normal file
@@ -0,0 +1,9 @@
|
||||
class VersionInfo:
|
||||
@property
|
||||
def year(self) -> int: ...
|
||||
@property
|
||||
def minor(self) -> int: ...
|
||||
@property
|
||||
def micro(self) -> int: ...
|
||||
@property
|
||||
def releaselevel(self) -> str: ...
|
||||
162
.venv/lib/python3.9/site-packages/attr/converters.py
Normal file
162
.venv/lib/python3.9/site-packages/attr/converters.py
Normal file
@@ -0,0 +1,162 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Commonly useful converters.
|
||||
"""
|
||||
|
||||
import typing
|
||||
|
||||
from ._compat import _AnnotationExtractor
|
||||
from ._make import NOTHING, Converter, Factory, pipe
|
||||
|
||||
|
||||
__all__ = [
|
||||
"default_if_none",
|
||||
"optional",
|
||||
"pipe",
|
||||
"to_bool",
|
||||
]
|
||||
|
||||
|
||||
def optional(converter):
|
||||
"""
|
||||
A converter that allows an attribute to be optional. An optional attribute
|
||||
is one which can be set to `None`.
|
||||
|
||||
Type annotations will be inferred from the wrapped converter's, if it has
|
||||
any.
|
||||
|
||||
Args:
|
||||
converter (typing.Callable):
|
||||
the converter that is used for non-`None` values.
|
||||
|
||||
.. versionadded:: 17.1.0
|
||||
"""
|
||||
|
||||
if isinstance(converter, Converter):
|
||||
|
||||
def optional_converter(val, inst, field):
|
||||
if val is None:
|
||||
return None
|
||||
return converter(val, inst, field)
|
||||
|
||||
else:
|
||||
|
||||
def optional_converter(val):
|
||||
if val is None:
|
||||
return None
|
||||
return converter(val)
|
||||
|
||||
xtr = _AnnotationExtractor(converter)
|
||||
|
||||
t = xtr.get_first_param_type()
|
||||
if t:
|
||||
optional_converter.__annotations__["val"] = typing.Optional[t]
|
||||
|
||||
rt = xtr.get_return_type()
|
||||
if rt:
|
||||
optional_converter.__annotations__["return"] = typing.Optional[rt]
|
||||
|
||||
if isinstance(converter, Converter):
|
||||
return Converter(optional_converter, takes_self=True, takes_field=True)
|
||||
|
||||
return optional_converter
|
||||
|
||||
|
||||
def default_if_none(default=NOTHING, factory=None):
|
||||
"""
|
||||
A converter that allows to replace `None` values by *default* or the result
|
||||
of *factory*.
|
||||
|
||||
Args:
|
||||
default:
|
||||
Value to be used if `None` is passed. Passing an instance of
|
||||
`attrs.Factory` is supported, however the ``takes_self`` option is
|
||||
*not*.
|
||||
|
||||
factory (typing.Callable):
|
||||
A callable that takes no parameters whose result is used if `None`
|
||||
is passed.
|
||||
|
||||
Raises:
|
||||
TypeError: If **neither** *default* or *factory* is passed.
|
||||
|
||||
TypeError: If **both** *default* and *factory* are passed.
|
||||
|
||||
ValueError:
|
||||
If an instance of `attrs.Factory` is passed with
|
||||
``takes_self=True``.
|
||||
|
||||
.. versionadded:: 18.2.0
|
||||
"""
|
||||
if default is NOTHING and factory is None:
|
||||
msg = "Must pass either `default` or `factory`."
|
||||
raise TypeError(msg)
|
||||
|
||||
if default is not NOTHING and factory is not None:
|
||||
msg = "Must pass either `default` or `factory` but not both."
|
||||
raise TypeError(msg)
|
||||
|
||||
if factory is not None:
|
||||
default = Factory(factory)
|
||||
|
||||
if isinstance(default, Factory):
|
||||
if default.takes_self:
|
||||
msg = "`takes_self` is not supported by default_if_none."
|
||||
raise ValueError(msg)
|
||||
|
||||
def default_if_none_converter(val):
|
||||
if val is not None:
|
||||
return val
|
||||
|
||||
return default.factory()
|
||||
|
||||
else:
|
||||
|
||||
def default_if_none_converter(val):
|
||||
if val is not None:
|
||||
return val
|
||||
|
||||
return default
|
||||
|
||||
return default_if_none_converter
|
||||
|
||||
|
||||
def to_bool(val):
|
||||
"""
|
||||
Convert "boolean" strings (for example, from environment variables) to real
|
||||
booleans.
|
||||
|
||||
Values mapping to `True`:
|
||||
|
||||
- ``True``
|
||||
- ``"true"`` / ``"t"``
|
||||
- ``"yes"`` / ``"y"``
|
||||
- ``"on"``
|
||||
- ``"1"``
|
||||
- ``1``
|
||||
|
||||
Values mapping to `False`:
|
||||
|
||||
- ``False``
|
||||
- ``"false"`` / ``"f"``
|
||||
- ``"no"`` / ``"n"``
|
||||
- ``"off"``
|
||||
- ``"0"``
|
||||
- ``0``
|
||||
|
||||
Raises:
|
||||
ValueError: For any other value.
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
if isinstance(val, str):
|
||||
val = val.lower()
|
||||
|
||||
if val in (True, "true", "t", "yes", "y", "on", "1", 1):
|
||||
return True
|
||||
if val in (False, "false", "f", "no", "n", "off", "0", 0):
|
||||
return False
|
||||
|
||||
msg = f"Cannot convert value to bool: {val!r}"
|
||||
raise ValueError(msg)
|
||||
19
.venv/lib/python3.9/site-packages/attr/converters.pyi
Normal file
19
.venv/lib/python3.9/site-packages/attr/converters.pyi
Normal file
@@ -0,0 +1,19 @@
|
||||
from typing import Callable, Any, overload
|
||||
|
||||
from attrs import _ConverterType, _CallableConverterType
|
||||
|
||||
@overload
|
||||
def pipe(*validators: _CallableConverterType) -> _CallableConverterType: ...
|
||||
@overload
|
||||
def pipe(*validators: _ConverterType) -> _ConverterType: ...
|
||||
@overload
|
||||
def optional(converter: _CallableConverterType) -> _CallableConverterType: ...
|
||||
@overload
|
||||
def optional(converter: _ConverterType) -> _ConverterType: ...
|
||||
@overload
|
||||
def default_if_none(default: Any) -> _CallableConverterType: ...
|
||||
@overload
|
||||
def default_if_none(
|
||||
*, factory: Callable[[], Any]
|
||||
) -> _CallableConverterType: ...
|
||||
def to_bool(val: str | int | bool) -> bool: ...
|
||||
95
.venv/lib/python3.9/site-packages/attr/exceptions.py
Normal file
95
.venv/lib/python3.9/site-packages/attr/exceptions.py
Normal file
@@ -0,0 +1,95 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import ClassVar
|
||||
|
||||
|
||||
class FrozenError(AttributeError):
|
||||
"""
|
||||
A frozen/immutable instance or attribute have been attempted to be
|
||||
modified.
|
||||
|
||||
It mirrors the behavior of ``namedtuples`` by using the same error message
|
||||
and subclassing `AttributeError`.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
"""
|
||||
|
||||
msg = "can't set attribute"
|
||||
args: ClassVar[tuple[str]] = [msg]
|
||||
|
||||
|
||||
class FrozenInstanceError(FrozenError):
|
||||
"""
|
||||
A frozen instance has been attempted to be modified.
|
||||
|
||||
.. versionadded:: 16.1.0
|
||||
"""
|
||||
|
||||
|
||||
class FrozenAttributeError(FrozenError):
|
||||
"""
|
||||
A frozen attribute has been attempted to be modified.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
"""
|
||||
|
||||
|
||||
class AttrsAttributeNotFoundError(ValueError):
|
||||
"""
|
||||
An *attrs* function couldn't find an attribute that the user asked for.
|
||||
|
||||
.. versionadded:: 16.2.0
|
||||
"""
|
||||
|
||||
|
||||
class NotAnAttrsClassError(ValueError):
|
||||
"""
|
||||
A non-*attrs* class has been passed into an *attrs* function.
|
||||
|
||||
.. versionadded:: 16.2.0
|
||||
"""
|
||||
|
||||
|
||||
class DefaultAlreadySetError(RuntimeError):
|
||||
"""
|
||||
A default has been set when defining the field and is attempted to be reset
|
||||
using the decorator.
|
||||
|
||||
.. versionadded:: 17.1.0
|
||||
"""
|
||||
|
||||
|
||||
class UnannotatedAttributeError(RuntimeError):
|
||||
"""
|
||||
A class with ``auto_attribs=True`` has a field without a type annotation.
|
||||
|
||||
.. versionadded:: 17.3.0
|
||||
"""
|
||||
|
||||
|
||||
class PythonTooOldError(RuntimeError):
|
||||
"""
|
||||
It was attempted to use an *attrs* feature that requires a newer Python
|
||||
version.
|
||||
|
||||
.. versionadded:: 18.2.0
|
||||
"""
|
||||
|
||||
|
||||
class NotCallableError(TypeError):
|
||||
"""
|
||||
A field requiring a callable has been set with a value that is not
|
||||
callable.
|
||||
|
||||
.. versionadded:: 19.2.0
|
||||
"""
|
||||
|
||||
def __init__(self, msg, value):
|
||||
super(TypeError, self).__init__(msg, value)
|
||||
self.msg = msg
|
||||
self.value = value
|
||||
|
||||
def __str__(self):
|
||||
return str(self.msg)
|
||||
17
.venv/lib/python3.9/site-packages/attr/exceptions.pyi
Normal file
17
.venv/lib/python3.9/site-packages/attr/exceptions.pyi
Normal file
@@ -0,0 +1,17 @@
|
||||
from typing import Any
|
||||
|
||||
class FrozenError(AttributeError):
|
||||
msg: str = ...
|
||||
|
||||
class FrozenInstanceError(FrozenError): ...
|
||||
class FrozenAttributeError(FrozenError): ...
|
||||
class AttrsAttributeNotFoundError(ValueError): ...
|
||||
class NotAnAttrsClassError(ValueError): ...
|
||||
class DefaultAlreadySetError(RuntimeError): ...
|
||||
class UnannotatedAttributeError(RuntimeError): ...
|
||||
class PythonTooOldError(RuntimeError): ...
|
||||
|
||||
class NotCallableError(TypeError):
|
||||
msg: str = ...
|
||||
value: Any = ...
|
||||
def __init__(self, msg: str, value: Any) -> None: ...
|
||||
72
.venv/lib/python3.9/site-packages/attr/filters.py
Normal file
72
.venv/lib/python3.9/site-packages/attr/filters.py
Normal file
@@ -0,0 +1,72 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Commonly useful filters for `attrs.asdict` and `attrs.astuple`.
|
||||
"""
|
||||
|
||||
from ._make import Attribute
|
||||
|
||||
|
||||
def _split_what(what):
|
||||
"""
|
||||
Returns a tuple of `frozenset`s of classes and attributes.
|
||||
"""
|
||||
return (
|
||||
frozenset(cls for cls in what if isinstance(cls, type)),
|
||||
frozenset(cls for cls in what if isinstance(cls, str)),
|
||||
frozenset(cls for cls in what if isinstance(cls, Attribute)),
|
||||
)
|
||||
|
||||
|
||||
def include(*what):
|
||||
"""
|
||||
Create a filter that only allows *what*.
|
||||
|
||||
Args:
|
||||
what (list[type, str, attrs.Attribute]):
|
||||
What to include. Can be a type, a name, or an attribute.
|
||||
|
||||
Returns:
|
||||
Callable:
|
||||
A callable that can be passed to `attrs.asdict`'s and
|
||||
`attrs.astuple`'s *filter* argument.
|
||||
|
||||
.. versionchanged:: 23.1.0 Accept strings with field names.
|
||||
"""
|
||||
cls, names, attrs = _split_what(what)
|
||||
|
||||
def include_(attribute, value):
|
||||
return (
|
||||
value.__class__ in cls
|
||||
or attribute.name in names
|
||||
or attribute in attrs
|
||||
)
|
||||
|
||||
return include_
|
||||
|
||||
|
||||
def exclude(*what):
|
||||
"""
|
||||
Create a filter that does **not** allow *what*.
|
||||
|
||||
Args:
|
||||
what (list[type, str, attrs.Attribute]):
|
||||
What to exclude. Can be a type, a name, or an attribute.
|
||||
|
||||
Returns:
|
||||
Callable:
|
||||
A callable that can be passed to `attrs.asdict`'s and
|
||||
`attrs.astuple`'s *filter* argument.
|
||||
|
||||
.. versionchanged:: 23.3.0 Accept field name string as input argument
|
||||
"""
|
||||
cls, names, attrs = _split_what(what)
|
||||
|
||||
def exclude_(attribute, value):
|
||||
return not (
|
||||
value.__class__ in cls
|
||||
or attribute.name in names
|
||||
or attribute in attrs
|
||||
)
|
||||
|
||||
return exclude_
|
||||
6
.venv/lib/python3.9/site-packages/attr/filters.pyi
Normal file
6
.venv/lib/python3.9/site-packages/attr/filters.pyi
Normal file
@@ -0,0 +1,6 @@
|
||||
from typing import Any
|
||||
|
||||
from . import Attribute, _FilterType
|
||||
|
||||
def include(*what: type | str | Attribute[Any]) -> _FilterType[Any]: ...
|
||||
def exclude(*what: type | str | Attribute[Any]) -> _FilterType[Any]: ...
|
||||
0
.venv/lib/python3.9/site-packages/attr/py.typed
Normal file
0
.venv/lib/python3.9/site-packages/attr/py.typed
Normal file
79
.venv/lib/python3.9/site-packages/attr/setters.py
Normal file
79
.venv/lib/python3.9/site-packages/attr/setters.py
Normal file
@@ -0,0 +1,79 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Commonly used hooks for on_setattr.
|
||||
"""
|
||||
|
||||
from . import _config
|
||||
from .exceptions import FrozenAttributeError
|
||||
|
||||
|
||||
def pipe(*setters):
|
||||
"""
|
||||
Run all *setters* and return the return value of the last one.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
"""
|
||||
|
||||
def wrapped_pipe(instance, attrib, new_value):
|
||||
rv = new_value
|
||||
|
||||
for setter in setters:
|
||||
rv = setter(instance, attrib, rv)
|
||||
|
||||
return rv
|
||||
|
||||
return wrapped_pipe
|
||||
|
||||
|
||||
def frozen(_, __, ___):
|
||||
"""
|
||||
Prevent an attribute to be modified.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
"""
|
||||
raise FrozenAttributeError
|
||||
|
||||
|
||||
def validate(instance, attrib, new_value):
|
||||
"""
|
||||
Run *attrib*'s validator on *new_value* if it has one.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
"""
|
||||
if _config._run_validators is False:
|
||||
return new_value
|
||||
|
||||
v = attrib.validator
|
||||
if not v:
|
||||
return new_value
|
||||
|
||||
v(instance, attrib, new_value)
|
||||
|
||||
return new_value
|
||||
|
||||
|
||||
def convert(instance, attrib, new_value):
|
||||
"""
|
||||
Run *attrib*'s converter -- if it has one -- on *new_value* and return the
|
||||
result.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
"""
|
||||
c = attrib.converter
|
||||
if c:
|
||||
# This can be removed once we drop 3.8 and use attrs.Converter instead.
|
||||
from ._make import Converter
|
||||
|
||||
if not isinstance(c, Converter):
|
||||
return c(new_value)
|
||||
|
||||
return c(new_value, instance, attrib)
|
||||
|
||||
return new_value
|
||||
|
||||
|
||||
# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
|
||||
# Sphinx's autodata stopped working, so the docstring is inlined in the API
|
||||
# docs.
|
||||
NO_OP = object()
|
||||
20
.venv/lib/python3.9/site-packages/attr/setters.pyi
Normal file
20
.venv/lib/python3.9/site-packages/attr/setters.pyi
Normal file
@@ -0,0 +1,20 @@
|
||||
from typing import Any, NewType, NoReturn, TypeVar
|
||||
|
||||
from . import Attribute
|
||||
from attrs import _OnSetAttrType
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
def frozen(
|
||||
instance: Any, attribute: Attribute[Any], new_value: Any
|
||||
) -> NoReturn: ...
|
||||
def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ...
|
||||
def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ...
|
||||
|
||||
# convert is allowed to return Any, because they can be chained using pipe.
|
||||
def convert(
|
||||
instance: Any, attribute: Attribute[Any], new_value: Any
|
||||
) -> Any: ...
|
||||
|
||||
_NoOpType = NewType("_NoOpType", object)
|
||||
NO_OP: _NoOpType
|
||||
748
.venv/lib/python3.9/site-packages/attr/validators.py
Normal file
748
.venv/lib/python3.9/site-packages/attr/validators.py
Normal file
@@ -0,0 +1,748 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Commonly useful validators.
|
||||
"""
|
||||
|
||||
import operator
|
||||
import re
|
||||
|
||||
from contextlib import contextmanager
|
||||
from re import Pattern
|
||||
|
||||
from ._config import get_run_validators, set_run_validators
|
||||
from ._make import _AndValidator, and_, attrib, attrs
|
||||
from .converters import default_if_none
|
||||
from .exceptions import NotCallableError
|
||||
|
||||
|
||||
__all__ = [
|
||||
"and_",
|
||||
"deep_iterable",
|
||||
"deep_mapping",
|
||||
"disabled",
|
||||
"ge",
|
||||
"get_disabled",
|
||||
"gt",
|
||||
"in_",
|
||||
"instance_of",
|
||||
"is_callable",
|
||||
"le",
|
||||
"lt",
|
||||
"matches_re",
|
||||
"max_len",
|
||||
"min_len",
|
||||
"not_",
|
||||
"optional",
|
||||
"or_",
|
||||
"set_disabled",
|
||||
]
|
||||
|
||||
|
||||
def set_disabled(disabled):
|
||||
"""
|
||||
Globally disable or enable running validators.
|
||||
|
||||
By default, they are run.
|
||||
|
||||
Args:
|
||||
disabled (bool): If `True`, disable running all validators.
|
||||
|
||||
.. warning::
|
||||
|
||||
This function is not thread-safe!
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
set_run_validators(not disabled)
|
||||
|
||||
|
||||
def get_disabled():
|
||||
"""
|
||||
Return a bool indicating whether validators are currently disabled or not.
|
||||
|
||||
Returns:
|
||||
bool:`True` if validators are currently disabled.
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return not get_run_validators()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def disabled():
|
||||
"""
|
||||
Context manager that disables running validators within its context.
|
||||
|
||||
.. warning::
|
||||
|
||||
This context manager is not thread-safe!
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
set_run_validators(False)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
set_run_validators(True)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, unsafe_hash=True)
|
||||
class _InstanceOfValidator:
|
||||
type = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if not isinstance(value, self.type):
|
||||
msg = f"'{attr.name}' must be {self.type!r} (got {value!r} that is a {value.__class__!r})."
|
||||
raise TypeError(
|
||||
msg,
|
||||
attr,
|
||||
self.type,
|
||||
value,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<instance_of validator for type {self.type!r}>"
|
||||
|
||||
|
||||
def instance_of(type):
|
||||
"""
|
||||
A validator that raises a `TypeError` if the initializer is called with a
|
||||
wrong type for this particular attribute (checks are performed using
|
||||
`isinstance` therefore it's also valid to pass a tuple of types).
|
||||
|
||||
Args:
|
||||
type (type | tuple[type]): The type to check for.
|
||||
|
||||
Raises:
|
||||
TypeError:
|
||||
With a human readable error message, the attribute (of type
|
||||
`attrs.Attribute`), the expected type, and the value it got.
|
||||
"""
|
||||
return _InstanceOfValidator(type)
|
||||
|
||||
|
||||
@attrs(repr=False, frozen=True, slots=True)
|
||||
class _MatchesReValidator:
|
||||
pattern = attrib()
|
||||
match_func = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if not self.match_func(value):
|
||||
msg = f"'{attr.name}' must match regex {self.pattern.pattern!r} ({value!r} doesn't)"
|
||||
raise ValueError(
|
||||
msg,
|
||||
attr,
|
||||
self.pattern,
|
||||
value,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<matches_re validator for pattern {self.pattern!r}>"
|
||||
|
||||
|
||||
def matches_re(regex, flags=0, func=None):
|
||||
r"""
|
||||
A validator that raises `ValueError` if the initializer is called with a
|
||||
string that doesn't match *regex*.
|
||||
|
||||
Args:
|
||||
regex (str, re.Pattern):
|
||||
A regex string or precompiled pattern to match against
|
||||
|
||||
flags (int):
|
||||
Flags that will be passed to the underlying re function (default 0)
|
||||
|
||||
func (typing.Callable):
|
||||
Which underlying `re` function to call. Valid options are
|
||||
`re.fullmatch`, `re.search`, and `re.match`; the default `None`
|
||||
means `re.fullmatch`. For performance reasons, the pattern is
|
||||
always precompiled using `re.compile`.
|
||||
|
||||
.. versionadded:: 19.2.0
|
||||
.. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern.
|
||||
"""
|
||||
valid_funcs = (re.fullmatch, None, re.search, re.match)
|
||||
if func not in valid_funcs:
|
||||
msg = "'func' must be one of {}.".format(
|
||||
", ".join(
|
||||
sorted((e and e.__name__) or "None" for e in set(valid_funcs))
|
||||
)
|
||||
)
|
||||
raise ValueError(msg)
|
||||
|
||||
if isinstance(regex, Pattern):
|
||||
if flags:
|
||||
msg = "'flags' can only be used with a string pattern; pass flags to re.compile() instead"
|
||||
raise TypeError(msg)
|
||||
pattern = regex
|
||||
else:
|
||||
pattern = re.compile(regex, flags)
|
||||
|
||||
if func is re.match:
|
||||
match_func = pattern.match
|
||||
elif func is re.search:
|
||||
match_func = pattern.search
|
||||
else:
|
||||
match_func = pattern.fullmatch
|
||||
|
||||
return _MatchesReValidator(pattern, match_func)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, unsafe_hash=True)
|
||||
class _OptionalValidator:
|
||||
validator = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
if value is None:
|
||||
return
|
||||
|
||||
self.validator(inst, attr, value)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<optional validator for {self.validator!r} or None>"
|
||||
|
||||
|
||||
def optional(validator):
|
||||
"""
|
||||
A validator that makes an attribute optional. An optional attribute is one
|
||||
which can be set to `None` in addition to satisfying the requirements of
|
||||
the sub-validator.
|
||||
|
||||
Args:
|
||||
validator
|
||||
(typing.Callable | tuple[typing.Callable] | list[typing.Callable]):
|
||||
A validator (or validators) that is used for non-`None` values.
|
||||
|
||||
.. versionadded:: 15.1.0
|
||||
.. versionchanged:: 17.1.0 *validator* can be a list of validators.
|
||||
.. versionchanged:: 23.1.0 *validator* can also be a tuple of validators.
|
||||
"""
|
||||
if isinstance(validator, (list, tuple)):
|
||||
return _OptionalValidator(_AndValidator(validator))
|
||||
|
||||
return _OptionalValidator(validator)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, unsafe_hash=True)
|
||||
class _InValidator:
|
||||
options = attrib()
|
||||
_original_options = attrib(hash=False)
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
try:
|
||||
in_options = value in self.options
|
||||
except TypeError: # e.g. `1 in "abc"`
|
||||
in_options = False
|
||||
|
||||
if not in_options:
|
||||
msg = f"'{attr.name}' must be in {self._original_options!r} (got {value!r})"
|
||||
raise ValueError(
|
||||
msg,
|
||||
attr,
|
||||
self._original_options,
|
||||
value,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<in_ validator with options {self._original_options!r}>"
|
||||
|
||||
|
||||
def in_(options):
|
||||
"""
|
||||
A validator that raises a `ValueError` if the initializer is called with a
|
||||
value that does not belong in the *options* provided.
|
||||
|
||||
The check is performed using ``value in options``, so *options* has to
|
||||
support that operation.
|
||||
|
||||
To keep the validator hashable, dicts, lists, and sets are transparently
|
||||
transformed into a `tuple`.
|
||||
|
||||
Args:
|
||||
options: Allowed options.
|
||||
|
||||
Raises:
|
||||
ValueError:
|
||||
With a human readable error message, the attribute (of type
|
||||
`attrs.Attribute`), the expected options, and the value it got.
|
||||
|
||||
.. versionadded:: 17.1.0
|
||||
.. versionchanged:: 22.1.0
|
||||
The ValueError was incomplete until now and only contained the human
|
||||
readable error message. Now it contains all the information that has
|
||||
been promised since 17.1.0.
|
||||
.. versionchanged:: 24.1.0
|
||||
*options* that are a list, dict, or a set are now transformed into a
|
||||
tuple to keep the validator hashable.
|
||||
"""
|
||||
repr_options = options
|
||||
if isinstance(options, (list, dict, set)):
|
||||
options = tuple(options)
|
||||
|
||||
return _InValidator(options, repr_options)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=False, unsafe_hash=True)
|
||||
class _IsCallableValidator:
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if not callable(value):
|
||||
message = (
|
||||
"'{name}' must be callable "
|
||||
"(got {value!r} that is a {actual!r})."
|
||||
)
|
||||
raise NotCallableError(
|
||||
msg=message.format(
|
||||
name=attr.name, value=value, actual=value.__class__
|
||||
),
|
||||
value=value,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<is_callable validator>"
|
||||
|
||||
|
||||
def is_callable():
|
||||
"""
|
||||
A validator that raises a `attrs.exceptions.NotCallableError` if the
|
||||
initializer is called with a value for this particular attribute that is
|
||||
not callable.
|
||||
|
||||
.. versionadded:: 19.1.0
|
||||
|
||||
Raises:
|
||||
attrs.exceptions.NotCallableError:
|
||||
With a human readable error message containing the attribute
|
||||
(`attrs.Attribute`) name, and the value it got.
|
||||
"""
|
||||
return _IsCallableValidator()
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, unsafe_hash=True)
|
||||
class _DeepIterable:
|
||||
member_validator = attrib(validator=is_callable())
|
||||
iterable_validator = attrib(
|
||||
default=None, validator=optional(is_callable())
|
||||
)
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if self.iterable_validator is not None:
|
||||
self.iterable_validator(inst, attr, value)
|
||||
|
||||
for member in value:
|
||||
self.member_validator(inst, attr, member)
|
||||
|
||||
def __repr__(self):
|
||||
iterable_identifier = (
|
||||
""
|
||||
if self.iterable_validator is None
|
||||
else f" {self.iterable_validator!r}"
|
||||
)
|
||||
return (
|
||||
f"<deep_iterable validator for{iterable_identifier}"
|
||||
f" iterables of {self.member_validator!r}>"
|
||||
)
|
||||
|
||||
|
||||
def deep_iterable(member_validator, iterable_validator=None):
|
||||
"""
|
||||
A validator that performs deep validation of an iterable.
|
||||
|
||||
Args:
|
||||
member_validator: Validator(s) to apply to iterable members.
|
||||
|
||||
iterable_validator:
|
||||
Validator(s) to apply to iterable itself (optional).
|
||||
|
||||
Raises
|
||||
TypeError: if any sub-validators fail
|
||||
|
||||
.. versionadded:: 19.1.0
|
||||
|
||||
.. versionchanged:: 25.4.0
|
||||
*member_validator* and *iterable_validator* can now be a list or tuple
|
||||
of validators.
|
||||
"""
|
||||
if isinstance(member_validator, (list, tuple)):
|
||||
member_validator = and_(*member_validator)
|
||||
if isinstance(iterable_validator, (list, tuple)):
|
||||
iterable_validator = and_(*iterable_validator)
|
||||
return _DeepIterable(member_validator, iterable_validator)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, unsafe_hash=True)
|
||||
class _DeepMapping:
|
||||
key_validator = attrib(validator=optional(is_callable()))
|
||||
value_validator = attrib(validator=optional(is_callable()))
|
||||
mapping_validator = attrib(validator=optional(is_callable()))
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if self.mapping_validator is not None:
|
||||
self.mapping_validator(inst, attr, value)
|
||||
|
||||
for key in value:
|
||||
if self.key_validator is not None:
|
||||
self.key_validator(inst, attr, key)
|
||||
if self.value_validator is not None:
|
||||
self.value_validator(inst, attr, value[key])
|
||||
|
||||
def __repr__(self):
|
||||
return f"<deep_mapping validator for objects mapping {self.key_validator!r} to {self.value_validator!r}>"
|
||||
|
||||
|
||||
def deep_mapping(
|
||||
key_validator=None, value_validator=None, mapping_validator=None
|
||||
):
|
||||
"""
|
||||
A validator that performs deep validation of a dictionary.
|
||||
|
||||
All validators are optional, but at least one of *key_validator* or
|
||||
*value_validator* must be provided.
|
||||
|
||||
Args:
|
||||
key_validator: Validator(s) to apply to dictionary keys.
|
||||
|
||||
value_validator: Validator(s) to apply to dictionary values.
|
||||
|
||||
mapping_validator:
|
||||
Validator(s) to apply to top-level mapping attribute.
|
||||
|
||||
.. versionadded:: 19.1.0
|
||||
|
||||
.. versionchanged:: 25.4.0
|
||||
*key_validator* and *value_validator* are now optional, but at least one
|
||||
of them must be provided.
|
||||
|
||||
.. versionchanged:: 25.4.0
|
||||
*key_validator*, *value_validator*, and *mapping_validator* can now be a
|
||||
list or tuple of validators.
|
||||
|
||||
Raises:
|
||||
TypeError: If any sub-validator fails on validation.
|
||||
|
||||
ValueError:
|
||||
If neither *key_validator* nor *value_validator* is provided on
|
||||
instantiation.
|
||||
"""
|
||||
if key_validator is None and value_validator is None:
|
||||
msg = (
|
||||
"At least one of key_validator or value_validator must be provided"
|
||||
)
|
||||
raise ValueError(msg)
|
||||
|
||||
if isinstance(key_validator, (list, tuple)):
|
||||
key_validator = and_(*key_validator)
|
||||
if isinstance(value_validator, (list, tuple)):
|
||||
value_validator = and_(*value_validator)
|
||||
if isinstance(mapping_validator, (list, tuple)):
|
||||
mapping_validator = and_(*mapping_validator)
|
||||
|
||||
return _DeepMapping(key_validator, value_validator, mapping_validator)
|
||||
|
||||
|
||||
@attrs(repr=False, frozen=True, slots=True)
|
||||
class _NumberValidator:
|
||||
bound = attrib()
|
||||
compare_op = attrib()
|
||||
compare_func = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if not self.compare_func(value, self.bound):
|
||||
msg = f"'{attr.name}' must be {self.compare_op} {self.bound}: {value}"
|
||||
raise ValueError(msg)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<Validator for x {self.compare_op} {self.bound}>"
|
||||
|
||||
|
||||
def lt(val):
|
||||
"""
|
||||
A validator that raises `ValueError` if the initializer is called with a
|
||||
number larger or equal to *val*.
|
||||
|
||||
The validator uses `operator.lt` to compare the values.
|
||||
|
||||
Args:
|
||||
val: Exclusive upper bound for values.
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _NumberValidator(val, "<", operator.lt)
|
||||
|
||||
|
||||
def le(val):
|
||||
"""
|
||||
A validator that raises `ValueError` if the initializer is called with a
|
||||
number greater than *val*.
|
||||
|
||||
The validator uses `operator.le` to compare the values.
|
||||
|
||||
Args:
|
||||
val: Inclusive upper bound for values.
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _NumberValidator(val, "<=", operator.le)
|
||||
|
||||
|
||||
def ge(val):
|
||||
"""
|
||||
A validator that raises `ValueError` if the initializer is called with a
|
||||
number smaller than *val*.
|
||||
|
||||
The validator uses `operator.ge` to compare the values.
|
||||
|
||||
Args:
|
||||
val: Inclusive lower bound for values
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _NumberValidator(val, ">=", operator.ge)
|
||||
|
||||
|
||||
def gt(val):
|
||||
"""
|
||||
A validator that raises `ValueError` if the initializer is called with a
|
||||
number smaller or equal to *val*.
|
||||
|
||||
The validator uses `operator.gt` to compare the values.
|
||||
|
||||
Args:
|
||||
val: Exclusive lower bound for values
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _NumberValidator(val, ">", operator.gt)
|
||||
|
||||
|
||||
@attrs(repr=False, frozen=True, slots=True)
|
||||
class _MaxLengthValidator:
|
||||
max_length = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if len(value) > self.max_length:
|
||||
msg = f"Length of '{attr.name}' must be <= {self.max_length}: {len(value)}"
|
||||
raise ValueError(msg)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<max_len validator for {self.max_length}>"
|
||||
|
||||
|
||||
def max_len(length):
|
||||
"""
|
||||
A validator that raises `ValueError` if the initializer is called
|
||||
with a string or iterable that is longer than *length*.
|
||||
|
||||
Args:
|
||||
length (int): Maximum length of the string or iterable
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _MaxLengthValidator(length)
|
||||
|
||||
|
||||
@attrs(repr=False, frozen=True, slots=True)
|
||||
class _MinLengthValidator:
|
||||
min_length = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if len(value) < self.min_length:
|
||||
msg = f"Length of '{attr.name}' must be >= {self.min_length}: {len(value)}"
|
||||
raise ValueError(msg)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<min_len validator for {self.min_length}>"
|
||||
|
||||
|
||||
def min_len(length):
|
||||
"""
|
||||
A validator that raises `ValueError` if the initializer is called
|
||||
with a string or iterable that is shorter than *length*.
|
||||
|
||||
Args:
|
||||
length (int): Minimum length of the string or iterable
|
||||
|
||||
.. versionadded:: 22.1.0
|
||||
"""
|
||||
return _MinLengthValidator(length)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, unsafe_hash=True)
|
||||
class _SubclassOfValidator:
|
||||
type = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if not issubclass(value, self.type):
|
||||
msg = f"'{attr.name}' must be a subclass of {self.type!r} (got {value!r})."
|
||||
raise TypeError(
|
||||
msg,
|
||||
attr,
|
||||
self.type,
|
||||
value,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<subclass_of validator for type {self.type!r}>"
|
||||
|
||||
|
||||
def _subclass_of(type):
|
||||
"""
|
||||
A validator that raises a `TypeError` if the initializer is called with a
|
||||
wrong type for this particular attribute (checks are performed using
|
||||
`issubclass` therefore it's also valid to pass a tuple of types).
|
||||
|
||||
Args:
|
||||
type (type | tuple[type, ...]): The type(s) to check for.
|
||||
|
||||
Raises:
|
||||
TypeError:
|
||||
With a human readable error message, the attribute (of type
|
||||
`attrs.Attribute`), the expected type, and the value it got.
|
||||
"""
|
||||
return _SubclassOfValidator(type)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, unsafe_hash=True)
|
||||
class _NotValidator:
|
||||
validator = attrib()
|
||||
msg = attrib(
|
||||
converter=default_if_none(
|
||||
"not_ validator child '{validator!r}' "
|
||||
"did not raise a captured error"
|
||||
)
|
||||
)
|
||||
exc_types = attrib(
|
||||
validator=deep_iterable(
|
||||
member_validator=_subclass_of(Exception),
|
||||
iterable_validator=instance_of(tuple),
|
||||
),
|
||||
)
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
try:
|
||||
self.validator(inst, attr, value)
|
||||
except self.exc_types:
|
||||
pass # suppress error to invert validity
|
||||
else:
|
||||
raise ValueError(
|
||||
self.msg.format(
|
||||
validator=self.validator,
|
||||
exc_types=self.exc_types,
|
||||
),
|
||||
attr,
|
||||
self.validator,
|
||||
value,
|
||||
self.exc_types,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<not_ validator wrapping {self.validator!r}, capturing {self.exc_types!r}>"
|
||||
|
||||
|
||||
def not_(validator, *, msg=None, exc_types=(ValueError, TypeError)):
|
||||
"""
|
||||
A validator that wraps and logically 'inverts' the validator passed to it.
|
||||
It will raise a `ValueError` if the provided validator *doesn't* raise a
|
||||
`ValueError` or `TypeError` (by default), and will suppress the exception
|
||||
if the provided validator *does*.
|
||||
|
||||
Intended to be used with existing validators to compose logic without
|
||||
needing to create inverted variants, for example, ``not_(in_(...))``.
|
||||
|
||||
Args:
|
||||
validator: A validator to be logically inverted.
|
||||
|
||||
msg (str):
|
||||
Message to raise if validator fails. Formatted with keys
|
||||
``exc_types`` and ``validator``.
|
||||
|
||||
exc_types (tuple[type, ...]):
|
||||
Exception type(s) to capture. Other types raised by child
|
||||
validators will not be intercepted and pass through.
|
||||
|
||||
Raises:
|
||||
ValueError:
|
||||
With a human readable error message, the attribute (of type
|
||||
`attrs.Attribute`), the validator that failed to raise an
|
||||
exception, the value it got, and the expected exception types.
|
||||
|
||||
.. versionadded:: 22.2.0
|
||||
"""
|
||||
try:
|
||||
exc_types = tuple(exc_types)
|
||||
except TypeError:
|
||||
exc_types = (exc_types,)
|
||||
return _NotValidator(validator, msg, exc_types)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, unsafe_hash=True)
|
||||
class _OrValidator:
|
||||
validators = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
for v in self.validators:
|
||||
try:
|
||||
v(inst, attr, value)
|
||||
except Exception: # noqa: BLE001, PERF203, S112
|
||||
continue
|
||||
else:
|
||||
return
|
||||
|
||||
msg = f"None of {self.validators!r} satisfied for value {value!r}"
|
||||
raise ValueError(msg)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<or validator wrapping {self.validators!r}>"
|
||||
|
||||
|
||||
def or_(*validators):
|
||||
"""
|
||||
A validator that composes multiple validators into one.
|
||||
|
||||
When called on a value, it runs all wrapped validators until one of them is
|
||||
satisfied.
|
||||
|
||||
Args:
|
||||
validators (~collections.abc.Iterable[typing.Callable]):
|
||||
Arbitrary number of validators.
|
||||
|
||||
Raises:
|
||||
ValueError:
|
||||
If no validator is satisfied. Raised with a human-readable error
|
||||
message listing all the wrapped validators and the value that
|
||||
failed all of them.
|
||||
|
||||
.. versionadded:: 24.1.0
|
||||
"""
|
||||
vals = []
|
||||
for v in validators:
|
||||
vals.extend(v.validators if isinstance(v, _OrValidator) else [v])
|
||||
|
||||
return _OrValidator(tuple(vals))
|
||||
140
.venv/lib/python3.9/site-packages/attr/validators.pyi
Normal file
140
.venv/lib/python3.9/site-packages/attr/validators.pyi
Normal file
@@ -0,0 +1,140 @@
|
||||
from types import UnionType
|
||||
from typing import (
|
||||
Any,
|
||||
AnyStr,
|
||||
Callable,
|
||||
Container,
|
||||
ContextManager,
|
||||
Iterable,
|
||||
Mapping,
|
||||
Match,
|
||||
Pattern,
|
||||
TypeVar,
|
||||
overload,
|
||||
)
|
||||
|
||||
from attrs import _ValidatorType
|
||||
from attrs import _ValidatorArgType
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_T1 = TypeVar("_T1")
|
||||
_T2 = TypeVar("_T2")
|
||||
_T3 = TypeVar("_T3")
|
||||
_T4 = TypeVar("_T4")
|
||||
_T5 = TypeVar("_T5")
|
||||
_T6 = TypeVar("_T6")
|
||||
_I = TypeVar("_I", bound=Iterable)
|
||||
_K = TypeVar("_K")
|
||||
_V = TypeVar("_V")
|
||||
_M = TypeVar("_M", bound=Mapping)
|
||||
|
||||
def set_disabled(run: bool) -> None: ...
|
||||
def get_disabled() -> bool: ...
|
||||
def disabled() -> ContextManager[None]: ...
|
||||
|
||||
# To be more precise on instance_of use some overloads.
|
||||
# If there are more than 3 items in the tuple then we fall back to Any
|
||||
@overload
|
||||
def instance_of(type: type[_T]) -> _ValidatorType[_T]: ...
|
||||
@overload
|
||||
def instance_of(type: tuple[type[_T]]) -> _ValidatorType[_T]: ...
|
||||
@overload
|
||||
def instance_of(
|
||||
type: tuple[type[_T1], type[_T2]],
|
||||
) -> _ValidatorType[_T1 | _T2]: ...
|
||||
@overload
|
||||
def instance_of(
|
||||
type: tuple[type[_T1], type[_T2], type[_T3]],
|
||||
) -> _ValidatorType[_T1 | _T2 | _T3]: ...
|
||||
@overload
|
||||
def instance_of(type: tuple[type, ...]) -> _ValidatorType[Any]: ...
|
||||
@overload
|
||||
def instance_of(type: UnionType) -> _ValidatorType[Any]: ...
|
||||
def optional(
|
||||
validator: (
|
||||
_ValidatorType[_T]
|
||||
| list[_ValidatorType[_T]]
|
||||
| tuple[_ValidatorType[_T]]
|
||||
),
|
||||
) -> _ValidatorType[_T | None]: ...
|
||||
def in_(options: Container[_T]) -> _ValidatorType[_T]: ...
|
||||
def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
|
||||
def matches_re(
|
||||
regex: Pattern[AnyStr] | AnyStr,
|
||||
flags: int = ...,
|
||||
func: Callable[[AnyStr, AnyStr, int], Match[AnyStr] | None] | None = ...,
|
||||
) -> _ValidatorType[AnyStr]: ...
|
||||
def deep_iterable(
|
||||
member_validator: _ValidatorArgType[_T],
|
||||
iterable_validator: _ValidatorArgType[_I] | None = ...,
|
||||
) -> _ValidatorType[_I]: ...
|
||||
@overload
|
||||
def deep_mapping(
|
||||
key_validator: _ValidatorArgType[_K],
|
||||
value_validator: _ValidatorArgType[_V] | None = ...,
|
||||
mapping_validator: _ValidatorArgType[_M] | None = ...,
|
||||
) -> _ValidatorType[_M]: ...
|
||||
@overload
|
||||
def deep_mapping(
|
||||
key_validator: _ValidatorArgType[_K] | None = ...,
|
||||
value_validator: _ValidatorArgType[_V] = ...,
|
||||
mapping_validator: _ValidatorArgType[_M] | None = ...,
|
||||
) -> _ValidatorType[_M]: ...
|
||||
def is_callable() -> _ValidatorType[_T]: ...
|
||||
def lt(val: _T) -> _ValidatorType[_T]: ...
|
||||
def le(val: _T) -> _ValidatorType[_T]: ...
|
||||
def ge(val: _T) -> _ValidatorType[_T]: ...
|
||||
def gt(val: _T) -> _ValidatorType[_T]: ...
|
||||
def max_len(length: int) -> _ValidatorType[_T]: ...
|
||||
def min_len(length: int) -> _ValidatorType[_T]: ...
|
||||
def not_(
|
||||
validator: _ValidatorType[_T],
|
||||
*,
|
||||
msg: str | None = None,
|
||||
exc_types: type[Exception] | Iterable[type[Exception]] = ...,
|
||||
) -> _ValidatorType[_T]: ...
|
||||
@overload
|
||||
def or_(
|
||||
__v1: _ValidatorType[_T1],
|
||||
__v2: _ValidatorType[_T2],
|
||||
) -> _ValidatorType[_T1 | _T2]: ...
|
||||
@overload
|
||||
def or_(
|
||||
__v1: _ValidatorType[_T1],
|
||||
__v2: _ValidatorType[_T2],
|
||||
__v3: _ValidatorType[_T3],
|
||||
) -> _ValidatorType[_T1 | _T2 | _T3]: ...
|
||||
@overload
|
||||
def or_(
|
||||
__v1: _ValidatorType[_T1],
|
||||
__v2: _ValidatorType[_T2],
|
||||
__v3: _ValidatorType[_T3],
|
||||
__v4: _ValidatorType[_T4],
|
||||
) -> _ValidatorType[_T1 | _T2 | _T3 | _T4]: ...
|
||||
@overload
|
||||
def or_(
|
||||
__v1: _ValidatorType[_T1],
|
||||
__v2: _ValidatorType[_T2],
|
||||
__v3: _ValidatorType[_T3],
|
||||
__v4: _ValidatorType[_T4],
|
||||
__v5: _ValidatorType[_T5],
|
||||
) -> _ValidatorType[_T1 | _T2 | _T3 | _T4 | _T5]: ...
|
||||
@overload
|
||||
def or_(
|
||||
__v1: _ValidatorType[_T1],
|
||||
__v2: _ValidatorType[_T2],
|
||||
__v3: _ValidatorType[_T3],
|
||||
__v4: _ValidatorType[_T4],
|
||||
__v5: _ValidatorType[_T5],
|
||||
__v6: _ValidatorType[_T6],
|
||||
) -> _ValidatorType[_T1 | _T2 | _T3 | _T4 | _T5 | _T6]: ...
|
||||
@overload
|
||||
def or_(
|
||||
__v1: _ValidatorType[Any],
|
||||
__v2: _ValidatorType[Any],
|
||||
__v3: _ValidatorType[Any],
|
||||
__v4: _ValidatorType[Any],
|
||||
__v5: _ValidatorType[Any],
|
||||
__v6: _ValidatorType[Any],
|
||||
*validators: _ValidatorType[Any],
|
||||
) -> _ValidatorType[Any]: ...
|
||||
@@ -0,0 +1 @@
|
||||
pip
|
||||
@@ -0,0 +1,235 @@
|
||||
Metadata-Version: 2.4
|
||||
Name: attrs
|
||||
Version: 25.4.0
|
||||
Summary: Classes Without Boilerplate
|
||||
Project-URL: Documentation, https://www.attrs.org/
|
||||
Project-URL: Changelog, https://www.attrs.org/en/stable/changelog.html
|
||||
Project-URL: GitHub, https://github.com/python-attrs/attrs
|
||||
Project-URL: Funding, https://github.com/sponsors/hynek
|
||||
Project-URL: Tidelift, https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=pypi
|
||||
Author-email: Hynek Schlawack <hs@ox.cx>
|
||||
License-Expression: MIT
|
||||
License-File: LICENSE
|
||||
Keywords: attribute,boilerplate,class
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Programming Language :: Python :: 3.12
|
||||
Classifier: Programming Language :: Python :: 3.13
|
||||
Classifier: Programming Language :: Python :: 3.14
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: Typing :: Typed
|
||||
Requires-Python: >=3.9
|
||||
Description-Content-Type: text/markdown
|
||||
|
||||
<p align="center">
|
||||
<a href="https://www.attrs.org/">
|
||||
<img src="https://raw.githubusercontent.com/python-attrs/attrs/main/docs/_static/attrs_logo.svg" width="35%" alt="attrs" />
|
||||
</a>
|
||||
</p>
|
||||
|
||||
|
||||
*attrs* is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka [dunder methods](https://www.attrs.org/en/latest/glossary.html#term-dunder-methods)).
|
||||
Trusted by NASA for [Mars missions since 2020](https://github.com/readme/featured/nasa-ingenuity-helicopter)!
|
||||
|
||||
Its main goal is to help you to write **concise** and **correct** software without slowing down your code.
|
||||
|
||||
|
||||
## Sponsors
|
||||
|
||||
*attrs* would not be possible without our [amazing sponsors](https://github.com/sponsors/hynek).
|
||||
Especially those generously supporting us at the *The Organization* tier and higher:
|
||||
|
||||
<!-- sponsor-break-begin -->
|
||||
|
||||
<p align="center">
|
||||
|
||||
<!-- [[[cog
|
||||
import pathlib, tomllib
|
||||
|
||||
for sponsor in tomllib.loads(pathlib.Path("pyproject.toml").read_text())["tool"]["sponcon"]["sponsors"]:
|
||||
print(f'<a href="{sponsor["url"]}"><img title="{sponsor["title"]}" src="https://www.attrs.org/en/25.4.0/_static/sponsors/{sponsor["img"]}" width="190" /></a>')
|
||||
]]] -->
|
||||
<a href="https://www.variomedia.de/"><img title="Variomedia AG" src="https://www.attrs.org/en/25.4.0/_static/sponsors/Variomedia.svg" width="190" /></a>
|
||||
<a href="https://tidelift.com/?utm_source=lifter&utm_medium=referral&utm_campaign=hynek"><img title="Tidelift" src="https://www.attrs.org/en/25.4.0/_static/sponsors/Tidelift.svg" width="190" /></a>
|
||||
<a href="https://privacy-solutions.org/"><img title="Privacy Solutions" src="https://www.attrs.org/en/25.4.0/_static/sponsors/Privacy-Solutions.svg" width="190" /></a>
|
||||
<a href="https://filepreviews.io/"><img title="FilePreviews" src="https://www.attrs.org/en/25.4.0/_static/sponsors/FilePreviews.svg" width="190" /></a>
|
||||
<a href="https://polar.sh/"><img title="Polar" src="https://www.attrs.org/en/25.4.0/_static/sponsors/Polar.svg" width="190" /></a>
|
||||
<!-- [[[end]]] -->
|
||||
|
||||
</p>
|
||||
|
||||
<!-- sponsor-break-end -->
|
||||
|
||||
<p align="center">
|
||||
<strong>Please consider <a href="https://github.com/sponsors/hynek">joining them</a> to help make <em>attrs</em>’s maintenance more sustainable!</strong>
|
||||
</p>
|
||||
|
||||
<!-- teaser-end -->
|
||||
|
||||
## Example
|
||||
|
||||
*attrs* gives you a class decorator and a way to declaratively define the attributes on that class:
|
||||
|
||||
<!-- code-begin -->
|
||||
|
||||
```pycon
|
||||
>>> from attrs import asdict, define, make_class, Factory
|
||||
|
||||
>>> @define
|
||||
... class SomeClass:
|
||||
... a_number: int = 42
|
||||
... list_of_numbers: list[int] = Factory(list)
|
||||
...
|
||||
... def hard_math(self, another_number):
|
||||
... return self.a_number + sum(self.list_of_numbers) * another_number
|
||||
|
||||
|
||||
>>> sc = SomeClass(1, [1, 2, 3])
|
||||
>>> sc
|
||||
SomeClass(a_number=1, list_of_numbers=[1, 2, 3])
|
||||
|
||||
>>> sc.hard_math(3)
|
||||
19
|
||||
>>> sc == SomeClass(1, [1, 2, 3])
|
||||
True
|
||||
>>> sc != SomeClass(2, [3, 2, 1])
|
||||
True
|
||||
|
||||
>>> asdict(sc)
|
||||
{'a_number': 1, 'list_of_numbers': [1, 2, 3]}
|
||||
|
||||
>>> SomeClass()
|
||||
SomeClass(a_number=42, list_of_numbers=[])
|
||||
|
||||
>>> C = make_class("C", ["a", "b"])
|
||||
>>> C("foo", "bar")
|
||||
C(a='foo', b='bar')
|
||||
```
|
||||
|
||||
After *declaring* your attributes, *attrs* gives you:
|
||||
|
||||
- a concise and explicit overview of the class's attributes,
|
||||
- a nice human-readable `__repr__`,
|
||||
- equality-checking methods,
|
||||
- an initializer,
|
||||
- and much more,
|
||||
|
||||
*without* writing dull boilerplate code again and again and *without* runtime performance penalties.
|
||||
|
||||
---
|
||||
|
||||
This example uses *attrs*'s modern APIs that have been introduced in version 20.1.0, and the *attrs* package import name that has been added in version 21.3.0.
|
||||
The classic APIs (`@attr.s`, `attr.ib`, plus their serious-business aliases) and the `attr` package import name will remain **indefinitely**.
|
||||
|
||||
Check out [*On The Core API Names*](https://www.attrs.org/en/latest/names.html) for an in-depth explanation!
|
||||
|
||||
|
||||
### Hate Type Annotations!?
|
||||
|
||||
No problem!
|
||||
Types are entirely **optional** with *attrs*.
|
||||
Simply assign `attrs.field()` to the attributes instead of annotating them with types:
|
||||
|
||||
```python
|
||||
from attrs import define, field
|
||||
|
||||
@define
|
||||
class SomeClass:
|
||||
a_number = field(default=42)
|
||||
list_of_numbers = field(factory=list)
|
||||
```
|
||||
|
||||
|
||||
## Data Classes
|
||||
|
||||
On the tin, *attrs* might remind you of `dataclasses` (and indeed, `dataclasses` [are a descendant](https://hynek.me/articles/import-attrs/) of *attrs*).
|
||||
In practice it does a lot more and is more flexible.
|
||||
For instance, it allows you to define [special handling of NumPy arrays for equality checks](https://www.attrs.org/en/stable/comparison.html#customization), allows more ways to [plug into the initialization process](https://www.attrs.org/en/stable/init.html#hooking-yourself-into-initialization), has a replacement for `__init_subclass__`, and allows for stepping through the generated methods using a debugger.
|
||||
|
||||
For more details, please refer to our [comparison page](https://www.attrs.org/en/stable/why.html#data-classes), but generally speaking, we are more likely to commit crimes against nature to make things work that one would expect to work, but that are quite complicated in practice.
|
||||
|
||||
|
||||
## Project Information
|
||||
|
||||
- [**Changelog**](https://www.attrs.org/en/stable/changelog.html)
|
||||
- [**Documentation**](https://www.attrs.org/)
|
||||
- [**PyPI**](https://pypi.org/project/attrs/)
|
||||
- [**Source Code**](https://github.com/python-attrs/attrs)
|
||||
- [**Contributing**](https://github.com/python-attrs/attrs/blob/main/.github/CONTRIBUTING.md)
|
||||
- [**Third-party Extensions**](https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs)
|
||||
- **Get Help**: use the `python-attrs` tag on [Stack Overflow](https://stackoverflow.com/questions/tagged/python-attrs)
|
||||
|
||||
|
||||
### *attrs* for Enterprise
|
||||
|
||||
Available as part of the [Tidelift Subscription](https://tidelift.com/?utm_source=lifter&utm_medium=referral&utm_campaign=hynek).
|
||||
|
||||
The maintainers of *attrs* and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source packages you use to build your applications.
|
||||
Save time, reduce risk, and improve code health, while paying the maintainers of the exact packages you use.
|
||||
|
||||
## Release Information
|
||||
|
||||
### Backwards-incompatible Changes
|
||||
|
||||
- Class-level `kw_only=True` behavior is now consistent with `dataclasses`.
|
||||
|
||||
Previously, a class that sets `kw_only=True` makes all attributes keyword-only, including those from base classes.
|
||||
If an attribute sets `kw_only=False`, that setting is ignored, and it is still made keyword-only.
|
||||
|
||||
Now, only the attributes defined in that class that doesn't explicitly set `kw_only=False` are made keyword-only.
|
||||
|
||||
This shouldn't be a problem for most users, unless you have a pattern like this:
|
||||
|
||||
```python
|
||||
@attrs.define(kw_only=True)
|
||||
class Base:
|
||||
a: int
|
||||
b: int = attrs.field(default=1, kw_only=False)
|
||||
|
||||
@attrs.define
|
||||
class Subclass(Base):
|
||||
c: int
|
||||
```
|
||||
|
||||
Here, we have a `kw_only=True` *attrs* class (`Base`) with an attribute that sets `kw_only=False` and has a default (`Base.b`), and then create a subclass (`Subclass`) with required arguments (`Subclass.c`).
|
||||
Previously this would work, since it would make `Base.b` keyword-only, but now this fails since `Base.b` is positional, and we have a required positional argument (`Subclass.c`) following another argument with defaults.
|
||||
[#1457](https://github.com/python-attrs/attrs/issues/1457)
|
||||
|
||||
|
||||
### Changes
|
||||
|
||||
- Values passed to the `__init__()` method of `attrs` classes are now correctly passed to `__attrs_pre_init__()` instead of their default values (in cases where *kw_only* was not specified).
|
||||
[#1427](https://github.com/python-attrs/attrs/issues/1427)
|
||||
- Added support for Python 3.14 and [PEP 749](https://peps.python.org/pep-0749/).
|
||||
[#1446](https://github.com/python-attrs/attrs/issues/1446),
|
||||
[#1451](https://github.com/python-attrs/attrs/issues/1451)
|
||||
- `attrs.validators.deep_mapping()` now allows to leave out either *key_validator* xor *value_validator*.
|
||||
[#1448](https://github.com/python-attrs/attrs/issues/1448)
|
||||
- `attrs.validators.deep_iterator()` and `attrs.validators.deep_mapping()` now accept lists and tuples for all validators and wrap them into a `attrs.validators.and_()`.
|
||||
[#1449](https://github.com/python-attrs/attrs/issues/1449)
|
||||
- Added a new **experimental** way to inspect classes:
|
||||
|
||||
`attrs.inspect(cls)` returns the _effective_ class-wide parameters that were used by *attrs* to construct the class.
|
||||
|
||||
The returned class is the same data structure that *attrs* uses internally to decide how to construct the final class.
|
||||
[#1454](https://github.com/python-attrs/attrs/issues/1454)
|
||||
- Fixed annotations for `attrs.field(converter=...)`.
|
||||
Previously, a `tuple` of converters was only accepted if it had exactly one element.
|
||||
[#1461](https://github.com/python-attrs/attrs/issues/1461)
|
||||
- The performance of `attrs.asdict()` has been improved by 45–260%.
|
||||
[#1463](https://github.com/python-attrs/attrs/issues/1463)
|
||||
- The performance of `attrs.astuple()` has been improved by 49–270%.
|
||||
[#1469](https://github.com/python-attrs/attrs/issues/1469)
|
||||
- The type annotation for `attrs.validators.or_()` now allows for different types of validators.
|
||||
|
||||
This was only an issue on Pyright.
|
||||
[#1474](https://github.com/python-attrs/attrs/issues/1474)
|
||||
|
||||
|
||||
|
||||
---
|
||||
|
||||
[Full changelog →](https://www.attrs.org/en/stable/changelog.html)
|
||||
@@ -0,0 +1,55 @@
|
||||
attr/__init__.py,sha256=fOYIvt1eGSqQre4uCS3sJWKZ0mwAuC8UD6qba5OS9_U,2057
|
||||
attr/__init__.pyi,sha256=IZkzIjvtbRqDWGkDBIF9dd12FgDa379JYq3GHnVOvFQ,11309
|
||||
attr/__pycache__/__init__.cpython-39.pyc,,
|
||||
attr/__pycache__/_cmp.cpython-39.pyc,,
|
||||
attr/__pycache__/_compat.cpython-39.pyc,,
|
||||
attr/__pycache__/_config.cpython-39.pyc,,
|
||||
attr/__pycache__/_funcs.cpython-39.pyc,,
|
||||
attr/__pycache__/_make.cpython-39.pyc,,
|
||||
attr/__pycache__/_next_gen.cpython-39.pyc,,
|
||||
attr/__pycache__/_version_info.cpython-39.pyc,,
|
||||
attr/__pycache__/converters.cpython-39.pyc,,
|
||||
attr/__pycache__/exceptions.cpython-39.pyc,,
|
||||
attr/__pycache__/filters.cpython-39.pyc,,
|
||||
attr/__pycache__/setters.cpython-39.pyc,,
|
||||
attr/__pycache__/validators.cpython-39.pyc,,
|
||||
attr/_cmp.py,sha256=3Nn1TjxllUYiX_nJoVnEkXoDk0hM1DYKj5DE7GZe4i0,4117
|
||||
attr/_cmp.pyi,sha256=U-_RU_UZOyPUEQzXE6RMYQQcjkZRY25wTH99sN0s7MM,368
|
||||
attr/_compat.py,sha256=x0g7iEUOnBVJC72zyFCgb1eKqyxS-7f2LGnNyZ_r95s,2829
|
||||
attr/_config.py,sha256=dGq3xR6fgZEF6UBt_L0T-eUHIB4i43kRmH0P28sJVw8,843
|
||||
attr/_funcs.py,sha256=Ix5IETTfz5F01F-12MF_CSFomIn2h8b67EVVz2gCtBE,16479
|
||||
attr/_make.py,sha256=NRJDGS8syg2h3YNflVNoK2FwR3CpdSZxx8M6lacwljA,104141
|
||||
attr/_next_gen.py,sha256=BQtCUlzwg2gWHTYXBQvrEYBnzBUrDvO57u0Py6UCPhc,26274
|
||||
attr/_typing_compat.pyi,sha256=XDP54TUn-ZKhD62TOQebmzrwFyomhUCoGRpclb6alRA,469
|
||||
attr/_version_info.py,sha256=w4R-FYC3NK_kMkGUWJlYP4cVAlH9HRaC-um3fcjYkHM,2222
|
||||
attr/_version_info.pyi,sha256=x_M3L3WuB7r_ULXAWjx959udKQ4HLB8l-hsc1FDGNvk,209
|
||||
attr/converters.py,sha256=GlDeOzPeTFgeBBLbj9G57Ez5lAk68uhSALRYJ_exe84,3861
|
||||
attr/converters.pyi,sha256=orU2bff-VjQa2kMDyvnMQV73oJT2WRyQuw4ZR1ym1bE,643
|
||||
attr/exceptions.py,sha256=HRFq4iybmv7-DcZwyjl6M1euM2YeJVK_hFxuaBGAngI,1977
|
||||
attr/exceptions.pyi,sha256=zZq8bCUnKAy9mDtBEw42ZhPhAUIHoTKedDQInJD883M,539
|
||||
attr/filters.py,sha256=ZBiKWLp3R0LfCZsq7X11pn9WX8NslS2wXM4jsnLOGc8,1795
|
||||
attr/filters.pyi,sha256=3J5BG-dTxltBk1_-RuNRUHrv2qu1v8v4aDNAQ7_mifA,208
|
||||
attr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
attr/setters.py,sha256=5-dcT63GQK35ONEzSgfXCkbB7pPkaR-qv15mm4PVSzQ,1617
|
||||
attr/setters.pyi,sha256=NnVkaFU1BB4JB8E4JuXyrzTUgvtMpj8p3wBdJY7uix4,584
|
||||
attr/validators.py,sha256=1BnYGTuYvSucGEI4ju-RPNJteVzG0ZlfWpJiWoSFHQ8,21458
|
||||
attr/validators.pyi,sha256=ftmW3m4KJ3pQcIXAj-BejT7BY4ZfqrC1G-5W7XvoPds,4082
|
||||
attrs-25.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
attrs-25.4.0.dist-info/METADATA,sha256=2Rerxj7agcMRxiwdkt6lC2guqHAmkGKCH13nWWK7ZoQ,10473
|
||||
attrs-25.4.0.dist-info/RECORD,,
|
||||
attrs-25.4.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
||||
attrs-25.4.0.dist-info/licenses/LICENSE,sha256=iCEVyV38KvHutnFPjsbVy8q_Znyv-HKfQkINpj9xTp8,1109
|
||||
attrs/__init__.py,sha256=RxaAZNwYiEh-fcvHLZNpQ_DWKni73M_jxEPEftiq1Zc,1183
|
||||
attrs/__init__.pyi,sha256=2gV79g9UxJppGSM48hAZJ6h_MHb70dZoJL31ZNJeZYI,9416
|
||||
attrs/__pycache__/__init__.cpython-39.pyc,,
|
||||
attrs/__pycache__/converters.cpython-39.pyc,,
|
||||
attrs/__pycache__/exceptions.cpython-39.pyc,,
|
||||
attrs/__pycache__/filters.cpython-39.pyc,,
|
||||
attrs/__pycache__/setters.cpython-39.pyc,,
|
||||
attrs/__pycache__/validators.cpython-39.pyc,,
|
||||
attrs/converters.py,sha256=8kQljrVwfSTRu8INwEk8SI0eGrzmWftsT7rM0EqyohM,76
|
||||
attrs/exceptions.py,sha256=ACCCmg19-vDFaDPY9vFl199SPXCQMN_bENs4DALjzms,76
|
||||
attrs/filters.py,sha256=VOUMZug9uEU6dUuA0dF1jInUK0PL3fLgP0VBS5d-CDE,73
|
||||
attrs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
attrs/setters.py,sha256=eL1YidYQV3T2h9_SYIZSZR1FAcHGb1TuCTy0E0Lv2SU,73
|
||||
attrs/validators.py,sha256=xcy6wD5TtTkdCG1f4XWbocPSO0faBjk5IfVJfP6SUj0,76
|
||||
@@ -0,0 +1,4 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: hatchling 1.27.0
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2015 Hynek Schlawack and the attrs contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
72
.venv/lib/python3.9/site-packages/attrs/__init__.py
Normal file
72
.venv/lib/python3.9/site-packages/attrs/__init__.py
Normal file
@@ -0,0 +1,72 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr import (
|
||||
NOTHING,
|
||||
Attribute,
|
||||
AttrsInstance,
|
||||
Converter,
|
||||
Factory,
|
||||
NothingType,
|
||||
_make_getattr,
|
||||
assoc,
|
||||
cmp_using,
|
||||
define,
|
||||
evolve,
|
||||
field,
|
||||
fields,
|
||||
fields_dict,
|
||||
frozen,
|
||||
has,
|
||||
make_class,
|
||||
mutable,
|
||||
resolve_types,
|
||||
validate,
|
||||
)
|
||||
from attr._make import ClassProps
|
||||
from attr._next_gen import asdict, astuple, inspect
|
||||
|
||||
from . import converters, exceptions, filters, setters, validators
|
||||
|
||||
|
||||
__all__ = [
|
||||
"NOTHING",
|
||||
"Attribute",
|
||||
"AttrsInstance",
|
||||
"ClassProps",
|
||||
"Converter",
|
||||
"Factory",
|
||||
"NothingType",
|
||||
"__author__",
|
||||
"__copyright__",
|
||||
"__description__",
|
||||
"__doc__",
|
||||
"__email__",
|
||||
"__license__",
|
||||
"__title__",
|
||||
"__url__",
|
||||
"__version__",
|
||||
"__version_info__",
|
||||
"asdict",
|
||||
"assoc",
|
||||
"astuple",
|
||||
"cmp_using",
|
||||
"converters",
|
||||
"define",
|
||||
"evolve",
|
||||
"exceptions",
|
||||
"field",
|
||||
"fields",
|
||||
"fields_dict",
|
||||
"filters",
|
||||
"frozen",
|
||||
"has",
|
||||
"inspect",
|
||||
"make_class",
|
||||
"mutable",
|
||||
"resolve_types",
|
||||
"setters",
|
||||
"validate",
|
||||
"validators",
|
||||
]
|
||||
|
||||
__getattr__ = _make_getattr(__name__)
|
||||
314
.venv/lib/python3.9/site-packages/attrs/__init__.pyi
Normal file
314
.venv/lib/python3.9/site-packages/attrs/__init__.pyi
Normal file
@@ -0,0 +1,314 @@
|
||||
import sys
|
||||
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Mapping,
|
||||
Sequence,
|
||||
overload,
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
# Because we need to type our own stuff, we have to make everything from
|
||||
# attr explicitly public too.
|
||||
from attr import __author__ as __author__
|
||||
from attr import __copyright__ as __copyright__
|
||||
from attr import __description__ as __description__
|
||||
from attr import __email__ as __email__
|
||||
from attr import __license__ as __license__
|
||||
from attr import __title__ as __title__
|
||||
from attr import __url__ as __url__
|
||||
from attr import __version__ as __version__
|
||||
from attr import __version_info__ as __version_info__
|
||||
from attr import assoc as assoc
|
||||
from attr import Attribute as Attribute
|
||||
from attr import AttrsInstance as AttrsInstance
|
||||
from attr import cmp_using as cmp_using
|
||||
from attr import converters as converters
|
||||
from attr import Converter as Converter
|
||||
from attr import evolve as evolve
|
||||
from attr import exceptions as exceptions
|
||||
from attr import Factory as Factory
|
||||
from attr import fields as fields
|
||||
from attr import fields_dict as fields_dict
|
||||
from attr import filters as filters
|
||||
from attr import has as has
|
||||
from attr import make_class as make_class
|
||||
from attr import NOTHING as NOTHING
|
||||
from attr import resolve_types as resolve_types
|
||||
from attr import setters as setters
|
||||
from attr import validate as validate
|
||||
from attr import validators as validators
|
||||
from attr import attrib, asdict as asdict, astuple as astuple
|
||||
from attr import NothingType as NothingType
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
from typing import dataclass_transform
|
||||
else:
|
||||
from typing_extensions import dataclass_transform
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_C = TypeVar("_C", bound=type)
|
||||
|
||||
_EqOrderType = bool | Callable[[Any], Any]
|
||||
_ValidatorType = Callable[[Any, "Attribute[_T]", _T], Any]
|
||||
_CallableConverterType = Callable[[Any], Any]
|
||||
_ConverterType = _CallableConverterType | Converter[Any, Any]
|
||||
_ReprType = Callable[[Any], str]
|
||||
_ReprArgType = bool | _ReprType
|
||||
_OnSetAttrType = Callable[[Any, "Attribute[Any]", Any], Any]
|
||||
_OnSetAttrArgType = _OnSetAttrType | list[_OnSetAttrType] | setters._NoOpType
|
||||
_FieldTransformer = Callable[
|
||||
[type, list["Attribute[Any]"]], list["Attribute[Any]"]
|
||||
]
|
||||
# FIXME: in reality, if multiple validators are passed they must be in a list
|
||||
# or tuple, but those are invariant and so would prevent subtypes of
|
||||
# _ValidatorType from working when passed in a list or tuple.
|
||||
_ValidatorArgType = _ValidatorType[_T] | Sequence[_ValidatorType[_T]]
|
||||
|
||||
@overload
|
||||
def field(
|
||||
*,
|
||||
default: None = ...,
|
||||
validator: None = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
hash: bool | None = ...,
|
||||
init: bool = ...,
|
||||
metadata: Mapping[Any, Any] | None = ...,
|
||||
converter: None = ...,
|
||||
factory: None = ...,
|
||||
kw_only: bool | None = ...,
|
||||
eq: bool | None = ...,
|
||||
order: bool | None = ...,
|
||||
on_setattr: _OnSetAttrArgType | None = ...,
|
||||
alias: str | None = ...,
|
||||
type: type | None = ...,
|
||||
) -> Any: ...
|
||||
|
||||
# This form catches an explicit None or no default and infers the type from the
|
||||
# other arguments.
|
||||
@overload
|
||||
def field(
|
||||
*,
|
||||
default: None = ...,
|
||||
validator: _ValidatorArgType[_T] | None = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
hash: bool | None = ...,
|
||||
init: bool = ...,
|
||||
metadata: Mapping[Any, Any] | None = ...,
|
||||
converter: _ConverterType
|
||||
| list[_ConverterType]
|
||||
| tuple[_ConverterType, ...]
|
||||
| None = ...,
|
||||
factory: Callable[[], _T] | None = ...,
|
||||
kw_only: bool | None = ...,
|
||||
eq: _EqOrderType | None = ...,
|
||||
order: _EqOrderType | None = ...,
|
||||
on_setattr: _OnSetAttrArgType | None = ...,
|
||||
alias: str | None = ...,
|
||||
type: type | None = ...,
|
||||
) -> _T: ...
|
||||
|
||||
# This form catches an explicit default argument.
|
||||
@overload
|
||||
def field(
|
||||
*,
|
||||
default: _T,
|
||||
validator: _ValidatorArgType[_T] | None = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
hash: bool | None = ...,
|
||||
init: bool = ...,
|
||||
metadata: Mapping[Any, Any] | None = ...,
|
||||
converter: _ConverterType
|
||||
| list[_ConverterType]
|
||||
| tuple[_ConverterType, ...]
|
||||
| None = ...,
|
||||
factory: Callable[[], _T] | None = ...,
|
||||
kw_only: bool | None = ...,
|
||||
eq: _EqOrderType | None = ...,
|
||||
order: _EqOrderType | None = ...,
|
||||
on_setattr: _OnSetAttrArgType | None = ...,
|
||||
alias: str | None = ...,
|
||||
type: type | None = ...,
|
||||
) -> _T: ...
|
||||
|
||||
# This form covers type=non-Type: e.g. forward references (str), Any
|
||||
@overload
|
||||
def field(
|
||||
*,
|
||||
default: _T | None = ...,
|
||||
validator: _ValidatorArgType[_T] | None = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
hash: bool | None = ...,
|
||||
init: bool = ...,
|
||||
metadata: Mapping[Any, Any] | None = ...,
|
||||
converter: _ConverterType
|
||||
| list[_ConverterType]
|
||||
| tuple[_ConverterType, ...]
|
||||
| None = ...,
|
||||
factory: Callable[[], _T] | None = ...,
|
||||
kw_only: bool | None = ...,
|
||||
eq: _EqOrderType | None = ...,
|
||||
order: _EqOrderType | None = ...,
|
||||
on_setattr: _OnSetAttrArgType | None = ...,
|
||||
alias: str | None = ...,
|
||||
type: type | None = ...,
|
||||
) -> Any: ...
|
||||
@overload
|
||||
@dataclass_transform(field_specifiers=(attrib, field))
|
||||
def define(
|
||||
maybe_cls: _C,
|
||||
*,
|
||||
these: dict[str, Any] | None = ...,
|
||||
repr: bool = ...,
|
||||
unsafe_hash: bool | None = ...,
|
||||
hash: bool | None = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: bool | None = ...,
|
||||
order: bool | None = ...,
|
||||
auto_detect: bool = ...,
|
||||
getstate_setstate: bool | None = ...,
|
||||
on_setattr: _OnSetAttrArgType | None = ...,
|
||||
field_transformer: _FieldTransformer | None = ...,
|
||||
match_args: bool = ...,
|
||||
) -> _C: ...
|
||||
@overload
|
||||
@dataclass_transform(field_specifiers=(attrib, field))
|
||||
def define(
|
||||
maybe_cls: None = ...,
|
||||
*,
|
||||
these: dict[str, Any] | None = ...,
|
||||
repr: bool = ...,
|
||||
unsafe_hash: bool | None = ...,
|
||||
hash: bool | None = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: bool | None = ...,
|
||||
order: bool | None = ...,
|
||||
auto_detect: bool = ...,
|
||||
getstate_setstate: bool | None = ...,
|
||||
on_setattr: _OnSetAttrArgType | None = ...,
|
||||
field_transformer: _FieldTransformer | None = ...,
|
||||
match_args: bool = ...,
|
||||
) -> Callable[[_C], _C]: ...
|
||||
|
||||
mutable = define
|
||||
|
||||
@overload
|
||||
@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field))
|
||||
def frozen(
|
||||
maybe_cls: _C,
|
||||
*,
|
||||
these: dict[str, Any] | None = ...,
|
||||
repr: bool = ...,
|
||||
unsafe_hash: bool | None = ...,
|
||||
hash: bool | None = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: bool | None = ...,
|
||||
order: bool | None = ...,
|
||||
auto_detect: bool = ...,
|
||||
getstate_setstate: bool | None = ...,
|
||||
on_setattr: _OnSetAttrArgType | None = ...,
|
||||
field_transformer: _FieldTransformer | None = ...,
|
||||
match_args: bool = ...,
|
||||
) -> _C: ...
|
||||
@overload
|
||||
@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field))
|
||||
def frozen(
|
||||
maybe_cls: None = ...,
|
||||
*,
|
||||
these: dict[str, Any] | None = ...,
|
||||
repr: bool = ...,
|
||||
unsafe_hash: bool | None = ...,
|
||||
hash: bool | None = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: bool | None = ...,
|
||||
order: bool | None = ...,
|
||||
auto_detect: bool = ...,
|
||||
getstate_setstate: bool | None = ...,
|
||||
on_setattr: _OnSetAttrArgType | None = ...,
|
||||
field_transformer: _FieldTransformer | None = ...,
|
||||
match_args: bool = ...,
|
||||
) -> Callable[[_C], _C]: ...
|
||||
|
||||
class ClassProps:
|
||||
# XXX: somehow when defining/using enums Mypy starts looking at our own
|
||||
# (untyped) code and causes tons of errors.
|
||||
Hashability: Any
|
||||
KeywordOnly: Any
|
||||
|
||||
is_exception: bool
|
||||
is_slotted: bool
|
||||
has_weakref_slot: bool
|
||||
is_frozen: bool
|
||||
# kw_only: ClassProps.KeywordOnly
|
||||
kw_only: Any
|
||||
collected_fields_by_mro: bool
|
||||
added_init: bool
|
||||
added_repr: bool
|
||||
added_eq: bool
|
||||
added_ordering: bool
|
||||
# hashability: ClassProps.Hashability
|
||||
hashability: Any
|
||||
added_match_args: bool
|
||||
added_str: bool
|
||||
added_pickling: bool
|
||||
on_setattr_hook: _OnSetAttrType | None
|
||||
field_transformer: Callable[[Attribute[Any]], Attribute[Any]] | None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
is_exception: bool,
|
||||
is_slotted: bool,
|
||||
has_weakref_slot: bool,
|
||||
is_frozen: bool,
|
||||
# kw_only: ClassProps.KeywordOnly
|
||||
kw_only: Any,
|
||||
collected_fields_by_mro: bool,
|
||||
added_init: bool,
|
||||
added_repr: bool,
|
||||
added_eq: bool,
|
||||
added_ordering: bool,
|
||||
# hashability: ClassProps.Hashability
|
||||
hashability: Any,
|
||||
added_match_args: bool,
|
||||
added_str: bool,
|
||||
added_pickling: bool,
|
||||
on_setattr_hook: _OnSetAttrType,
|
||||
field_transformer: Callable[[Attribute[Any]], Attribute[Any]],
|
||||
) -> None: ...
|
||||
@property
|
||||
def is_hashable(self) -> bool: ...
|
||||
|
||||
def inspect(cls: type) -> ClassProps: ...
|
||||
3
.venv/lib/python3.9/site-packages/attrs/converters.py
Normal file
3
.venv/lib/python3.9/site-packages/attrs/converters.py
Normal file
@@ -0,0 +1,3 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.converters import * # noqa: F403
|
||||
3
.venv/lib/python3.9/site-packages/attrs/exceptions.py
Normal file
3
.venv/lib/python3.9/site-packages/attrs/exceptions.py
Normal file
@@ -0,0 +1,3 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.exceptions import * # noqa: F403
|
||||
3
.venv/lib/python3.9/site-packages/attrs/filters.py
Normal file
3
.venv/lib/python3.9/site-packages/attrs/filters.py
Normal file
@@ -0,0 +1,3 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.filters import * # noqa: F403
|
||||
0
.venv/lib/python3.9/site-packages/attrs/py.typed
Normal file
0
.venv/lib/python3.9/site-packages/attrs/py.typed
Normal file
3
.venv/lib/python3.9/site-packages/attrs/setters.py
Normal file
3
.venv/lib/python3.9/site-packages/attrs/setters.py
Normal file
@@ -0,0 +1,3 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.setters import * # noqa: F403
|
||||
3
.venv/lib/python3.9/site-packages/attrs/validators.py
Normal file
3
.venv/lib/python3.9/site-packages/attrs/validators.py
Normal file
@@ -0,0 +1,3 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.validators import * # noqa: F403
|
||||
@@ -0,0 +1 @@
|
||||
import os; var = 'SETUPTOOLS_USE_DISTUTILS'; enabled = os.environ.get(var, 'stdlib') == 'local'; enabled and __import__('_distutils_hack').add_shim();
|
||||
@@ -0,0 +1 @@
|
||||
pip
|
||||
@@ -0,0 +1,170 @@
|
||||
Metadata-Version: 2.4
|
||||
Name: jsonschema
|
||||
Version: 4.25.1
|
||||
Summary: An implementation of JSON Schema validation for Python
|
||||
Project-URL: Homepage, https://github.com/python-jsonschema/jsonschema
|
||||
Project-URL: Documentation, https://python-jsonschema.readthedocs.io/
|
||||
Project-URL: Issues, https://github.com/python-jsonschema/jsonschema/issues/
|
||||
Project-URL: Funding, https://github.com/sponsors/Julian
|
||||
Project-URL: Tidelift, https://tidelift.com/subscription/pkg/pypi-jsonschema?utm_source=pypi-jsonschema&utm_medium=referral&utm_campaign=pypi-link
|
||||
Project-URL: Changelog, https://github.com/python-jsonschema/jsonschema/blob/main/CHANGELOG.rst
|
||||
Project-URL: Source, https://github.com/python-jsonschema/jsonschema
|
||||
Author-email: Julian Berman <Julian+jsonschema@GrayVines.com>
|
||||
License-Expression: MIT
|
||||
License-File: COPYING
|
||||
Keywords: data validation,json,json schema,jsonschema,validation
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Programming Language :: Python :: 3.12
|
||||
Classifier: Programming Language :: Python :: 3.13
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: Topic :: File Formats :: JSON
|
||||
Classifier: Topic :: File Formats :: JSON :: JSON Schema
|
||||
Requires-Python: >=3.9
|
||||
Requires-Dist: attrs>=22.2.0
|
||||
Requires-Dist: jsonschema-specifications>=2023.03.6
|
||||
Requires-Dist: referencing>=0.28.4
|
||||
Requires-Dist: rpds-py>=0.7.1
|
||||
Provides-Extra: format
|
||||
Requires-Dist: fqdn; extra == 'format'
|
||||
Requires-Dist: idna; extra == 'format'
|
||||
Requires-Dist: isoduration; extra == 'format'
|
||||
Requires-Dist: jsonpointer>1.13; extra == 'format'
|
||||
Requires-Dist: rfc3339-validator; extra == 'format'
|
||||
Requires-Dist: rfc3987; extra == 'format'
|
||||
Requires-Dist: uri-template; extra == 'format'
|
||||
Requires-Dist: webcolors>=1.11; extra == 'format'
|
||||
Provides-Extra: format-nongpl
|
||||
Requires-Dist: fqdn; extra == 'format-nongpl'
|
||||
Requires-Dist: idna; extra == 'format-nongpl'
|
||||
Requires-Dist: isoduration; extra == 'format-nongpl'
|
||||
Requires-Dist: jsonpointer>1.13; extra == 'format-nongpl'
|
||||
Requires-Dist: rfc3339-validator; extra == 'format-nongpl'
|
||||
Requires-Dist: rfc3986-validator>0.1.0; extra == 'format-nongpl'
|
||||
Requires-Dist: rfc3987-syntax>=1.1.0; extra == 'format-nongpl'
|
||||
Requires-Dist: uri-template; extra == 'format-nongpl'
|
||||
Requires-Dist: webcolors>=24.6.0; extra == 'format-nongpl'
|
||||
Description-Content-Type: text/x-rst
|
||||
|
||||
==========
|
||||
jsonschema
|
||||
==========
|
||||
|
||||
|PyPI| |Pythons| |CI| |ReadTheDocs| |Precommit| |Zenodo|
|
||||
|
||||
.. |PyPI| image:: https://img.shields.io/pypi/v/jsonschema.svg
|
||||
:alt: PyPI version
|
||||
:target: https://pypi.org/project/jsonschema/
|
||||
|
||||
.. |Pythons| image:: https://img.shields.io/pypi/pyversions/jsonschema.svg
|
||||
:alt: Supported Python versions
|
||||
:target: https://pypi.org/project/jsonschema/
|
||||
|
||||
.. |CI| image:: https://github.com/python-jsonschema/jsonschema/workflows/CI/badge.svg
|
||||
:alt: Build status
|
||||
:target: https://github.com/python-jsonschema/jsonschema/actions?query=workflow%3ACI
|
||||
|
||||
.. |ReadTheDocs| image:: https://readthedocs.org/projects/python-jsonschema/badge/?version=stable&style=flat
|
||||
:alt: ReadTheDocs status
|
||||
:target: https://python-jsonschema.readthedocs.io/en/stable/
|
||||
|
||||
.. |Precommit| image:: https://results.pre-commit.ci/badge/github/python-jsonschema/jsonschema/main.svg
|
||||
:alt: pre-commit.ci status
|
||||
:target: https://results.pre-commit.ci/latest/github/python-jsonschema/jsonschema/main
|
||||
|
||||
.. |Zenodo| image:: https://zenodo.org/badge/3072629.svg
|
||||
:alt: Zenodo DOI
|
||||
:target: https://zenodo.org/badge/latestdoi/3072629
|
||||
|
||||
|
||||
``jsonschema`` is an implementation of the `JSON Schema <https://json-schema.org>`_ specification for Python.
|
||||
|
||||
.. code:: python
|
||||
|
||||
>>> from jsonschema import validate
|
||||
|
||||
>>> # A sample schema, like what we'd get from json.load()
|
||||
>>> schema = {
|
||||
... "type" : "object",
|
||||
... "properties" : {
|
||||
... "price" : {"type" : "number"},
|
||||
... "name" : {"type" : "string"},
|
||||
... },
|
||||
... }
|
||||
|
||||
>>> # If no exception is raised by validate(), the instance is valid.
|
||||
>>> validate(instance={"name" : "Eggs", "price" : 34.99}, schema=schema)
|
||||
|
||||
>>> validate(
|
||||
... instance={"name" : "Eggs", "price" : "Invalid"}, schema=schema,
|
||||
... ) # doctest: +IGNORE_EXCEPTION_DETAIL
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
ValidationError: 'Invalid' is not of type 'number'
|
||||
|
||||
It can also be used from the command line by installing `check-jsonschema <https://github.com/python-jsonschema/check-jsonschema>`_.
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
* Full support for `Draft 2020-12 <https://python-jsonschema.readthedocs.io/en/latest/api/jsonschema/validators/#jsonschema.validators.Draft202012Validator>`_, `Draft 2019-09 <https://python-jsonschema.readthedocs.io/en/latest/api/jsonschema/validators/#jsonschema.validators.Draft201909Validator>`_, `Draft 7 <https://python-jsonschema.readthedocs.io/en/latest/api/jsonschema/validators/#jsonschema.validators.Draft7Validator>`_, `Draft 6 <https://python-jsonschema.readthedocs.io/en/latest/api/jsonschema/validators/#jsonschema.validators.Draft6Validator>`_, `Draft 4 <https://python-jsonschema.readthedocs.io/en/latest/api/jsonschema/validators/#jsonschema.validators.Draft4Validator>`_ and `Draft 3 <https://python-jsonschema.readthedocs.io/en/latest/api/jsonschema/validators/#jsonschema.validators.Draft3Validator>`_
|
||||
|
||||
* `Lazy validation <https://python-jsonschema.readthedocs.io/en/latest/api/jsonschema/protocols/#jsonschema.protocols.Validator.iter_errors>`_ that can iteratively report *all* validation errors.
|
||||
|
||||
* `Programmatic querying <https://python-jsonschema.readthedocs.io/en/latest/errors/>`_ of which properties or items failed validation.
|
||||
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
``jsonschema`` is available on `PyPI <https://pypi.org/project/jsonschema/>`_. You can install using `pip <https://pip.pypa.io/en/stable/>`_:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ pip install jsonschema
|
||||
|
||||
|
||||
Extras
|
||||
======
|
||||
|
||||
Two extras are available when installing the package, both currently related to ``format`` validation:
|
||||
|
||||
* ``format``
|
||||
* ``format-nongpl``
|
||||
|
||||
They can be used when installing in order to include additional dependencies, e.g.:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ pip install jsonschema'[format]'
|
||||
|
||||
Be aware that the mere presence of these dependencies – or even the specification of ``format`` checks in a schema – do *not* activate format checks (as per the specification).
|
||||
Please read the `format validation documentation <https://python-jsonschema.readthedocs.io/en/latest/validate/#validating-formats>`_ for further details.
|
||||
|
||||
About
|
||||
-----
|
||||
|
||||
I'm Julian Berman.
|
||||
|
||||
``jsonschema`` is on `GitHub <https://github.com/python-jsonschema/jsonschema>`_.
|
||||
|
||||
Get in touch, via GitHub or otherwise, if you've got something to contribute, it'd be most welcome!
|
||||
|
||||
If you feel overwhelmingly grateful, you can also `sponsor me <https://github.com/sponsors/Julian/>`_.
|
||||
|
||||
And for companies who appreciate ``jsonschema`` and its continued support and growth, ``jsonschema`` is also now supportable via `TideLift <https://tidelift.com/subscription/pkg/pypi-jsonschema?utm_source=pypi-jsonschema&utm_medium=referral&utm_campaign=readme>`_.
|
||||
|
||||
|
||||
Release Information
|
||||
-------------------
|
||||
|
||||
v4.25.1
|
||||
=======
|
||||
|
||||
* Fix an incorrect required argument in the ``Validator`` protocol's type annotations (#1396).
|
||||
@@ -0,0 +1,81 @@
|
||||
../../../bin/jsonschema,sha256=9g5ecyMOy5s32BHUDNuR7Eb9Ayk33DJwV0G3p-_1Aio,205
|
||||
jsonschema-4.25.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
jsonschema-4.25.1.dist-info/METADATA,sha256=Mfg8FXnkbhr2ImnO-l2DU07xVfPuIZFPeIG71US8Elw,7608
|
||||
jsonschema-4.25.1.dist-info/RECORD,,
|
||||
jsonschema-4.25.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
jsonschema-4.25.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
||||
jsonschema-4.25.1.dist-info/entry_points.txt,sha256=vO7rX4Fs_xIVJy2pnAtKgTSxfpnozAVQ0DjCmpMxnWE,51
|
||||
jsonschema-4.25.1.dist-info/licenses/COPYING,sha256=T5KgFaE8TRoEC-8BiqE0MLTxvHO0Gxa7hGw0Z2bedDk,1057
|
||||
jsonschema/__init__.py,sha256=p-Rw4TS_0OPHZIJyImDWsdWgmd6CPWHMXLq7BuQxTGc,3941
|
||||
jsonschema/__main__.py,sha256=iLsZf2upUB3ilBKTlMnyK-HHt2Cnnfkwwxi_c6gLvSA,115
|
||||
jsonschema/__pycache__/__init__.cpython-39.pyc,,
|
||||
jsonschema/__pycache__/__main__.cpython-39.pyc,,
|
||||
jsonschema/__pycache__/_format.cpython-39.pyc,,
|
||||
jsonschema/__pycache__/_keywords.cpython-39.pyc,,
|
||||
jsonschema/__pycache__/_legacy_keywords.cpython-39.pyc,,
|
||||
jsonschema/__pycache__/_types.cpython-39.pyc,,
|
||||
jsonschema/__pycache__/_typing.cpython-39.pyc,,
|
||||
jsonschema/__pycache__/_utils.cpython-39.pyc,,
|
||||
jsonschema/__pycache__/cli.cpython-39.pyc,,
|
||||
jsonschema/__pycache__/exceptions.cpython-39.pyc,,
|
||||
jsonschema/__pycache__/protocols.cpython-39.pyc,,
|
||||
jsonschema/__pycache__/validators.cpython-39.pyc,,
|
||||
jsonschema/_format.py,sha256=ip1N16CBBOb_8sM_iejyW0U5JY9kXoz3O_AFj1SHFl8,15581
|
||||
jsonschema/_keywords.py,sha256=r8_DrqAfn6QLwQnmXEggveiSU-UaIL2p2nuPINelfFc,14949
|
||||
jsonschema/_legacy_keywords.py,sha256=2tWuwRPWbYS7EAl8wBIC_rabGuv1J4dfYLqNEPpShhA,15191
|
||||
jsonschema/_types.py,sha256=0pYJG61cn_4ZWVnqyD24tax2QBMlnSPy0fcECCpASMk,5456
|
||||
jsonschema/_typing.py,sha256=hFfAEeFJ76LYAl_feuVa0gnHnV9VEq_UhjLJS-7axgY,630
|
||||
jsonschema/_utils.py,sha256=Xv6_wKKslBJlwyj9-j2c8JDFw-4z4aWFnVe2pX8h7U4,10659
|
||||
jsonschema/benchmarks/__init__.py,sha256=A0sQrxDBVHSyQ-8ru3L11hMXf3q9gVuB9x_YgHb4R9M,70
|
||||
jsonschema/benchmarks/__pycache__/__init__.cpython-39.pyc,,
|
||||
jsonschema/benchmarks/__pycache__/const_vs_enum.cpython-39.pyc,,
|
||||
jsonschema/benchmarks/__pycache__/contains.cpython-39.pyc,,
|
||||
jsonschema/benchmarks/__pycache__/issue232.cpython-39.pyc,,
|
||||
jsonschema/benchmarks/__pycache__/json_schema_test_suite.cpython-39.pyc,,
|
||||
jsonschema/benchmarks/__pycache__/nested_schemas.cpython-39.pyc,,
|
||||
jsonschema/benchmarks/__pycache__/subcomponents.cpython-39.pyc,,
|
||||
jsonschema/benchmarks/__pycache__/unused_registry.cpython-39.pyc,,
|
||||
jsonschema/benchmarks/__pycache__/useless_applicator_schemas.cpython-39.pyc,,
|
||||
jsonschema/benchmarks/__pycache__/useless_keywords.cpython-39.pyc,,
|
||||
jsonschema/benchmarks/__pycache__/validator_creation.cpython-39.pyc,,
|
||||
jsonschema/benchmarks/const_vs_enum.py,sha256=DVFi3WDqBalZFOibnjpX1uTSr3Rxa2cPgFcowd7Ukrs,830
|
||||
jsonschema/benchmarks/contains.py,sha256=gexQoUrCOwECofbt19BeosQZ7WFL6PDdkX49DWwBlOg,786
|
||||
jsonschema/benchmarks/issue232.py,sha256=3LLYLIlBGQnVuyyo2iAv-xky5P6PRFHANx4-zIIQOoE,521
|
||||
jsonschema/benchmarks/issue232/issue.json,sha256=eaPOZjMRu5u8RpKrsA9uk7ucPZS5tkKG4D_hkOTQ3Hk,117105
|
||||
jsonschema/benchmarks/json_schema_test_suite.py,sha256=PvfabpUYcF4_7csYDTcTauED8rnFEGYbdY5RqTXD08s,320
|
||||
jsonschema/benchmarks/nested_schemas.py,sha256=mo07dx-CIgmSOI62CNs4g5xu1FzHklLBpkQoDxWYcKs,1892
|
||||
jsonschema/benchmarks/subcomponents.py,sha256=fEyiMzsWeK2pd7DEGCuuY-vzGunwhHczRBWEnBRLKIo,1113
|
||||
jsonschema/benchmarks/unused_registry.py,sha256=hwRwONc9cefPtYzkoX_TYRO3GyUojriv0-YQaK3vnj0,940
|
||||
jsonschema/benchmarks/useless_applicator_schemas.py,sha256=EVm5-EtOEFoLP_Vt2j4SrCwlx05NhPqNuZQ6LIMP1Dc,3342
|
||||
jsonschema/benchmarks/useless_keywords.py,sha256=bj_zKr1oVctFlqyZaObCsYTgFjiiNgPzC0hr1Y868mE,867
|
||||
jsonschema/benchmarks/validator_creation.py,sha256=UkUQlLAnussnr_KdCIdad6xx2pXxQLmYtsXoiirKeWQ,285
|
||||
jsonschema/cli.py,sha256=av90OtpSxuiko3FAyEHtxpI-NSuX3WMtoQpIx09obJY,8445
|
||||
jsonschema/exceptions.py,sha256=b42hUDOfPFcprI4ZlNpjDeLKv8k9vOhgWct2jyDlxzk,15256
|
||||
jsonschema/protocols.py,sha256=lgyryVHqFijSFRAz95ch3VN2Fz-oTFyhj9Obcy5P_CI,7198
|
||||
jsonschema/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
jsonschema/tests/__pycache__/__init__.cpython-39.pyc,,
|
||||
jsonschema/tests/__pycache__/_suite.cpython-39.pyc,,
|
||||
jsonschema/tests/__pycache__/fuzz_validate.cpython-39.pyc,,
|
||||
jsonschema/tests/__pycache__/test_cli.cpython-39.pyc,,
|
||||
jsonschema/tests/__pycache__/test_deprecations.cpython-39.pyc,,
|
||||
jsonschema/tests/__pycache__/test_exceptions.cpython-39.pyc,,
|
||||
jsonschema/tests/__pycache__/test_format.cpython-39.pyc,,
|
||||
jsonschema/tests/__pycache__/test_jsonschema_test_suite.cpython-39.pyc,,
|
||||
jsonschema/tests/__pycache__/test_types.cpython-39.pyc,,
|
||||
jsonschema/tests/__pycache__/test_utils.cpython-39.pyc,,
|
||||
jsonschema/tests/__pycache__/test_validators.cpython-39.pyc,,
|
||||
jsonschema/tests/_suite.py,sha256=2k0X91N7dOHhQc5mrYv40OKf1weioj6RMBqWgLT6-PI,8374
|
||||
jsonschema/tests/fuzz_validate.py,sha256=fUA7yTJIihaCwJplkUehZeyB84HcXEcqtY5oPJXIO7I,1114
|
||||
jsonschema/tests/test_cli.py,sha256=A89r5LOHy-peLPZA5YDkOaMTWqzQO_w2Tu8WFz_vphM,28544
|
||||
jsonschema/tests/test_deprecations.py,sha256=yG6mkRJHpTHbWoxpLC5y5H7fk8erGOs8f_9V4tCBEh8,15754
|
||||
jsonschema/tests/test_exceptions.py,sha256=lWTRyeSeOaFd5dnutqy1YG9uocnxeM_0cIEVG6GgGMI,24310
|
||||
jsonschema/tests/test_format.py,sha256=eVm5SMaWF2lOPO28bPAwNvkiQvHCQKy-MnuAgEchfEc,3188
|
||||
jsonschema/tests/test_jsonschema_test_suite.py,sha256=tAfxknM65OR9LyDPHu1pkEaombLgjRLnJ6FPiWPdxjg,8461
|
||||
jsonschema/tests/test_types.py,sha256=cF51KTDmdsx06MrIc4fXKt0X9fIsVgw5uhT8CamVa8U,6977
|
||||
jsonschema/tests/test_utils.py,sha256=sao74o1PyYMxBfqweokQN48CFSS6yhJk5FkCfMJ5PsI,4163
|
||||
jsonschema/tests/test_validators.py,sha256=eiaigsZMzHYYsniQ1UPygaS56a1d-_7-9NC4wVXAhzs,87975
|
||||
jsonschema/tests/typing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
jsonschema/tests/typing/__pycache__/__init__.cpython-39.pyc,,
|
||||
jsonschema/tests/typing/__pycache__/test_all_concrete_validators_match_protocol.cpython-39.pyc,,
|
||||
jsonschema/tests/typing/test_all_concrete_validators_match_protocol.py,sha256=I5XUl5ZYUSZo3APkF10l8Mnfk09oXKqvXWttGGd3sDk,1238
|
||||
jsonschema/validators.py,sha256=AI0bQrGJpvEH1RSO3ynwWcNvfkqN6WJPgyy0VN7WlSE,47147
|
||||
@@ -0,0 +1,4 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: hatchling 1.27.0
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
@@ -0,0 +1,2 @@
|
||||
[console_scripts]
|
||||
jsonschema = jsonschema.cli:main
|
||||
@@ -0,0 +1,19 @@
|
||||
Copyright (c) 2013 Julian Berman
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
120
.venv/lib/python3.9/site-packages/jsonschema/__init__.py
Normal file
120
.venv/lib/python3.9/site-packages/jsonschema/__init__.py
Normal file
@@ -0,0 +1,120 @@
|
||||
"""
|
||||
An implementation of JSON Schema for Python.
|
||||
|
||||
The main functionality is provided by the validator classes for each of the
|
||||
supported JSON Schema versions.
|
||||
|
||||
Most commonly, `jsonschema.validators.validate` is the quickest way to simply
|
||||
validate a given instance under a schema, and will create a validator
|
||||
for you.
|
||||
"""
|
||||
import warnings
|
||||
|
||||
from jsonschema._format import FormatChecker
|
||||
from jsonschema._types import TypeChecker
|
||||
from jsonschema.exceptions import SchemaError, ValidationError
|
||||
from jsonschema.validators import (
|
||||
Draft3Validator,
|
||||
Draft4Validator,
|
||||
Draft6Validator,
|
||||
Draft7Validator,
|
||||
Draft201909Validator,
|
||||
Draft202012Validator,
|
||||
validate,
|
||||
)
|
||||
|
||||
|
||||
def __getattr__(name):
|
||||
if name == "__version__":
|
||||
warnings.warn(
|
||||
"Accessing jsonschema.__version__ is deprecated and will be "
|
||||
"removed in a future release. Use importlib.metadata directly "
|
||||
"to query for jsonschema's version.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
from importlib import metadata
|
||||
return metadata.version("jsonschema")
|
||||
elif name == "RefResolver":
|
||||
from jsonschema.validators import _RefResolver
|
||||
warnings.warn(
|
||||
_RefResolver._DEPRECATION_MESSAGE,
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return _RefResolver
|
||||
elif name == "ErrorTree":
|
||||
warnings.warn(
|
||||
"Importing ErrorTree directly from the jsonschema package "
|
||||
"is deprecated and will become an ImportError. Import it from "
|
||||
"jsonschema.exceptions instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
from jsonschema.exceptions import ErrorTree
|
||||
return ErrorTree
|
||||
elif name == "FormatError":
|
||||
warnings.warn(
|
||||
"Importing FormatError directly from the jsonschema package "
|
||||
"is deprecated and will become an ImportError. Import it from "
|
||||
"jsonschema.exceptions instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
from jsonschema.exceptions import FormatError
|
||||
return FormatError
|
||||
elif name == "Validator":
|
||||
warnings.warn(
|
||||
"Importing Validator directly from the jsonschema package "
|
||||
"is deprecated and will become an ImportError. Import it from "
|
||||
"jsonschema.protocols instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
from jsonschema.protocols import Validator
|
||||
return Validator
|
||||
elif name == "RefResolutionError":
|
||||
from jsonschema.exceptions import _RefResolutionError
|
||||
warnings.warn(
|
||||
_RefResolutionError._DEPRECATION_MESSAGE,
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return _RefResolutionError
|
||||
|
||||
format_checkers = {
|
||||
"draft3_format_checker": Draft3Validator,
|
||||
"draft4_format_checker": Draft4Validator,
|
||||
"draft6_format_checker": Draft6Validator,
|
||||
"draft7_format_checker": Draft7Validator,
|
||||
"draft201909_format_checker": Draft201909Validator,
|
||||
"draft202012_format_checker": Draft202012Validator,
|
||||
}
|
||||
ValidatorForFormat = format_checkers.get(name)
|
||||
if ValidatorForFormat is not None:
|
||||
warnings.warn(
|
||||
f"Accessing jsonschema.{name} is deprecated and will be "
|
||||
"removed in a future release. Instead, use the FORMAT_CHECKER "
|
||||
"attribute on the corresponding Validator.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return ValidatorForFormat.FORMAT_CHECKER
|
||||
|
||||
raise AttributeError(f"module {__name__} has no attribute {name}")
|
||||
|
||||
|
||||
__all__ = [
|
||||
"Draft3Validator",
|
||||
"Draft4Validator",
|
||||
"Draft6Validator",
|
||||
"Draft7Validator",
|
||||
"Draft201909Validator",
|
||||
"Draft202012Validator",
|
||||
"FormatChecker",
|
||||
"SchemaError",
|
||||
"TypeChecker",
|
||||
"ValidationError",
|
||||
"validate",
|
||||
]
|
||||
6
.venv/lib/python3.9/site-packages/jsonschema/__main__.py
Normal file
6
.venv/lib/python3.9/site-packages/jsonschema/__main__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
"""
|
||||
The jsonschema CLI is now deprecated in favor of check-jsonschema.
|
||||
"""
|
||||
from jsonschema.cli import main
|
||||
|
||||
main()
|
||||
546
.venv/lib/python3.9/site-packages/jsonschema/_format.py
Normal file
546
.venv/lib/python3.9/site-packages/jsonschema/_format.py
Normal file
@@ -0,0 +1,546 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import suppress
|
||||
from datetime import date, datetime
|
||||
from uuid import UUID
|
||||
import ipaddress
|
||||
import re
|
||||
import typing
|
||||
import warnings
|
||||
|
||||
from jsonschema.exceptions import FormatError
|
||||
|
||||
_FormatCheckCallable = typing.Callable[[object], bool]
|
||||
#: A format checker callable.
|
||||
_F = typing.TypeVar("_F", bound=_FormatCheckCallable)
|
||||
_RaisesType = typing.Union[type[Exception], tuple[type[Exception], ...]]
|
||||
|
||||
_RE_DATE = re.compile(r"^\d{4}-\d{2}-\d{2}$", re.ASCII)
|
||||
|
||||
|
||||
class FormatChecker:
|
||||
"""
|
||||
A ``format`` property checker.
|
||||
|
||||
JSON Schema does not mandate that the ``format`` property actually do any
|
||||
validation. If validation is desired however, instances of this class can
|
||||
be hooked into validators to enable format validation.
|
||||
|
||||
`FormatChecker` objects always return ``True`` when asked about
|
||||
formats that they do not know how to validate.
|
||||
|
||||
To add a check for a custom format use the `FormatChecker.checks`
|
||||
decorator.
|
||||
|
||||
Arguments:
|
||||
|
||||
formats:
|
||||
|
||||
The known formats to validate. This argument can be used to
|
||||
limit which formats will be used during validation.
|
||||
|
||||
"""
|
||||
|
||||
checkers: dict[
|
||||
str,
|
||||
tuple[_FormatCheckCallable, _RaisesType],
|
||||
] = {} # noqa: RUF012
|
||||
|
||||
def __init__(self, formats: typing.Iterable[str] | None = None):
|
||||
if formats is None:
|
||||
formats = self.checkers.keys()
|
||||
self.checkers = {k: self.checkers[k] for k in formats}
|
||||
|
||||
def __repr__(self):
|
||||
return f"<FormatChecker checkers={sorted(self.checkers)}>"
|
||||
|
||||
def checks(
|
||||
self, format: str, raises: _RaisesType = (),
|
||||
) -> typing.Callable[[_F], _F]:
|
||||
"""
|
||||
Register a decorated function as validating a new format.
|
||||
|
||||
Arguments:
|
||||
|
||||
format:
|
||||
|
||||
The format that the decorated function will check.
|
||||
|
||||
raises:
|
||||
|
||||
The exception(s) raised by the decorated function when an
|
||||
invalid instance is found.
|
||||
|
||||
The exception object will be accessible as the
|
||||
`jsonschema.exceptions.ValidationError.cause` attribute of the
|
||||
resulting validation error.
|
||||
|
||||
"""
|
||||
|
||||
def _checks(func: _F) -> _F:
|
||||
self.checkers[format] = (func, raises)
|
||||
return func
|
||||
|
||||
return _checks
|
||||
|
||||
@classmethod
|
||||
def cls_checks(
|
||||
cls, format: str, raises: _RaisesType = (),
|
||||
) -> typing.Callable[[_F], _F]:
|
||||
warnings.warn(
|
||||
(
|
||||
"FormatChecker.cls_checks is deprecated. Call "
|
||||
"FormatChecker.checks on a specific FormatChecker instance "
|
||||
"instead."
|
||||
),
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return cls._cls_checks(format=format, raises=raises)
|
||||
|
||||
@classmethod
|
||||
def _cls_checks(
|
||||
cls, format: str, raises: _RaisesType = (),
|
||||
) -> typing.Callable[[_F], _F]:
|
||||
def _checks(func: _F) -> _F:
|
||||
cls.checkers[format] = (func, raises)
|
||||
return func
|
||||
|
||||
return _checks
|
||||
|
||||
def check(self, instance: object, format: str) -> None:
|
||||
"""
|
||||
Check whether the instance conforms to the given format.
|
||||
|
||||
Arguments:
|
||||
|
||||
instance (*any primitive type*, i.e. str, number, bool):
|
||||
|
||||
The instance to check
|
||||
|
||||
format:
|
||||
|
||||
The format that instance should conform to
|
||||
|
||||
Raises:
|
||||
|
||||
FormatError:
|
||||
|
||||
if the instance does not conform to ``format``
|
||||
|
||||
"""
|
||||
if format not in self.checkers:
|
||||
return
|
||||
|
||||
func, raises = self.checkers[format]
|
||||
result, cause = None, None
|
||||
try:
|
||||
result = func(instance)
|
||||
except raises as e:
|
||||
cause = e
|
||||
if not result:
|
||||
raise FormatError(f"{instance!r} is not a {format!r}", cause=cause)
|
||||
|
||||
def conforms(self, instance: object, format: str) -> bool:
|
||||
"""
|
||||
Check whether the instance conforms to the given format.
|
||||
|
||||
Arguments:
|
||||
|
||||
instance (*any primitive type*, i.e. str, number, bool):
|
||||
|
||||
The instance to check
|
||||
|
||||
format:
|
||||
|
||||
The format that instance should conform to
|
||||
|
||||
Returns:
|
||||
|
||||
bool: whether it conformed
|
||||
|
||||
"""
|
||||
try:
|
||||
self.check(instance, format)
|
||||
except FormatError:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
draft3_format_checker = FormatChecker()
|
||||
draft4_format_checker = FormatChecker()
|
||||
draft6_format_checker = FormatChecker()
|
||||
draft7_format_checker = FormatChecker()
|
||||
draft201909_format_checker = FormatChecker()
|
||||
draft202012_format_checker = FormatChecker()
|
||||
|
||||
_draft_checkers: dict[str, FormatChecker] = dict(
|
||||
draft3=draft3_format_checker,
|
||||
draft4=draft4_format_checker,
|
||||
draft6=draft6_format_checker,
|
||||
draft7=draft7_format_checker,
|
||||
draft201909=draft201909_format_checker,
|
||||
draft202012=draft202012_format_checker,
|
||||
)
|
||||
|
||||
|
||||
def _checks_drafts(
|
||||
name=None,
|
||||
draft3=None,
|
||||
draft4=None,
|
||||
draft6=None,
|
||||
draft7=None,
|
||||
draft201909=None,
|
||||
draft202012=None,
|
||||
raises=(),
|
||||
) -> typing.Callable[[_F], _F]:
|
||||
draft3 = draft3 or name
|
||||
draft4 = draft4 or name
|
||||
draft6 = draft6 or name
|
||||
draft7 = draft7 or name
|
||||
draft201909 = draft201909 or name
|
||||
draft202012 = draft202012 or name
|
||||
|
||||
def wrap(func: _F) -> _F:
|
||||
if draft3:
|
||||
func = _draft_checkers["draft3"].checks(draft3, raises)(func)
|
||||
if draft4:
|
||||
func = _draft_checkers["draft4"].checks(draft4, raises)(func)
|
||||
if draft6:
|
||||
func = _draft_checkers["draft6"].checks(draft6, raises)(func)
|
||||
if draft7:
|
||||
func = _draft_checkers["draft7"].checks(draft7, raises)(func)
|
||||
if draft201909:
|
||||
func = _draft_checkers["draft201909"].checks(draft201909, raises)(
|
||||
func,
|
||||
)
|
||||
if draft202012:
|
||||
func = _draft_checkers["draft202012"].checks(draft202012, raises)(
|
||||
func,
|
||||
)
|
||||
|
||||
# Oy. This is bad global state, but relied upon for now, until
|
||||
# deprecation. See #519 and test_format_checkers_come_with_defaults
|
||||
FormatChecker._cls_checks(
|
||||
draft202012 or draft201909 or draft7 or draft6 or draft4 or draft3,
|
||||
raises,
|
||||
)(func)
|
||||
return func
|
||||
|
||||
return wrap
|
||||
|
||||
|
||||
@_checks_drafts(name="idn-email")
|
||||
@_checks_drafts(name="email")
|
||||
def is_email(instance: object) -> bool:
|
||||
if not isinstance(instance, str):
|
||||
return True
|
||||
return "@" in instance
|
||||
|
||||
|
||||
@_checks_drafts(
|
||||
draft3="ip-address",
|
||||
draft4="ipv4",
|
||||
draft6="ipv4",
|
||||
draft7="ipv4",
|
||||
draft201909="ipv4",
|
||||
draft202012="ipv4",
|
||||
raises=ipaddress.AddressValueError,
|
||||
)
|
||||
def is_ipv4(instance: object) -> bool:
|
||||
if not isinstance(instance, str):
|
||||
return True
|
||||
return bool(ipaddress.IPv4Address(instance))
|
||||
|
||||
|
||||
@_checks_drafts(name="ipv6", raises=ipaddress.AddressValueError)
|
||||
def is_ipv6(instance: object) -> bool:
|
||||
if not isinstance(instance, str):
|
||||
return True
|
||||
address = ipaddress.IPv6Address(instance)
|
||||
return not getattr(address, "scope_id", "")
|
||||
|
||||
|
||||
with suppress(ImportError):
|
||||
from fqdn import FQDN
|
||||
|
||||
@_checks_drafts(
|
||||
draft3="host-name",
|
||||
draft4="hostname",
|
||||
draft6="hostname",
|
||||
draft7="hostname",
|
||||
draft201909="hostname",
|
||||
draft202012="hostname",
|
||||
# fqdn.FQDN("") raises a ValueError due to a bug
|
||||
# however, it's not clear when or if that will be fixed, so catch it
|
||||
# here for now
|
||||
raises=ValueError,
|
||||
)
|
||||
def is_host_name(instance: object) -> bool:
|
||||
if not isinstance(instance, str):
|
||||
return True
|
||||
return FQDN(instance, min_labels=1).is_valid
|
||||
|
||||
|
||||
with suppress(ImportError):
|
||||
# The built-in `idna` codec only implements RFC 3890, so we go elsewhere.
|
||||
import idna
|
||||
|
||||
@_checks_drafts(
|
||||
draft7="idn-hostname",
|
||||
draft201909="idn-hostname",
|
||||
draft202012="idn-hostname",
|
||||
raises=(idna.IDNAError, UnicodeError),
|
||||
)
|
||||
def is_idn_host_name(instance: object) -> bool:
|
||||
if not isinstance(instance, str):
|
||||
return True
|
||||
idna.encode(instance)
|
||||
return True
|
||||
|
||||
|
||||
try:
|
||||
import rfc3987
|
||||
except ImportError:
|
||||
with suppress(ImportError):
|
||||
from rfc3986_validator import validate_rfc3986
|
||||
|
||||
@_checks_drafts(name="uri")
|
||||
def is_uri(instance: object) -> bool:
|
||||
if not isinstance(instance, str):
|
||||
return True
|
||||
return validate_rfc3986(instance, rule="URI")
|
||||
|
||||
@_checks_drafts(
|
||||
draft6="uri-reference",
|
||||
draft7="uri-reference",
|
||||
draft201909="uri-reference",
|
||||
draft202012="uri-reference",
|
||||
raises=ValueError,
|
||||
)
|
||||
def is_uri_reference(instance: object) -> bool:
|
||||
if not isinstance(instance, str):
|
||||
return True
|
||||
return validate_rfc3986(instance, rule="URI_reference")
|
||||
|
||||
with suppress(ImportError):
|
||||
from rfc3987_syntax import is_valid_syntax as _rfc3987_is_valid_syntax
|
||||
|
||||
@_checks_drafts(
|
||||
draft7="iri",
|
||||
draft201909="iri",
|
||||
draft202012="iri",
|
||||
raises=ValueError,
|
||||
)
|
||||
def is_iri(instance: object) -> bool:
|
||||
if not isinstance(instance, str):
|
||||
return True
|
||||
return _rfc3987_is_valid_syntax("iri", instance)
|
||||
|
||||
@_checks_drafts(
|
||||
draft7="iri-reference",
|
||||
draft201909="iri-reference",
|
||||
draft202012="iri-reference",
|
||||
raises=ValueError,
|
||||
)
|
||||
def is_iri_reference(instance: object) -> bool:
|
||||
if not isinstance(instance, str):
|
||||
return True
|
||||
return _rfc3987_is_valid_syntax("iri_reference", instance)
|
||||
|
||||
else:
|
||||
|
||||
@_checks_drafts(
|
||||
draft7="iri",
|
||||
draft201909="iri",
|
||||
draft202012="iri",
|
||||
raises=ValueError,
|
||||
)
|
||||
def is_iri(instance: object) -> bool:
|
||||
if not isinstance(instance, str):
|
||||
return True
|
||||
return rfc3987.parse(instance, rule="IRI")
|
||||
|
||||
@_checks_drafts(
|
||||
draft7="iri-reference",
|
||||
draft201909="iri-reference",
|
||||
draft202012="iri-reference",
|
||||
raises=ValueError,
|
||||
)
|
||||
def is_iri_reference(instance: object) -> bool:
|
||||
if not isinstance(instance, str):
|
||||
return True
|
||||
return rfc3987.parse(instance, rule="IRI_reference")
|
||||
|
||||
@_checks_drafts(name="uri", raises=ValueError)
|
||||
def is_uri(instance: object) -> bool:
|
||||
if not isinstance(instance, str):
|
||||
return True
|
||||
return rfc3987.parse(instance, rule="URI")
|
||||
|
||||
@_checks_drafts(
|
||||
draft6="uri-reference",
|
||||
draft7="uri-reference",
|
||||
draft201909="uri-reference",
|
||||
draft202012="uri-reference",
|
||||
raises=ValueError,
|
||||
)
|
||||
def is_uri_reference(instance: object) -> bool:
|
||||
if not isinstance(instance, str):
|
||||
return True
|
||||
return rfc3987.parse(instance, rule="URI_reference")
|
||||
|
||||
|
||||
with suppress(ImportError):
|
||||
from rfc3339_validator import validate_rfc3339
|
||||
|
||||
@_checks_drafts(name="date-time")
|
||||
def is_datetime(instance: object) -> bool:
|
||||
if not isinstance(instance, str):
|
||||
return True
|
||||
return validate_rfc3339(instance.upper())
|
||||
|
||||
@_checks_drafts(
|
||||
draft7="time",
|
||||
draft201909="time",
|
||||
draft202012="time",
|
||||
)
|
||||
def is_time(instance: object) -> bool:
|
||||
if not isinstance(instance, str):
|
||||
return True
|
||||
return is_datetime("1970-01-01T" + instance)
|
||||
|
||||
|
||||
@_checks_drafts(name="regex", raises=re.error)
|
||||
def is_regex(instance: object) -> bool:
|
||||
if not isinstance(instance, str):
|
||||
return True
|
||||
return bool(re.compile(instance))
|
||||
|
||||
|
||||
@_checks_drafts(
|
||||
draft3="date",
|
||||
draft7="date",
|
||||
draft201909="date",
|
||||
draft202012="date",
|
||||
raises=ValueError,
|
||||
)
|
||||
def is_date(instance: object) -> bool:
|
||||
if not isinstance(instance, str):
|
||||
return True
|
||||
return bool(_RE_DATE.fullmatch(instance) and date.fromisoformat(instance))
|
||||
|
||||
|
||||
@_checks_drafts(draft3="time", raises=ValueError)
|
||||
def is_draft3_time(instance: object) -> bool:
|
||||
if not isinstance(instance, str):
|
||||
return True
|
||||
return bool(datetime.strptime(instance, "%H:%M:%S")) # noqa: DTZ007
|
||||
|
||||
|
||||
with suppress(ImportError):
|
||||
import webcolors
|
||||
|
||||
@_checks_drafts(draft3="color", raises=(ValueError, TypeError))
|
||||
def is_css21_color(instance: object) -> bool:
|
||||
if isinstance(instance, str):
|
||||
try:
|
||||
webcolors.name_to_hex(instance)
|
||||
except ValueError:
|
||||
webcolors.normalize_hex(instance.lower())
|
||||
return True
|
||||
|
||||
|
||||
with suppress(ImportError):
|
||||
import jsonpointer
|
||||
|
||||
@_checks_drafts(
|
||||
draft6="json-pointer",
|
||||
draft7="json-pointer",
|
||||
draft201909="json-pointer",
|
||||
draft202012="json-pointer",
|
||||
raises=jsonpointer.JsonPointerException,
|
||||
)
|
||||
def is_json_pointer(instance: object) -> bool:
|
||||
if not isinstance(instance, str):
|
||||
return True
|
||||
return bool(jsonpointer.JsonPointer(instance))
|
||||
|
||||
# TODO: I don't want to maintain this, so it
|
||||
# needs to go either into jsonpointer (pending
|
||||
# https://github.com/stefankoegl/python-json-pointer/issues/34) or
|
||||
# into a new external library.
|
||||
@_checks_drafts(
|
||||
draft7="relative-json-pointer",
|
||||
draft201909="relative-json-pointer",
|
||||
draft202012="relative-json-pointer",
|
||||
raises=jsonpointer.JsonPointerException,
|
||||
)
|
||||
def is_relative_json_pointer(instance: object) -> bool:
|
||||
# Definition taken from:
|
||||
# https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
|
||||
if not isinstance(instance, str):
|
||||
return True
|
||||
if not instance:
|
||||
return False
|
||||
|
||||
non_negative_integer, rest = [], ""
|
||||
for i, character in enumerate(instance):
|
||||
if character.isdigit():
|
||||
# digits with a leading "0" are not allowed
|
||||
if i > 0 and int(instance[i - 1]) == 0:
|
||||
return False
|
||||
|
||||
non_negative_integer.append(character)
|
||||
continue
|
||||
|
||||
if not non_negative_integer:
|
||||
return False
|
||||
|
||||
rest = instance[i:]
|
||||
break
|
||||
return (rest == "#") or bool(jsonpointer.JsonPointer(rest))
|
||||
|
||||
|
||||
with suppress(ImportError):
|
||||
import uri_template
|
||||
|
||||
@_checks_drafts(
|
||||
draft6="uri-template",
|
||||
draft7="uri-template",
|
||||
draft201909="uri-template",
|
||||
draft202012="uri-template",
|
||||
)
|
||||
def is_uri_template(instance: object) -> bool:
|
||||
if not isinstance(instance, str):
|
||||
return True
|
||||
return uri_template.validate(instance)
|
||||
|
||||
|
||||
with suppress(ImportError):
|
||||
import isoduration
|
||||
|
||||
@_checks_drafts(
|
||||
draft201909="duration",
|
||||
draft202012="duration",
|
||||
raises=isoduration.DurationParsingException,
|
||||
)
|
||||
def is_duration(instance: object) -> bool:
|
||||
if not isinstance(instance, str):
|
||||
return True
|
||||
isoduration.parse_duration(instance)
|
||||
# FIXME: See bolsote/isoduration#25 and bolsote/isoduration#21
|
||||
return instance.endswith(tuple("DMYWHMS"))
|
||||
|
||||
|
||||
@_checks_drafts(
|
||||
draft201909="uuid",
|
||||
draft202012="uuid",
|
||||
raises=ValueError,
|
||||
)
|
||||
def is_uuid(instance: object) -> bool:
|
||||
if not isinstance(instance, str):
|
||||
return True
|
||||
UUID(instance)
|
||||
return all(instance[position] == "-" for position in (8, 13, 18, 23))
|
||||
449
.venv/lib/python3.9/site-packages/jsonschema/_keywords.py
Normal file
449
.venv/lib/python3.9/site-packages/jsonschema/_keywords.py
Normal file
@@ -0,0 +1,449 @@
|
||||
from fractions import Fraction
|
||||
import re
|
||||
|
||||
from jsonschema._utils import (
|
||||
ensure_list,
|
||||
equal,
|
||||
extras_msg,
|
||||
find_additional_properties,
|
||||
find_evaluated_item_indexes_by_schema,
|
||||
find_evaluated_property_keys_by_schema,
|
||||
uniq,
|
||||
)
|
||||
from jsonschema.exceptions import FormatError, ValidationError
|
||||
|
||||
|
||||
def patternProperties(validator, patternProperties, instance, schema):
|
||||
if not validator.is_type(instance, "object"):
|
||||
return
|
||||
|
||||
for pattern, subschema in patternProperties.items():
|
||||
for k, v in instance.items():
|
||||
if re.search(pattern, k):
|
||||
yield from validator.descend(
|
||||
v, subschema, path=k, schema_path=pattern,
|
||||
)
|
||||
|
||||
|
||||
def propertyNames(validator, propertyNames, instance, schema):
|
||||
if not validator.is_type(instance, "object"):
|
||||
return
|
||||
|
||||
for property in instance:
|
||||
yield from validator.descend(instance=property, schema=propertyNames)
|
||||
|
||||
|
||||
def additionalProperties(validator, aP, instance, schema):
|
||||
if not validator.is_type(instance, "object"):
|
||||
return
|
||||
|
||||
extras = set(find_additional_properties(instance, schema))
|
||||
|
||||
if validator.is_type(aP, "object"):
|
||||
for extra in extras:
|
||||
yield from validator.descend(instance[extra], aP, path=extra)
|
||||
elif not aP and extras:
|
||||
if "patternProperties" in schema:
|
||||
verb = "does" if len(extras) == 1 else "do"
|
||||
joined = ", ".join(repr(each) for each in sorted(extras))
|
||||
patterns = ", ".join(
|
||||
repr(each) for each in sorted(schema["patternProperties"])
|
||||
)
|
||||
error = f"{joined} {verb} not match any of the regexes: {patterns}"
|
||||
yield ValidationError(error)
|
||||
else:
|
||||
error = "Additional properties are not allowed (%s %s unexpected)"
|
||||
yield ValidationError(error % extras_msg(sorted(extras, key=str)))
|
||||
|
||||
|
||||
def items(validator, items, instance, schema):
|
||||
if not validator.is_type(instance, "array"):
|
||||
return
|
||||
|
||||
prefix = len(schema.get("prefixItems", []))
|
||||
total = len(instance)
|
||||
extra = total - prefix
|
||||
if extra <= 0:
|
||||
return
|
||||
|
||||
if items is False:
|
||||
rest = instance[prefix:] if extra != 1 else instance[prefix]
|
||||
item = "items" if prefix != 1 else "item"
|
||||
yield ValidationError(
|
||||
f"Expected at most {prefix} {item} but found {extra} "
|
||||
f"extra: {rest!r}",
|
||||
)
|
||||
else:
|
||||
for index in range(prefix, total):
|
||||
yield from validator.descend(
|
||||
instance=instance[index],
|
||||
schema=items,
|
||||
path=index,
|
||||
)
|
||||
|
||||
|
||||
def const(validator, const, instance, schema):
|
||||
if not equal(instance, const):
|
||||
yield ValidationError(f"{const!r} was expected")
|
||||
|
||||
|
||||
def contains(validator, contains, instance, schema):
|
||||
if not validator.is_type(instance, "array"):
|
||||
return
|
||||
|
||||
matches = 0
|
||||
min_contains = schema.get("minContains", 1)
|
||||
max_contains = schema.get("maxContains", len(instance))
|
||||
|
||||
contains_validator = validator.evolve(schema=contains)
|
||||
|
||||
for each in instance:
|
||||
if contains_validator.is_valid(each):
|
||||
matches += 1
|
||||
if matches > max_contains:
|
||||
yield ValidationError(
|
||||
"Too many items match the given schema "
|
||||
f"(expected at most {max_contains})",
|
||||
validator="maxContains",
|
||||
validator_value=max_contains,
|
||||
)
|
||||
return
|
||||
|
||||
if matches < min_contains:
|
||||
if not matches:
|
||||
yield ValidationError(
|
||||
f"{instance!r} does not contain items "
|
||||
"matching the given schema",
|
||||
)
|
||||
else:
|
||||
yield ValidationError(
|
||||
"Too few items match the given schema (expected at least "
|
||||
f"{min_contains} but only {matches} matched)",
|
||||
validator="minContains",
|
||||
validator_value=min_contains,
|
||||
)
|
||||
|
||||
|
||||
def exclusiveMinimum(validator, minimum, instance, schema):
|
||||
if not validator.is_type(instance, "number"):
|
||||
return
|
||||
|
||||
if instance <= minimum:
|
||||
yield ValidationError(
|
||||
f"{instance!r} is less than or equal to "
|
||||
f"the minimum of {minimum!r}",
|
||||
)
|
||||
|
||||
|
||||
def exclusiveMaximum(validator, maximum, instance, schema):
|
||||
if not validator.is_type(instance, "number"):
|
||||
return
|
||||
|
||||
if instance >= maximum:
|
||||
yield ValidationError(
|
||||
f"{instance!r} is greater than or equal "
|
||||
f"to the maximum of {maximum!r}",
|
||||
)
|
||||
|
||||
|
||||
def minimum(validator, minimum, instance, schema):
|
||||
if not validator.is_type(instance, "number"):
|
||||
return
|
||||
|
||||
if instance < minimum:
|
||||
message = f"{instance!r} is less than the minimum of {minimum!r}"
|
||||
yield ValidationError(message)
|
||||
|
||||
|
||||
def maximum(validator, maximum, instance, schema):
|
||||
if not validator.is_type(instance, "number"):
|
||||
return
|
||||
|
||||
if instance > maximum:
|
||||
message = f"{instance!r} is greater than the maximum of {maximum!r}"
|
||||
yield ValidationError(message)
|
||||
|
||||
|
||||
def multipleOf(validator, dB, instance, schema):
|
||||
if not validator.is_type(instance, "number"):
|
||||
return
|
||||
|
||||
if isinstance(dB, float):
|
||||
quotient = instance / dB
|
||||
try:
|
||||
failed = int(quotient) != quotient
|
||||
except OverflowError:
|
||||
# When `instance` is large and `dB` is less than one,
|
||||
# quotient can overflow to infinity; and then casting to int
|
||||
# raises an error.
|
||||
#
|
||||
# In this case we fall back to Fraction logic, which is
|
||||
# exact and cannot overflow. The performance is also
|
||||
# acceptable: we try the fast all-float option first, and
|
||||
# we know that fraction(dB) can have at most a few hundred
|
||||
# digits in each part. The worst-case slowdown is therefore
|
||||
# for already-slow enormous integers or Decimals.
|
||||
failed = (Fraction(instance) / Fraction(dB)).denominator != 1
|
||||
else:
|
||||
failed = instance % dB
|
||||
|
||||
if failed:
|
||||
yield ValidationError(f"{instance!r} is not a multiple of {dB}")
|
||||
|
||||
|
||||
def minItems(validator, mI, instance, schema):
|
||||
if validator.is_type(instance, "array") and len(instance) < mI:
|
||||
message = "should be non-empty" if mI == 1 else "is too short"
|
||||
yield ValidationError(f"{instance!r} {message}")
|
||||
|
||||
|
||||
def maxItems(validator, mI, instance, schema):
|
||||
if validator.is_type(instance, "array") and len(instance) > mI:
|
||||
message = "is expected to be empty" if mI == 0 else "is too long"
|
||||
yield ValidationError(f"{instance!r} {message}")
|
||||
|
||||
|
||||
def uniqueItems(validator, uI, instance, schema):
|
||||
if (
|
||||
uI
|
||||
and validator.is_type(instance, "array")
|
||||
and not uniq(instance)
|
||||
):
|
||||
yield ValidationError(f"{instance!r} has non-unique elements")
|
||||
|
||||
|
||||
def pattern(validator, patrn, instance, schema):
|
||||
if (
|
||||
validator.is_type(instance, "string")
|
||||
and not re.search(patrn, instance)
|
||||
):
|
||||
yield ValidationError(f"{instance!r} does not match {patrn!r}")
|
||||
|
||||
|
||||
def format(validator, format, instance, schema):
|
||||
if validator.format_checker is not None:
|
||||
try:
|
||||
validator.format_checker.check(instance, format)
|
||||
except FormatError as error:
|
||||
yield ValidationError(error.message, cause=error.cause)
|
||||
|
||||
|
||||
def minLength(validator, mL, instance, schema):
|
||||
if validator.is_type(instance, "string") and len(instance) < mL:
|
||||
message = "should be non-empty" if mL == 1 else "is too short"
|
||||
yield ValidationError(f"{instance!r} {message}")
|
||||
|
||||
|
||||
def maxLength(validator, mL, instance, schema):
|
||||
if validator.is_type(instance, "string") and len(instance) > mL:
|
||||
message = "is expected to be empty" if mL == 0 else "is too long"
|
||||
yield ValidationError(f"{instance!r} {message}")
|
||||
|
||||
|
||||
def dependentRequired(validator, dependentRequired, instance, schema):
|
||||
if not validator.is_type(instance, "object"):
|
||||
return
|
||||
|
||||
for property, dependency in dependentRequired.items():
|
||||
if property not in instance:
|
||||
continue
|
||||
|
||||
for each in dependency:
|
||||
if each not in instance:
|
||||
message = f"{each!r} is a dependency of {property!r}"
|
||||
yield ValidationError(message)
|
||||
|
||||
|
||||
def dependentSchemas(validator, dependentSchemas, instance, schema):
|
||||
if not validator.is_type(instance, "object"):
|
||||
return
|
||||
|
||||
for property, dependency in dependentSchemas.items():
|
||||
if property not in instance:
|
||||
continue
|
||||
yield from validator.descend(
|
||||
instance, dependency, schema_path=property,
|
||||
)
|
||||
|
||||
|
||||
def enum(validator, enums, instance, schema):
|
||||
if all(not equal(each, instance) for each in enums):
|
||||
yield ValidationError(f"{instance!r} is not one of {enums!r}")
|
||||
|
||||
|
||||
def ref(validator, ref, instance, schema):
|
||||
yield from validator._validate_reference(ref=ref, instance=instance)
|
||||
|
||||
|
||||
def dynamicRef(validator, dynamicRef, instance, schema):
|
||||
yield from validator._validate_reference(ref=dynamicRef, instance=instance)
|
||||
|
||||
|
||||
def type(validator, types, instance, schema):
|
||||
types = ensure_list(types)
|
||||
|
||||
if not any(validator.is_type(instance, type) for type in types):
|
||||
reprs = ", ".join(repr(type) for type in types)
|
||||
yield ValidationError(f"{instance!r} is not of type {reprs}")
|
||||
|
||||
|
||||
def properties(validator, properties, instance, schema):
|
||||
if not validator.is_type(instance, "object"):
|
||||
return
|
||||
|
||||
for property, subschema in properties.items():
|
||||
if property in instance:
|
||||
yield from validator.descend(
|
||||
instance[property],
|
||||
subschema,
|
||||
path=property,
|
||||
schema_path=property,
|
||||
)
|
||||
|
||||
|
||||
def required(validator, required, instance, schema):
|
||||
if not validator.is_type(instance, "object"):
|
||||
return
|
||||
for property in required:
|
||||
if property not in instance:
|
||||
yield ValidationError(f"{property!r} is a required property")
|
||||
|
||||
|
||||
def minProperties(validator, mP, instance, schema):
|
||||
if validator.is_type(instance, "object") and len(instance) < mP:
|
||||
message = (
|
||||
"should be non-empty" if mP == 1
|
||||
else "does not have enough properties"
|
||||
)
|
||||
yield ValidationError(f"{instance!r} {message}")
|
||||
|
||||
|
||||
def maxProperties(validator, mP, instance, schema):
|
||||
if not validator.is_type(instance, "object"):
|
||||
return
|
||||
if validator.is_type(instance, "object") and len(instance) > mP:
|
||||
message = (
|
||||
"is expected to be empty" if mP == 0
|
||||
else "has too many properties"
|
||||
)
|
||||
yield ValidationError(f"{instance!r} {message}")
|
||||
|
||||
|
||||
def allOf(validator, allOf, instance, schema):
|
||||
for index, subschema in enumerate(allOf):
|
||||
yield from validator.descend(instance, subschema, schema_path=index)
|
||||
|
||||
|
||||
def anyOf(validator, anyOf, instance, schema):
|
||||
all_errors = []
|
||||
for index, subschema in enumerate(anyOf):
|
||||
errs = list(validator.descend(instance, subschema, schema_path=index))
|
||||
if not errs:
|
||||
break
|
||||
all_errors.extend(errs)
|
||||
else:
|
||||
yield ValidationError(
|
||||
f"{instance!r} is not valid under any of the given schemas",
|
||||
context=all_errors,
|
||||
)
|
||||
|
||||
|
||||
def oneOf(validator, oneOf, instance, schema):
|
||||
subschemas = enumerate(oneOf)
|
||||
all_errors = []
|
||||
for index, subschema in subschemas:
|
||||
errs = list(validator.descend(instance, subschema, schema_path=index))
|
||||
if not errs:
|
||||
first_valid = subschema
|
||||
break
|
||||
all_errors.extend(errs)
|
||||
else:
|
||||
yield ValidationError(
|
||||
f"{instance!r} is not valid under any of the given schemas",
|
||||
context=all_errors,
|
||||
)
|
||||
|
||||
more_valid = [
|
||||
each for _, each in subschemas
|
||||
if validator.evolve(schema=each).is_valid(instance)
|
||||
]
|
||||
if more_valid:
|
||||
more_valid.append(first_valid)
|
||||
reprs = ", ".join(repr(schema) for schema in more_valid)
|
||||
yield ValidationError(f"{instance!r} is valid under each of {reprs}")
|
||||
|
||||
|
||||
def not_(validator, not_schema, instance, schema):
|
||||
if validator.evolve(schema=not_schema).is_valid(instance):
|
||||
message = f"{instance!r} should not be valid under {not_schema!r}"
|
||||
yield ValidationError(message)
|
||||
|
||||
|
||||
def if_(validator, if_schema, instance, schema):
|
||||
if validator.evolve(schema=if_schema).is_valid(instance):
|
||||
if "then" in schema:
|
||||
then = schema["then"]
|
||||
yield from validator.descend(instance, then, schema_path="then")
|
||||
elif "else" in schema:
|
||||
else_ = schema["else"]
|
||||
yield from validator.descend(instance, else_, schema_path="else")
|
||||
|
||||
|
||||
def unevaluatedItems(validator, unevaluatedItems, instance, schema):
|
||||
if not validator.is_type(instance, "array"):
|
||||
return
|
||||
evaluated_item_indexes = find_evaluated_item_indexes_by_schema(
|
||||
validator, instance, schema,
|
||||
)
|
||||
unevaluated_items = [
|
||||
item for index, item in enumerate(instance)
|
||||
if index not in evaluated_item_indexes
|
||||
]
|
||||
if unevaluated_items:
|
||||
error = "Unevaluated items are not allowed (%s %s unexpected)"
|
||||
yield ValidationError(error % extras_msg(unevaluated_items))
|
||||
|
||||
|
||||
def unevaluatedProperties(validator, unevaluatedProperties, instance, schema):
|
||||
if not validator.is_type(instance, "object"):
|
||||
return
|
||||
evaluated_keys = find_evaluated_property_keys_by_schema(
|
||||
validator, instance, schema,
|
||||
)
|
||||
unevaluated_keys = []
|
||||
for property in instance:
|
||||
if property not in evaluated_keys:
|
||||
for _ in validator.descend(
|
||||
instance[property],
|
||||
unevaluatedProperties,
|
||||
path=property,
|
||||
schema_path=property,
|
||||
):
|
||||
# FIXME: Include context for each unevaluated property
|
||||
# indicating why it's invalid under the subschema.
|
||||
unevaluated_keys.append(property) # noqa: PERF401
|
||||
|
||||
if unevaluated_keys:
|
||||
if unevaluatedProperties is False:
|
||||
error = "Unevaluated properties are not allowed (%s %s unexpected)"
|
||||
extras = sorted(unevaluated_keys, key=str)
|
||||
yield ValidationError(error % extras_msg(extras))
|
||||
else:
|
||||
error = (
|
||||
"Unevaluated properties are not valid under "
|
||||
"the given schema (%s %s unevaluated and invalid)"
|
||||
)
|
||||
yield ValidationError(error % extras_msg(unevaluated_keys))
|
||||
|
||||
|
||||
def prefixItems(validator, prefixItems, instance, schema):
|
||||
if not validator.is_type(instance, "array"):
|
||||
return
|
||||
|
||||
for (index, item), subschema in zip(enumerate(instance), prefixItems):
|
||||
yield from validator.descend(
|
||||
instance=item,
|
||||
schema=subschema,
|
||||
schema_path=index,
|
||||
path=index,
|
||||
)
|
||||
449
.venv/lib/python3.9/site-packages/jsonschema/_legacy_keywords.py
Normal file
449
.venv/lib/python3.9/site-packages/jsonschema/_legacy_keywords.py
Normal file
@@ -0,0 +1,449 @@
|
||||
import re
|
||||
|
||||
from referencing.jsonschema import lookup_recursive_ref
|
||||
|
||||
from jsonschema import _utils
|
||||
from jsonschema.exceptions import ValidationError
|
||||
|
||||
|
||||
def ignore_ref_siblings(schema):
|
||||
"""
|
||||
Ignore siblings of ``$ref`` if it is present.
|
||||
|
||||
Otherwise, return all keywords.
|
||||
|
||||
Suitable for use with `create`'s ``applicable_validators`` argument.
|
||||
"""
|
||||
ref = schema.get("$ref")
|
||||
if ref is not None:
|
||||
return [("$ref", ref)]
|
||||
else:
|
||||
return schema.items()
|
||||
|
||||
|
||||
def dependencies_draft3(validator, dependencies, instance, schema):
|
||||
if not validator.is_type(instance, "object"):
|
||||
return
|
||||
|
||||
for property, dependency in dependencies.items():
|
||||
if property not in instance:
|
||||
continue
|
||||
|
||||
if validator.is_type(dependency, "object"):
|
||||
yield from validator.descend(
|
||||
instance, dependency, schema_path=property,
|
||||
)
|
||||
elif validator.is_type(dependency, "string"):
|
||||
if dependency not in instance:
|
||||
message = f"{dependency!r} is a dependency of {property!r}"
|
||||
yield ValidationError(message)
|
||||
else:
|
||||
for each in dependency:
|
||||
if each not in instance:
|
||||
message = f"{each!r} is a dependency of {property!r}"
|
||||
yield ValidationError(message)
|
||||
|
||||
|
||||
def dependencies_draft4_draft6_draft7(
|
||||
validator,
|
||||
dependencies,
|
||||
instance,
|
||||
schema,
|
||||
):
|
||||
"""
|
||||
Support for the ``dependencies`` keyword from pre-draft 2019-09.
|
||||
|
||||
In later drafts, the keyword was split into separate
|
||||
``dependentRequired`` and ``dependentSchemas`` validators.
|
||||
"""
|
||||
if not validator.is_type(instance, "object"):
|
||||
return
|
||||
|
||||
for property, dependency in dependencies.items():
|
||||
if property not in instance:
|
||||
continue
|
||||
|
||||
if validator.is_type(dependency, "array"):
|
||||
for each in dependency:
|
||||
if each not in instance:
|
||||
message = f"{each!r} is a dependency of {property!r}"
|
||||
yield ValidationError(message)
|
||||
else:
|
||||
yield from validator.descend(
|
||||
instance, dependency, schema_path=property,
|
||||
)
|
||||
|
||||
|
||||
def disallow_draft3(validator, disallow, instance, schema):
|
||||
for disallowed in _utils.ensure_list(disallow):
|
||||
if validator.evolve(schema={"type": [disallowed]}).is_valid(instance):
|
||||
message = f"{disallowed!r} is disallowed for {instance!r}"
|
||||
yield ValidationError(message)
|
||||
|
||||
|
||||
def extends_draft3(validator, extends, instance, schema):
|
||||
if validator.is_type(extends, "object"):
|
||||
yield from validator.descend(instance, extends)
|
||||
return
|
||||
for index, subschema in enumerate(extends):
|
||||
yield from validator.descend(instance, subschema, schema_path=index)
|
||||
|
||||
|
||||
def items_draft3_draft4(validator, items, instance, schema):
|
||||
if not validator.is_type(instance, "array"):
|
||||
return
|
||||
|
||||
if validator.is_type(items, "object"):
|
||||
for index, item in enumerate(instance):
|
||||
yield from validator.descend(item, items, path=index)
|
||||
else:
|
||||
for (index, item), subschema in zip(enumerate(instance), items):
|
||||
yield from validator.descend(
|
||||
item, subschema, path=index, schema_path=index,
|
||||
)
|
||||
|
||||
|
||||
def additionalItems(validator, aI, instance, schema):
|
||||
if (
|
||||
not validator.is_type(instance, "array")
|
||||
or validator.is_type(schema.get("items", {}), "object")
|
||||
):
|
||||
return
|
||||
|
||||
len_items = len(schema.get("items", []))
|
||||
if validator.is_type(aI, "object"):
|
||||
for index, item in enumerate(instance[len_items:], start=len_items):
|
||||
yield from validator.descend(item, aI, path=index)
|
||||
elif not aI and len(instance) > len(schema.get("items", [])):
|
||||
error = "Additional items are not allowed (%s %s unexpected)"
|
||||
yield ValidationError(
|
||||
error % _utils.extras_msg(instance[len(schema.get("items", [])):]),
|
||||
)
|
||||
|
||||
|
||||
def items_draft6_draft7_draft201909(validator, items, instance, schema):
|
||||
if not validator.is_type(instance, "array"):
|
||||
return
|
||||
|
||||
if validator.is_type(items, "array"):
|
||||
for (index, item), subschema in zip(enumerate(instance), items):
|
||||
yield from validator.descend(
|
||||
item, subschema, path=index, schema_path=index,
|
||||
)
|
||||
else:
|
||||
for index, item in enumerate(instance):
|
||||
yield from validator.descend(item, items, path=index)
|
||||
|
||||
|
||||
def minimum_draft3_draft4(validator, minimum, instance, schema):
|
||||
if not validator.is_type(instance, "number"):
|
||||
return
|
||||
|
||||
if schema.get("exclusiveMinimum", False):
|
||||
failed = instance <= minimum
|
||||
cmp = "less than or equal to"
|
||||
else:
|
||||
failed = instance < minimum
|
||||
cmp = "less than"
|
||||
|
||||
if failed:
|
||||
message = f"{instance!r} is {cmp} the minimum of {minimum!r}"
|
||||
yield ValidationError(message)
|
||||
|
||||
|
||||
def maximum_draft3_draft4(validator, maximum, instance, schema):
|
||||
if not validator.is_type(instance, "number"):
|
||||
return
|
||||
|
||||
if schema.get("exclusiveMaximum", False):
|
||||
failed = instance >= maximum
|
||||
cmp = "greater than or equal to"
|
||||
else:
|
||||
failed = instance > maximum
|
||||
cmp = "greater than"
|
||||
|
||||
if failed:
|
||||
message = f"{instance!r} is {cmp} the maximum of {maximum!r}"
|
||||
yield ValidationError(message)
|
||||
|
||||
|
||||
def properties_draft3(validator, properties, instance, schema):
|
||||
if not validator.is_type(instance, "object"):
|
||||
return
|
||||
|
||||
for property, subschema in properties.items():
|
||||
if property in instance:
|
||||
yield from validator.descend(
|
||||
instance[property],
|
||||
subschema,
|
||||
path=property,
|
||||
schema_path=property,
|
||||
)
|
||||
elif subschema.get("required", False):
|
||||
error = ValidationError(f"{property!r} is a required property")
|
||||
error._set(
|
||||
validator="required",
|
||||
validator_value=subschema["required"],
|
||||
instance=instance,
|
||||
schema=schema,
|
||||
)
|
||||
error.path.appendleft(property)
|
||||
error.schema_path.extend([property, "required"])
|
||||
yield error
|
||||
|
||||
|
||||
def type_draft3(validator, types, instance, schema):
|
||||
types = _utils.ensure_list(types)
|
||||
|
||||
all_errors = []
|
||||
for index, type in enumerate(types):
|
||||
if validator.is_type(type, "object"):
|
||||
errors = list(validator.descend(instance, type, schema_path=index))
|
||||
if not errors:
|
||||
return
|
||||
all_errors.extend(errors)
|
||||
elif validator.is_type(instance, type):
|
||||
return
|
||||
|
||||
reprs = []
|
||||
for type in types:
|
||||
try:
|
||||
reprs.append(repr(type["name"]))
|
||||
except Exception: # noqa: BLE001
|
||||
reprs.append(repr(type))
|
||||
yield ValidationError(
|
||||
f"{instance!r} is not of type {', '.join(reprs)}",
|
||||
context=all_errors,
|
||||
)
|
||||
|
||||
|
||||
def contains_draft6_draft7(validator, contains, instance, schema):
|
||||
if not validator.is_type(instance, "array"):
|
||||
return
|
||||
|
||||
if not any(
|
||||
validator.evolve(schema=contains).is_valid(element)
|
||||
for element in instance
|
||||
):
|
||||
yield ValidationError(
|
||||
f"None of {instance!r} are valid under the given schema",
|
||||
)
|
||||
|
||||
|
||||
def recursiveRef(validator, recursiveRef, instance, schema):
|
||||
resolved = lookup_recursive_ref(validator._resolver)
|
||||
yield from validator.descend(
|
||||
instance,
|
||||
resolved.contents,
|
||||
resolver=resolved.resolver,
|
||||
)
|
||||
|
||||
|
||||
def find_evaluated_item_indexes_by_schema(validator, instance, schema):
|
||||
"""
|
||||
Get all indexes of items that get evaluated under the current schema.
|
||||
|
||||
Covers all keywords related to unevaluatedItems: items, prefixItems, if,
|
||||
then, else, contains, unevaluatedItems, allOf, oneOf, anyOf
|
||||
"""
|
||||
if validator.is_type(schema, "boolean"):
|
||||
return []
|
||||
evaluated_indexes = []
|
||||
|
||||
ref = schema.get("$ref")
|
||||
if ref is not None:
|
||||
resolved = validator._resolver.lookup(ref)
|
||||
evaluated_indexes.extend(
|
||||
find_evaluated_item_indexes_by_schema(
|
||||
validator.evolve(
|
||||
schema=resolved.contents,
|
||||
_resolver=resolved.resolver,
|
||||
),
|
||||
instance,
|
||||
resolved.contents,
|
||||
),
|
||||
)
|
||||
|
||||
if "$recursiveRef" in schema:
|
||||
resolved = lookup_recursive_ref(validator._resolver)
|
||||
evaluated_indexes.extend(
|
||||
find_evaluated_item_indexes_by_schema(
|
||||
validator.evolve(
|
||||
schema=resolved.contents,
|
||||
_resolver=resolved.resolver,
|
||||
),
|
||||
instance,
|
||||
resolved.contents,
|
||||
),
|
||||
)
|
||||
|
||||
if "items" in schema:
|
||||
if "additionalItems" in schema:
|
||||
return list(range(len(instance)))
|
||||
|
||||
if validator.is_type(schema["items"], "object"):
|
||||
return list(range(len(instance)))
|
||||
evaluated_indexes += list(range(len(schema["items"])))
|
||||
|
||||
if "if" in schema:
|
||||
if validator.evolve(schema=schema["if"]).is_valid(instance):
|
||||
evaluated_indexes += find_evaluated_item_indexes_by_schema(
|
||||
validator, instance, schema["if"],
|
||||
)
|
||||
if "then" in schema:
|
||||
evaluated_indexes += find_evaluated_item_indexes_by_schema(
|
||||
validator, instance, schema["then"],
|
||||
)
|
||||
elif "else" in schema:
|
||||
evaluated_indexes += find_evaluated_item_indexes_by_schema(
|
||||
validator, instance, schema["else"],
|
||||
)
|
||||
|
||||
for keyword in ["contains", "unevaluatedItems"]:
|
||||
if keyword in schema:
|
||||
for k, v in enumerate(instance):
|
||||
if validator.evolve(schema=schema[keyword]).is_valid(v):
|
||||
evaluated_indexes.append(k)
|
||||
|
||||
for keyword in ["allOf", "oneOf", "anyOf"]:
|
||||
if keyword in schema:
|
||||
for subschema in schema[keyword]:
|
||||
errs = next(validator.descend(instance, subschema), None)
|
||||
if errs is None:
|
||||
evaluated_indexes += find_evaluated_item_indexes_by_schema(
|
||||
validator, instance, subschema,
|
||||
)
|
||||
|
||||
return evaluated_indexes
|
||||
|
||||
|
||||
def unevaluatedItems_draft2019(validator, unevaluatedItems, instance, schema):
|
||||
if not validator.is_type(instance, "array"):
|
||||
return
|
||||
evaluated_item_indexes = find_evaluated_item_indexes_by_schema(
|
||||
validator, instance, schema,
|
||||
)
|
||||
unevaluated_items = [
|
||||
item for index, item in enumerate(instance)
|
||||
if index not in evaluated_item_indexes
|
||||
]
|
||||
if unevaluated_items:
|
||||
error = "Unevaluated items are not allowed (%s %s unexpected)"
|
||||
yield ValidationError(error % _utils.extras_msg(unevaluated_items))
|
||||
|
||||
|
||||
def find_evaluated_property_keys_by_schema(validator, instance, schema):
|
||||
if validator.is_type(schema, "boolean"):
|
||||
return []
|
||||
evaluated_keys = []
|
||||
|
||||
ref = schema.get("$ref")
|
||||
if ref is not None:
|
||||
resolved = validator._resolver.lookup(ref)
|
||||
evaluated_keys.extend(
|
||||
find_evaluated_property_keys_by_schema(
|
||||
validator.evolve(
|
||||
schema=resolved.contents,
|
||||
_resolver=resolved.resolver,
|
||||
),
|
||||
instance,
|
||||
resolved.contents,
|
||||
),
|
||||
)
|
||||
|
||||
if "$recursiveRef" in schema:
|
||||
resolved = lookup_recursive_ref(validator._resolver)
|
||||
evaluated_keys.extend(
|
||||
find_evaluated_property_keys_by_schema(
|
||||
validator.evolve(
|
||||
schema=resolved.contents,
|
||||
_resolver=resolved.resolver,
|
||||
),
|
||||
instance,
|
||||
resolved.contents,
|
||||
),
|
||||
)
|
||||
|
||||
for keyword in [
|
||||
"properties", "additionalProperties", "unevaluatedProperties",
|
||||
]:
|
||||
if keyword in schema:
|
||||
schema_value = schema[keyword]
|
||||
if validator.is_type(schema_value, "boolean") and schema_value:
|
||||
evaluated_keys += instance.keys()
|
||||
|
||||
elif validator.is_type(schema_value, "object"):
|
||||
for property in schema_value:
|
||||
if property in instance:
|
||||
evaluated_keys.append(property)
|
||||
|
||||
if "patternProperties" in schema:
|
||||
for property in instance:
|
||||
for pattern in schema["patternProperties"]:
|
||||
if re.search(pattern, property):
|
||||
evaluated_keys.append(property)
|
||||
|
||||
if "dependentSchemas" in schema:
|
||||
for property, subschema in schema["dependentSchemas"].items():
|
||||
if property not in instance:
|
||||
continue
|
||||
evaluated_keys += find_evaluated_property_keys_by_schema(
|
||||
validator, instance, subschema,
|
||||
)
|
||||
|
||||
for keyword in ["allOf", "oneOf", "anyOf"]:
|
||||
if keyword in schema:
|
||||
for subschema in schema[keyword]:
|
||||
errs = next(validator.descend(instance, subschema), None)
|
||||
if errs is None:
|
||||
evaluated_keys += find_evaluated_property_keys_by_schema(
|
||||
validator, instance, subschema,
|
||||
)
|
||||
|
||||
if "if" in schema:
|
||||
if validator.evolve(schema=schema["if"]).is_valid(instance):
|
||||
evaluated_keys += find_evaluated_property_keys_by_schema(
|
||||
validator, instance, schema["if"],
|
||||
)
|
||||
if "then" in schema:
|
||||
evaluated_keys += find_evaluated_property_keys_by_schema(
|
||||
validator, instance, schema["then"],
|
||||
)
|
||||
elif "else" in schema:
|
||||
evaluated_keys += find_evaluated_property_keys_by_schema(
|
||||
validator, instance, schema["else"],
|
||||
)
|
||||
|
||||
return evaluated_keys
|
||||
|
||||
|
||||
def unevaluatedProperties_draft2019(validator, uP, instance, schema):
|
||||
if not validator.is_type(instance, "object"):
|
||||
return
|
||||
evaluated_keys = find_evaluated_property_keys_by_schema(
|
||||
validator, instance, schema,
|
||||
)
|
||||
unevaluated_keys = []
|
||||
for property in instance:
|
||||
if property not in evaluated_keys:
|
||||
for _ in validator.descend(
|
||||
instance[property],
|
||||
uP,
|
||||
path=property,
|
||||
schema_path=property,
|
||||
):
|
||||
# FIXME: Include context for each unevaluated property
|
||||
# indicating why it's invalid under the subschema.
|
||||
unevaluated_keys.append(property) # noqa: PERF401
|
||||
|
||||
if unevaluated_keys:
|
||||
if uP is False:
|
||||
error = "Unevaluated properties are not allowed (%s %s unexpected)"
|
||||
extras = sorted(unevaluated_keys, key=str)
|
||||
yield ValidationError(error % _utils.extras_msg(extras))
|
||||
else:
|
||||
error = (
|
||||
"Unevaluated properties are not valid under "
|
||||
"the given schema (%s %s unevaluated and invalid)"
|
||||
)
|
||||
yield ValidationError(error % _utils.extras_msg(unevaluated_keys))
|
||||
204
.venv/lib/python3.9/site-packages/jsonschema/_types.py
Normal file
204
.venv/lib/python3.9/site-packages/jsonschema/_types.py
Normal file
@@ -0,0 +1,204 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
import numbers
|
||||
|
||||
from attrs import evolve, field, frozen
|
||||
from rpds import HashTrieMap
|
||||
|
||||
from jsonschema.exceptions import UndefinedTypeCheck
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Mapping
|
||||
from typing import Any, Callable
|
||||
|
||||
|
||||
# unfortunately, the type of HashTrieMap is generic, and if used as an attrs
|
||||
# converter, the generic type is presented to mypy, which then fails to match
|
||||
# the concrete type of a type checker mapping
|
||||
# this "do nothing" wrapper presents the correct information to mypy
|
||||
def _typed_map_converter(
|
||||
init_val: Mapping[str, Callable[[TypeChecker, Any], bool]],
|
||||
) -> HashTrieMap[str, Callable[[TypeChecker, Any], bool]]:
|
||||
return HashTrieMap.convert(init_val)
|
||||
|
||||
|
||||
def is_array(checker, instance):
|
||||
return isinstance(instance, list)
|
||||
|
||||
|
||||
def is_bool(checker, instance):
|
||||
return isinstance(instance, bool)
|
||||
|
||||
|
||||
def is_integer(checker, instance):
|
||||
# bool inherits from int, so ensure bools aren't reported as ints
|
||||
if isinstance(instance, bool):
|
||||
return False
|
||||
return isinstance(instance, int)
|
||||
|
||||
|
||||
def is_null(checker, instance):
|
||||
return instance is None
|
||||
|
||||
|
||||
def is_number(checker, instance):
|
||||
# bool inherits from int, so ensure bools aren't reported as ints
|
||||
if isinstance(instance, bool):
|
||||
return False
|
||||
return isinstance(instance, numbers.Number)
|
||||
|
||||
|
||||
def is_object(checker, instance):
|
||||
return isinstance(instance, dict)
|
||||
|
||||
|
||||
def is_string(checker, instance):
|
||||
return isinstance(instance, str)
|
||||
|
||||
|
||||
def is_any(checker, instance):
|
||||
return True
|
||||
|
||||
|
||||
@frozen(repr=False)
|
||||
class TypeChecker:
|
||||
"""
|
||||
A :kw:`type` property checker.
|
||||
|
||||
A `TypeChecker` performs type checking for a `Validator`, converting
|
||||
between the defined JSON Schema types and some associated Python types or
|
||||
objects.
|
||||
|
||||
Modifying the behavior just mentioned by redefining which Python objects
|
||||
are considered to be of which JSON Schema types can be done using
|
||||
`TypeChecker.redefine` or `TypeChecker.redefine_many`, and types can be
|
||||
removed via `TypeChecker.remove`. Each of these return a new `TypeChecker`.
|
||||
|
||||
Arguments:
|
||||
|
||||
type_checkers:
|
||||
|
||||
The initial mapping of types to their checking functions.
|
||||
|
||||
"""
|
||||
|
||||
_type_checkers: HashTrieMap[
|
||||
str, Callable[[TypeChecker, Any], bool],
|
||||
] = field(default=HashTrieMap(), converter=_typed_map_converter)
|
||||
|
||||
def __repr__(self):
|
||||
types = ", ".join(repr(k) for k in sorted(self._type_checkers))
|
||||
return f"<{self.__class__.__name__} types={{{types}}}>"
|
||||
|
||||
def is_type(self, instance, type: str) -> bool:
|
||||
"""
|
||||
Check if the instance is of the appropriate type.
|
||||
|
||||
Arguments:
|
||||
|
||||
instance:
|
||||
|
||||
The instance to check
|
||||
|
||||
type:
|
||||
|
||||
The name of the type that is expected.
|
||||
|
||||
Raises:
|
||||
|
||||
`jsonschema.exceptions.UndefinedTypeCheck`:
|
||||
|
||||
if ``type`` is unknown to this object.
|
||||
|
||||
"""
|
||||
try:
|
||||
fn = self._type_checkers[type]
|
||||
except KeyError:
|
||||
raise UndefinedTypeCheck(type) from None
|
||||
|
||||
return fn(self, instance)
|
||||
|
||||
def redefine(self, type: str, fn) -> TypeChecker:
|
||||
"""
|
||||
Produce a new checker with the given type redefined.
|
||||
|
||||
Arguments:
|
||||
|
||||
type:
|
||||
|
||||
The name of the type to check.
|
||||
|
||||
fn (collections.abc.Callable):
|
||||
|
||||
A callable taking exactly two parameters - the type
|
||||
checker calling the function and the instance to check.
|
||||
The function should return true if instance is of this
|
||||
type and false otherwise.
|
||||
|
||||
"""
|
||||
return self.redefine_many({type: fn})
|
||||
|
||||
def redefine_many(self, definitions=()) -> TypeChecker:
|
||||
"""
|
||||
Produce a new checker with the given types redefined.
|
||||
|
||||
Arguments:
|
||||
|
||||
definitions (dict):
|
||||
|
||||
A dictionary mapping types to their checking functions.
|
||||
|
||||
"""
|
||||
type_checkers = self._type_checkers.update(definitions)
|
||||
return evolve(self, type_checkers=type_checkers)
|
||||
|
||||
def remove(self, *types) -> TypeChecker:
|
||||
"""
|
||||
Produce a new checker with the given types forgotten.
|
||||
|
||||
Arguments:
|
||||
|
||||
types:
|
||||
|
||||
the names of the types to remove.
|
||||
|
||||
Raises:
|
||||
|
||||
`jsonschema.exceptions.UndefinedTypeCheck`:
|
||||
|
||||
if any given type is unknown to this object
|
||||
|
||||
"""
|
||||
type_checkers = self._type_checkers
|
||||
for each in types:
|
||||
try:
|
||||
type_checkers = type_checkers.remove(each)
|
||||
except KeyError:
|
||||
raise UndefinedTypeCheck(each) from None
|
||||
return evolve(self, type_checkers=type_checkers)
|
||||
|
||||
|
||||
draft3_type_checker = TypeChecker(
|
||||
{
|
||||
"any": is_any,
|
||||
"array": is_array,
|
||||
"boolean": is_bool,
|
||||
"integer": is_integer,
|
||||
"object": is_object,
|
||||
"null": is_null,
|
||||
"number": is_number,
|
||||
"string": is_string,
|
||||
},
|
||||
)
|
||||
draft4_type_checker = draft3_type_checker.remove("any")
|
||||
draft6_type_checker = draft4_type_checker.redefine(
|
||||
"integer",
|
||||
lambda checker, instance: (
|
||||
is_integer(checker, instance)
|
||||
or (isinstance(instance, float) and instance.is_integer())
|
||||
),
|
||||
)
|
||||
draft7_type_checker = draft6_type_checker
|
||||
draft201909_type_checker = draft7_type_checker
|
||||
draft202012_type_checker = draft201909_type_checker
|
||||
29
.venv/lib/python3.9/site-packages/jsonschema/_typing.py
Normal file
29
.venv/lib/python3.9/site-packages/jsonschema/_typing.py
Normal file
@@ -0,0 +1,29 @@
|
||||
"""
|
||||
Some (initially private) typing helpers for jsonschema's types.
|
||||
"""
|
||||
from collections.abc import Iterable
|
||||
from typing import Any, Callable, Protocol, Union
|
||||
|
||||
import referencing.jsonschema
|
||||
|
||||
from jsonschema.protocols import Validator
|
||||
|
||||
|
||||
class SchemaKeywordValidator(Protocol):
|
||||
def __call__(
|
||||
self,
|
||||
validator: Validator,
|
||||
value: Any,
|
||||
instance: Any,
|
||||
schema: referencing.jsonschema.Schema,
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
id_of = Callable[[referencing.jsonschema.Schema], Union[str, None]]
|
||||
|
||||
|
||||
ApplicableValidators = Callable[
|
||||
[referencing.jsonschema.Schema],
|
||||
Iterable[tuple[str, Any]],
|
||||
]
|
||||
355
.venv/lib/python3.9/site-packages/jsonschema/_utils.py
Normal file
355
.venv/lib/python3.9/site-packages/jsonschema/_utils.py
Normal file
@@ -0,0 +1,355 @@
|
||||
from collections.abc import Mapping, MutableMapping, Sequence
|
||||
from urllib.parse import urlsplit
|
||||
import itertools
|
||||
import re
|
||||
|
||||
|
||||
class URIDict(MutableMapping):
|
||||
"""
|
||||
Dictionary which uses normalized URIs as keys.
|
||||
"""
|
||||
|
||||
def normalize(self, uri):
|
||||
return urlsplit(uri).geturl()
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.store = dict()
|
||||
self.store.update(*args, **kwargs)
|
||||
|
||||
def __getitem__(self, uri):
|
||||
return self.store[self.normalize(uri)]
|
||||
|
||||
def __setitem__(self, uri, value):
|
||||
self.store[self.normalize(uri)] = value
|
||||
|
||||
def __delitem__(self, uri):
|
||||
del self.store[self.normalize(uri)]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.store)
|
||||
|
||||
def __len__(self): # pragma: no cover -- untested, but to be removed
|
||||
return len(self.store)
|
||||
|
||||
def __repr__(self): # pragma: no cover -- untested, but to be removed
|
||||
return repr(self.store)
|
||||
|
||||
|
||||
class Unset:
|
||||
"""
|
||||
An as-of-yet unset attribute or unprovided default parameter.
|
||||
"""
|
||||
|
||||
def __repr__(self): # pragma: no cover
|
||||
return "<unset>"
|
||||
|
||||
|
||||
def format_as_index(container, indices):
|
||||
"""
|
||||
Construct a single string containing indexing operations for the indices.
|
||||
|
||||
For example for a container ``bar``, [1, 2, "foo"] -> bar[1][2]["foo"]
|
||||
|
||||
Arguments:
|
||||
|
||||
container (str):
|
||||
|
||||
A word to use for the thing being indexed
|
||||
|
||||
indices (sequence):
|
||||
|
||||
The indices to format.
|
||||
|
||||
"""
|
||||
if not indices:
|
||||
return container
|
||||
return f"{container}[{']['.join(repr(index) for index in indices)}]"
|
||||
|
||||
|
||||
def find_additional_properties(instance, schema):
|
||||
"""
|
||||
Return the set of additional properties for the given ``instance``.
|
||||
|
||||
Weeds out properties that should have been validated by ``properties`` and
|
||||
/ or ``patternProperties``.
|
||||
|
||||
Assumes ``instance`` is dict-like already.
|
||||
"""
|
||||
properties = schema.get("properties", {})
|
||||
patterns = "|".join(schema.get("patternProperties", {}))
|
||||
for property in instance:
|
||||
if property not in properties:
|
||||
if patterns and re.search(patterns, property):
|
||||
continue
|
||||
yield property
|
||||
|
||||
|
||||
def extras_msg(extras):
|
||||
"""
|
||||
Create an error message for extra items or properties.
|
||||
"""
|
||||
verb = "was" if len(extras) == 1 else "were"
|
||||
return ", ".join(repr(extra) for extra in extras), verb
|
||||
|
||||
|
||||
def ensure_list(thing):
|
||||
"""
|
||||
Wrap ``thing`` in a list if it's a single str.
|
||||
|
||||
Otherwise, return it unchanged.
|
||||
"""
|
||||
if isinstance(thing, str):
|
||||
return [thing]
|
||||
return thing
|
||||
|
||||
|
||||
def _mapping_equal(one, two):
|
||||
"""
|
||||
Check if two mappings are equal using the semantics of `equal`.
|
||||
"""
|
||||
if len(one) != len(two):
|
||||
return False
|
||||
return all(
|
||||
key in two and equal(value, two[key])
|
||||
for key, value in one.items()
|
||||
)
|
||||
|
||||
|
||||
def _sequence_equal(one, two):
|
||||
"""
|
||||
Check if two sequences are equal using the semantics of `equal`.
|
||||
"""
|
||||
if len(one) != len(two):
|
||||
return False
|
||||
return all(equal(i, j) for i, j in zip(one, two))
|
||||
|
||||
|
||||
def equal(one, two):
|
||||
"""
|
||||
Check if two things are equal evading some Python type hierarchy semantics.
|
||||
|
||||
Specifically in JSON Schema, evade `bool` inheriting from `int`,
|
||||
recursing into sequences to do the same.
|
||||
"""
|
||||
if one is two:
|
||||
return True
|
||||
if isinstance(one, str) or isinstance(two, str):
|
||||
return one == two
|
||||
if isinstance(one, Sequence) and isinstance(two, Sequence):
|
||||
return _sequence_equal(one, two)
|
||||
if isinstance(one, Mapping) and isinstance(two, Mapping):
|
||||
return _mapping_equal(one, two)
|
||||
return unbool(one) == unbool(two)
|
||||
|
||||
|
||||
def unbool(element, true=object(), false=object()):
|
||||
"""
|
||||
A hack to make True and 1 and False and 0 unique for ``uniq``.
|
||||
"""
|
||||
if element is True:
|
||||
return true
|
||||
elif element is False:
|
||||
return false
|
||||
return element
|
||||
|
||||
|
||||
def uniq(container):
|
||||
"""
|
||||
Check if all of a container's elements are unique.
|
||||
|
||||
Tries to rely on the container being recursively sortable, or otherwise
|
||||
falls back on (slow) brute force.
|
||||
"""
|
||||
try:
|
||||
sort = sorted(unbool(i) for i in container)
|
||||
sliced = itertools.islice(sort, 1, None)
|
||||
|
||||
for i, j in zip(sort, sliced):
|
||||
if equal(i, j):
|
||||
return False
|
||||
|
||||
except (NotImplementedError, TypeError):
|
||||
seen = []
|
||||
for e in container:
|
||||
e = unbool(e)
|
||||
|
||||
for i in seen:
|
||||
if equal(i, e):
|
||||
return False
|
||||
|
||||
seen.append(e)
|
||||
return True
|
||||
|
||||
|
||||
def find_evaluated_item_indexes_by_schema(validator, instance, schema):
|
||||
"""
|
||||
Get all indexes of items that get evaluated under the current schema.
|
||||
|
||||
Covers all keywords related to unevaluatedItems: items, prefixItems, if,
|
||||
then, else, contains, unevaluatedItems, allOf, oneOf, anyOf
|
||||
"""
|
||||
if validator.is_type(schema, "boolean"):
|
||||
return []
|
||||
evaluated_indexes = []
|
||||
|
||||
if "items" in schema:
|
||||
return list(range(len(instance)))
|
||||
|
||||
ref = schema.get("$ref")
|
||||
if ref is not None:
|
||||
resolved = validator._resolver.lookup(ref)
|
||||
evaluated_indexes.extend(
|
||||
find_evaluated_item_indexes_by_schema(
|
||||
validator.evolve(
|
||||
schema=resolved.contents,
|
||||
_resolver=resolved.resolver,
|
||||
),
|
||||
instance,
|
||||
resolved.contents,
|
||||
),
|
||||
)
|
||||
|
||||
dynamicRef = schema.get("$dynamicRef")
|
||||
if dynamicRef is not None:
|
||||
resolved = validator._resolver.lookup(dynamicRef)
|
||||
evaluated_indexes.extend(
|
||||
find_evaluated_item_indexes_by_schema(
|
||||
validator.evolve(
|
||||
schema=resolved.contents,
|
||||
_resolver=resolved.resolver,
|
||||
),
|
||||
instance,
|
||||
resolved.contents,
|
||||
),
|
||||
)
|
||||
|
||||
if "prefixItems" in schema:
|
||||
evaluated_indexes += list(range(len(schema["prefixItems"])))
|
||||
|
||||
if "if" in schema:
|
||||
if validator.evolve(schema=schema["if"]).is_valid(instance):
|
||||
evaluated_indexes += find_evaluated_item_indexes_by_schema(
|
||||
validator, instance, schema["if"],
|
||||
)
|
||||
if "then" in schema:
|
||||
evaluated_indexes += find_evaluated_item_indexes_by_schema(
|
||||
validator, instance, schema["then"],
|
||||
)
|
||||
elif "else" in schema:
|
||||
evaluated_indexes += find_evaluated_item_indexes_by_schema(
|
||||
validator, instance, schema["else"],
|
||||
)
|
||||
|
||||
for keyword in ["contains", "unevaluatedItems"]:
|
||||
if keyword in schema:
|
||||
for k, v in enumerate(instance):
|
||||
if validator.evolve(schema=schema[keyword]).is_valid(v):
|
||||
evaluated_indexes.append(k)
|
||||
|
||||
for keyword in ["allOf", "oneOf", "anyOf"]:
|
||||
if keyword in schema:
|
||||
for subschema in schema[keyword]:
|
||||
errs = next(validator.descend(instance, subschema), None)
|
||||
if errs is None:
|
||||
evaluated_indexes += find_evaluated_item_indexes_by_schema(
|
||||
validator, instance, subschema,
|
||||
)
|
||||
|
||||
return evaluated_indexes
|
||||
|
||||
|
||||
def find_evaluated_property_keys_by_schema(validator, instance, schema):
|
||||
"""
|
||||
Get all keys of items that get evaluated under the current schema.
|
||||
|
||||
Covers all keywords related to unevaluatedProperties: properties,
|
||||
additionalProperties, unevaluatedProperties, patternProperties,
|
||||
dependentSchemas, allOf, oneOf, anyOf, if, then, else
|
||||
"""
|
||||
if validator.is_type(schema, "boolean"):
|
||||
return []
|
||||
evaluated_keys = []
|
||||
|
||||
ref = schema.get("$ref")
|
||||
if ref is not None:
|
||||
resolved = validator._resolver.lookup(ref)
|
||||
evaluated_keys.extend(
|
||||
find_evaluated_property_keys_by_schema(
|
||||
validator.evolve(
|
||||
schema=resolved.contents,
|
||||
_resolver=resolved.resolver,
|
||||
),
|
||||
instance,
|
||||
resolved.contents,
|
||||
),
|
||||
)
|
||||
|
||||
dynamicRef = schema.get("$dynamicRef")
|
||||
if dynamicRef is not None:
|
||||
resolved = validator._resolver.lookup(dynamicRef)
|
||||
evaluated_keys.extend(
|
||||
find_evaluated_property_keys_by_schema(
|
||||
validator.evolve(
|
||||
schema=resolved.contents,
|
||||
_resolver=resolved.resolver,
|
||||
),
|
||||
instance,
|
||||
resolved.contents,
|
||||
),
|
||||
)
|
||||
|
||||
properties = schema.get("properties")
|
||||
if validator.is_type(properties, "object"):
|
||||
evaluated_keys += properties.keys() & instance.keys()
|
||||
|
||||
for keyword in ["additionalProperties", "unevaluatedProperties"]:
|
||||
if (subschema := schema.get(keyword)) is None:
|
||||
continue
|
||||
evaluated_keys += (
|
||||
key
|
||||
for key, value in instance.items()
|
||||
if is_valid(validator.descend(value, subschema))
|
||||
)
|
||||
|
||||
if "patternProperties" in schema:
|
||||
for property in instance:
|
||||
for pattern in schema["patternProperties"]:
|
||||
if re.search(pattern, property):
|
||||
evaluated_keys.append(property)
|
||||
|
||||
if "dependentSchemas" in schema:
|
||||
for property, subschema in schema["dependentSchemas"].items():
|
||||
if property not in instance:
|
||||
continue
|
||||
evaluated_keys += find_evaluated_property_keys_by_schema(
|
||||
validator, instance, subschema,
|
||||
)
|
||||
|
||||
for keyword in ["allOf", "oneOf", "anyOf"]:
|
||||
for subschema in schema.get(keyword, []):
|
||||
if not is_valid(validator.descend(instance, subschema)):
|
||||
continue
|
||||
evaluated_keys += find_evaluated_property_keys_by_schema(
|
||||
validator, instance, subschema,
|
||||
)
|
||||
|
||||
if "if" in schema:
|
||||
if validator.evolve(schema=schema["if"]).is_valid(instance):
|
||||
evaluated_keys += find_evaluated_property_keys_by_schema(
|
||||
validator, instance, schema["if"],
|
||||
)
|
||||
if "then" in schema:
|
||||
evaluated_keys += find_evaluated_property_keys_by_schema(
|
||||
validator, instance, schema["then"],
|
||||
)
|
||||
elif "else" in schema:
|
||||
evaluated_keys += find_evaluated_property_keys_by_schema(
|
||||
validator, instance, schema["else"],
|
||||
)
|
||||
|
||||
return evaluated_keys
|
||||
|
||||
|
||||
def is_valid(errs_it):
|
||||
"""Whether there are no errors in the given iterator."""
|
||||
return next(errs_it, None) is None
|
||||
@@ -0,0 +1,5 @@
|
||||
"""
|
||||
Benchmarks for validation.
|
||||
|
||||
This package is *not* public API.
|
||||
"""
|
||||
@@ -0,0 +1,30 @@
|
||||
"""
|
||||
A benchmark for comparing equivalent validation of `const` and `enum`.
|
||||
"""
|
||||
|
||||
from pyperf import Runner
|
||||
|
||||
from jsonschema import Draft202012Validator
|
||||
|
||||
value = [37] * 100
|
||||
const_schema = {"const": list(value)}
|
||||
enum_schema = {"enum": [list(value)]}
|
||||
|
||||
valid = list(value)
|
||||
invalid = [*valid, 73]
|
||||
|
||||
const = Draft202012Validator(const_schema)
|
||||
enum = Draft202012Validator(enum_schema)
|
||||
|
||||
assert const.is_valid(valid)
|
||||
assert enum.is_valid(valid)
|
||||
assert not const.is_valid(invalid)
|
||||
assert not enum.is_valid(invalid)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
runner = Runner()
|
||||
runner.bench_func("const valid", lambda: const.is_valid(valid))
|
||||
runner.bench_func("const invalid", lambda: const.is_valid(invalid))
|
||||
runner.bench_func("enum valid", lambda: enum.is_valid(valid))
|
||||
runner.bench_func("enum invalid", lambda: enum.is_valid(invalid))
|
||||
@@ -0,0 +1,28 @@
|
||||
"""
|
||||
A benchmark for validation of the `contains` keyword.
|
||||
"""
|
||||
|
||||
from pyperf import Runner
|
||||
|
||||
from jsonschema import Draft202012Validator
|
||||
|
||||
schema = {
|
||||
"type": "array",
|
||||
"contains": {"const": 37},
|
||||
}
|
||||
validator = Draft202012Validator(schema)
|
||||
|
||||
size = 1000
|
||||
beginning = [37] + [0] * (size - 1)
|
||||
middle = [0] * (size // 2) + [37] + [0] * (size // 2)
|
||||
end = [0] * (size - 1) + [37]
|
||||
invalid = [0] * size
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
runner = Runner()
|
||||
runner.bench_func("baseline", lambda: validator.is_valid([]))
|
||||
runner.bench_func("beginning", lambda: validator.is_valid(beginning))
|
||||
runner.bench_func("middle", lambda: validator.is_valid(middle))
|
||||
runner.bench_func("end", lambda: validator.is_valid(end))
|
||||
runner.bench_func("invalid", lambda: validator.is_valid(invalid))
|
||||
@@ -0,0 +1,25 @@
|
||||
"""
|
||||
A performance benchmark using the example from issue #232.
|
||||
|
||||
See https://github.com/python-jsonschema/jsonschema/pull/232.
|
||||
"""
|
||||
from pathlib import Path
|
||||
|
||||
from pyperf import Runner
|
||||
from referencing import Registry
|
||||
|
||||
from jsonschema.tests._suite import Version
|
||||
import jsonschema
|
||||
|
||||
issue232 = Version(
|
||||
path=Path(__file__).parent / "issue232",
|
||||
remotes=Registry(),
|
||||
name="issue232",
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
issue232.benchmark(
|
||||
runner=Runner(),
|
||||
Validator=jsonschema.Draft4Validator,
|
||||
)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,12 @@
|
||||
"""
|
||||
A performance benchmark using the official test suite.
|
||||
|
||||
This benchmarks jsonschema using every valid example in the
|
||||
JSON-Schema-Test-Suite. It will take some time to complete.
|
||||
"""
|
||||
from pyperf import Runner
|
||||
|
||||
from jsonschema.tests._suite import Suite
|
||||
|
||||
if __name__ == "__main__":
|
||||
Suite().benchmark(runner=Runner())
|
||||
@@ -0,0 +1,56 @@
|
||||
"""
|
||||
Validating highly nested schemas shouldn't cause exponential time blowups.
|
||||
|
||||
See https://github.com/python-jsonschema/jsonschema/issues/1097.
|
||||
"""
|
||||
from itertools import cycle
|
||||
|
||||
from jsonschema.validators import validator_for
|
||||
|
||||
metaschemaish = {
|
||||
"$id": "https://example.com/draft/2020-12/schema/strict",
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/core": True,
|
||||
"https://json-schema.org/draft/2020-12/vocab/applicator": True,
|
||||
"https://json-schema.org/draft/2020-12/vocab/unevaluated": True,
|
||||
"https://json-schema.org/draft/2020-12/vocab/validation": True,
|
||||
"https://json-schema.org/draft/2020-12/vocab/meta-data": True,
|
||||
"https://json-schema.org/draft/2020-12/vocab/format-annotation": True,
|
||||
"https://json-schema.org/draft/2020-12/vocab/content": True,
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
|
||||
"$ref": "https://json-schema.org/draft/2020-12/schema",
|
||||
"unevaluatedProperties": False,
|
||||
}
|
||||
|
||||
|
||||
def nested_schema(levels):
|
||||
"""
|
||||
Produce a schema which validates deeply nested objects and arrays.
|
||||
"""
|
||||
|
||||
names = cycle(["foo", "bar", "baz", "quux", "spam", "eggs"])
|
||||
schema = {"type": "object", "properties": {"ham": {"type": "string"}}}
|
||||
for _, name in zip(range(levels - 1), names):
|
||||
schema = {"type": "object", "properties": {name: schema}}
|
||||
return schema
|
||||
|
||||
|
||||
validator = validator_for(metaschemaish)(metaschemaish)
|
||||
|
||||
if __name__ == "__main__":
|
||||
from pyperf import Runner
|
||||
runner = Runner()
|
||||
|
||||
not_nested = nested_schema(levels=1)
|
||||
runner.bench_func("not nested", lambda: validator.is_valid(not_nested))
|
||||
|
||||
for levels in range(1, 11, 3):
|
||||
schema = nested_schema(levels=levels)
|
||||
runner.bench_func(
|
||||
f"nested * {levels}",
|
||||
lambda schema=schema: validator.is_valid(schema),
|
||||
)
|
||||
@@ -0,0 +1,42 @@
|
||||
"""
|
||||
A benchmark which tries to compare the possible slow subparts of validation.
|
||||
"""
|
||||
from referencing import Registry
|
||||
from referencing.jsonschema import DRAFT202012
|
||||
from rpds import HashTrieMap, HashTrieSet
|
||||
|
||||
from jsonschema import Draft202012Validator
|
||||
|
||||
schema = {
|
||||
"type": "array",
|
||||
"minLength": 1,
|
||||
"maxLength": 1,
|
||||
"items": {"type": "integer"},
|
||||
}
|
||||
|
||||
hmap = HashTrieMap()
|
||||
hset = HashTrieSet()
|
||||
|
||||
registry = Registry()
|
||||
|
||||
v = Draft202012Validator(schema)
|
||||
|
||||
|
||||
def registry_data_structures():
|
||||
return hmap.insert("foo", "bar"), hset.insert("foo")
|
||||
|
||||
|
||||
def registry_add():
|
||||
resource = DRAFT202012.create_resource(schema)
|
||||
return registry.with_resource(uri="urn:example", resource=resource)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
from pyperf import Runner
|
||||
runner = Runner()
|
||||
|
||||
runner.bench_func("HashMap/HashSet insertion", registry_data_structures)
|
||||
runner.bench_func("Registry insertion", registry_add)
|
||||
runner.bench_func("Success", lambda: v.is_valid([1]))
|
||||
runner.bench_func("Failure", lambda: v.is_valid(["foo"]))
|
||||
runner.bench_func("Metaschema validation", lambda: v.check_schema(schema))
|
||||
@@ -0,0 +1,35 @@
|
||||
"""
|
||||
An unused schema registry should not cause slower validation.
|
||||
|
||||
"Unused" here means one where no reference resolution is occurring anyhow.
|
||||
|
||||
See https://github.com/python-jsonschema/jsonschema/issues/1088.
|
||||
"""
|
||||
from pyperf import Runner
|
||||
from referencing import Registry
|
||||
from referencing.jsonschema import DRAFT201909
|
||||
|
||||
from jsonschema import Draft201909Validator
|
||||
|
||||
registry = Registry().with_resource(
|
||||
"urn:example:foo",
|
||||
DRAFT201909.create_resource({}),
|
||||
)
|
||||
|
||||
schema = {"$ref": "https://json-schema.org/draft/2019-09/schema"}
|
||||
instance = {"maxLength": 4}
|
||||
|
||||
no_registry = Draft201909Validator(schema)
|
||||
with_useless_registry = Draft201909Validator(schema, registry=registry)
|
||||
|
||||
if __name__ == "__main__":
|
||||
runner = Runner()
|
||||
|
||||
runner.bench_func(
|
||||
"no registry",
|
||||
lambda: no_registry.is_valid(instance),
|
||||
)
|
||||
runner.bench_func(
|
||||
"useless registry",
|
||||
lambda: with_useless_registry.is_valid(instance),
|
||||
)
|
||||
@@ -0,0 +1,106 @@
|
||||
|
||||
"""
|
||||
A benchmark for validation of applicators containing lots of useless schemas.
|
||||
|
||||
Signals a small possible optimization to remove all such schemas ahead of time.
|
||||
"""
|
||||
|
||||
from pyperf import Runner
|
||||
|
||||
from jsonschema import Draft202012Validator as Validator
|
||||
|
||||
NUM_USELESS = 100000
|
||||
|
||||
subschema = {"const": 37}
|
||||
|
||||
valid = 37
|
||||
invalid = 12
|
||||
|
||||
baseline = Validator(subschema)
|
||||
|
||||
|
||||
# These should be indistinguishable from just `subschema`
|
||||
by_name = {
|
||||
"single subschema": {
|
||||
"anyOf": Validator({"anyOf": [subschema]}),
|
||||
"allOf": Validator({"allOf": [subschema]}),
|
||||
"oneOf": Validator({"oneOf": [subschema]}),
|
||||
},
|
||||
"redundant subschemas": {
|
||||
"anyOf": Validator({"anyOf": [subschema] * NUM_USELESS}),
|
||||
"allOf": Validator({"allOf": [subschema] * NUM_USELESS}),
|
||||
},
|
||||
"useless successful subschemas (beginning)": {
|
||||
"anyOf": Validator({"anyOf": [subschema, *[True] * NUM_USELESS]}),
|
||||
"allOf": Validator({"allOf": [subschema, *[True] * NUM_USELESS]}),
|
||||
},
|
||||
"useless successful subschemas (middle)": {
|
||||
"anyOf": Validator(
|
||||
{
|
||||
"anyOf": [
|
||||
*[True] * (NUM_USELESS // 2),
|
||||
subschema,
|
||||
*[True] * (NUM_USELESS // 2),
|
||||
],
|
||||
},
|
||||
),
|
||||
"allOf": Validator(
|
||||
{
|
||||
"allOf": [
|
||||
*[True] * (NUM_USELESS // 2),
|
||||
subschema,
|
||||
*[True] * (NUM_USELESS // 2),
|
||||
],
|
||||
},
|
||||
),
|
||||
},
|
||||
"useless successful subschemas (end)": {
|
||||
"anyOf": Validator({"anyOf": [*[True] * NUM_USELESS, subschema]}),
|
||||
"allOf": Validator({"allOf": [*[True] * NUM_USELESS, subschema]}),
|
||||
},
|
||||
"useless failing subschemas (beginning)": {
|
||||
"anyOf": Validator({"anyOf": [subschema, *[False] * NUM_USELESS]}),
|
||||
"oneOf": Validator({"oneOf": [subschema, *[False] * NUM_USELESS]}),
|
||||
},
|
||||
"useless failing subschemas (middle)": {
|
||||
"anyOf": Validator(
|
||||
{
|
||||
"anyOf": [
|
||||
*[False] * (NUM_USELESS // 2),
|
||||
subschema,
|
||||
*[False] * (NUM_USELESS // 2),
|
||||
],
|
||||
},
|
||||
),
|
||||
"oneOf": Validator(
|
||||
{
|
||||
"oneOf": [
|
||||
*[False] * (NUM_USELESS // 2),
|
||||
subschema,
|
||||
*[False] * (NUM_USELESS // 2),
|
||||
],
|
||||
},
|
||||
),
|
||||
},
|
||||
"useless failing subschemas (end)": {
|
||||
"anyOf": Validator({"anyOf": [*[False] * NUM_USELESS, subschema]}),
|
||||
"oneOf": Validator({"oneOf": [*[False] * NUM_USELESS, subschema]}),
|
||||
},
|
||||
}
|
||||
|
||||
if __name__ == "__main__":
|
||||
runner = Runner()
|
||||
|
||||
runner.bench_func("baseline valid", lambda: baseline.is_valid(valid))
|
||||
runner.bench_func("baseline invalid", lambda: baseline.is_valid(invalid))
|
||||
|
||||
for group, applicators in by_name.items():
|
||||
for applicator, validator in applicators.items():
|
||||
runner.bench_func(
|
||||
f"{group}: {applicator} valid",
|
||||
lambda validator=validator: validator.is_valid(valid),
|
||||
)
|
||||
runner.bench_func(
|
||||
f"{group}: {applicator} invalid",
|
||||
lambda validator=validator: validator.is_valid(invalid),
|
||||
)
|
||||
@@ -0,0 +1,32 @@
|
||||
"""
|
||||
A benchmark for validation of schemas containing lots of useless keywords.
|
||||
|
||||
Checks we filter them out once, ahead of time.
|
||||
"""
|
||||
|
||||
from pyperf import Runner
|
||||
|
||||
from jsonschema import Draft202012Validator
|
||||
|
||||
NUM_USELESS = 100000
|
||||
schema = dict(
|
||||
[
|
||||
("not", {"const": 42}),
|
||||
*((str(i), i) for i in range(NUM_USELESS)),
|
||||
("type", "integer"),
|
||||
*((str(i), i) for i in range(NUM_USELESS, NUM_USELESS)),
|
||||
("minimum", 37),
|
||||
],
|
||||
)
|
||||
validator = Draft202012Validator(schema)
|
||||
|
||||
valid = 3737
|
||||
invalid = 12
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
runner = Runner()
|
||||
runner.bench_func("beginning of schema", lambda: validator.is_valid(42))
|
||||
runner.bench_func("middle of schema", lambda: validator.is_valid("foo"))
|
||||
runner.bench_func("end of schema", lambda: validator.is_valid(12))
|
||||
runner.bench_func("valid", lambda: validator.is_valid(3737))
|
||||
@@ -0,0 +1,14 @@
|
||||
from pyperf import Runner
|
||||
|
||||
from jsonschema import Draft202012Validator
|
||||
|
||||
schema = {
|
||||
"type": "array",
|
||||
"minLength": 1,
|
||||
"maxLength": 1,
|
||||
"items": {"type": "integer"},
|
||||
}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
Runner().bench_func("validator creation", Draft202012Validator, schema)
|
||||
292
.venv/lib/python3.9/site-packages/jsonschema/cli.py
Normal file
292
.venv/lib/python3.9/site-packages/jsonschema/cli.py
Normal file
@@ -0,0 +1,292 @@
|
||||
"""
|
||||
The ``jsonschema`` command line.
|
||||
"""
|
||||
|
||||
from importlib import metadata
|
||||
from json import JSONDecodeError
|
||||
from pkgutil import resolve_name
|
||||
from textwrap import dedent
|
||||
import argparse
|
||||
import json
|
||||
import sys
|
||||
import traceback
|
||||
import warnings
|
||||
|
||||
from attrs import define, field
|
||||
|
||||
from jsonschema.exceptions import SchemaError
|
||||
from jsonschema.validators import _RefResolver, validator_for
|
||||
|
||||
warnings.warn(
|
||||
(
|
||||
"The jsonschema CLI is deprecated and will be removed in a future "
|
||||
"version. Please use check-jsonschema instead, which can be installed "
|
||||
"from https://pypi.org/project/check-jsonschema/"
|
||||
),
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
|
||||
class _CannotLoadFile(Exception):
|
||||
pass
|
||||
|
||||
|
||||
@define
|
||||
class _Outputter:
|
||||
|
||||
_formatter = field()
|
||||
_stdout = field()
|
||||
_stderr = field()
|
||||
|
||||
@classmethod
|
||||
def from_arguments(cls, arguments, stdout, stderr):
|
||||
if arguments["output"] == "plain":
|
||||
formatter = _PlainFormatter(arguments["error_format"])
|
||||
elif arguments["output"] == "pretty":
|
||||
formatter = _PrettyFormatter()
|
||||
return cls(formatter=formatter, stdout=stdout, stderr=stderr)
|
||||
|
||||
def load(self, path):
|
||||
try:
|
||||
file = open(path) # noqa: SIM115, PTH123
|
||||
except FileNotFoundError as error:
|
||||
self.filenotfound_error(path=path, exc_info=sys.exc_info())
|
||||
raise _CannotLoadFile() from error
|
||||
|
||||
with file:
|
||||
try:
|
||||
return json.load(file)
|
||||
except JSONDecodeError as error:
|
||||
self.parsing_error(path=path, exc_info=sys.exc_info())
|
||||
raise _CannotLoadFile() from error
|
||||
|
||||
def filenotfound_error(self, **kwargs):
|
||||
self._stderr.write(self._formatter.filenotfound_error(**kwargs))
|
||||
|
||||
def parsing_error(self, **kwargs):
|
||||
self._stderr.write(self._formatter.parsing_error(**kwargs))
|
||||
|
||||
def validation_error(self, **kwargs):
|
||||
self._stderr.write(self._formatter.validation_error(**kwargs))
|
||||
|
||||
def validation_success(self, **kwargs):
|
||||
self._stdout.write(self._formatter.validation_success(**kwargs))
|
||||
|
||||
|
||||
@define
|
||||
class _PrettyFormatter:
|
||||
|
||||
_ERROR_MSG = dedent(
|
||||
"""\
|
||||
===[{type}]===({path})===
|
||||
|
||||
{body}
|
||||
-----------------------------
|
||||
""",
|
||||
)
|
||||
_SUCCESS_MSG = "===[SUCCESS]===({path})===\n"
|
||||
|
||||
def filenotfound_error(self, path, exc_info):
|
||||
return self._ERROR_MSG.format(
|
||||
path=path,
|
||||
type="FileNotFoundError",
|
||||
body=f"{path!r} does not exist.",
|
||||
)
|
||||
|
||||
def parsing_error(self, path, exc_info):
|
||||
exc_type, exc_value, exc_traceback = exc_info
|
||||
exc_lines = "".join(
|
||||
traceback.format_exception(exc_type, exc_value, exc_traceback),
|
||||
)
|
||||
return self._ERROR_MSG.format(
|
||||
path=path,
|
||||
type=exc_type.__name__,
|
||||
body=exc_lines,
|
||||
)
|
||||
|
||||
def validation_error(self, instance_path, error):
|
||||
return self._ERROR_MSG.format(
|
||||
path=instance_path,
|
||||
type=error.__class__.__name__,
|
||||
body=error,
|
||||
)
|
||||
|
||||
def validation_success(self, instance_path):
|
||||
return self._SUCCESS_MSG.format(path=instance_path)
|
||||
|
||||
|
||||
@define
|
||||
class _PlainFormatter:
|
||||
|
||||
_error_format = field()
|
||||
|
||||
def filenotfound_error(self, path, exc_info):
|
||||
return f"{path!r} does not exist.\n"
|
||||
|
||||
def parsing_error(self, path, exc_info):
|
||||
return "Failed to parse {}: {}\n".format(
|
||||
"<stdin>" if path == "<stdin>" else repr(path),
|
||||
exc_info[1],
|
||||
)
|
||||
|
||||
def validation_error(self, instance_path, error):
|
||||
return self._error_format.format(file_name=instance_path, error=error)
|
||||
|
||||
def validation_success(self, instance_path):
|
||||
return ""
|
||||
|
||||
|
||||
def _resolve_name_with_default(name):
|
||||
if "." not in name:
|
||||
name = "jsonschema." + name
|
||||
return resolve_name(name)
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description="JSON Schema Validation CLI",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-i", "--instance",
|
||||
action="append",
|
||||
dest="instances",
|
||||
help="""
|
||||
a path to a JSON instance (i.e. filename.json) to validate (may
|
||||
be specified multiple times). If no instances are provided via this
|
||||
option, one will be expected on standard input.
|
||||
""",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-F", "--error-format",
|
||||
help="""
|
||||
the format to use for each validation error message, specified
|
||||
in a form suitable for str.format. This string will be passed
|
||||
one formatted object named 'error' for each ValidationError.
|
||||
Only provide this option when using --output=plain, which is the
|
||||
default. If this argument is unprovided and --output=plain is
|
||||
used, a simple default representation will be used.
|
||||
""",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-o", "--output",
|
||||
choices=["plain", "pretty"],
|
||||
default="plain",
|
||||
help="""
|
||||
an output format to use. 'plain' (default) will produce minimal
|
||||
text with one line for each error, while 'pretty' will produce
|
||||
more detailed human-readable output on multiple lines.
|
||||
""",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-V", "--validator",
|
||||
type=_resolve_name_with_default,
|
||||
help="""
|
||||
the fully qualified object name of a validator to use, or, for
|
||||
validators that are registered with jsonschema, simply the name
|
||||
of the class.
|
||||
""",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--base-uri",
|
||||
help="""
|
||||
a base URI to assign to the provided schema, even if it does not
|
||||
declare one (via e.g. $id). This option can be used if you wish to
|
||||
resolve relative references to a particular URI (or local path)
|
||||
""",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--version",
|
||||
action="version",
|
||||
version=metadata.version("jsonschema"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"schema",
|
||||
help="the path to a JSON Schema to validate with (i.e. schema.json)",
|
||||
)
|
||||
|
||||
|
||||
def parse_args(args): # noqa: D103
|
||||
arguments = vars(parser.parse_args(args=args or ["--help"]))
|
||||
if arguments["output"] != "plain" and arguments["error_format"]:
|
||||
raise parser.error(
|
||||
"--error-format can only be used with --output plain",
|
||||
)
|
||||
if arguments["output"] == "plain" and arguments["error_format"] is None:
|
||||
arguments["error_format"] = "{error.instance}: {error.message}\n"
|
||||
return arguments
|
||||
|
||||
|
||||
def _validate_instance(instance_path, instance, validator, outputter):
|
||||
invalid = False
|
||||
for error in validator.iter_errors(instance):
|
||||
invalid = True
|
||||
outputter.validation_error(instance_path=instance_path, error=error)
|
||||
|
||||
if not invalid:
|
||||
outputter.validation_success(instance_path=instance_path)
|
||||
return invalid
|
||||
|
||||
|
||||
def main(args=sys.argv[1:]): # noqa: D103
|
||||
sys.exit(run(arguments=parse_args(args=args)))
|
||||
|
||||
|
||||
def run(arguments, stdout=sys.stdout, stderr=sys.stderr, stdin=sys.stdin): # noqa: D103
|
||||
outputter = _Outputter.from_arguments(
|
||||
arguments=arguments,
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
)
|
||||
|
||||
try:
|
||||
schema = outputter.load(arguments["schema"])
|
||||
except _CannotLoadFile:
|
||||
return 1
|
||||
|
||||
Validator = arguments["validator"]
|
||||
if Validator is None:
|
||||
Validator = validator_for(schema)
|
||||
|
||||
try:
|
||||
Validator.check_schema(schema)
|
||||
except SchemaError as error:
|
||||
outputter.validation_error(
|
||||
instance_path=arguments["schema"],
|
||||
error=error,
|
||||
)
|
||||
return 1
|
||||
|
||||
if arguments["instances"]:
|
||||
load, instances = outputter.load, arguments["instances"]
|
||||
else:
|
||||
def load(_):
|
||||
try:
|
||||
return json.load(stdin)
|
||||
except JSONDecodeError as error:
|
||||
outputter.parsing_error(
|
||||
path="<stdin>", exc_info=sys.exc_info(),
|
||||
)
|
||||
raise _CannotLoadFile() from error
|
||||
instances = ["<stdin>"]
|
||||
|
||||
resolver = _RefResolver(
|
||||
base_uri=arguments["base_uri"],
|
||||
referrer=schema,
|
||||
) if arguments["base_uri"] is not None else None
|
||||
|
||||
validator = Validator(schema, resolver=resolver)
|
||||
exit_code = 0
|
||||
for each in instances:
|
||||
try:
|
||||
instance = load(each)
|
||||
except _CannotLoadFile:
|
||||
exit_code = 1
|
||||
else:
|
||||
exit_code |= _validate_instance(
|
||||
instance_path=each,
|
||||
instance=instance,
|
||||
validator=validator,
|
||||
outputter=outputter,
|
||||
)
|
||||
|
||||
return exit_code
|
||||
490
.venv/lib/python3.9/site-packages/jsonschema/exceptions.py
Normal file
490
.venv/lib/python3.9/site-packages/jsonschema/exceptions.py
Normal file
@@ -0,0 +1,490 @@
|
||||
"""
|
||||
Validation errors, and some surrounding helpers.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import defaultdict, deque
|
||||
from pprint import pformat
|
||||
from textwrap import dedent, indent
|
||||
from typing import TYPE_CHECKING, Any, ClassVar
|
||||
import heapq
|
||||
import re
|
||||
import warnings
|
||||
|
||||
from attrs import define
|
||||
from referencing.exceptions import Unresolvable as _Unresolvable
|
||||
|
||||
from jsonschema import _utils
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable, Mapping, MutableMapping, Sequence
|
||||
|
||||
from jsonschema import _types
|
||||
|
||||
WEAK_MATCHES: frozenset[str] = frozenset(["anyOf", "oneOf"])
|
||||
STRONG_MATCHES: frozenset[str] = frozenset()
|
||||
|
||||
_JSON_PATH_COMPATIBLE_PROPERTY_PATTERN = re.compile("^[a-zA-Z][a-zA-Z0-9_]*$")
|
||||
|
||||
_unset = _utils.Unset()
|
||||
|
||||
|
||||
def _pretty(thing: Any, prefix: str):
|
||||
"""
|
||||
Format something for an error message as prettily as we currently can.
|
||||
"""
|
||||
return indent(pformat(thing, width=72, sort_dicts=False), prefix).lstrip()
|
||||
|
||||
|
||||
def __getattr__(name):
|
||||
if name == "RefResolutionError":
|
||||
warnings.warn(
|
||||
_RefResolutionError._DEPRECATION_MESSAGE,
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return _RefResolutionError
|
||||
raise AttributeError(f"module {__name__} has no attribute {name}")
|
||||
|
||||
|
||||
class _Error(Exception):
|
||||
|
||||
_word_for_schema_in_error_message: ClassVar[str]
|
||||
_word_for_instance_in_error_message: ClassVar[str]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str,
|
||||
validator: str = _unset, # type: ignore[assignment]
|
||||
path: Iterable[str | int] = (),
|
||||
cause: Exception | None = None,
|
||||
context=(),
|
||||
validator_value: Any = _unset,
|
||||
instance: Any = _unset,
|
||||
schema: Mapping[str, Any] | bool = _unset, # type: ignore[assignment]
|
||||
schema_path: Iterable[str | int] = (),
|
||||
parent: _Error | None = None,
|
||||
type_checker: _types.TypeChecker = _unset, # type: ignore[assignment]
|
||||
) -> None:
|
||||
super().__init__(
|
||||
message,
|
||||
validator,
|
||||
path,
|
||||
cause,
|
||||
context,
|
||||
validator_value,
|
||||
instance,
|
||||
schema,
|
||||
schema_path,
|
||||
parent,
|
||||
)
|
||||
self.message = message
|
||||
self.path = self.relative_path = deque(path)
|
||||
self.schema_path = self.relative_schema_path = deque(schema_path)
|
||||
self.context = list(context)
|
||||
self.cause = self.__cause__ = cause
|
||||
self.validator = validator
|
||||
self.validator_value = validator_value
|
||||
self.instance = instance
|
||||
self.schema = schema
|
||||
self.parent = parent
|
||||
self._type_checker = type_checker
|
||||
|
||||
for error in context:
|
||||
error.parent = self
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<{self.__class__.__name__}: {self.message!r}>"
|
||||
|
||||
def __str__(self) -> str:
|
||||
essential_for_verbose = (
|
||||
self.validator, self.validator_value, self.instance, self.schema,
|
||||
)
|
||||
if any(m is _unset for m in essential_for_verbose):
|
||||
return self.message
|
||||
|
||||
schema_path = _utils.format_as_index(
|
||||
container=self._word_for_schema_in_error_message,
|
||||
indices=list(self.relative_schema_path)[:-1],
|
||||
)
|
||||
instance_path = _utils.format_as_index(
|
||||
container=self._word_for_instance_in_error_message,
|
||||
indices=self.relative_path,
|
||||
)
|
||||
prefix = 16 * " "
|
||||
|
||||
return dedent(
|
||||
f"""\
|
||||
{self.message}
|
||||
|
||||
Failed validating {self.validator!r} in {schema_path}:
|
||||
{_pretty(self.schema, prefix=prefix)}
|
||||
|
||||
On {instance_path}:
|
||||
{_pretty(self.instance, prefix=prefix)}
|
||||
""".rstrip(),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_from(cls, other: _Error):
|
||||
return cls(**other._contents())
|
||||
|
||||
@property
|
||||
def absolute_path(self) -> Sequence[str | int]:
|
||||
parent = self.parent
|
||||
if parent is None:
|
||||
return self.relative_path
|
||||
|
||||
path = deque(self.relative_path)
|
||||
path.extendleft(reversed(parent.absolute_path))
|
||||
return path
|
||||
|
||||
@property
|
||||
def absolute_schema_path(self) -> Sequence[str | int]:
|
||||
parent = self.parent
|
||||
if parent is None:
|
||||
return self.relative_schema_path
|
||||
|
||||
path = deque(self.relative_schema_path)
|
||||
path.extendleft(reversed(parent.absolute_schema_path))
|
||||
return path
|
||||
|
||||
@property
|
||||
def json_path(self) -> str:
|
||||
path = "$"
|
||||
for elem in self.absolute_path:
|
||||
if isinstance(elem, int):
|
||||
path += "[" + str(elem) + "]"
|
||||
elif _JSON_PATH_COMPATIBLE_PROPERTY_PATTERN.match(elem):
|
||||
path += "." + elem
|
||||
else:
|
||||
escaped_elem = elem.replace("\\", "\\\\").replace("'", r"\'")
|
||||
path += "['" + escaped_elem + "']"
|
||||
return path
|
||||
|
||||
def _set(
|
||||
self,
|
||||
type_checker: _types.TypeChecker | None = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
if type_checker is not None and self._type_checker is _unset:
|
||||
self._type_checker = type_checker
|
||||
|
||||
for k, v in kwargs.items():
|
||||
if getattr(self, k) is _unset:
|
||||
setattr(self, k, v)
|
||||
|
||||
def _contents(self):
|
||||
attrs = (
|
||||
"message", "cause", "context", "validator", "validator_value",
|
||||
"path", "schema_path", "instance", "schema", "parent",
|
||||
)
|
||||
return {attr: getattr(self, attr) for attr in attrs}
|
||||
|
||||
def _matches_type(self) -> bool:
|
||||
try:
|
||||
# We ignore this as we want to simply crash if this happens
|
||||
expected = self.schema["type"] # type: ignore[index]
|
||||
except (KeyError, TypeError):
|
||||
return False
|
||||
|
||||
if isinstance(expected, str):
|
||||
return self._type_checker.is_type(self.instance, expected)
|
||||
|
||||
return any(
|
||||
self._type_checker.is_type(self.instance, expected_type)
|
||||
for expected_type in expected
|
||||
)
|
||||
|
||||
|
||||
class ValidationError(_Error):
|
||||
"""
|
||||
An instance was invalid under a provided schema.
|
||||
"""
|
||||
|
||||
_word_for_schema_in_error_message = "schema"
|
||||
_word_for_instance_in_error_message = "instance"
|
||||
|
||||
|
||||
class SchemaError(_Error):
|
||||
"""
|
||||
A schema was invalid under its corresponding metaschema.
|
||||
"""
|
||||
|
||||
_word_for_schema_in_error_message = "metaschema"
|
||||
_word_for_instance_in_error_message = "schema"
|
||||
|
||||
|
||||
@define(slots=False)
|
||||
class _RefResolutionError(Exception): # noqa: PLW1641
|
||||
"""
|
||||
A ref could not be resolved.
|
||||
"""
|
||||
|
||||
_DEPRECATION_MESSAGE = (
|
||||
"jsonschema.exceptions.RefResolutionError is deprecated as of version "
|
||||
"4.18.0. If you wish to catch potential reference resolution errors, "
|
||||
"directly catch referencing.exceptions.Unresolvable."
|
||||
)
|
||||
|
||||
_cause: Exception
|
||||
|
||||
def __eq__(self, other):
|
||||
if self.__class__ is not other.__class__:
|
||||
return NotImplemented # pragma: no cover -- uncovered but deprecated # noqa: E501
|
||||
return self._cause == other._cause
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(self._cause)
|
||||
|
||||
|
||||
class _WrappedReferencingError(_RefResolutionError, _Unresolvable): # pragma: no cover -- partially uncovered but to be removed # noqa: E501
|
||||
def __init__(self, cause: _Unresolvable):
|
||||
object.__setattr__(self, "_wrapped", cause)
|
||||
|
||||
def __eq__(self, other):
|
||||
if other.__class__ is self.__class__:
|
||||
return self._wrapped == other._wrapped
|
||||
elif other.__class__ is self._wrapped.__class__:
|
||||
return self._wrapped == other
|
||||
return NotImplemented
|
||||
|
||||
def __getattr__(self, attr):
|
||||
return getattr(self._wrapped, attr)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._wrapped)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<WrappedReferencingError {self._wrapped!r}>"
|
||||
|
||||
def __str__(self):
|
||||
return f"{self._wrapped.__class__.__name__}: {self._wrapped}"
|
||||
|
||||
|
||||
class UndefinedTypeCheck(Exception):
|
||||
"""
|
||||
A type checker was asked to check a type it did not have registered.
|
||||
"""
|
||||
|
||||
def __init__(self, type: str) -> None:
|
||||
self.type = type
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"Type {self.type!r} is unknown to this type checker"
|
||||
|
||||
|
||||
class UnknownType(Exception):
|
||||
"""
|
||||
A validator was asked to validate an instance against an unknown type.
|
||||
"""
|
||||
|
||||
def __init__(self, type, instance, schema):
|
||||
self.type = type
|
||||
self.instance = instance
|
||||
self.schema = schema
|
||||
|
||||
def __str__(self):
|
||||
prefix = 16 * " "
|
||||
|
||||
return dedent(
|
||||
f"""\
|
||||
Unknown type {self.type!r} for validator with schema:
|
||||
{_pretty(self.schema, prefix=prefix)}
|
||||
|
||||
While checking instance:
|
||||
{_pretty(self.instance, prefix=prefix)}
|
||||
""".rstrip(),
|
||||
)
|
||||
|
||||
|
||||
class FormatError(Exception):
|
||||
"""
|
||||
Validating a format failed.
|
||||
"""
|
||||
|
||||
def __init__(self, message, cause=None):
|
||||
super().__init__(message, cause)
|
||||
self.message = message
|
||||
self.cause = self.__cause__ = cause
|
||||
|
||||
def __str__(self):
|
||||
return self.message
|
||||
|
||||
|
||||
class ErrorTree:
|
||||
"""
|
||||
ErrorTrees make it easier to check which validations failed.
|
||||
"""
|
||||
|
||||
_instance = _unset
|
||||
|
||||
def __init__(self, errors: Iterable[ValidationError] = ()):
|
||||
self.errors: MutableMapping[str, ValidationError] = {}
|
||||
self._contents: Mapping[str, ErrorTree] = defaultdict(self.__class__)
|
||||
|
||||
for error in errors:
|
||||
container = self
|
||||
for element in error.path:
|
||||
container = container[element]
|
||||
container.errors[error.validator] = error
|
||||
|
||||
container._instance = error.instance
|
||||
|
||||
def __contains__(self, index: str | int):
|
||||
"""
|
||||
Check whether ``instance[index]`` has any errors.
|
||||
"""
|
||||
return index in self._contents
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""
|
||||
Retrieve the child tree one level down at the given ``index``.
|
||||
|
||||
If the index is not in the instance that this tree corresponds
|
||||
to and is not known by this tree, whatever error would be raised
|
||||
by ``instance.__getitem__`` will be propagated (usually this is
|
||||
some subclass of `LookupError`.
|
||||
"""
|
||||
if self._instance is not _unset and index not in self:
|
||||
self._instance[index]
|
||||
return self._contents[index]
|
||||
|
||||
def __setitem__(self, index: str | int, value: ErrorTree):
|
||||
"""
|
||||
Add an error to the tree at the given ``index``.
|
||||
|
||||
.. deprecated:: v4.20.0
|
||||
|
||||
Setting items on an `ErrorTree` is deprecated without replacement.
|
||||
To populate a tree, provide all of its sub-errors when you
|
||||
construct the tree.
|
||||
"""
|
||||
warnings.warn(
|
||||
"ErrorTree.__setitem__ is deprecated without replacement.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
self._contents[index] = value # type: ignore[index]
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Iterate (non-recursively) over the indices in the instance with errors.
|
||||
"""
|
||||
return iter(self._contents)
|
||||
|
||||
def __len__(self):
|
||||
"""
|
||||
Return the `total_errors`.
|
||||
"""
|
||||
return self.total_errors
|
||||
|
||||
def __repr__(self):
|
||||
total = len(self)
|
||||
errors = "error" if total == 1 else "errors"
|
||||
return f"<{self.__class__.__name__} ({total} total {errors})>"
|
||||
|
||||
@property
|
||||
def total_errors(self):
|
||||
"""
|
||||
The total number of errors in the entire tree, including children.
|
||||
"""
|
||||
child_errors = sum(len(tree) for _, tree in self._contents.items())
|
||||
return len(self.errors) + child_errors
|
||||
|
||||
|
||||
def by_relevance(weak=WEAK_MATCHES, strong=STRONG_MATCHES):
|
||||
"""
|
||||
Create a key function that can be used to sort errors by relevance.
|
||||
|
||||
Arguments:
|
||||
weak (set):
|
||||
a collection of validation keywords to consider to be
|
||||
"weak". If there are two errors at the same level of the
|
||||
instance and one is in the set of weak validation keywords,
|
||||
the other error will take priority. By default, :kw:`anyOf`
|
||||
and :kw:`oneOf` are considered weak keywords and will be
|
||||
superseded by other same-level validation errors.
|
||||
|
||||
strong (set):
|
||||
a collection of validation keywords to consider to be
|
||||
"strong"
|
||||
|
||||
"""
|
||||
|
||||
def relevance(error):
|
||||
validator = error.validator
|
||||
return ( # prefer errors which are ...
|
||||
-len(error.path), # 'deeper' and thereby more specific
|
||||
error.path, # earlier (for sibling errors)
|
||||
validator not in weak, # for a non-low-priority keyword
|
||||
validator in strong, # for a high priority keyword
|
||||
not error._matches_type(), # at least match the instance's type
|
||||
) # otherwise we'll treat them the same
|
||||
|
||||
return relevance
|
||||
|
||||
|
||||
relevance = by_relevance()
|
||||
"""
|
||||
A key function (e.g. to use with `sorted`) which sorts errors by relevance.
|
||||
|
||||
Example:
|
||||
|
||||
.. code:: python
|
||||
|
||||
sorted(validator.iter_errors(12), key=jsonschema.exceptions.relevance)
|
||||
"""
|
||||
|
||||
|
||||
def best_match(errors, key=relevance):
|
||||
"""
|
||||
Try to find an error that appears to be the best match among given errors.
|
||||
|
||||
In general, errors that are higher up in the instance (i.e. for which
|
||||
`ValidationError.path` is shorter) are considered better matches,
|
||||
since they indicate "more" is wrong with the instance.
|
||||
|
||||
If the resulting match is either :kw:`oneOf` or :kw:`anyOf`, the
|
||||
*opposite* assumption is made -- i.e. the deepest error is picked,
|
||||
since these keywords only need to match once, and any other errors
|
||||
may not be relevant.
|
||||
|
||||
Arguments:
|
||||
errors (collections.abc.Iterable):
|
||||
|
||||
the errors to select from. Do not provide a mixture of
|
||||
errors from different validation attempts (i.e. from
|
||||
different instances or schemas), since it won't produce
|
||||
sensical output.
|
||||
|
||||
key (collections.abc.Callable):
|
||||
|
||||
the key to use when sorting errors. See `relevance` and
|
||||
transitively `by_relevance` for more details (the default is
|
||||
to sort with the defaults of that function). Changing the
|
||||
default is only useful if you want to change the function
|
||||
that rates errors but still want the error context descent
|
||||
done by this function.
|
||||
|
||||
Returns:
|
||||
the best matching error, or ``None`` if the iterable was empty
|
||||
|
||||
.. note::
|
||||
|
||||
This function is a heuristic. Its return value may change for a given
|
||||
set of inputs from version to version if better heuristics are added.
|
||||
|
||||
"""
|
||||
best = max(errors, key=key, default=None)
|
||||
if best is None:
|
||||
return
|
||||
|
||||
while best.context:
|
||||
# Calculate the minimum via nsmallest, because we don't recurse if
|
||||
# all nested errors have the same relevance (i.e. if min == max == all)
|
||||
smallest = heapq.nsmallest(2, best.context, key=key)
|
||||
if len(smallest) == 2 and key(smallest[0]) == key(smallest[1]): # noqa: PLR2004
|
||||
return best
|
||||
best = smallest[0]
|
||||
return best
|
||||
230
.venv/lib/python3.9/site-packages/jsonschema/protocols.py
Normal file
230
.venv/lib/python3.9/site-packages/jsonschema/protocols.py
Normal file
@@ -0,0 +1,230 @@
|
||||
"""
|
||||
typing.Protocol classes for jsonschema interfaces.
|
||||
"""
|
||||
|
||||
# for reference material on Protocols, see
|
||||
# https://www.python.org/dev/peps/pep-0544/
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any, ClassVar, Protocol, runtime_checkable
|
||||
|
||||
# in order for Sphinx to resolve references accurately from type annotations,
|
||||
# it needs to see names like `jsonschema.TypeChecker`
|
||||
# therefore, only import at type-checking time (to avoid circular references),
|
||||
# but use `jsonschema` for any types which will otherwise not be resolvable
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable, Mapping
|
||||
|
||||
import referencing.jsonschema
|
||||
|
||||
from jsonschema import _typing
|
||||
from jsonschema.exceptions import ValidationError
|
||||
import jsonschema
|
||||
import jsonschema.validators
|
||||
|
||||
# For code authors working on the validator protocol, these are the three
|
||||
# use-cases which should be kept in mind:
|
||||
#
|
||||
# 1. As a protocol class, it can be used in type annotations to describe the
|
||||
# available methods and attributes of a validator
|
||||
# 2. It is the source of autodoc for the validator documentation
|
||||
# 3. It is runtime_checkable, meaning that it can be used in isinstance()
|
||||
# checks.
|
||||
#
|
||||
# Since protocols are not base classes, isinstance() checking is limited in
|
||||
# its capabilities. See docs on runtime_checkable for detail
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class Validator(Protocol):
|
||||
"""
|
||||
The protocol to which all validator classes adhere.
|
||||
|
||||
Arguments:
|
||||
|
||||
schema:
|
||||
|
||||
The schema that the validator object will validate with.
|
||||
It is assumed to be valid, and providing
|
||||
an invalid schema can lead to undefined behavior. See
|
||||
`Validator.check_schema` to validate a schema first.
|
||||
|
||||
registry:
|
||||
|
||||
a schema registry that will be used for looking up JSON references
|
||||
|
||||
resolver:
|
||||
|
||||
a resolver that will be used to resolve :kw:`$ref`
|
||||
properties (JSON references). If unprovided, one will be created.
|
||||
|
||||
.. deprecated:: v4.18.0
|
||||
|
||||
`RefResolver <_RefResolver>` has been deprecated in favor of
|
||||
`referencing`, and with it, this argument.
|
||||
|
||||
format_checker:
|
||||
|
||||
if provided, a checker which will be used to assert about
|
||||
:kw:`format` properties present in the schema. If unprovided,
|
||||
*no* format validation is done, and the presence of format
|
||||
within schemas is strictly informational. Certain formats
|
||||
require additional packages to be installed in order to assert
|
||||
against instances. Ensure you've installed `jsonschema` with
|
||||
its `extra (optional) dependencies <index:extras>` when
|
||||
invoking ``pip``.
|
||||
|
||||
.. deprecated:: v4.12.0
|
||||
|
||||
Subclassing validator classes now explicitly warns this is not part of
|
||||
their public API.
|
||||
|
||||
"""
|
||||
|
||||
#: An object representing the validator's meta schema (the schema that
|
||||
#: describes valid schemas in the given version).
|
||||
META_SCHEMA: ClassVar[Mapping]
|
||||
|
||||
#: A mapping of validation keywords (`str`\s) to functions that
|
||||
#: validate the keyword with that name. For more information see
|
||||
#: `creating-validators`.
|
||||
VALIDATORS: ClassVar[Mapping]
|
||||
|
||||
#: A `jsonschema.TypeChecker` that will be used when validating
|
||||
#: :kw:`type` keywords in JSON schemas.
|
||||
TYPE_CHECKER: ClassVar[jsonschema.TypeChecker]
|
||||
|
||||
#: A `jsonschema.FormatChecker` that will be used when validating
|
||||
#: :kw:`format` keywords in JSON schemas.
|
||||
FORMAT_CHECKER: ClassVar[jsonschema.FormatChecker]
|
||||
|
||||
#: A function which given a schema returns its ID.
|
||||
ID_OF: _typing.id_of
|
||||
|
||||
#: The schema that will be used to validate instances
|
||||
schema: Mapping | bool
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
schema: Mapping | bool,
|
||||
resolver: Any = None, # deprecated
|
||||
format_checker: jsonschema.FormatChecker | None = None,
|
||||
*,
|
||||
registry: referencing.jsonschema.SchemaRegistry = ...,
|
||||
) -> None: ...
|
||||
|
||||
@classmethod
|
||||
def check_schema(cls, schema: Mapping | bool) -> None:
|
||||
"""
|
||||
Validate the given schema against the validator's `META_SCHEMA`.
|
||||
|
||||
Raises:
|
||||
|
||||
`jsonschema.exceptions.SchemaError`:
|
||||
|
||||
if the schema is invalid
|
||||
|
||||
"""
|
||||
|
||||
def is_type(self, instance: Any, type: str) -> bool:
|
||||
"""
|
||||
Check if the instance is of the given (JSON Schema) type.
|
||||
|
||||
Arguments:
|
||||
|
||||
instance:
|
||||
|
||||
the value to check
|
||||
|
||||
type:
|
||||
|
||||
the name of a known (JSON Schema) type
|
||||
|
||||
Returns:
|
||||
|
||||
whether the instance is of the given type
|
||||
|
||||
Raises:
|
||||
|
||||
`jsonschema.exceptions.UnknownType`:
|
||||
|
||||
if ``type`` is not a known type
|
||||
|
||||
"""
|
||||
|
||||
def is_valid(self, instance: Any) -> bool:
|
||||
"""
|
||||
Check if the instance is valid under the current `schema`.
|
||||
|
||||
Returns:
|
||||
|
||||
whether the instance is valid or not
|
||||
|
||||
>>> schema = {"maxItems" : 2}
|
||||
>>> Draft202012Validator(schema).is_valid([2, 3, 4])
|
||||
False
|
||||
|
||||
"""
|
||||
|
||||
def iter_errors(self, instance: Any) -> Iterable[ValidationError]:
|
||||
r"""
|
||||
Lazily yield each of the validation errors in the given instance.
|
||||
|
||||
>>> schema = {
|
||||
... "type" : "array",
|
||||
... "items" : {"enum" : [1, 2, 3]},
|
||||
... "maxItems" : 2,
|
||||
... }
|
||||
>>> v = Draft202012Validator(schema)
|
||||
>>> for error in sorted(v.iter_errors([2, 3, 4]), key=str):
|
||||
... print(error.message)
|
||||
4 is not one of [1, 2, 3]
|
||||
[2, 3, 4] is too long
|
||||
|
||||
.. deprecated:: v4.0.0
|
||||
|
||||
Calling this function with a second schema argument is deprecated.
|
||||
Use `Validator.evolve` instead.
|
||||
"""
|
||||
|
||||
def validate(self, instance: Any) -> None:
|
||||
"""
|
||||
Check if the instance is valid under the current `schema`.
|
||||
|
||||
Raises:
|
||||
|
||||
`jsonschema.exceptions.ValidationError`:
|
||||
|
||||
if the instance is invalid
|
||||
|
||||
>>> schema = {"maxItems" : 2}
|
||||
>>> Draft202012Validator(schema).validate([2, 3, 4])
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
ValidationError: [2, 3, 4] is too long
|
||||
|
||||
"""
|
||||
|
||||
def evolve(self, **kwargs) -> Validator:
|
||||
"""
|
||||
Create a new validator like this one, but with given changes.
|
||||
|
||||
Preserves all other attributes, so can be used to e.g. create a
|
||||
validator with a different schema but with the same :kw:`$ref`
|
||||
resolution behavior.
|
||||
|
||||
>>> validator = Draft202012Validator({})
|
||||
>>> validator.evolve(schema={"type": "number"})
|
||||
Draft202012Validator(schema={'type': 'number'}, format_checker=None)
|
||||
|
||||
The returned object satisfies the validator protocol, but may not
|
||||
be of the same concrete class! In particular this occurs
|
||||
when a :kw:`$ref` occurs to a schema with a different
|
||||
:kw:`$schema` than this one (i.e. for a different draft).
|
||||
|
||||
>>> validator.evolve(
|
||||
... schema={"$schema": Draft7Validator.META_SCHEMA["$id"]}
|
||||
... )
|
||||
Draft7Validator(schema=..., format_checker=None)
|
||||
"""
|
||||
285
.venv/lib/python3.9/site-packages/jsonschema/tests/_suite.py
Normal file
285
.venv/lib/python3.9/site-packages/jsonschema/tests/_suite.py
Normal file
@@ -0,0 +1,285 @@
|
||||
"""
|
||||
Python representations of the JSON Schema Test Suite tests.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import suppress
|
||||
from functools import partial
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
from attrs import field, frozen
|
||||
from referencing import Registry
|
||||
import referencing.jsonschema
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable, Mapping, Sequence
|
||||
|
||||
from referencing.jsonschema import Schema
|
||||
import pyperf
|
||||
|
||||
from jsonschema.validators import _VALIDATORS
|
||||
import jsonschema
|
||||
|
||||
MAGIC_REMOTE_URL = "http://localhost:1234"
|
||||
|
||||
_DELIMITERS = re.compile(r"[\W\- ]+")
|
||||
|
||||
|
||||
def _find_suite():
|
||||
root = os.environ.get("JSON_SCHEMA_TEST_SUITE")
|
||||
if root is not None:
|
||||
return Path(root)
|
||||
|
||||
root = Path(jsonschema.__file__).parent.parent / "json"
|
||||
if not root.is_dir(): # pragma: no cover
|
||||
raise ValueError(
|
||||
(
|
||||
"Can't find the JSON-Schema-Test-Suite directory. "
|
||||
"Set the 'JSON_SCHEMA_TEST_SUITE' environment "
|
||||
"variable or run the tests from alongside a checkout "
|
||||
"of the suite."
|
||||
),
|
||||
)
|
||||
return root
|
||||
|
||||
|
||||
@frozen
|
||||
class Suite:
|
||||
|
||||
_root: Path = field(factory=_find_suite)
|
||||
|
||||
|
||||
def benchmark(self, runner: pyperf.Runner): # pragma: no cover
|
||||
for name, Validator in _VALIDATORS.items():
|
||||
self.version(name=name).benchmark(
|
||||
runner=runner,
|
||||
Validator=Validator,
|
||||
)
|
||||
|
||||
def version(self, name) -> Version:
|
||||
Validator = _VALIDATORS[name]
|
||||
uri: str = Validator.ID_OF(Validator.META_SCHEMA) # type: ignore[assignment]
|
||||
specification = referencing.jsonschema.specification_with(uri)
|
||||
|
||||
registry = Registry().with_contents(
|
||||
remotes_in(root=self._root / "remotes", name=name, uri=uri),
|
||||
default_specification=specification,
|
||||
)
|
||||
return Version(
|
||||
name=name,
|
||||
path=self._root / "tests" / name,
|
||||
remotes=registry,
|
||||
)
|
||||
|
||||
|
||||
@frozen
|
||||
class Version:
|
||||
|
||||
_path: Path
|
||||
_remotes: referencing.jsonschema.SchemaRegistry
|
||||
|
||||
name: str
|
||||
|
||||
def benchmark(self, **kwargs): # pragma: no cover
|
||||
for case in self.cases():
|
||||
case.benchmark(**kwargs)
|
||||
|
||||
def cases(self) -> Iterable[_Case]:
|
||||
return self._cases_in(paths=self._path.glob("*.json"))
|
||||
|
||||
def format_cases(self) -> Iterable[_Case]:
|
||||
return self._cases_in(paths=self._path.glob("optional/format/*.json"))
|
||||
|
||||
def optional_cases_of(self, name: str) -> Iterable[_Case]:
|
||||
return self._cases_in(paths=[self._path / "optional" / f"{name}.json"])
|
||||
|
||||
def to_unittest_testcase(self, *groups, **kwargs):
|
||||
name = kwargs.pop("name", "Test" + self.name.title().replace("-", ""))
|
||||
methods = {
|
||||
method.__name__: method
|
||||
for method in (
|
||||
test.to_unittest_method(**kwargs)
|
||||
for group in groups
|
||||
for case in group
|
||||
for test in case.tests
|
||||
)
|
||||
}
|
||||
cls = type(name, (unittest.TestCase,), methods)
|
||||
|
||||
# We're doing crazy things, so if they go wrong, like a function
|
||||
# behaving differently on some other interpreter, just make them
|
||||
# not happen.
|
||||
with suppress(Exception):
|
||||
cls.__module__ = _someone_save_us_the_module_of_the_caller()
|
||||
|
||||
return cls
|
||||
|
||||
def _cases_in(self, paths: Iterable[Path]) -> Iterable[_Case]:
|
||||
for path in paths:
|
||||
for case in json.loads(path.read_text(encoding="utf-8")):
|
||||
yield _Case.from_dict(
|
||||
case,
|
||||
version=self,
|
||||
subject=path.stem,
|
||||
remotes=self._remotes,
|
||||
)
|
||||
|
||||
|
||||
@frozen
|
||||
class _Case:
|
||||
|
||||
version: Version
|
||||
|
||||
subject: str
|
||||
description: str
|
||||
schema: Mapping[str, Any] | bool
|
||||
tests: list[_Test]
|
||||
comment: str | None = None
|
||||
specification: Sequence[dict[str, str]] = ()
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data, remotes, **kwargs):
|
||||
data.update(kwargs)
|
||||
tests = [
|
||||
_Test(
|
||||
version=data["version"],
|
||||
subject=data["subject"],
|
||||
case_description=data["description"],
|
||||
schema=data["schema"],
|
||||
remotes=remotes,
|
||||
**test,
|
||||
) for test in data.pop("tests")
|
||||
]
|
||||
return cls(tests=tests, **data)
|
||||
|
||||
def benchmark(self, runner: pyperf.Runner, **kwargs): # pragma: no cover
|
||||
for test in self.tests:
|
||||
runner.bench_func(
|
||||
test.fully_qualified_name,
|
||||
partial(test.validate_ignoring_errors, **kwargs),
|
||||
)
|
||||
|
||||
|
||||
def remotes_in(
|
||||
root: Path,
|
||||
name: str,
|
||||
uri: str,
|
||||
) -> Iterable[tuple[str, Schema]]:
|
||||
# This messy logic is because the test suite is terrible at indicating
|
||||
# what remotes are needed for what drafts, and mixes in schemas which
|
||||
# have no $schema and which are invalid under earlier versions, in with
|
||||
# other schemas which are needed for tests.
|
||||
|
||||
for each in root.rglob("*.json"):
|
||||
schema = json.loads(each.read_text())
|
||||
|
||||
relative = str(each.relative_to(root)).replace("\\", "/")
|
||||
|
||||
if (
|
||||
( # invalid boolean schema
|
||||
name in {"draft3", "draft4"}
|
||||
and each.stem == "tree"
|
||||
) or
|
||||
( # draft<NotThisDialect>/*.json
|
||||
"$schema" not in schema
|
||||
and relative.startswith("draft")
|
||||
and not relative.startswith(name)
|
||||
)
|
||||
):
|
||||
continue
|
||||
yield f"{MAGIC_REMOTE_URL}/{relative}", schema
|
||||
|
||||
|
||||
@frozen(repr=False)
|
||||
class _Test:
|
||||
|
||||
version: Version
|
||||
|
||||
subject: str
|
||||
case_description: str
|
||||
description: str
|
||||
|
||||
data: Any
|
||||
schema: Mapping[str, Any] | bool
|
||||
|
||||
valid: bool
|
||||
|
||||
_remotes: referencing.jsonschema.SchemaRegistry
|
||||
|
||||
comment: str | None = None
|
||||
|
||||
def __repr__(self): # pragma: no cover
|
||||
return f"<Test {self.fully_qualified_name}>"
|
||||
|
||||
@property
|
||||
def fully_qualified_name(self): # pragma: no cover
|
||||
return " > ".join( # noqa: FLY002
|
||||
[
|
||||
self.version.name,
|
||||
self.subject,
|
||||
self.case_description,
|
||||
self.description,
|
||||
],
|
||||
)
|
||||
|
||||
def to_unittest_method(self, skip=lambda test: None, **kwargs):
|
||||
if self.valid:
|
||||
def fn(this):
|
||||
self.validate(**kwargs)
|
||||
else:
|
||||
def fn(this):
|
||||
with this.assertRaises(jsonschema.ValidationError):
|
||||
self.validate(**kwargs)
|
||||
|
||||
fn.__name__ = "_".join(
|
||||
[
|
||||
"test",
|
||||
_DELIMITERS.sub("_", self.subject),
|
||||
_DELIMITERS.sub("_", self.case_description),
|
||||
_DELIMITERS.sub("_", self.description),
|
||||
],
|
||||
)
|
||||
reason = skip(self)
|
||||
if reason is None or os.environ.get("JSON_SCHEMA_DEBUG", "0") != "0":
|
||||
return fn
|
||||
elif os.environ.get("JSON_SCHEMA_EXPECTED_FAILURES", "0") != "0": # pragma: no cover # noqa: E501
|
||||
return unittest.expectedFailure(fn)
|
||||
else:
|
||||
return unittest.skip(reason)(fn)
|
||||
|
||||
def validate(self, Validator, **kwargs):
|
||||
Validator.check_schema(self.schema)
|
||||
validator = Validator(
|
||||
schema=self.schema,
|
||||
registry=self._remotes,
|
||||
**kwargs,
|
||||
)
|
||||
if os.environ.get("JSON_SCHEMA_DEBUG", "0") != "0": # pragma: no cover
|
||||
breakpoint() # noqa: T100
|
||||
validator.validate(instance=self.data)
|
||||
|
||||
def validate_ignoring_errors(self, Validator): # pragma: no cover
|
||||
with suppress(jsonschema.ValidationError):
|
||||
self.validate(Validator=Validator)
|
||||
|
||||
|
||||
def _someone_save_us_the_module_of_the_caller():
|
||||
"""
|
||||
The FQON of the module 2nd stack frames up from here.
|
||||
|
||||
This is intended to allow us to dynamically return test case classes that
|
||||
are indistinguishable from being defined in the module that wants them.
|
||||
|
||||
Otherwise, trial will mis-print the FQON, and copy pasting it won't re-run
|
||||
the class that really is running.
|
||||
|
||||
Save us all, this is all so so so so so terrible.
|
||||
"""
|
||||
|
||||
return sys._getframe(2).f_globals["__name__"]
|
||||
@@ -0,0 +1,50 @@
|
||||
"""
|
||||
Fuzzing setup for OSS-Fuzz.
|
||||
|
||||
See https://github.com/google/oss-fuzz/tree/master/projects/jsonschema for the
|
||||
other half of the setup here.
|
||||
"""
|
||||
import sys
|
||||
|
||||
from hypothesis import given, strategies
|
||||
|
||||
import jsonschema
|
||||
|
||||
PRIM = strategies.one_of(
|
||||
strategies.booleans(),
|
||||
strategies.integers(),
|
||||
strategies.floats(allow_nan=False, allow_infinity=False),
|
||||
strategies.text(),
|
||||
)
|
||||
DICT = strategies.recursive(
|
||||
base=strategies.one_of(
|
||||
strategies.booleans(),
|
||||
strategies.dictionaries(strategies.text(), PRIM),
|
||||
),
|
||||
extend=lambda inner: strategies.dictionaries(strategies.text(), inner),
|
||||
)
|
||||
|
||||
|
||||
@given(obj1=DICT, obj2=DICT)
|
||||
def test_schemas(obj1, obj2):
|
||||
try:
|
||||
jsonschema.validate(instance=obj1, schema=obj2)
|
||||
except jsonschema.exceptions.ValidationError:
|
||||
pass
|
||||
except jsonschema.exceptions.SchemaError:
|
||||
pass
|
||||
|
||||
|
||||
def main():
|
||||
atheris.instrument_all()
|
||||
atheris.Setup(
|
||||
sys.argv,
|
||||
test_schemas.hypothesis.fuzz_one_input,
|
||||
enable_python_coverage=True,
|
||||
)
|
||||
atheris.Fuzz()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import atheris
|
||||
main()
|
||||
904
.venv/lib/python3.9/site-packages/jsonschema/tests/test_cli.py
Normal file
904
.venv/lib/python3.9/site-packages/jsonschema/tests/test_cli.py
Normal file
@@ -0,0 +1,904 @@
|
||||
from contextlib import redirect_stderr, redirect_stdout
|
||||
from importlib import metadata
|
||||
from io import StringIO
|
||||
from json import JSONDecodeError
|
||||
from pathlib import Path
|
||||
from textwrap import dedent
|
||||
from unittest import TestCase
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import warnings
|
||||
|
||||
from jsonschema import Draft4Validator, Draft202012Validator
|
||||
from jsonschema.exceptions import (
|
||||
SchemaError,
|
||||
ValidationError,
|
||||
_RefResolutionError,
|
||||
)
|
||||
from jsonschema.validators import _LATEST_VERSION, validate
|
||||
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore")
|
||||
from jsonschema import cli
|
||||
|
||||
|
||||
def fake_validator(*errors):
|
||||
errors = list(reversed(errors))
|
||||
|
||||
class FakeValidator:
|
||||
def __init__(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def iter_errors(self, instance):
|
||||
if errors:
|
||||
return errors.pop()
|
||||
return [] # pragma: no cover
|
||||
|
||||
@classmethod
|
||||
def check_schema(self, schema):
|
||||
pass
|
||||
|
||||
return FakeValidator
|
||||
|
||||
|
||||
def fake_open(all_contents):
|
||||
def open(path):
|
||||
contents = all_contents.get(path)
|
||||
if contents is None:
|
||||
raise FileNotFoundError(path)
|
||||
return StringIO(contents)
|
||||
return open
|
||||
|
||||
|
||||
def _message_for(non_json):
|
||||
try:
|
||||
json.loads(non_json)
|
||||
except JSONDecodeError as error:
|
||||
return str(error)
|
||||
else: # pragma: no cover
|
||||
raise RuntimeError("Tried and failed to capture a JSON dump error.")
|
||||
|
||||
|
||||
class TestCLI(TestCase):
|
||||
def run_cli(
|
||||
self, argv, files=None, stdin=StringIO(), exit_code=0, **override,
|
||||
):
|
||||
arguments = cli.parse_args(argv)
|
||||
arguments.update(override)
|
||||
|
||||
self.assertFalse(hasattr(cli, "open"))
|
||||
cli.open = fake_open(files or {})
|
||||
try:
|
||||
stdout, stderr = StringIO(), StringIO()
|
||||
actual_exit_code = cli.run(
|
||||
arguments,
|
||||
stdin=stdin,
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
)
|
||||
finally:
|
||||
del cli.open
|
||||
|
||||
self.assertEqual(
|
||||
actual_exit_code, exit_code, msg=dedent(
|
||||
f"""
|
||||
Expected an exit code of {exit_code} != {actual_exit_code}.
|
||||
|
||||
stdout: {stdout.getvalue()}
|
||||
|
||||
stderr: {stderr.getvalue()}
|
||||
""",
|
||||
),
|
||||
)
|
||||
return stdout.getvalue(), stderr.getvalue()
|
||||
|
||||
def assertOutputs(self, stdout="", stderr="", **kwargs):
|
||||
self.assertEqual(
|
||||
self.run_cli(**kwargs),
|
||||
(dedent(stdout), dedent(stderr)),
|
||||
)
|
||||
|
||||
def test_invalid_instance(self):
|
||||
error = ValidationError("I am an error!", instance=12)
|
||||
self.assertOutputs(
|
||||
files=dict(
|
||||
some_schema='{"does not": "matter since it is stubbed"}',
|
||||
some_instance=json.dumps(error.instance),
|
||||
),
|
||||
validator=fake_validator([error]),
|
||||
|
||||
argv=["-i", "some_instance", "some_schema"],
|
||||
|
||||
exit_code=1,
|
||||
stderr="12: I am an error!\n",
|
||||
)
|
||||
|
||||
def test_invalid_instance_pretty_output(self):
|
||||
error = ValidationError("I am an error!", instance=12)
|
||||
self.assertOutputs(
|
||||
files=dict(
|
||||
some_schema='{"does not": "matter since it is stubbed"}',
|
||||
some_instance=json.dumps(error.instance),
|
||||
),
|
||||
validator=fake_validator([error]),
|
||||
|
||||
argv=["-i", "some_instance", "--output", "pretty", "some_schema"],
|
||||
|
||||
exit_code=1,
|
||||
stderr="""\
|
||||
===[ValidationError]===(some_instance)===
|
||||
|
||||
I am an error!
|
||||
-----------------------------
|
||||
""",
|
||||
)
|
||||
|
||||
def test_invalid_instance_explicit_plain_output(self):
|
||||
error = ValidationError("I am an error!", instance=12)
|
||||
self.assertOutputs(
|
||||
files=dict(
|
||||
some_schema='{"does not": "matter since it is stubbed"}',
|
||||
some_instance=json.dumps(error.instance),
|
||||
),
|
||||
validator=fake_validator([error]),
|
||||
|
||||
argv=["--output", "plain", "-i", "some_instance", "some_schema"],
|
||||
|
||||
exit_code=1,
|
||||
stderr="12: I am an error!\n",
|
||||
)
|
||||
|
||||
def test_invalid_instance_multiple_errors(self):
|
||||
instance = 12
|
||||
first = ValidationError("First error", instance=instance)
|
||||
second = ValidationError("Second error", instance=instance)
|
||||
|
||||
self.assertOutputs(
|
||||
files=dict(
|
||||
some_schema='{"does not": "matter since it is stubbed"}',
|
||||
some_instance=json.dumps(instance),
|
||||
),
|
||||
validator=fake_validator([first, second]),
|
||||
|
||||
argv=["-i", "some_instance", "some_schema"],
|
||||
|
||||
exit_code=1,
|
||||
stderr="""\
|
||||
12: First error
|
||||
12: Second error
|
||||
""",
|
||||
)
|
||||
|
||||
def test_invalid_instance_multiple_errors_pretty_output(self):
|
||||
instance = 12
|
||||
first = ValidationError("First error", instance=instance)
|
||||
second = ValidationError("Second error", instance=instance)
|
||||
|
||||
self.assertOutputs(
|
||||
files=dict(
|
||||
some_schema='{"does not": "matter since it is stubbed"}',
|
||||
some_instance=json.dumps(instance),
|
||||
),
|
||||
validator=fake_validator([first, second]),
|
||||
|
||||
argv=["-i", "some_instance", "--output", "pretty", "some_schema"],
|
||||
|
||||
exit_code=1,
|
||||
stderr="""\
|
||||
===[ValidationError]===(some_instance)===
|
||||
|
||||
First error
|
||||
-----------------------------
|
||||
===[ValidationError]===(some_instance)===
|
||||
|
||||
Second error
|
||||
-----------------------------
|
||||
""",
|
||||
)
|
||||
|
||||
def test_multiple_invalid_instances(self):
|
||||
first_instance = 12
|
||||
first_errors = [
|
||||
ValidationError("An error", instance=first_instance),
|
||||
ValidationError("Another error", instance=first_instance),
|
||||
]
|
||||
second_instance = "foo"
|
||||
second_errors = [ValidationError("BOOM", instance=second_instance)]
|
||||
|
||||
self.assertOutputs(
|
||||
files=dict(
|
||||
some_schema='{"does not": "matter since it is stubbed"}',
|
||||
some_first_instance=json.dumps(first_instance),
|
||||
some_second_instance=json.dumps(second_instance),
|
||||
),
|
||||
validator=fake_validator(first_errors, second_errors),
|
||||
|
||||
argv=[
|
||||
"-i", "some_first_instance",
|
||||
"-i", "some_second_instance",
|
||||
"some_schema",
|
||||
],
|
||||
|
||||
exit_code=1,
|
||||
stderr="""\
|
||||
12: An error
|
||||
12: Another error
|
||||
foo: BOOM
|
||||
""",
|
||||
)
|
||||
|
||||
def test_multiple_invalid_instances_pretty_output(self):
|
||||
first_instance = 12
|
||||
first_errors = [
|
||||
ValidationError("An error", instance=first_instance),
|
||||
ValidationError("Another error", instance=first_instance),
|
||||
]
|
||||
second_instance = "foo"
|
||||
second_errors = [ValidationError("BOOM", instance=second_instance)]
|
||||
|
||||
self.assertOutputs(
|
||||
files=dict(
|
||||
some_schema='{"does not": "matter since it is stubbed"}',
|
||||
some_first_instance=json.dumps(first_instance),
|
||||
some_second_instance=json.dumps(second_instance),
|
||||
),
|
||||
validator=fake_validator(first_errors, second_errors),
|
||||
|
||||
argv=[
|
||||
"--output", "pretty",
|
||||
"-i", "some_first_instance",
|
||||
"-i", "some_second_instance",
|
||||
"some_schema",
|
||||
],
|
||||
|
||||
exit_code=1,
|
||||
stderr="""\
|
||||
===[ValidationError]===(some_first_instance)===
|
||||
|
||||
An error
|
||||
-----------------------------
|
||||
===[ValidationError]===(some_first_instance)===
|
||||
|
||||
Another error
|
||||
-----------------------------
|
||||
===[ValidationError]===(some_second_instance)===
|
||||
|
||||
BOOM
|
||||
-----------------------------
|
||||
""",
|
||||
)
|
||||
|
||||
def test_custom_error_format(self):
|
||||
first_instance = 12
|
||||
first_errors = [
|
||||
ValidationError("An error", instance=first_instance),
|
||||
ValidationError("Another error", instance=first_instance),
|
||||
]
|
||||
second_instance = "foo"
|
||||
second_errors = [ValidationError("BOOM", instance=second_instance)]
|
||||
|
||||
self.assertOutputs(
|
||||
files=dict(
|
||||
some_schema='{"does not": "matter since it is stubbed"}',
|
||||
some_first_instance=json.dumps(first_instance),
|
||||
some_second_instance=json.dumps(second_instance),
|
||||
),
|
||||
validator=fake_validator(first_errors, second_errors),
|
||||
|
||||
argv=[
|
||||
"--error-format", ":{error.message}._-_.{error.instance}:",
|
||||
"-i", "some_first_instance",
|
||||
"-i", "some_second_instance",
|
||||
"some_schema",
|
||||
],
|
||||
|
||||
exit_code=1,
|
||||
stderr=":An error._-_.12::Another error._-_.12::BOOM._-_.foo:",
|
||||
)
|
||||
|
||||
def test_invalid_schema(self):
|
||||
self.assertOutputs(
|
||||
files=dict(some_schema='{"type": 12}'),
|
||||
argv=["some_schema"],
|
||||
|
||||
exit_code=1,
|
||||
stderr="""\
|
||||
12: 12 is not valid under any of the given schemas
|
||||
""",
|
||||
)
|
||||
|
||||
def test_invalid_schema_pretty_output(self):
|
||||
schema = {"type": 12}
|
||||
|
||||
with self.assertRaises(SchemaError) as e:
|
||||
validate(schema=schema, instance="")
|
||||
error = str(e.exception)
|
||||
|
||||
self.assertOutputs(
|
||||
files=dict(some_schema=json.dumps(schema)),
|
||||
argv=["--output", "pretty", "some_schema"],
|
||||
|
||||
exit_code=1,
|
||||
stderr=(
|
||||
"===[SchemaError]===(some_schema)===\n\n"
|
||||
+ str(error)
|
||||
+ "\n-----------------------------\n"
|
||||
),
|
||||
)
|
||||
|
||||
def test_invalid_schema_multiple_errors(self):
|
||||
self.assertOutputs(
|
||||
files=dict(some_schema='{"type": 12, "items": 57}'),
|
||||
argv=["some_schema"],
|
||||
|
||||
exit_code=1,
|
||||
stderr="""\
|
||||
57: 57 is not of type 'object', 'boolean'
|
||||
""",
|
||||
)
|
||||
|
||||
def test_invalid_schema_multiple_errors_pretty_output(self):
|
||||
schema = {"type": 12, "items": 57}
|
||||
|
||||
with self.assertRaises(SchemaError) as e:
|
||||
validate(schema=schema, instance="")
|
||||
error = str(e.exception)
|
||||
|
||||
self.assertOutputs(
|
||||
files=dict(some_schema=json.dumps(schema)),
|
||||
argv=["--output", "pretty", "some_schema"],
|
||||
|
||||
exit_code=1,
|
||||
stderr=(
|
||||
"===[SchemaError]===(some_schema)===\n\n"
|
||||
+ str(error)
|
||||
+ "\n-----------------------------\n"
|
||||
),
|
||||
)
|
||||
|
||||
def test_invalid_schema_with_invalid_instance(self):
|
||||
"""
|
||||
"Validating" an instance that's invalid under an invalid schema
|
||||
just shows the schema error.
|
||||
"""
|
||||
self.assertOutputs(
|
||||
files=dict(
|
||||
some_schema='{"type": 12, "minimum": 30}',
|
||||
some_instance="13",
|
||||
),
|
||||
argv=["-i", "some_instance", "some_schema"],
|
||||
|
||||
exit_code=1,
|
||||
stderr="""\
|
||||
12: 12 is not valid under any of the given schemas
|
||||
""",
|
||||
)
|
||||
|
||||
def test_invalid_schema_with_invalid_instance_pretty_output(self):
|
||||
instance, schema = 13, {"type": 12, "minimum": 30}
|
||||
|
||||
with self.assertRaises(SchemaError) as e:
|
||||
validate(schema=schema, instance=instance)
|
||||
error = str(e.exception)
|
||||
|
||||
self.assertOutputs(
|
||||
files=dict(
|
||||
some_schema=json.dumps(schema),
|
||||
some_instance=json.dumps(instance),
|
||||
),
|
||||
argv=["--output", "pretty", "-i", "some_instance", "some_schema"],
|
||||
|
||||
exit_code=1,
|
||||
stderr=(
|
||||
"===[SchemaError]===(some_schema)===\n\n"
|
||||
+ str(error)
|
||||
+ "\n-----------------------------\n"
|
||||
),
|
||||
)
|
||||
|
||||
def test_invalid_instance_continues_with_the_rest(self):
|
||||
self.assertOutputs(
|
||||
files=dict(
|
||||
some_schema='{"minimum": 30}',
|
||||
first_instance="not valid JSON!",
|
||||
second_instance="12",
|
||||
),
|
||||
argv=[
|
||||
"-i", "first_instance",
|
||||
"-i", "second_instance",
|
||||
"some_schema",
|
||||
],
|
||||
|
||||
exit_code=1,
|
||||
stderr="""\
|
||||
Failed to parse 'first_instance': {}
|
||||
12: 12 is less than the minimum of 30
|
||||
""".format(_message_for("not valid JSON!")),
|
||||
)
|
||||
|
||||
def test_custom_error_format_applies_to_schema_errors(self):
|
||||
instance, schema = 13, {"type": 12, "minimum": 30}
|
||||
|
||||
with self.assertRaises(SchemaError):
|
||||
validate(schema=schema, instance=instance)
|
||||
|
||||
self.assertOutputs(
|
||||
files=dict(some_schema=json.dumps(schema)),
|
||||
|
||||
argv=[
|
||||
"--error-format", ":{error.message}._-_.{error.instance}:",
|
||||
"some_schema",
|
||||
],
|
||||
|
||||
exit_code=1,
|
||||
stderr=":12 is not valid under any of the given schemas._-_.12:",
|
||||
)
|
||||
|
||||
def test_instance_is_invalid_JSON(self):
|
||||
instance = "not valid JSON!"
|
||||
|
||||
self.assertOutputs(
|
||||
files=dict(some_schema="{}", some_instance=instance),
|
||||
argv=["-i", "some_instance", "some_schema"],
|
||||
|
||||
exit_code=1,
|
||||
stderr=f"""\
|
||||
Failed to parse 'some_instance': {_message_for(instance)}
|
||||
""",
|
||||
)
|
||||
|
||||
def test_instance_is_invalid_JSON_pretty_output(self):
|
||||
stdout, stderr = self.run_cli(
|
||||
files=dict(
|
||||
some_schema="{}",
|
||||
some_instance="not valid JSON!",
|
||||
),
|
||||
|
||||
argv=["--output", "pretty", "-i", "some_instance", "some_schema"],
|
||||
|
||||
exit_code=1,
|
||||
)
|
||||
self.assertFalse(stdout)
|
||||
self.assertIn(
|
||||
"(some_instance)===\n\nTraceback (most recent call last):\n",
|
||||
stderr,
|
||||
)
|
||||
self.assertNotIn("some_schema", stderr)
|
||||
|
||||
def test_instance_is_invalid_JSON_on_stdin(self):
|
||||
instance = "not valid JSON!"
|
||||
|
||||
self.assertOutputs(
|
||||
files=dict(some_schema="{}"),
|
||||
stdin=StringIO(instance),
|
||||
|
||||
argv=["some_schema"],
|
||||
|
||||
exit_code=1,
|
||||
stderr=f"""\
|
||||
Failed to parse <stdin>: {_message_for(instance)}
|
||||
""",
|
||||
)
|
||||
|
||||
def test_instance_is_invalid_JSON_on_stdin_pretty_output(self):
|
||||
stdout, stderr = self.run_cli(
|
||||
files=dict(some_schema="{}"),
|
||||
stdin=StringIO("not valid JSON!"),
|
||||
|
||||
argv=["--output", "pretty", "some_schema"],
|
||||
|
||||
exit_code=1,
|
||||
)
|
||||
self.assertFalse(stdout)
|
||||
self.assertIn(
|
||||
"(<stdin>)===\n\nTraceback (most recent call last):\n",
|
||||
stderr,
|
||||
)
|
||||
self.assertNotIn("some_schema", stderr)
|
||||
|
||||
def test_schema_is_invalid_JSON(self):
|
||||
schema = "not valid JSON!"
|
||||
|
||||
self.assertOutputs(
|
||||
files=dict(some_schema=schema),
|
||||
|
||||
argv=["some_schema"],
|
||||
|
||||
exit_code=1,
|
||||
stderr=f"""\
|
||||
Failed to parse 'some_schema': {_message_for(schema)}
|
||||
""",
|
||||
)
|
||||
|
||||
def test_schema_is_invalid_JSON_pretty_output(self):
|
||||
stdout, stderr = self.run_cli(
|
||||
files=dict(some_schema="not valid JSON!"),
|
||||
|
||||
argv=["--output", "pretty", "some_schema"],
|
||||
|
||||
exit_code=1,
|
||||
)
|
||||
self.assertFalse(stdout)
|
||||
self.assertIn(
|
||||
"(some_schema)===\n\nTraceback (most recent call last):\n",
|
||||
stderr,
|
||||
)
|
||||
|
||||
def test_schema_and_instance_are_both_invalid_JSON(self):
|
||||
"""
|
||||
Only the schema error is reported, as we abort immediately.
|
||||
"""
|
||||
schema, instance = "not valid JSON!", "also not valid JSON!"
|
||||
self.assertOutputs(
|
||||
files=dict(some_schema=schema, some_instance=instance),
|
||||
|
||||
argv=["some_schema"],
|
||||
|
||||
exit_code=1,
|
||||
stderr=f"""\
|
||||
Failed to parse 'some_schema': {_message_for(schema)}
|
||||
""",
|
||||
)
|
||||
|
||||
def test_schema_and_instance_are_both_invalid_JSON_pretty_output(self):
|
||||
"""
|
||||
Only the schema error is reported, as we abort immediately.
|
||||
"""
|
||||
stdout, stderr = self.run_cli(
|
||||
files=dict(
|
||||
some_schema="not valid JSON!",
|
||||
some_instance="also not valid JSON!",
|
||||
),
|
||||
|
||||
argv=["--output", "pretty", "-i", "some_instance", "some_schema"],
|
||||
|
||||
exit_code=1,
|
||||
)
|
||||
self.assertFalse(stdout)
|
||||
self.assertIn(
|
||||
"(some_schema)===\n\nTraceback (most recent call last):\n",
|
||||
stderr,
|
||||
)
|
||||
self.assertNotIn("some_instance", stderr)
|
||||
|
||||
def test_instance_does_not_exist(self):
|
||||
self.assertOutputs(
|
||||
files=dict(some_schema="{}"),
|
||||
argv=["-i", "nonexisting_instance", "some_schema"],
|
||||
|
||||
exit_code=1,
|
||||
stderr="""\
|
||||
'nonexisting_instance' does not exist.
|
||||
""",
|
||||
)
|
||||
|
||||
def test_instance_does_not_exist_pretty_output(self):
|
||||
self.assertOutputs(
|
||||
files=dict(some_schema="{}"),
|
||||
argv=[
|
||||
"--output", "pretty",
|
||||
"-i", "nonexisting_instance",
|
||||
"some_schema",
|
||||
],
|
||||
|
||||
exit_code=1,
|
||||
stderr="""\
|
||||
===[FileNotFoundError]===(nonexisting_instance)===
|
||||
|
||||
'nonexisting_instance' does not exist.
|
||||
-----------------------------
|
||||
""",
|
||||
)
|
||||
|
||||
def test_schema_does_not_exist(self):
|
||||
self.assertOutputs(
|
||||
argv=["nonexisting_schema"],
|
||||
|
||||
exit_code=1,
|
||||
stderr="'nonexisting_schema' does not exist.\n",
|
||||
)
|
||||
|
||||
def test_schema_does_not_exist_pretty_output(self):
|
||||
self.assertOutputs(
|
||||
argv=["--output", "pretty", "nonexisting_schema"],
|
||||
|
||||
exit_code=1,
|
||||
stderr="""\
|
||||
===[FileNotFoundError]===(nonexisting_schema)===
|
||||
|
||||
'nonexisting_schema' does not exist.
|
||||
-----------------------------
|
||||
""",
|
||||
)
|
||||
|
||||
def test_neither_instance_nor_schema_exist(self):
|
||||
self.assertOutputs(
|
||||
argv=["-i", "nonexisting_instance", "nonexisting_schema"],
|
||||
|
||||
exit_code=1,
|
||||
stderr="'nonexisting_schema' does not exist.\n",
|
||||
)
|
||||
|
||||
def test_neither_instance_nor_schema_exist_pretty_output(self):
|
||||
self.assertOutputs(
|
||||
argv=[
|
||||
"--output", "pretty",
|
||||
"-i", "nonexisting_instance",
|
||||
"nonexisting_schema",
|
||||
],
|
||||
|
||||
exit_code=1,
|
||||
stderr="""\
|
||||
===[FileNotFoundError]===(nonexisting_schema)===
|
||||
|
||||
'nonexisting_schema' does not exist.
|
||||
-----------------------------
|
||||
""",
|
||||
)
|
||||
|
||||
def test_successful_validation(self):
|
||||
self.assertOutputs(
|
||||
files=dict(some_schema="{}", some_instance="{}"),
|
||||
argv=["-i", "some_instance", "some_schema"],
|
||||
stdout="",
|
||||
stderr="",
|
||||
)
|
||||
|
||||
def test_successful_validation_pretty_output(self):
|
||||
self.assertOutputs(
|
||||
files=dict(some_schema="{}", some_instance="{}"),
|
||||
argv=["--output", "pretty", "-i", "some_instance", "some_schema"],
|
||||
stdout="===[SUCCESS]===(some_instance)===\n",
|
||||
stderr="",
|
||||
)
|
||||
|
||||
def test_successful_validation_of_stdin(self):
|
||||
self.assertOutputs(
|
||||
files=dict(some_schema="{}"),
|
||||
stdin=StringIO("{}"),
|
||||
argv=["some_schema"],
|
||||
stdout="",
|
||||
stderr="",
|
||||
)
|
||||
|
||||
def test_successful_validation_of_stdin_pretty_output(self):
|
||||
self.assertOutputs(
|
||||
files=dict(some_schema="{}"),
|
||||
stdin=StringIO("{}"),
|
||||
argv=["--output", "pretty", "some_schema"],
|
||||
stdout="===[SUCCESS]===(<stdin>)===\n",
|
||||
stderr="",
|
||||
)
|
||||
|
||||
def test_successful_validation_of_just_the_schema(self):
|
||||
self.assertOutputs(
|
||||
files=dict(some_schema="{}", some_instance="{}"),
|
||||
argv=["-i", "some_instance", "some_schema"],
|
||||
stdout="",
|
||||
stderr="",
|
||||
)
|
||||
|
||||
def test_successful_validation_of_just_the_schema_pretty_output(self):
|
||||
self.assertOutputs(
|
||||
files=dict(some_schema="{}", some_instance="{}"),
|
||||
argv=["--output", "pretty", "-i", "some_instance", "some_schema"],
|
||||
stdout="===[SUCCESS]===(some_instance)===\n",
|
||||
stderr="",
|
||||
)
|
||||
|
||||
def test_successful_validation_via_explicit_base_uri(self):
|
||||
ref_schema_file = tempfile.NamedTemporaryFile(delete=False) # noqa: SIM115
|
||||
ref_schema_file.close()
|
||||
self.addCleanup(os.remove, ref_schema_file.name)
|
||||
|
||||
ref_path = Path(ref_schema_file.name)
|
||||
ref_path.write_text('{"definitions": {"num": {"type": "integer"}}}')
|
||||
|
||||
schema = f'{{"$ref": "{ref_path.name}#/definitions/num"}}'
|
||||
|
||||
self.assertOutputs(
|
||||
files=dict(some_schema=schema, some_instance="1"),
|
||||
argv=[
|
||||
"-i", "some_instance",
|
||||
"--base-uri", ref_path.parent.as_uri() + "/",
|
||||
"some_schema",
|
||||
],
|
||||
stdout="",
|
||||
stderr="",
|
||||
)
|
||||
|
||||
def test_unsuccessful_validation_via_explicit_base_uri(self):
|
||||
ref_schema_file = tempfile.NamedTemporaryFile(delete=False) # noqa: SIM115
|
||||
ref_schema_file.close()
|
||||
self.addCleanup(os.remove, ref_schema_file.name)
|
||||
|
||||
ref_path = Path(ref_schema_file.name)
|
||||
ref_path.write_text('{"definitions": {"num": {"type": "integer"}}}')
|
||||
|
||||
schema = f'{{"$ref": "{ref_path.name}#/definitions/num"}}'
|
||||
|
||||
self.assertOutputs(
|
||||
files=dict(some_schema=schema, some_instance='"1"'),
|
||||
argv=[
|
||||
"-i", "some_instance",
|
||||
"--base-uri", ref_path.parent.as_uri() + "/",
|
||||
"some_schema",
|
||||
],
|
||||
exit_code=1,
|
||||
stdout="",
|
||||
stderr="1: '1' is not of type 'integer'\n",
|
||||
)
|
||||
|
||||
def test_nonexistent_file_with_explicit_base_uri(self):
|
||||
schema = '{"$ref": "someNonexistentFile.json#definitions/num"}'
|
||||
instance = "1"
|
||||
|
||||
with self.assertRaises(_RefResolutionError) as e:
|
||||
self.assertOutputs(
|
||||
files=dict(
|
||||
some_schema=schema,
|
||||
some_instance=instance,
|
||||
),
|
||||
argv=[
|
||||
"-i", "some_instance",
|
||||
"--base-uri", Path.cwd().as_uri(),
|
||||
"some_schema",
|
||||
],
|
||||
)
|
||||
error = str(e.exception)
|
||||
self.assertIn(f"{os.sep}someNonexistentFile.json'", error)
|
||||
|
||||
def test_invalid_explicit_base_uri(self):
|
||||
schema = '{"$ref": "foo.json#definitions/num"}'
|
||||
instance = "1"
|
||||
|
||||
with self.assertRaises(_RefResolutionError) as e:
|
||||
self.assertOutputs(
|
||||
files=dict(
|
||||
some_schema=schema,
|
||||
some_instance=instance,
|
||||
),
|
||||
argv=[
|
||||
"-i", "some_instance",
|
||||
"--base-uri", "not@UR1",
|
||||
"some_schema",
|
||||
],
|
||||
)
|
||||
error = str(e.exception)
|
||||
self.assertEqual(
|
||||
error, "unknown url type: 'foo.json'",
|
||||
)
|
||||
|
||||
def test_it_validates_using_the_latest_validator_when_unspecified(self):
|
||||
# There isn't a better way now I can think of to ensure that the
|
||||
# latest version was used, given that the call to validator_for
|
||||
# is hidden inside the CLI, so guard that that's the case, and
|
||||
# this test will have to be updated when versions change until
|
||||
# we can think of a better way to ensure this behavior.
|
||||
self.assertIs(Draft202012Validator, _LATEST_VERSION)
|
||||
|
||||
self.assertOutputs(
|
||||
files=dict(some_schema='{"const": "check"}', some_instance='"a"'),
|
||||
argv=["-i", "some_instance", "some_schema"],
|
||||
exit_code=1,
|
||||
stdout="",
|
||||
stderr="a: 'check' was expected\n",
|
||||
)
|
||||
|
||||
def test_it_validates_using_draft7_when_specified(self):
|
||||
"""
|
||||
Specifically, `const` validation applies for Draft 7.
|
||||
"""
|
||||
schema = """
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"const": "check"
|
||||
}
|
||||
"""
|
||||
instance = '"foo"'
|
||||
self.assertOutputs(
|
||||
files=dict(some_schema=schema, some_instance=instance),
|
||||
argv=["-i", "some_instance", "some_schema"],
|
||||
exit_code=1,
|
||||
stdout="",
|
||||
stderr="foo: 'check' was expected\n",
|
||||
)
|
||||
|
||||
def test_it_validates_using_draft4_when_specified(self):
|
||||
"""
|
||||
Specifically, `const` validation *does not* apply for Draft 4.
|
||||
"""
|
||||
schema = """
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"const": "check"
|
||||
}
|
||||
"""
|
||||
instance = '"foo"'
|
||||
self.assertOutputs(
|
||||
files=dict(some_schema=schema, some_instance=instance),
|
||||
argv=["-i", "some_instance", "some_schema"],
|
||||
stdout="",
|
||||
stderr="",
|
||||
)
|
||||
|
||||
|
||||
class TestParser(TestCase):
|
||||
|
||||
FakeValidator = fake_validator()
|
||||
|
||||
def test_find_validator_by_fully_qualified_object_name(self):
|
||||
arguments = cli.parse_args(
|
||||
[
|
||||
"--validator",
|
||||
"jsonschema.tests.test_cli.TestParser.FakeValidator",
|
||||
"--instance", "mem://some/instance",
|
||||
"mem://some/schema",
|
||||
],
|
||||
)
|
||||
self.assertIs(arguments["validator"], self.FakeValidator)
|
||||
|
||||
def test_find_validator_in_jsonschema(self):
|
||||
arguments = cli.parse_args(
|
||||
[
|
||||
"--validator", "Draft4Validator",
|
||||
"--instance", "mem://some/instance",
|
||||
"mem://some/schema",
|
||||
],
|
||||
)
|
||||
self.assertIs(arguments["validator"], Draft4Validator)
|
||||
|
||||
def cli_output_for(self, *argv):
|
||||
stdout, stderr = StringIO(), StringIO()
|
||||
with redirect_stdout(stdout), redirect_stderr(stderr): # noqa: SIM117
|
||||
with self.assertRaises(SystemExit):
|
||||
cli.parse_args(argv)
|
||||
return stdout.getvalue(), stderr.getvalue()
|
||||
|
||||
def test_unknown_output(self):
|
||||
stdout, stderr = self.cli_output_for(
|
||||
"--output", "foo",
|
||||
"mem://some/schema",
|
||||
)
|
||||
self.assertIn("invalid choice: 'foo'", stderr)
|
||||
self.assertFalse(stdout)
|
||||
|
||||
def test_useless_error_format(self):
|
||||
stdout, stderr = self.cli_output_for(
|
||||
"--output", "pretty",
|
||||
"--error-format", "foo",
|
||||
"mem://some/schema",
|
||||
)
|
||||
self.assertIn(
|
||||
"--error-format can only be used with --output plain",
|
||||
stderr,
|
||||
)
|
||||
self.assertFalse(stdout)
|
||||
|
||||
|
||||
class TestCLIIntegration(TestCase):
|
||||
def test_license(self):
|
||||
our_metadata = metadata.metadata("jsonschema")
|
||||
self.assertEqual(our_metadata.get("License-Expression"), "MIT")
|
||||
|
||||
def test_version(self):
|
||||
version = subprocess.check_output(
|
||||
[sys.executable, "-W", "ignore", "-m", "jsonschema", "--version"],
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
version = version.decode("utf-8").strip()
|
||||
self.assertEqual(version, metadata.version("jsonschema"))
|
||||
|
||||
def test_no_arguments_shows_usage_notes(self):
|
||||
output = subprocess.check_output(
|
||||
[sys.executable, "-m", "jsonschema"],
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
output_for_help = subprocess.check_output(
|
||||
[sys.executable, "-m", "jsonschema", "--help"],
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
self.assertEqual(output, output_for_help)
|
||||
@@ -0,0 +1,432 @@
|
||||
from contextlib import contextmanager
|
||||
from io import BytesIO
|
||||
from unittest import TestCase, mock
|
||||
import importlib.metadata
|
||||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
import urllib.request
|
||||
|
||||
import referencing.exceptions
|
||||
|
||||
from jsonschema import FormatChecker, exceptions, protocols, validators
|
||||
|
||||
|
||||
class TestDeprecations(TestCase):
|
||||
def test_version(self):
|
||||
"""
|
||||
As of v4.0.0, __version__ is deprecated in favor of importlib.metadata.
|
||||
"""
|
||||
|
||||
message = "Accessing jsonschema.__version__ is deprecated"
|
||||
with self.assertWarnsRegex(DeprecationWarning, message) as w:
|
||||
from jsonschema import __version__
|
||||
|
||||
self.assertEqual(__version__, importlib.metadata.version("jsonschema"))
|
||||
self.assertEqual(w.filename, __file__)
|
||||
|
||||
def test_validators_ErrorTree(self):
|
||||
"""
|
||||
As of v4.0.0, importing ErrorTree from jsonschema.validators is
|
||||
deprecated in favor of doing so from jsonschema.exceptions.
|
||||
"""
|
||||
|
||||
message = "Importing ErrorTree from jsonschema.validators is "
|
||||
with self.assertWarnsRegex(DeprecationWarning, message) as w:
|
||||
from jsonschema.validators import ErrorTree
|
||||
|
||||
self.assertEqual(ErrorTree, exceptions.ErrorTree)
|
||||
self.assertEqual(w.filename, __file__)
|
||||
|
||||
def test_import_ErrorTree(self):
|
||||
"""
|
||||
As of v4.18.0, importing ErrorTree from the package root is
|
||||
deprecated in favor of doing so from jsonschema.exceptions.
|
||||
"""
|
||||
|
||||
message = "Importing ErrorTree directly from the jsonschema package "
|
||||
with self.assertWarnsRegex(DeprecationWarning, message) as w:
|
||||
from jsonschema import ErrorTree
|
||||
|
||||
self.assertEqual(ErrorTree, exceptions.ErrorTree)
|
||||
self.assertEqual(w.filename, __file__)
|
||||
|
||||
def test_ErrorTree_setitem(self):
|
||||
"""
|
||||
As of v4.20.0, setting items on an ErrorTree is deprecated.
|
||||
"""
|
||||
|
||||
e = exceptions.ValidationError("some error", path=["foo"])
|
||||
tree = exceptions.ErrorTree()
|
||||
subtree = exceptions.ErrorTree(errors=[e])
|
||||
|
||||
message = "ErrorTree.__setitem__ is "
|
||||
with self.assertWarnsRegex(DeprecationWarning, message) as w:
|
||||
tree["foo"] = subtree
|
||||
|
||||
self.assertEqual(tree["foo"], subtree)
|
||||
self.assertEqual(w.filename, __file__)
|
||||
|
||||
def test_import_FormatError(self):
|
||||
"""
|
||||
As of v4.18.0, importing FormatError from the package root is
|
||||
deprecated in favor of doing so from jsonschema.exceptions.
|
||||
"""
|
||||
|
||||
message = "Importing FormatError directly from the jsonschema package "
|
||||
with self.assertWarnsRegex(DeprecationWarning, message) as w:
|
||||
from jsonschema import FormatError
|
||||
|
||||
self.assertEqual(FormatError, exceptions.FormatError)
|
||||
self.assertEqual(w.filename, __file__)
|
||||
|
||||
def test_import_Validator(self):
|
||||
"""
|
||||
As of v4.19.0, importing Validator from the package root is
|
||||
deprecated in favor of doing so from jsonschema.protocols.
|
||||
"""
|
||||
|
||||
message = "Importing Validator directly from the jsonschema package "
|
||||
with self.assertWarnsRegex(DeprecationWarning, message) as w:
|
||||
from jsonschema import Validator
|
||||
|
||||
self.assertEqual(Validator, protocols.Validator)
|
||||
self.assertEqual(w.filename, __file__)
|
||||
|
||||
def test_validators_validators(self):
|
||||
"""
|
||||
As of v4.0.0, accessing jsonschema.validators.validators is
|
||||
deprecated.
|
||||
"""
|
||||
|
||||
message = "Accessing jsonschema.validators.validators is deprecated"
|
||||
with self.assertWarnsRegex(DeprecationWarning, message) as w:
|
||||
value = validators.validators
|
||||
|
||||
self.assertEqual(value, validators._VALIDATORS)
|
||||
self.assertEqual(w.filename, __file__)
|
||||
|
||||
def test_validators_meta_schemas(self):
|
||||
"""
|
||||
As of v4.0.0, accessing jsonschema.validators.meta_schemas is
|
||||
deprecated.
|
||||
"""
|
||||
|
||||
message = "Accessing jsonschema.validators.meta_schemas is deprecated"
|
||||
with self.assertWarnsRegex(DeprecationWarning, message) as w:
|
||||
value = validators.meta_schemas
|
||||
|
||||
self.assertEqual(value, validators._META_SCHEMAS)
|
||||
self.assertEqual(w.filename, __file__)
|
||||
|
||||
def test_RefResolver_in_scope(self):
|
||||
"""
|
||||
As of v4.0.0, RefResolver.in_scope is deprecated.
|
||||
"""
|
||||
|
||||
resolver = validators._RefResolver.from_schema({})
|
||||
message = "jsonschema.RefResolver.in_scope is deprecated "
|
||||
with self.assertWarnsRegex(DeprecationWarning, message) as w: # noqa: SIM117
|
||||
with resolver.in_scope("foo"):
|
||||
pass
|
||||
|
||||
self.assertEqual(w.filename, __file__)
|
||||
|
||||
def test_Validator_is_valid_two_arguments(self):
|
||||
"""
|
||||
As of v4.0.0, calling is_valid with two arguments (to provide a
|
||||
different schema) is deprecated.
|
||||
"""
|
||||
|
||||
validator = validators.Draft7Validator({})
|
||||
message = "Passing a schema to Validator.is_valid is deprecated "
|
||||
with self.assertWarnsRegex(DeprecationWarning, message) as w:
|
||||
result = validator.is_valid("foo", {"type": "number"})
|
||||
|
||||
self.assertFalse(result)
|
||||
self.assertEqual(w.filename, __file__)
|
||||
|
||||
def test_Validator_iter_errors_two_arguments(self):
|
||||
"""
|
||||
As of v4.0.0, calling iter_errors with two arguments (to provide a
|
||||
different schema) is deprecated.
|
||||
"""
|
||||
|
||||
validator = validators.Draft7Validator({})
|
||||
message = "Passing a schema to Validator.iter_errors is deprecated "
|
||||
with self.assertWarnsRegex(DeprecationWarning, message) as w:
|
||||
error, = validator.iter_errors("foo", {"type": "number"})
|
||||
|
||||
self.assertEqual(error.validator, "type")
|
||||
self.assertEqual(w.filename, __file__)
|
||||
|
||||
def test_Validator_resolver(self):
|
||||
"""
|
||||
As of v4.18.0, accessing Validator.resolver is deprecated.
|
||||
"""
|
||||
|
||||
validator = validators.Draft7Validator({})
|
||||
message = "Accessing Draft7Validator.resolver is "
|
||||
with self.assertWarnsRegex(DeprecationWarning, message) as w:
|
||||
self.assertIsInstance(validator.resolver, validators._RefResolver)
|
||||
|
||||
self.assertEqual(w.filename, __file__)
|
||||
|
||||
def test_RefResolver(self):
|
||||
"""
|
||||
As of v4.18.0, RefResolver is fully deprecated.
|
||||
"""
|
||||
|
||||
message = "jsonschema.RefResolver is deprecated"
|
||||
with self.assertWarnsRegex(DeprecationWarning, message) as w:
|
||||
from jsonschema import RefResolver
|
||||
self.assertEqual(w.filename, __file__)
|
||||
|
||||
with self.assertWarnsRegex(DeprecationWarning, message) as w:
|
||||
from jsonschema.validators import RefResolver # noqa: F401
|
||||
self.assertEqual(w.filename, __file__)
|
||||
|
||||
def test_RefResolutionError(self):
|
||||
"""
|
||||
As of v4.18.0, RefResolutionError is deprecated in favor of directly
|
||||
catching errors from the referencing library.
|
||||
"""
|
||||
|
||||
message = "jsonschema.exceptions.RefResolutionError is deprecated"
|
||||
with self.assertWarnsRegex(DeprecationWarning, message) as w:
|
||||
from jsonschema import RefResolutionError
|
||||
|
||||
self.assertEqual(RefResolutionError, exceptions._RefResolutionError)
|
||||
self.assertEqual(w.filename, __file__)
|
||||
|
||||
with self.assertWarnsRegex(DeprecationWarning, message) as w:
|
||||
from jsonschema.exceptions import RefResolutionError
|
||||
|
||||
self.assertEqual(RefResolutionError, exceptions._RefResolutionError)
|
||||
self.assertEqual(w.filename, __file__)
|
||||
|
||||
def test_catching_Unresolvable_directly(self):
|
||||
"""
|
||||
This behavior is the intended behavior (i.e. it's not deprecated), but
|
||||
given we do "tricksy" things in the iterim to wrap exceptions in a
|
||||
multiple inheritance subclass, we need to be extra sure it works and
|
||||
stays working.
|
||||
"""
|
||||
validator = validators.Draft202012Validator({"$ref": "urn:nothing"})
|
||||
|
||||
with self.assertRaises(referencing.exceptions.Unresolvable) as e:
|
||||
validator.validate(12)
|
||||
|
||||
expected = referencing.exceptions.Unresolvable(ref="urn:nothing")
|
||||
self.assertEqual(
|
||||
(e.exception, str(e.exception)),
|
||||
(expected, "Unresolvable: urn:nothing"),
|
||||
)
|
||||
|
||||
def test_catching_Unresolvable_via_RefResolutionError(self):
|
||||
"""
|
||||
Until RefResolutionError is removed, it is still possible to catch
|
||||
exceptions from reference resolution using it, even though they may
|
||||
have been raised by referencing.
|
||||
"""
|
||||
with self.assertWarns(DeprecationWarning):
|
||||
from jsonschema import RefResolutionError
|
||||
|
||||
validator = validators.Draft202012Validator({"$ref": "urn:nothing"})
|
||||
|
||||
with self.assertRaises(referencing.exceptions.Unresolvable) as u:
|
||||
validator.validate(12)
|
||||
|
||||
with self.assertRaises(RefResolutionError) as e:
|
||||
validator.validate(12)
|
||||
|
||||
self.assertEqual(
|
||||
(e.exception, str(e.exception)),
|
||||
(u.exception, "Unresolvable: urn:nothing"),
|
||||
)
|
||||
|
||||
def test_WrappedReferencingError_hashability(self):
|
||||
"""
|
||||
Ensure the wrapped referencing errors are hashable when possible.
|
||||
"""
|
||||
with self.assertWarns(DeprecationWarning):
|
||||
from jsonschema import RefResolutionError
|
||||
|
||||
validator = validators.Draft202012Validator({"$ref": "urn:nothing"})
|
||||
|
||||
with self.assertRaises(referencing.exceptions.Unresolvable) as u:
|
||||
validator.validate(12)
|
||||
|
||||
with self.assertRaises(RefResolutionError) as e:
|
||||
validator.validate(12)
|
||||
|
||||
self.assertIn(e.exception, {u.exception})
|
||||
self.assertIn(u.exception, {e.exception})
|
||||
|
||||
def test_Validator_subclassing(self):
|
||||
"""
|
||||
As of v4.12.0, subclassing a validator class produces an explicit
|
||||
deprecation warning.
|
||||
|
||||
This was never intended to be public API (and some comments over the
|
||||
years in issues said so, but obviously that's not a great way to make
|
||||
sure it's followed).
|
||||
|
||||
A future version will explicitly raise an error.
|
||||
"""
|
||||
|
||||
message = "Subclassing validator classes is "
|
||||
with self.assertWarnsRegex(DeprecationWarning, message) as w:
|
||||
class Subclass(validators.Draft202012Validator):
|
||||
pass
|
||||
|
||||
self.assertEqual(w.filename, __file__)
|
||||
|
||||
with self.assertWarnsRegex(DeprecationWarning, message) as w:
|
||||
class AnotherSubclass(validators.create(meta_schema={})):
|
||||
pass
|
||||
|
||||
def test_FormatChecker_cls_checks(self):
|
||||
"""
|
||||
As of v4.14.0, FormatChecker.cls_checks is deprecated without
|
||||
replacement.
|
||||
"""
|
||||
|
||||
self.addCleanup(FormatChecker.checkers.pop, "boom", None)
|
||||
|
||||
message = "FormatChecker.cls_checks "
|
||||
with self.assertWarnsRegex(DeprecationWarning, message) as w:
|
||||
FormatChecker.cls_checks("boom")
|
||||
|
||||
self.assertEqual(w.filename, __file__)
|
||||
|
||||
def test_draftN_format_checker(self):
|
||||
"""
|
||||
As of v4.16.0, accessing jsonschema.draftn_format_checker is deprecated
|
||||
in favor of Validator.FORMAT_CHECKER.
|
||||
"""
|
||||
|
||||
message = "Accessing jsonschema.draft202012_format_checker is "
|
||||
with self.assertWarnsRegex(DeprecationWarning, message) as w:
|
||||
from jsonschema import draft202012_format_checker
|
||||
|
||||
self.assertIs(
|
||||
draft202012_format_checker,
|
||||
validators.Draft202012Validator.FORMAT_CHECKER,
|
||||
)
|
||||
self.assertEqual(w.filename, __file__)
|
||||
|
||||
message = "Accessing jsonschema.draft201909_format_checker is "
|
||||
with self.assertWarnsRegex(DeprecationWarning, message) as w:
|
||||
from jsonschema import draft201909_format_checker
|
||||
|
||||
self.assertIs(
|
||||
draft201909_format_checker,
|
||||
validators.Draft201909Validator.FORMAT_CHECKER,
|
||||
)
|
||||
self.assertEqual(w.filename, __file__)
|
||||
|
||||
message = "Accessing jsonschema.draft7_format_checker is "
|
||||
with self.assertWarnsRegex(DeprecationWarning, message) as w:
|
||||
from jsonschema import draft7_format_checker
|
||||
|
||||
self.assertIs(
|
||||
draft7_format_checker,
|
||||
validators.Draft7Validator.FORMAT_CHECKER,
|
||||
)
|
||||
self.assertEqual(w.filename, __file__)
|
||||
|
||||
message = "Accessing jsonschema.draft6_format_checker is "
|
||||
with self.assertWarnsRegex(DeprecationWarning, message) as w:
|
||||
from jsonschema import draft6_format_checker
|
||||
|
||||
self.assertIs(
|
||||
draft6_format_checker,
|
||||
validators.Draft6Validator.FORMAT_CHECKER,
|
||||
)
|
||||
self.assertEqual(w.filename, __file__)
|
||||
|
||||
message = "Accessing jsonschema.draft4_format_checker is "
|
||||
with self.assertWarnsRegex(DeprecationWarning, message) as w:
|
||||
from jsonschema import draft4_format_checker
|
||||
|
||||
self.assertIs(
|
||||
draft4_format_checker,
|
||||
validators.Draft4Validator.FORMAT_CHECKER,
|
||||
)
|
||||
self.assertEqual(w.filename, __file__)
|
||||
|
||||
message = "Accessing jsonschema.draft3_format_checker is "
|
||||
with self.assertWarnsRegex(DeprecationWarning, message) as w:
|
||||
from jsonschema import draft3_format_checker
|
||||
|
||||
self.assertIs(
|
||||
draft3_format_checker,
|
||||
validators.Draft3Validator.FORMAT_CHECKER,
|
||||
)
|
||||
self.assertEqual(w.filename, __file__)
|
||||
|
||||
with self.assertRaises(ImportError):
|
||||
from jsonschema import draft1234_format_checker # noqa: F401
|
||||
|
||||
def test_import_cli(self):
|
||||
"""
|
||||
As of v4.17.0, importing jsonschema.cli is deprecated.
|
||||
"""
|
||||
|
||||
message = "The jsonschema CLI is deprecated and will be removed "
|
||||
with self.assertWarnsRegex(DeprecationWarning, message) as w:
|
||||
import jsonschema.cli
|
||||
importlib.reload(jsonschema.cli)
|
||||
|
||||
self.assertEqual(w.filename, importlib.__file__)
|
||||
|
||||
def test_cli(self):
|
||||
"""
|
||||
As of v4.17.0, the jsonschema CLI is deprecated.
|
||||
"""
|
||||
|
||||
process = subprocess.run(
|
||||
[sys.executable, "-m", "jsonschema"],
|
||||
capture_output=True,
|
||||
check=True,
|
||||
)
|
||||
self.assertIn(b"The jsonschema CLI is deprecated ", process.stderr)
|
||||
|
||||
def test_automatic_remote_retrieval(self):
|
||||
"""
|
||||
Automatic retrieval of remote references is deprecated as of v4.18.0.
|
||||
"""
|
||||
ref = "http://bar#/$defs/baz"
|
||||
schema = {"$defs": {"baz": {"type": "integer"}}}
|
||||
|
||||
if "requests" in sys.modules: # pragma: no cover
|
||||
self.addCleanup(
|
||||
sys.modules.__setitem__, "requests", sys.modules["requests"],
|
||||
)
|
||||
sys.modules["requests"] = None
|
||||
|
||||
@contextmanager
|
||||
def fake_urlopen(request):
|
||||
self.assertIsInstance(request, urllib.request.Request)
|
||||
self.assertEqual(request.full_url, "http://bar")
|
||||
|
||||
# Ha ha urllib.request.Request "normalizes" header names and
|
||||
# Request.get_header does not also normalize them...
|
||||
(header, value), = request.header_items()
|
||||
self.assertEqual(header.lower(), "user-agent")
|
||||
self.assertEqual(
|
||||
value, "python-jsonschema (deprecated $ref resolution)",
|
||||
)
|
||||
yield BytesIO(json.dumps(schema).encode("utf8"))
|
||||
|
||||
validator = validators.Draft202012Validator({"$ref": ref})
|
||||
|
||||
message = "Automatically retrieving remote references "
|
||||
patch = mock.patch.object(urllib.request, "urlopen", new=fake_urlopen)
|
||||
|
||||
with patch, self.assertWarnsRegex(DeprecationWarning, message):
|
||||
self.assertEqual(
|
||||
(validator.is_valid({}), validator.is_valid(37)),
|
||||
(False, True),
|
||||
)
|
||||
@@ -0,0 +1,759 @@
|
||||
from unittest import TestCase
|
||||
import textwrap
|
||||
|
||||
import jsonpath_ng
|
||||
|
||||
from jsonschema import exceptions
|
||||
from jsonschema.validators import _LATEST_VERSION
|
||||
|
||||
|
||||
class TestBestMatch(TestCase):
|
||||
def best_match_of(self, instance, schema):
|
||||
errors = list(_LATEST_VERSION(schema).iter_errors(instance))
|
||||
msg = f"No errors found for {instance} under {schema!r}!"
|
||||
self.assertTrue(errors, msg=msg)
|
||||
|
||||
best = exceptions.best_match(iter(errors))
|
||||
reversed_best = exceptions.best_match(reversed(errors))
|
||||
|
||||
self.assertEqual(
|
||||
best._contents(),
|
||||
reversed_best._contents(),
|
||||
f"No consistent best match!\nGot: {best}\n\nThen: {reversed_best}",
|
||||
)
|
||||
return best
|
||||
|
||||
def test_shallower_errors_are_better_matches(self):
|
||||
schema = {
|
||||
"properties": {
|
||||
"foo": {
|
||||
"minProperties": 2,
|
||||
"properties": {"bar": {"type": "object"}},
|
||||
},
|
||||
},
|
||||
}
|
||||
best = self.best_match_of(instance={"foo": {"bar": []}}, schema=schema)
|
||||
self.assertEqual(best.validator, "minProperties")
|
||||
|
||||
def test_oneOf_and_anyOf_are_weak_matches(self):
|
||||
"""
|
||||
A property you *must* match is probably better than one you have to
|
||||
match a part of.
|
||||
"""
|
||||
|
||||
schema = {
|
||||
"minProperties": 2,
|
||||
"anyOf": [{"type": "string"}, {"type": "number"}],
|
||||
"oneOf": [{"type": "string"}, {"type": "number"}],
|
||||
}
|
||||
best = self.best_match_of(instance={}, schema=schema)
|
||||
self.assertEqual(best.validator, "minProperties")
|
||||
|
||||
def test_if_the_most_relevant_error_is_anyOf_it_is_traversed(self):
|
||||
"""
|
||||
If the most relevant error is an anyOf, then we traverse its context
|
||||
and select the otherwise *least* relevant error, since in this case
|
||||
that means the most specific, deep, error inside the instance.
|
||||
|
||||
I.e. since only one of the schemas must match, we look for the most
|
||||
relevant one.
|
||||
"""
|
||||
|
||||
schema = {
|
||||
"properties": {
|
||||
"foo": {
|
||||
"anyOf": [
|
||||
{"type": "string"},
|
||||
{"properties": {"bar": {"type": "array"}}},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
best = self.best_match_of(instance={"foo": {"bar": 12}}, schema=schema)
|
||||
self.assertEqual(best.validator_value, "array")
|
||||
|
||||
def test_no_anyOf_traversal_for_equally_relevant_errors(self):
|
||||
"""
|
||||
We don't traverse into an anyOf (as above) if all of its context errors
|
||||
seem to be equally "wrong" against the instance.
|
||||
"""
|
||||
|
||||
schema = {
|
||||
"anyOf": [
|
||||
{"type": "string"},
|
||||
{"type": "integer"},
|
||||
{"type": "object"},
|
||||
],
|
||||
}
|
||||
best = self.best_match_of(instance=[], schema=schema)
|
||||
self.assertEqual(best.validator, "anyOf")
|
||||
|
||||
def test_anyOf_traversal_for_single_equally_relevant_error(self):
|
||||
"""
|
||||
We *do* traverse anyOf with a single nested error, even though it is
|
||||
vacuously equally relevant to itself.
|
||||
"""
|
||||
|
||||
schema = {
|
||||
"anyOf": [
|
||||
{"type": "string"},
|
||||
],
|
||||
}
|
||||
best = self.best_match_of(instance=[], schema=schema)
|
||||
self.assertEqual(best.validator, "type")
|
||||
|
||||
def test_anyOf_traversal_for_single_sibling_errors(self):
|
||||
"""
|
||||
We *do* traverse anyOf with a single subschema that fails multiple
|
||||
times (e.g. on multiple items).
|
||||
"""
|
||||
|
||||
schema = {
|
||||
"anyOf": [
|
||||
{"items": {"const": 37}},
|
||||
],
|
||||
}
|
||||
best = self.best_match_of(instance=[12, 12], schema=schema)
|
||||
self.assertEqual(best.validator, "const")
|
||||
|
||||
def test_anyOf_traversal_for_non_type_matching_sibling_errors(self):
|
||||
"""
|
||||
We *do* traverse anyOf with multiple subschemas when one does not type
|
||||
match.
|
||||
"""
|
||||
|
||||
schema = {
|
||||
"anyOf": [
|
||||
{"type": "object"},
|
||||
{"items": {"const": 37}},
|
||||
],
|
||||
}
|
||||
best = self.best_match_of(instance=[12, 12], schema=schema)
|
||||
self.assertEqual(best.validator, "const")
|
||||
|
||||
def test_if_the_most_relevant_error_is_oneOf_it_is_traversed(self):
|
||||
"""
|
||||
If the most relevant error is an oneOf, then we traverse its context
|
||||
and select the otherwise *least* relevant error, since in this case
|
||||
that means the most specific, deep, error inside the instance.
|
||||
|
||||
I.e. since only one of the schemas must match, we look for the most
|
||||
relevant one.
|
||||
"""
|
||||
|
||||
schema = {
|
||||
"properties": {
|
||||
"foo": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{"properties": {"bar": {"type": "array"}}},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
best = self.best_match_of(instance={"foo": {"bar": 12}}, schema=schema)
|
||||
self.assertEqual(best.validator_value, "array")
|
||||
|
||||
def test_no_oneOf_traversal_for_equally_relevant_errors(self):
|
||||
"""
|
||||
We don't traverse into an oneOf (as above) if all of its context errors
|
||||
seem to be equally "wrong" against the instance.
|
||||
"""
|
||||
|
||||
schema = {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{"type": "integer"},
|
||||
{"type": "object"},
|
||||
],
|
||||
}
|
||||
best = self.best_match_of(instance=[], schema=schema)
|
||||
self.assertEqual(best.validator, "oneOf")
|
||||
|
||||
def test_oneOf_traversal_for_single_equally_relevant_error(self):
|
||||
"""
|
||||
We *do* traverse oneOf with a single nested error, even though it is
|
||||
vacuously equally relevant to itself.
|
||||
"""
|
||||
|
||||
schema = {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
],
|
||||
}
|
||||
best = self.best_match_of(instance=[], schema=schema)
|
||||
self.assertEqual(best.validator, "type")
|
||||
|
||||
def test_oneOf_traversal_for_single_sibling_errors(self):
|
||||
"""
|
||||
We *do* traverse oneOf with a single subschema that fails multiple
|
||||
times (e.g. on multiple items).
|
||||
"""
|
||||
|
||||
schema = {
|
||||
"oneOf": [
|
||||
{"items": {"const": 37}},
|
||||
],
|
||||
}
|
||||
best = self.best_match_of(instance=[12, 12], schema=schema)
|
||||
self.assertEqual(best.validator, "const")
|
||||
|
||||
def test_oneOf_traversal_for_non_type_matching_sibling_errors(self):
|
||||
"""
|
||||
We *do* traverse oneOf with multiple subschemas when one does not type
|
||||
match.
|
||||
"""
|
||||
|
||||
schema = {
|
||||
"oneOf": [
|
||||
{"type": "object"},
|
||||
{"items": {"const": 37}},
|
||||
],
|
||||
}
|
||||
best = self.best_match_of(instance=[12, 12], schema=schema)
|
||||
self.assertEqual(best.validator, "const")
|
||||
|
||||
def test_if_the_most_relevant_error_is_allOf_it_is_traversed(self):
|
||||
"""
|
||||
Now, if the error is allOf, we traverse but select the *most* relevant
|
||||
error from the context, because all schemas here must match anyways.
|
||||
"""
|
||||
|
||||
schema = {
|
||||
"properties": {
|
||||
"foo": {
|
||||
"allOf": [
|
||||
{"type": "string"},
|
||||
{"properties": {"bar": {"type": "array"}}},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
best = self.best_match_of(instance={"foo": {"bar": 12}}, schema=schema)
|
||||
self.assertEqual(best.validator_value, "string")
|
||||
|
||||
def test_nested_context_for_oneOf(self):
|
||||
"""
|
||||
We traverse into nested contexts (a oneOf containing an error in a
|
||||
nested oneOf here).
|
||||
"""
|
||||
|
||||
schema = {
|
||||
"properties": {
|
||||
"foo": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"properties": {
|
||||
"bar": {"type": "array"},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
best = self.best_match_of(instance={"foo": {"bar": 12}}, schema=schema)
|
||||
self.assertEqual(best.validator_value, "array")
|
||||
|
||||
def test_it_prioritizes_matching_types(self):
|
||||
schema = {
|
||||
"properties": {
|
||||
"foo": {
|
||||
"anyOf": [
|
||||
{"type": "array", "minItems": 2},
|
||||
{"type": "string", "minLength": 10},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
best = self.best_match_of(instance={"foo": "bar"}, schema=schema)
|
||||
self.assertEqual(best.validator, "minLength")
|
||||
|
||||
reordered = {
|
||||
"properties": {
|
||||
"foo": {
|
||||
"anyOf": [
|
||||
{"type": "string", "minLength": 10},
|
||||
{"type": "array", "minItems": 2},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
best = self.best_match_of(instance={"foo": "bar"}, schema=reordered)
|
||||
self.assertEqual(best.validator, "minLength")
|
||||
|
||||
def test_it_prioritizes_matching_union_types(self):
|
||||
schema = {
|
||||
"properties": {
|
||||
"foo": {
|
||||
"anyOf": [
|
||||
{"type": ["array", "object"], "minItems": 2},
|
||||
{"type": ["integer", "string"], "minLength": 10},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
best = self.best_match_of(instance={"foo": "bar"}, schema=schema)
|
||||
self.assertEqual(best.validator, "minLength")
|
||||
|
||||
reordered = {
|
||||
"properties": {
|
||||
"foo": {
|
||||
"anyOf": [
|
||||
{"type": "string", "minLength": 10},
|
||||
{"type": "array", "minItems": 2},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
best = self.best_match_of(instance={"foo": "bar"}, schema=reordered)
|
||||
self.assertEqual(best.validator, "minLength")
|
||||
|
||||
def test_boolean_schemas(self):
|
||||
schema = {"properties": {"foo": False}}
|
||||
best = self.best_match_of(instance={"foo": "bar"}, schema=schema)
|
||||
self.assertIsNone(best.validator)
|
||||
|
||||
def test_one_error(self):
|
||||
validator = _LATEST_VERSION({"minProperties": 2})
|
||||
error, = validator.iter_errors({})
|
||||
self.assertEqual(
|
||||
exceptions.best_match(validator.iter_errors({})).validator,
|
||||
"minProperties",
|
||||
)
|
||||
|
||||
def test_no_errors(self):
|
||||
validator = _LATEST_VERSION({})
|
||||
self.assertIsNone(exceptions.best_match(validator.iter_errors({})))
|
||||
|
||||
|
||||
class TestByRelevance(TestCase):
|
||||
def test_short_paths_are_better_matches(self):
|
||||
shallow = exceptions.ValidationError("Oh no!", path=["baz"])
|
||||
deep = exceptions.ValidationError("Oh yes!", path=["foo", "bar"])
|
||||
match = max([shallow, deep], key=exceptions.relevance)
|
||||
self.assertIs(match, shallow)
|
||||
|
||||
match = max([deep, shallow], key=exceptions.relevance)
|
||||
self.assertIs(match, shallow)
|
||||
|
||||
def test_global_errors_are_even_better_matches(self):
|
||||
shallow = exceptions.ValidationError("Oh no!", path=[])
|
||||
deep = exceptions.ValidationError("Oh yes!", path=["foo"])
|
||||
|
||||
errors = sorted([shallow, deep], key=exceptions.relevance)
|
||||
self.assertEqual(
|
||||
[list(error.path) for error in errors],
|
||||
[["foo"], []],
|
||||
)
|
||||
|
||||
errors = sorted([deep, shallow], key=exceptions.relevance)
|
||||
self.assertEqual(
|
||||
[list(error.path) for error in errors],
|
||||
[["foo"], []],
|
||||
)
|
||||
|
||||
def test_weak_keywords_are_lower_priority(self):
|
||||
weak = exceptions.ValidationError("Oh no!", path=[], validator="a")
|
||||
normal = exceptions.ValidationError("Oh yes!", path=[], validator="b")
|
||||
|
||||
best_match = exceptions.by_relevance(weak="a")
|
||||
|
||||
match = max([weak, normal], key=best_match)
|
||||
self.assertIs(match, normal)
|
||||
|
||||
match = max([normal, weak], key=best_match)
|
||||
self.assertIs(match, normal)
|
||||
|
||||
def test_strong_keywords_are_higher_priority(self):
|
||||
weak = exceptions.ValidationError("Oh no!", path=[], validator="a")
|
||||
normal = exceptions.ValidationError("Oh yes!", path=[], validator="b")
|
||||
strong = exceptions.ValidationError("Oh fine!", path=[], validator="c")
|
||||
|
||||
best_match = exceptions.by_relevance(weak="a", strong="c")
|
||||
|
||||
match = max([weak, normal, strong], key=best_match)
|
||||
self.assertIs(match, strong)
|
||||
|
||||
match = max([strong, normal, weak], key=best_match)
|
||||
self.assertIs(match, strong)
|
||||
|
||||
|
||||
class TestErrorTree(TestCase):
|
||||
def test_it_knows_how_many_total_errors_it_contains(self):
|
||||
# FIXME: #442
|
||||
errors = [
|
||||
exceptions.ValidationError("Something", validator=i)
|
||||
for i in range(8)
|
||||
]
|
||||
tree = exceptions.ErrorTree(errors)
|
||||
self.assertEqual(tree.total_errors, 8)
|
||||
|
||||
def test_it_contains_an_item_if_the_item_had_an_error(self):
|
||||
errors = [exceptions.ValidationError("a message", path=["bar"])]
|
||||
tree = exceptions.ErrorTree(errors)
|
||||
self.assertIn("bar", tree)
|
||||
|
||||
def test_it_does_not_contain_an_item_if_the_item_had_no_error(self):
|
||||
errors = [exceptions.ValidationError("a message", path=["bar"])]
|
||||
tree = exceptions.ErrorTree(errors)
|
||||
self.assertNotIn("foo", tree)
|
||||
|
||||
def test_keywords_that_failed_appear_in_errors_dict(self):
|
||||
error = exceptions.ValidationError("a message", validator="foo")
|
||||
tree = exceptions.ErrorTree([error])
|
||||
self.assertEqual(tree.errors, {"foo": error})
|
||||
|
||||
def test_it_creates_a_child_tree_for_each_nested_path(self):
|
||||
errors = [
|
||||
exceptions.ValidationError("a bar message", path=["bar"]),
|
||||
exceptions.ValidationError("a bar -> 0 message", path=["bar", 0]),
|
||||
]
|
||||
tree = exceptions.ErrorTree(errors)
|
||||
self.assertIn(0, tree["bar"])
|
||||
self.assertNotIn(1, tree["bar"])
|
||||
|
||||
def test_children_have_their_errors_dicts_built(self):
|
||||
e1, e2 = (
|
||||
exceptions.ValidationError("1", validator="foo", path=["bar", 0]),
|
||||
exceptions.ValidationError("2", validator="quux", path=["bar", 0]),
|
||||
)
|
||||
tree = exceptions.ErrorTree([e1, e2])
|
||||
self.assertEqual(tree["bar"][0].errors, {"foo": e1, "quux": e2})
|
||||
|
||||
def test_multiple_errors_with_instance(self):
|
||||
e1, e2 = (
|
||||
exceptions.ValidationError(
|
||||
"1",
|
||||
validator="foo",
|
||||
path=["bar", "bar2"],
|
||||
instance="i1"),
|
||||
exceptions.ValidationError(
|
||||
"2",
|
||||
validator="quux",
|
||||
path=["foobar", 2],
|
||||
instance="i2"),
|
||||
)
|
||||
exceptions.ErrorTree([e1, e2])
|
||||
|
||||
def test_it_does_not_contain_subtrees_that_are_not_in_the_instance(self):
|
||||
error = exceptions.ValidationError("123", validator="foo", instance=[])
|
||||
tree = exceptions.ErrorTree([error])
|
||||
|
||||
with self.assertRaises(IndexError):
|
||||
tree[0]
|
||||
|
||||
def test_if_its_in_the_tree_anyhow_it_does_not_raise_an_error(self):
|
||||
"""
|
||||
If a keyword refers to a path that isn't in the instance, the
|
||||
tree still properly returns a subtree for that path.
|
||||
"""
|
||||
|
||||
error = exceptions.ValidationError(
|
||||
"a message", validator="foo", instance={}, path=["foo"],
|
||||
)
|
||||
tree = exceptions.ErrorTree([error])
|
||||
self.assertIsInstance(tree["foo"], exceptions.ErrorTree)
|
||||
|
||||
def test_iter(self):
|
||||
e1, e2 = (
|
||||
exceptions.ValidationError(
|
||||
"1",
|
||||
validator="foo",
|
||||
path=["bar", "bar2"],
|
||||
instance="i1"),
|
||||
exceptions.ValidationError(
|
||||
"2",
|
||||
validator="quux",
|
||||
path=["foobar", 2],
|
||||
instance="i2"),
|
||||
)
|
||||
tree = exceptions.ErrorTree([e1, e2])
|
||||
self.assertEqual(set(tree), {"bar", "foobar"})
|
||||
|
||||
def test_repr_single(self):
|
||||
error = exceptions.ValidationError(
|
||||
"1",
|
||||
validator="foo",
|
||||
path=["bar", "bar2"],
|
||||
instance="i1",
|
||||
)
|
||||
tree = exceptions.ErrorTree([error])
|
||||
self.assertEqual(repr(tree), "<ErrorTree (1 total error)>")
|
||||
|
||||
def test_repr_multiple(self):
|
||||
e1, e2 = (
|
||||
exceptions.ValidationError(
|
||||
"1",
|
||||
validator="foo",
|
||||
path=["bar", "bar2"],
|
||||
instance="i1"),
|
||||
exceptions.ValidationError(
|
||||
"2",
|
||||
validator="quux",
|
||||
path=["foobar", 2],
|
||||
instance="i2"),
|
||||
)
|
||||
tree = exceptions.ErrorTree([e1, e2])
|
||||
self.assertEqual(repr(tree), "<ErrorTree (2 total errors)>")
|
||||
|
||||
def test_repr_empty(self):
|
||||
tree = exceptions.ErrorTree([])
|
||||
self.assertEqual(repr(tree), "<ErrorTree (0 total errors)>")
|
||||
|
||||
|
||||
class TestErrorInitReprStr(TestCase):
|
||||
def make_error(self, **kwargs):
|
||||
defaults = dict(
|
||||
message="hello",
|
||||
validator="type",
|
||||
validator_value="string",
|
||||
instance=5,
|
||||
schema={"type": "string"},
|
||||
)
|
||||
defaults.update(kwargs)
|
||||
return exceptions.ValidationError(**defaults)
|
||||
|
||||
def assertShows(self, expected, **kwargs):
|
||||
expected = textwrap.dedent(expected).rstrip("\n")
|
||||
|
||||
error = self.make_error(**kwargs)
|
||||
message_line, _, rest = str(error).partition("\n")
|
||||
self.assertEqual(message_line, error.message)
|
||||
self.assertEqual(rest, expected)
|
||||
|
||||
def test_it_calls_super_and_sets_args(self):
|
||||
error = self.make_error()
|
||||
self.assertGreater(len(error.args), 1)
|
||||
|
||||
def test_repr(self):
|
||||
self.assertEqual(
|
||||
repr(exceptions.ValidationError(message="Hello!")),
|
||||
"<ValidationError: 'Hello!'>",
|
||||
)
|
||||
|
||||
def test_unset_error(self):
|
||||
error = exceptions.ValidationError("message")
|
||||
self.assertEqual(str(error), "message")
|
||||
|
||||
kwargs = {
|
||||
"validator": "type",
|
||||
"validator_value": "string",
|
||||
"instance": 5,
|
||||
"schema": {"type": "string"},
|
||||
}
|
||||
# Just the message should show if any of the attributes are unset
|
||||
for attr in kwargs:
|
||||
k = dict(kwargs)
|
||||
del k[attr]
|
||||
error = exceptions.ValidationError("message", **k)
|
||||
self.assertEqual(str(error), "message")
|
||||
|
||||
def test_empty_paths(self):
|
||||
self.assertShows(
|
||||
"""
|
||||
Failed validating 'type' in schema:
|
||||
{'type': 'string'}
|
||||
|
||||
On instance:
|
||||
5
|
||||
""",
|
||||
path=[],
|
||||
schema_path=[],
|
||||
)
|
||||
|
||||
def test_one_item_paths(self):
|
||||
self.assertShows(
|
||||
"""
|
||||
Failed validating 'type' in schema:
|
||||
{'type': 'string'}
|
||||
|
||||
On instance[0]:
|
||||
5
|
||||
""",
|
||||
path=[0],
|
||||
schema_path=["items"],
|
||||
)
|
||||
|
||||
def test_multiple_item_paths(self):
|
||||
self.assertShows(
|
||||
"""
|
||||
Failed validating 'type' in schema['items'][0]:
|
||||
{'type': 'string'}
|
||||
|
||||
On instance[0]['a']:
|
||||
5
|
||||
""",
|
||||
path=[0, "a"],
|
||||
schema_path=["items", 0, 1],
|
||||
)
|
||||
|
||||
def test_uses_pprint(self):
|
||||
self.assertShows(
|
||||
"""
|
||||
Failed validating 'maxLength' in schema:
|
||||
{0: 0,
|
||||
1: 1,
|
||||
2: 2,
|
||||
3: 3,
|
||||
4: 4,
|
||||
5: 5,
|
||||
6: 6,
|
||||
7: 7,
|
||||
8: 8,
|
||||
9: 9,
|
||||
10: 10,
|
||||
11: 11,
|
||||
12: 12,
|
||||
13: 13,
|
||||
14: 14,
|
||||
15: 15,
|
||||
16: 16,
|
||||
17: 17,
|
||||
18: 18,
|
||||
19: 19}
|
||||
|
||||
On instance:
|
||||
[0,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
9,
|
||||
10,
|
||||
11,
|
||||
12,
|
||||
13,
|
||||
14,
|
||||
15,
|
||||
16,
|
||||
17,
|
||||
18,
|
||||
19,
|
||||
20,
|
||||
21,
|
||||
22,
|
||||
23,
|
||||
24]
|
||||
""",
|
||||
instance=list(range(25)),
|
||||
schema=dict(zip(range(20), range(20))),
|
||||
validator="maxLength",
|
||||
)
|
||||
|
||||
def test_does_not_reorder_dicts(self):
|
||||
self.assertShows(
|
||||
"""
|
||||
Failed validating 'type' in schema:
|
||||
{'do': 3, 'not': 7, 'sort': 37, 'me': 73}
|
||||
|
||||
On instance:
|
||||
{'here': 73, 'too': 37, 'no': 7, 'sorting': 3}
|
||||
""",
|
||||
schema={
|
||||
"do": 3,
|
||||
"not": 7,
|
||||
"sort": 37,
|
||||
"me": 73,
|
||||
},
|
||||
instance={
|
||||
"here": 73,
|
||||
"too": 37,
|
||||
"no": 7,
|
||||
"sorting": 3,
|
||||
},
|
||||
)
|
||||
|
||||
def test_str_works_with_instances_having_overriden_eq_operator(self):
|
||||
"""
|
||||
Check for #164 which rendered exceptions unusable when a
|
||||
`ValidationError` involved instances with an `__eq__` method
|
||||
that returned truthy values.
|
||||
"""
|
||||
|
||||
class DontEQMeBro:
|
||||
def __eq__(this, other): # pragma: no cover
|
||||
self.fail("Don't!")
|
||||
|
||||
def __ne__(this, other): # pragma: no cover
|
||||
self.fail("Don't!")
|
||||
|
||||
instance = DontEQMeBro()
|
||||
error = exceptions.ValidationError(
|
||||
"a message",
|
||||
validator="foo",
|
||||
instance=instance,
|
||||
validator_value="some",
|
||||
schema="schema",
|
||||
)
|
||||
self.assertIn(repr(instance), str(error))
|
||||
|
||||
|
||||
class TestHashable(TestCase):
|
||||
def test_hashable(self):
|
||||
{exceptions.ValidationError("")}
|
||||
{exceptions.SchemaError("")}
|
||||
|
||||
|
||||
class TestJsonPathRendering(TestCase):
|
||||
def validate_json_path_rendering(self, property_name, expected_path):
|
||||
error = exceptions.ValidationError(
|
||||
path=[property_name],
|
||||
message="1",
|
||||
validator="foo",
|
||||
instance="i1",
|
||||
)
|
||||
|
||||
rendered_json_path = error.json_path
|
||||
self.assertEqual(rendered_json_path, expected_path)
|
||||
|
||||
re_parsed_name = jsonpath_ng.parse(rendered_json_path).right.fields[0]
|
||||
self.assertEqual(re_parsed_name, property_name)
|
||||
|
||||
def test_basic(self):
|
||||
self.validate_json_path_rendering("x", "$.x")
|
||||
|
||||
def test_empty(self):
|
||||
self.validate_json_path_rendering("", "$['']")
|
||||
|
||||
def test_number(self):
|
||||
self.validate_json_path_rendering("1", "$['1']")
|
||||
|
||||
def test_period(self):
|
||||
self.validate_json_path_rendering(".", "$['.']")
|
||||
|
||||
def test_single_quote(self):
|
||||
self.validate_json_path_rendering("'", r"$['\'']")
|
||||
|
||||
def test_space(self):
|
||||
self.validate_json_path_rendering(" ", "$[' ']")
|
||||
|
||||
def test_backslash(self):
|
||||
self.validate_json_path_rendering("\\", r"$['\\']")
|
||||
|
||||
def test_backslash_single_quote(self):
|
||||
self.validate_json_path_rendering(r"\'", r"$['\\\'']")
|
||||
|
||||
def test_underscore(self):
|
||||
self.validate_json_path_rendering("_", r"$['_']")
|
||||
|
||||
def test_double_quote(self):
|
||||
self.validate_json_path_rendering('"', """$['"']""")
|
||||
|
||||
def test_hyphen(self):
|
||||
self.validate_json_path_rendering("-", "$['-']")
|
||||
|
||||
def test_json_path_injection(self):
|
||||
self.validate_json_path_rendering("a[0]", "$['a[0]']")
|
||||
|
||||
def test_open_bracket(self):
|
||||
self.validate_json_path_rendering("[", "$['[']")
|
||||
@@ -0,0 +1,91 @@
|
||||
"""
|
||||
Tests for the parts of jsonschema related to the :kw:`format` keyword.
|
||||
"""
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
from jsonschema import FormatChecker, ValidationError
|
||||
from jsonschema.exceptions import FormatError
|
||||
from jsonschema.validators import Draft4Validator
|
||||
|
||||
BOOM = ValueError("Boom!")
|
||||
BANG = ZeroDivisionError("Bang!")
|
||||
|
||||
|
||||
def boom(thing):
|
||||
if thing == "bang":
|
||||
raise BANG
|
||||
raise BOOM
|
||||
|
||||
|
||||
class TestFormatChecker(TestCase):
|
||||
def test_it_can_validate_no_formats(self):
|
||||
checker = FormatChecker(formats=())
|
||||
self.assertFalse(checker.checkers)
|
||||
|
||||
def test_it_raises_a_key_error_for_unknown_formats(self):
|
||||
with self.assertRaises(KeyError):
|
||||
FormatChecker(formats=["o noes"])
|
||||
|
||||
def test_it_can_register_cls_checkers(self):
|
||||
original = dict(FormatChecker.checkers)
|
||||
self.addCleanup(FormatChecker.checkers.pop, "boom")
|
||||
with self.assertWarns(DeprecationWarning):
|
||||
FormatChecker.cls_checks("boom")(boom)
|
||||
self.assertEqual(
|
||||
FormatChecker.checkers,
|
||||
dict(original, boom=(boom, ())),
|
||||
)
|
||||
|
||||
def test_it_can_register_checkers(self):
|
||||
checker = FormatChecker()
|
||||
checker.checks("boom")(boom)
|
||||
self.assertEqual(
|
||||
checker.checkers,
|
||||
dict(FormatChecker.checkers, boom=(boom, ())),
|
||||
)
|
||||
|
||||
def test_it_catches_registered_errors(self):
|
||||
checker = FormatChecker()
|
||||
checker.checks("boom", raises=type(BOOM))(boom)
|
||||
|
||||
with self.assertRaises(FormatError) as cm:
|
||||
checker.check(instance=12, format="boom")
|
||||
|
||||
self.assertIs(cm.exception.cause, BOOM)
|
||||
self.assertIs(cm.exception.__cause__, BOOM)
|
||||
self.assertEqual(str(cm.exception), "12 is not a 'boom'")
|
||||
|
||||
# Unregistered errors should not be caught
|
||||
with self.assertRaises(type(BANG)):
|
||||
checker.check(instance="bang", format="boom")
|
||||
|
||||
def test_format_error_causes_become_validation_error_causes(self):
|
||||
checker = FormatChecker()
|
||||
checker.checks("boom", raises=ValueError)(boom)
|
||||
validator = Draft4Validator({"format": "boom"}, format_checker=checker)
|
||||
|
||||
with self.assertRaises(ValidationError) as cm:
|
||||
validator.validate("BOOM")
|
||||
|
||||
self.assertIs(cm.exception.cause, BOOM)
|
||||
self.assertIs(cm.exception.__cause__, BOOM)
|
||||
|
||||
def test_format_checkers_come_with_defaults(self):
|
||||
# This is bad :/ but relied upon.
|
||||
# The docs for quite awhile recommended people do things like
|
||||
# validate(..., format_checker=FormatChecker())
|
||||
# We should change that, but we can't without deprecation...
|
||||
checker = FormatChecker()
|
||||
with self.assertRaises(FormatError):
|
||||
checker.check(instance="not-an-ipv4", format="ipv4")
|
||||
|
||||
def test_repr(self):
|
||||
checker = FormatChecker(formats=())
|
||||
checker.checks("foo")(lambda thing: True) # pragma: no cover
|
||||
checker.checks("bar")(lambda thing: True) # pragma: no cover
|
||||
checker.checks("baz")(lambda thing: True) # pragma: no cover
|
||||
self.assertEqual(
|
||||
repr(checker),
|
||||
"<FormatChecker checkers=['bar', 'baz', 'foo']>",
|
||||
)
|
||||
@@ -0,0 +1,262 @@
|
||||
"""
|
||||
Test runner for the JSON Schema official test suite
|
||||
|
||||
Tests comprehensive correctness of each draft's validator.
|
||||
|
||||
See https://github.com/json-schema-org/JSON-Schema-Test-Suite for details.
|
||||
"""
|
||||
|
||||
|
||||
from jsonschema.tests._suite import Suite
|
||||
import jsonschema
|
||||
|
||||
SUITE = Suite()
|
||||
DRAFT3 = SUITE.version(name="draft3")
|
||||
DRAFT4 = SUITE.version(name="draft4")
|
||||
DRAFT6 = SUITE.version(name="draft6")
|
||||
DRAFT7 = SUITE.version(name="draft7")
|
||||
DRAFT201909 = SUITE.version(name="draft2019-09")
|
||||
DRAFT202012 = SUITE.version(name="draft2020-12")
|
||||
|
||||
|
||||
def skip(message, **kwargs):
|
||||
def skipper(test):
|
||||
if all(value == getattr(test, attr) for attr, value in kwargs.items()):
|
||||
return message
|
||||
return skipper
|
||||
|
||||
|
||||
def ecmascript_regex(test):
|
||||
if test.subject == "ecmascript-regex":
|
||||
return "ECMA regex support will be added in #1142."
|
||||
|
||||
|
||||
def missing_format(Validator):
|
||||
def missing_format(test): # pragma: no cover
|
||||
schema = test.schema
|
||||
if (
|
||||
schema is True
|
||||
or schema is False
|
||||
or "format" not in schema
|
||||
or schema["format"] in Validator.FORMAT_CHECKER.checkers
|
||||
or test.valid
|
||||
):
|
||||
return
|
||||
|
||||
return f"Format checker {schema['format']!r} not found."
|
||||
return missing_format
|
||||
|
||||
|
||||
def complex_email_validation(test):
|
||||
if test.subject != "email":
|
||||
return
|
||||
|
||||
message = "Complex email validation is (intentionally) unsupported."
|
||||
return skip(
|
||||
message=message,
|
||||
description="an invalid domain",
|
||||
)(test) or skip(
|
||||
message=message,
|
||||
description="an invalid IPv4-address-literal",
|
||||
)(test) or skip(
|
||||
message=message,
|
||||
description="dot after local part is not valid",
|
||||
)(test) or skip(
|
||||
message=message,
|
||||
description="dot before local part is not valid",
|
||||
)(test) or skip(
|
||||
message=message,
|
||||
description="two subsequent dots inside local part are not valid",
|
||||
)(test)
|
||||
|
||||
|
||||
def leap_second(test):
|
||||
message = "Leap seconds are unsupported."
|
||||
return skip(
|
||||
message=message,
|
||||
subject="time",
|
||||
description="a valid time string with leap second",
|
||||
)(test) or skip(
|
||||
message=message,
|
||||
subject="time",
|
||||
description="a valid time string with leap second, Zulu",
|
||||
)(test) or skip(
|
||||
message=message,
|
||||
subject="time",
|
||||
description="a valid time string with leap second with offset",
|
||||
)(test) or skip(
|
||||
message=message,
|
||||
subject="time",
|
||||
description="valid leap second, positive time-offset",
|
||||
)(test) or skip(
|
||||
message=message,
|
||||
subject="time",
|
||||
description="valid leap second, negative time-offset",
|
||||
)(test) or skip(
|
||||
message=message,
|
||||
subject="time",
|
||||
description="valid leap second, large positive time-offset",
|
||||
)(test) or skip(
|
||||
message=message,
|
||||
subject="time",
|
||||
description="valid leap second, large negative time-offset",
|
||||
)(test) or skip(
|
||||
message=message,
|
||||
subject="time",
|
||||
description="valid leap second, zero time-offset",
|
||||
)(test) or skip(
|
||||
message=message,
|
||||
subject="date-time",
|
||||
description="a valid date-time with a leap second, UTC",
|
||||
)(test) or skip(
|
||||
message=message,
|
||||
subject="date-time",
|
||||
description="a valid date-time with a leap second, with minus offset",
|
||||
)(test)
|
||||
|
||||
|
||||
TestDraft3 = DRAFT3.to_unittest_testcase(
|
||||
DRAFT3.cases(),
|
||||
DRAFT3.format_cases(),
|
||||
DRAFT3.optional_cases_of(name="bignum"),
|
||||
DRAFT3.optional_cases_of(name="non-bmp-regex"),
|
||||
DRAFT3.optional_cases_of(name="zeroTerminatedFloats"),
|
||||
Validator=jsonschema.Draft3Validator,
|
||||
format_checker=jsonschema.Draft3Validator.FORMAT_CHECKER,
|
||||
skip=lambda test: (
|
||||
ecmascript_regex(test)
|
||||
or missing_format(jsonschema.Draft3Validator)(test)
|
||||
or complex_email_validation(test)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
TestDraft4 = DRAFT4.to_unittest_testcase(
|
||||
DRAFT4.cases(),
|
||||
DRAFT4.format_cases(),
|
||||
DRAFT4.optional_cases_of(name="bignum"),
|
||||
DRAFT4.optional_cases_of(name="float-overflow"),
|
||||
DRAFT4.optional_cases_of(name="id"),
|
||||
DRAFT4.optional_cases_of(name="non-bmp-regex"),
|
||||
DRAFT4.optional_cases_of(name="zeroTerminatedFloats"),
|
||||
Validator=jsonschema.Draft4Validator,
|
||||
format_checker=jsonschema.Draft4Validator.FORMAT_CHECKER,
|
||||
skip=lambda test: (
|
||||
ecmascript_regex(test)
|
||||
or leap_second(test)
|
||||
or missing_format(jsonschema.Draft4Validator)(test)
|
||||
or complex_email_validation(test)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
TestDraft6 = DRAFT6.to_unittest_testcase(
|
||||
DRAFT6.cases(),
|
||||
DRAFT6.format_cases(),
|
||||
DRAFT6.optional_cases_of(name="bignum"),
|
||||
DRAFT6.optional_cases_of(name="float-overflow"),
|
||||
DRAFT6.optional_cases_of(name="id"),
|
||||
DRAFT6.optional_cases_of(name="non-bmp-regex"),
|
||||
Validator=jsonschema.Draft6Validator,
|
||||
format_checker=jsonschema.Draft6Validator.FORMAT_CHECKER,
|
||||
skip=lambda test: (
|
||||
ecmascript_regex(test)
|
||||
or leap_second(test)
|
||||
or missing_format(jsonschema.Draft6Validator)(test)
|
||||
or complex_email_validation(test)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
TestDraft7 = DRAFT7.to_unittest_testcase(
|
||||
DRAFT7.cases(),
|
||||
DRAFT7.format_cases(),
|
||||
DRAFT7.optional_cases_of(name="bignum"),
|
||||
DRAFT7.optional_cases_of(name="cross-draft"),
|
||||
DRAFT7.optional_cases_of(name="float-overflow"),
|
||||
DRAFT6.optional_cases_of(name="id"),
|
||||
DRAFT7.optional_cases_of(name="non-bmp-regex"),
|
||||
DRAFT7.optional_cases_of(name="unknownKeyword"),
|
||||
Validator=jsonschema.Draft7Validator,
|
||||
format_checker=jsonschema.Draft7Validator.FORMAT_CHECKER,
|
||||
skip=lambda test: (
|
||||
ecmascript_regex(test)
|
||||
or leap_second(test)
|
||||
or missing_format(jsonschema.Draft7Validator)(test)
|
||||
or complex_email_validation(test)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
TestDraft201909 = DRAFT201909.to_unittest_testcase(
|
||||
DRAFT201909.cases(),
|
||||
DRAFT201909.optional_cases_of(name="anchor"),
|
||||
DRAFT201909.optional_cases_of(name="bignum"),
|
||||
DRAFT201909.optional_cases_of(name="cross-draft"),
|
||||
DRAFT201909.optional_cases_of(name="float-overflow"),
|
||||
DRAFT201909.optional_cases_of(name="id"),
|
||||
DRAFT201909.optional_cases_of(name="no-schema"),
|
||||
DRAFT201909.optional_cases_of(name="non-bmp-regex"),
|
||||
DRAFT201909.optional_cases_of(name="refOfUnknownKeyword"),
|
||||
DRAFT201909.optional_cases_of(name="unknownKeyword"),
|
||||
Validator=jsonschema.Draft201909Validator,
|
||||
skip=skip(
|
||||
message="Vocabulary support is still in-progress.",
|
||||
subject="vocabulary",
|
||||
description=(
|
||||
"no validation: invalid number, but it still validates"
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
TestDraft201909Format = DRAFT201909.to_unittest_testcase(
|
||||
DRAFT201909.format_cases(),
|
||||
name="TestDraft201909Format",
|
||||
Validator=jsonschema.Draft201909Validator,
|
||||
format_checker=jsonschema.Draft201909Validator.FORMAT_CHECKER,
|
||||
skip=lambda test: (
|
||||
complex_email_validation(test)
|
||||
or ecmascript_regex(test)
|
||||
or leap_second(test)
|
||||
or missing_format(jsonschema.Draft201909Validator)(test)
|
||||
or complex_email_validation(test)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
TestDraft202012 = DRAFT202012.to_unittest_testcase(
|
||||
DRAFT202012.cases(),
|
||||
DRAFT201909.optional_cases_of(name="anchor"),
|
||||
DRAFT202012.optional_cases_of(name="bignum"),
|
||||
DRAFT202012.optional_cases_of(name="cross-draft"),
|
||||
DRAFT202012.optional_cases_of(name="float-overflow"),
|
||||
DRAFT202012.optional_cases_of(name="id"),
|
||||
DRAFT202012.optional_cases_of(name="no-schema"),
|
||||
DRAFT202012.optional_cases_of(name="non-bmp-regex"),
|
||||
DRAFT202012.optional_cases_of(name="refOfUnknownKeyword"),
|
||||
DRAFT202012.optional_cases_of(name="unknownKeyword"),
|
||||
Validator=jsonschema.Draft202012Validator,
|
||||
skip=skip(
|
||||
message="Vocabulary support is still in-progress.",
|
||||
subject="vocabulary",
|
||||
description=(
|
||||
"no validation: invalid number, but it still validates"
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
TestDraft202012Format = DRAFT202012.to_unittest_testcase(
|
||||
DRAFT202012.format_cases(),
|
||||
name="TestDraft202012Format",
|
||||
Validator=jsonschema.Draft202012Validator,
|
||||
format_checker=jsonschema.Draft202012Validator.FORMAT_CHECKER,
|
||||
skip=lambda test: (
|
||||
complex_email_validation(test)
|
||||
or ecmascript_regex(test)
|
||||
or leap_second(test)
|
||||
or missing_format(jsonschema.Draft202012Validator)(test)
|
||||
or complex_email_validation(test)
|
||||
),
|
||||
)
|
||||
221
.venv/lib/python3.9/site-packages/jsonschema/tests/test_types.py
Normal file
221
.venv/lib/python3.9/site-packages/jsonschema/tests/test_types.py
Normal file
@@ -0,0 +1,221 @@
|
||||
"""
|
||||
Tests for the `TypeChecker`-based type interface.
|
||||
|
||||
The actual correctness of the type checking is handled in
|
||||
`test_jsonschema_test_suite`; these tests check that TypeChecker
|
||||
functions correctly at a more granular level.
|
||||
"""
|
||||
from collections import namedtuple
|
||||
from unittest import TestCase
|
||||
|
||||
from jsonschema import ValidationError, _keywords
|
||||
from jsonschema._types import TypeChecker
|
||||
from jsonschema.exceptions import UndefinedTypeCheck, UnknownType
|
||||
from jsonschema.validators import Draft202012Validator, extend
|
||||
|
||||
|
||||
def equals_2(checker, instance):
|
||||
return instance == 2
|
||||
|
||||
|
||||
def is_namedtuple(instance):
|
||||
return isinstance(instance, tuple) and getattr(instance, "_fields", None)
|
||||
|
||||
|
||||
def is_object_or_named_tuple(checker, instance):
|
||||
if Draft202012Validator.TYPE_CHECKER.is_type(instance, "object"):
|
||||
return True
|
||||
return is_namedtuple(instance)
|
||||
|
||||
|
||||
class TestTypeChecker(TestCase):
|
||||
def test_is_type(self):
|
||||
checker = TypeChecker({"two": equals_2})
|
||||
self.assertEqual(
|
||||
(
|
||||
checker.is_type(instance=2, type="two"),
|
||||
checker.is_type(instance="bar", type="two"),
|
||||
),
|
||||
(True, False),
|
||||
)
|
||||
|
||||
def test_is_unknown_type(self):
|
||||
with self.assertRaises(UndefinedTypeCheck) as e:
|
||||
TypeChecker().is_type(4, "foobar")
|
||||
self.assertIn(
|
||||
"'foobar' is unknown to this type checker",
|
||||
str(e.exception),
|
||||
)
|
||||
self.assertTrue(
|
||||
e.exception.__suppress_context__,
|
||||
msg="Expected the internal KeyError to be hidden.",
|
||||
)
|
||||
|
||||
def test_checks_can_be_added_at_init(self):
|
||||
checker = TypeChecker({"two": equals_2})
|
||||
self.assertEqual(checker, TypeChecker().redefine("two", equals_2))
|
||||
|
||||
def test_redefine_existing_type(self):
|
||||
self.assertEqual(
|
||||
TypeChecker().redefine("two", object()).redefine("two", equals_2),
|
||||
TypeChecker().redefine("two", equals_2),
|
||||
)
|
||||
|
||||
def test_remove(self):
|
||||
self.assertEqual(
|
||||
TypeChecker({"two": equals_2}).remove("two"),
|
||||
TypeChecker(),
|
||||
)
|
||||
|
||||
def test_remove_unknown_type(self):
|
||||
with self.assertRaises(UndefinedTypeCheck) as context:
|
||||
TypeChecker().remove("foobar")
|
||||
self.assertIn("foobar", str(context.exception))
|
||||
|
||||
def test_redefine_many(self):
|
||||
self.assertEqual(
|
||||
TypeChecker().redefine_many({"foo": int, "bar": str}),
|
||||
TypeChecker().redefine("foo", int).redefine("bar", str),
|
||||
)
|
||||
|
||||
def test_remove_multiple(self):
|
||||
self.assertEqual(
|
||||
TypeChecker({"foo": int, "bar": str}).remove("foo", "bar"),
|
||||
TypeChecker(),
|
||||
)
|
||||
|
||||
def test_type_check_can_raise_key_error(self):
|
||||
"""
|
||||
Make sure no one writes:
|
||||
|
||||
try:
|
||||
self._type_checkers[type](...)
|
||||
except KeyError:
|
||||
|
||||
ignoring the fact that the function itself can raise that.
|
||||
"""
|
||||
|
||||
error = KeyError("Stuff")
|
||||
|
||||
def raises_keyerror(checker, instance):
|
||||
raise error
|
||||
|
||||
with self.assertRaises(KeyError) as context:
|
||||
TypeChecker({"foo": raises_keyerror}).is_type(4, "foo")
|
||||
|
||||
self.assertIs(context.exception, error)
|
||||
|
||||
def test_repr(self):
|
||||
checker = TypeChecker({"foo": is_namedtuple, "bar": is_namedtuple})
|
||||
self.assertEqual(repr(checker), "<TypeChecker types={'bar', 'foo'}>")
|
||||
|
||||
|
||||
class TestCustomTypes(TestCase):
|
||||
def test_simple_type_can_be_extended(self):
|
||||
def int_or_str_int(checker, instance):
|
||||
if not isinstance(instance, (int, str)):
|
||||
return False
|
||||
try:
|
||||
int(instance)
|
||||
except ValueError:
|
||||
return False
|
||||
return True
|
||||
|
||||
CustomValidator = extend(
|
||||
Draft202012Validator,
|
||||
type_checker=Draft202012Validator.TYPE_CHECKER.redefine(
|
||||
"integer", int_or_str_int,
|
||||
),
|
||||
)
|
||||
validator = CustomValidator({"type": "integer"})
|
||||
|
||||
validator.validate(4)
|
||||
validator.validate("4")
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
validator.validate(4.4)
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
validator.validate("foo")
|
||||
|
||||
def test_object_can_be_extended(self):
|
||||
schema = {"type": "object"}
|
||||
|
||||
Point = namedtuple("Point", ["x", "y"])
|
||||
|
||||
type_checker = Draft202012Validator.TYPE_CHECKER.redefine(
|
||||
"object", is_object_or_named_tuple,
|
||||
)
|
||||
|
||||
CustomValidator = extend(
|
||||
Draft202012Validator,
|
||||
type_checker=type_checker,
|
||||
)
|
||||
validator = CustomValidator(schema)
|
||||
|
||||
validator.validate(Point(x=4, y=5))
|
||||
|
||||
def test_object_extensions_require_custom_validators(self):
|
||||
schema = {"type": "object", "required": ["x"]}
|
||||
|
||||
type_checker = Draft202012Validator.TYPE_CHECKER.redefine(
|
||||
"object", is_object_or_named_tuple,
|
||||
)
|
||||
|
||||
CustomValidator = extend(
|
||||
Draft202012Validator,
|
||||
type_checker=type_checker,
|
||||
)
|
||||
validator = CustomValidator(schema)
|
||||
|
||||
Point = namedtuple("Point", ["x", "y"])
|
||||
# Cannot handle required
|
||||
with self.assertRaises(ValidationError):
|
||||
validator.validate(Point(x=4, y=5))
|
||||
|
||||
def test_object_extensions_can_handle_custom_validators(self):
|
||||
schema = {
|
||||
"type": "object",
|
||||
"required": ["x"],
|
||||
"properties": {"x": {"type": "integer"}},
|
||||
}
|
||||
|
||||
type_checker = Draft202012Validator.TYPE_CHECKER.redefine(
|
||||
"object", is_object_or_named_tuple,
|
||||
)
|
||||
|
||||
def coerce_named_tuple(fn):
|
||||
def coerced(validator, value, instance, schema):
|
||||
if is_namedtuple(instance):
|
||||
instance = instance._asdict()
|
||||
return fn(validator, value, instance, schema)
|
||||
return coerced
|
||||
|
||||
required = coerce_named_tuple(_keywords.required)
|
||||
properties = coerce_named_tuple(_keywords.properties)
|
||||
|
||||
CustomValidator = extend(
|
||||
Draft202012Validator,
|
||||
type_checker=type_checker,
|
||||
validators={"required": required, "properties": properties},
|
||||
)
|
||||
|
||||
validator = CustomValidator(schema)
|
||||
|
||||
Point = namedtuple("Point", ["x", "y"])
|
||||
# Can now process required and properties
|
||||
validator.validate(Point(x=4, y=5))
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
validator.validate(Point(x="not an integer", y=5))
|
||||
|
||||
# As well as still handle objects.
|
||||
validator.validate({"x": 4, "y": 5})
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
validator.validate({"x": "not an integer", "y": 5})
|
||||
|
||||
def test_unknown_type(self):
|
||||
with self.assertRaises(UnknownType) as e:
|
||||
Draft202012Validator({}).is_type(12, "some unknown type")
|
||||
self.assertIn("'some unknown type'", str(e.exception))
|
||||
138
.venv/lib/python3.9/site-packages/jsonschema/tests/test_utils.py
Normal file
138
.venv/lib/python3.9/site-packages/jsonschema/tests/test_utils.py
Normal file
@@ -0,0 +1,138 @@
|
||||
from math import nan
|
||||
from unittest import TestCase
|
||||
|
||||
from jsonschema._utils import equal
|
||||
|
||||
|
||||
class TestEqual(TestCase):
|
||||
def test_none(self):
|
||||
self.assertTrue(equal(None, None))
|
||||
|
||||
def test_nan(self):
|
||||
self.assertTrue(equal(nan, nan))
|
||||
|
||||
|
||||
class TestDictEqual(TestCase):
|
||||
def test_equal_dictionaries(self):
|
||||
dict_1 = {"a": "b", "c": "d"}
|
||||
dict_2 = {"c": "d", "a": "b"}
|
||||
self.assertTrue(equal(dict_1, dict_2))
|
||||
|
||||
def test_equal_dictionaries_with_nan(self):
|
||||
dict_1 = {"a": nan, "c": "d"}
|
||||
dict_2 = {"c": "d", "a": nan}
|
||||
self.assertTrue(equal(dict_1, dict_2))
|
||||
|
||||
def test_missing_key(self):
|
||||
dict_1 = {"a": "b", "c": "d"}
|
||||
dict_2 = {"c": "d", "x": "b"}
|
||||
self.assertFalse(equal(dict_1, dict_2))
|
||||
|
||||
def test_additional_key(self):
|
||||
dict_1 = {"a": "b", "c": "d"}
|
||||
dict_2 = {"c": "d", "a": "b", "x": "x"}
|
||||
self.assertFalse(equal(dict_1, dict_2))
|
||||
|
||||
def test_missing_value(self):
|
||||
dict_1 = {"a": "b", "c": "d"}
|
||||
dict_2 = {"c": "d", "a": "x"}
|
||||
self.assertFalse(equal(dict_1, dict_2))
|
||||
|
||||
def test_empty_dictionaries(self):
|
||||
dict_1 = {}
|
||||
dict_2 = {}
|
||||
self.assertTrue(equal(dict_1, dict_2))
|
||||
|
||||
def test_one_none(self):
|
||||
dict_1 = None
|
||||
dict_2 = {"a": "b", "c": "d"}
|
||||
self.assertFalse(equal(dict_1, dict_2))
|
||||
|
||||
def test_same_item(self):
|
||||
dict_1 = {"a": "b", "c": "d"}
|
||||
self.assertTrue(equal(dict_1, dict_1))
|
||||
|
||||
def test_nested_equal(self):
|
||||
dict_1 = {"a": {"a": "b", "c": "d"}, "c": "d"}
|
||||
dict_2 = {"c": "d", "a": {"a": "b", "c": "d"}}
|
||||
self.assertTrue(equal(dict_1, dict_2))
|
||||
|
||||
def test_nested_dict_unequal(self):
|
||||
dict_1 = {"a": {"a": "b", "c": "d"}, "c": "d"}
|
||||
dict_2 = {"c": "d", "a": {"a": "b", "c": "x"}}
|
||||
self.assertFalse(equal(dict_1, dict_2))
|
||||
|
||||
def test_mixed_nested_equal(self):
|
||||
dict_1 = {"a": ["a", "b", "c", "d"], "c": "d"}
|
||||
dict_2 = {"c": "d", "a": ["a", "b", "c", "d"]}
|
||||
self.assertTrue(equal(dict_1, dict_2))
|
||||
|
||||
def test_nested_list_unequal(self):
|
||||
dict_1 = {"a": ["a", "b", "c", "d"], "c": "d"}
|
||||
dict_2 = {"c": "d", "a": ["b", "c", "d", "a"]}
|
||||
self.assertFalse(equal(dict_1, dict_2))
|
||||
|
||||
|
||||
class TestListEqual(TestCase):
|
||||
def test_equal_lists(self):
|
||||
list_1 = ["a", "b", "c"]
|
||||
list_2 = ["a", "b", "c"]
|
||||
self.assertTrue(equal(list_1, list_2))
|
||||
|
||||
def test_equal_lists_with_nan(self):
|
||||
list_1 = ["a", nan, "c"]
|
||||
list_2 = ["a", nan, "c"]
|
||||
self.assertTrue(equal(list_1, list_2))
|
||||
|
||||
def test_unsorted_lists(self):
|
||||
list_1 = ["a", "b", "c"]
|
||||
list_2 = ["b", "b", "a"]
|
||||
self.assertFalse(equal(list_1, list_2))
|
||||
|
||||
def test_first_list_larger(self):
|
||||
list_1 = ["a", "b", "c"]
|
||||
list_2 = ["a", "b"]
|
||||
self.assertFalse(equal(list_1, list_2))
|
||||
|
||||
def test_second_list_larger(self):
|
||||
list_1 = ["a", "b"]
|
||||
list_2 = ["a", "b", "c"]
|
||||
self.assertFalse(equal(list_1, list_2))
|
||||
|
||||
def test_list_with_none_unequal(self):
|
||||
list_1 = ["a", "b", None]
|
||||
list_2 = ["a", "b", "c"]
|
||||
self.assertFalse(equal(list_1, list_2))
|
||||
|
||||
list_1 = ["a", "b", None]
|
||||
list_2 = [None, "b", "c"]
|
||||
self.assertFalse(equal(list_1, list_2))
|
||||
|
||||
def test_list_with_none_equal(self):
|
||||
list_1 = ["a", None, "c"]
|
||||
list_2 = ["a", None, "c"]
|
||||
self.assertTrue(equal(list_1, list_2))
|
||||
|
||||
def test_empty_list(self):
|
||||
list_1 = []
|
||||
list_2 = []
|
||||
self.assertTrue(equal(list_1, list_2))
|
||||
|
||||
def test_one_none(self):
|
||||
list_1 = None
|
||||
list_2 = []
|
||||
self.assertFalse(equal(list_1, list_2))
|
||||
|
||||
def test_same_list(self):
|
||||
list_1 = ["a", "b", "c"]
|
||||
self.assertTrue(equal(list_1, list_1))
|
||||
|
||||
def test_equal_nested_lists(self):
|
||||
list_1 = ["a", ["b", "c"], "d"]
|
||||
list_2 = ["a", ["b", "c"], "d"]
|
||||
self.assertTrue(equal(list_1, list_2))
|
||||
|
||||
def test_unequal_nested_lists(self):
|
||||
list_1 = ["a", ["b", "c"], "d"]
|
||||
list_2 = ["a", [], "c"]
|
||||
self.assertFalse(equal(list_1, list_2))
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,38 @@
|
||||
"""
|
||||
This module acts as a test that type checkers will allow each validator
|
||||
class to be assigned to a variable of type `type[Validator]`
|
||||
|
||||
The assignation is only valid if type checkers recognize each Validator
|
||||
implementation as a valid implementer of the protocol.
|
||||
"""
|
||||
from jsonschema.protocols import Validator
|
||||
from jsonschema.validators import (
|
||||
Draft3Validator,
|
||||
Draft4Validator,
|
||||
Draft6Validator,
|
||||
Draft7Validator,
|
||||
Draft201909Validator,
|
||||
Draft202012Validator,
|
||||
)
|
||||
|
||||
my_validator: type[Validator]
|
||||
|
||||
my_validator = Draft3Validator
|
||||
my_validator = Draft4Validator
|
||||
my_validator = Draft6Validator
|
||||
my_validator = Draft7Validator
|
||||
my_validator = Draft201909Validator
|
||||
my_validator = Draft202012Validator
|
||||
|
||||
|
||||
# in order to confirm that none of the above were incorrectly typed as 'Any'
|
||||
# ensure that each of these assignments to a non-validator variable requires an
|
||||
# ignore
|
||||
none_var: None
|
||||
|
||||
none_var = Draft3Validator # type: ignore[assignment]
|
||||
none_var = Draft4Validator # type: ignore[assignment]
|
||||
none_var = Draft6Validator # type: ignore[assignment]
|
||||
none_var = Draft7Validator # type: ignore[assignment]
|
||||
none_var = Draft201909Validator # type: ignore[assignment]
|
||||
none_var = Draft202012Validator # type: ignore[assignment]
|
||||
1410
.venv/lib/python3.9/site-packages/jsonschema/validators.py
Normal file
1410
.venv/lib/python3.9/site-packages/jsonschema/validators.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1 @@
|
||||
pip
|
||||
@@ -0,0 +1,54 @@
|
||||
Metadata-Version: 2.4
|
||||
Name: jsonschema-specifications
|
||||
Version: 2025.9.1
|
||||
Summary: The JSON Schema meta-schemas and vocabularies, exposed as a Registry
|
||||
Project-URL: Documentation, https://jsonschema-specifications.readthedocs.io/
|
||||
Project-URL: Homepage, https://github.com/python-jsonschema/jsonschema-specifications
|
||||
Project-URL: Issues, https://github.com/python-jsonschema/jsonschema-specifications/issues/
|
||||
Project-URL: Funding, https://github.com/sponsors/Julian
|
||||
Project-URL: Tidelift, https://tidelift.com/subscription/pkg/pypi-jsonschema-specifications?utm_source=pypi-jsonschema-specifications&utm_medium=referral&utm_campaign=pypi-link
|
||||
Project-URL: Source, https://github.com/python-jsonschema/jsonschema-specifications
|
||||
Author-email: Julian Berman <Julian+jsonschema-specifications@GrayVines.com>
|
||||
License-Expression: MIT
|
||||
License-File: COPYING
|
||||
Keywords: data validation,json,json schema,jsonschema,validation
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Programming Language :: Python :: 3.12
|
||||
Classifier: Programming Language :: Python :: 3.13
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: Topic :: File Formats :: JSON
|
||||
Classifier: Topic :: File Formats :: JSON :: JSON Schema
|
||||
Requires-Python: >=3.9
|
||||
Requires-Dist: referencing>=0.31.0
|
||||
Description-Content-Type: text/x-rst
|
||||
|
||||
=============================
|
||||
``jsonschema-specifications``
|
||||
=============================
|
||||
|
||||
|PyPI| |Pythons| |CI| |ReadTheDocs|
|
||||
|
||||
JSON support files from the `JSON Schema Specifications <https://json-schema.org/specification.html>`_ (metaschemas, vocabularies, etc.), packaged for runtime access from Python as a `referencing-based Schema Registry <https://referencing.readthedocs.io/en/stable/api/#referencing.Registry>`_.
|
||||
|
||||
.. |PyPI| image:: https://img.shields.io/pypi/v/jsonschema-specifications.svg
|
||||
:alt: PyPI version
|
||||
:target: https://pypi.org/project/jsonschema-specifications/
|
||||
|
||||
.. |Pythons| image:: https://img.shields.io/pypi/pyversions/jsonschema-specifications.svg
|
||||
:alt: Supported Python versions
|
||||
:target: https://pypi.org/project/jsonschema-specifications/
|
||||
|
||||
.. |CI| image:: https://github.com/python-jsonschema/jsonschema-specifications/workflows/CI/badge.svg
|
||||
:alt: Build status
|
||||
:target: https://github.com/python-jsonschema/jsonschema-specifications/actions?query=workflow%3ACI
|
||||
|
||||
.. |ReadTheDocs| image:: https://readthedocs.org/projects/jsonschema-specifications/badge/?version=stable&style=flat
|
||||
:alt: ReadTheDocs status
|
||||
:target: https://jsonschema-specifications.readthedocs.io/en/stable/
|
||||
@@ -0,0 +1,33 @@
|
||||
jsonschema_specifications-2025.9.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
jsonschema_specifications-2025.9.1.dist-info/METADATA,sha256=NavUF1fzK06iR1aSDe1HtwFz13y8BSpabTq1g7Lo2J0,2907
|
||||
jsonschema_specifications-2025.9.1.dist-info/RECORD,,
|
||||
jsonschema_specifications-2025.9.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
||||
jsonschema_specifications-2025.9.1.dist-info/licenses/COPYING,sha256=QtzWNJX4e063x3V6-jebtVpT-Ur9el9lfZrfVyNuUVw,1057
|
||||
jsonschema_specifications/__init__.py,sha256=qoTB2DKY7qvNrGhMPH6gtmAJRLilmVQ-fFZwT6ryqw0,386
|
||||
jsonschema_specifications/__pycache__/__init__.cpython-39.pyc,,
|
||||
jsonschema_specifications/__pycache__/_core.cpython-39.pyc,,
|
||||
jsonschema_specifications/_core.py,sha256=tFhc1CMleJ3AJOK_bjxOpFQTdrsUClFGfFxPBU_CebM,1140
|
||||
jsonschema_specifications/schemas/draft201909/metaschema.json,sha256=e3YbPhIfCgyh6ioLjizIVrz4AWBLgmjXG6yqICvAwTs,1785
|
||||
jsonschema_specifications/schemas/draft201909/vocabularies/applicator,sha256=aJUQDplyb7sQcFhRK77D7P1LJOj9L6zuPlBe5ysNTDE,1860
|
||||
jsonschema_specifications/schemas/draft201909/vocabularies/content,sha256=m31PVaTi_bAsQwBo_f-rxzKt3OI42j8d8mkCScM1MnQ,517
|
||||
jsonschema_specifications/schemas/draft201909/vocabularies/core,sha256=taLElX9kldClCB8ECevooU5BOayyA_x0hHH47eKvWyw,1531
|
||||
jsonschema_specifications/schemas/draft201909/vocabularies/format,sha256=UOu_55BhGoSbjMQAoJwdDg-2q1wNQ6DyIgH9NiUFa_Q,403
|
||||
jsonschema_specifications/schemas/draft201909/vocabularies/meta-data,sha256=1H4kRd1qgicaKY2DzGxsuNSuHhXg3Fa-zTehY-zwEoY,892
|
||||
jsonschema_specifications/schemas/draft201909/vocabularies/validation,sha256=HlJsHTNac0gF_ILPV5jBK5YK19olF8Zs2lobCTWcPBw,2834
|
||||
jsonschema_specifications/schemas/draft202012/metaschema.json,sha256=Qdp29a-3zgYtJI92JGOpL3ykfk4PkFsiS6av7vkd7Q8,2452
|
||||
jsonschema_specifications/schemas/draft202012/vocabularies/applicator,sha256=xKbkFHuR_vf-ptwFjLG_k0AvdBS3ZXiosWqvHa1qrO8,1659
|
||||
jsonschema_specifications/schemas/draft202012/vocabularies/content,sha256=CDQ3R3ZOSlgUJieTz01lIFenkThjxZUNQyl-jh_axbY,519
|
||||
jsonschema_specifications/schemas/draft202012/vocabularies/core,sha256=wtEqjk3RHTNt_IOj9mOqTGnwtJs76wlP_rJbUxb0gD0,1564
|
||||
jsonschema_specifications/schemas/draft202012/vocabularies/format-annotation,sha256=q8d1rf79idIjWBcNm_k_Tr0jSVY7u-3WDwK-98gSvMA,448
|
||||
jsonschema_specifications/schemas/draft202012/vocabularies/format-assertion,sha256=xSJCuaG7eGsmw-gset1CjDH5yW5XXc6Z5W6l_qptogw,445
|
||||
jsonschema_specifications/schemas/draft202012/vocabularies/meta-data,sha256=j3bW4U9Bubku-TO3CM3FFEyLUmhlGtEZGEhfsXVPHHY,892
|
||||
jsonschema_specifications/schemas/draft202012/vocabularies/unevaluated,sha256=Lb-8tzmUtnCwl2SSre4f_7RsIWgnhNL1pMpWH54tDLQ,506
|
||||
jsonschema_specifications/schemas/draft202012/vocabularies/validation,sha256=cBCjHlQfMtK-ch4t40jfdcmzaHaj7TBId_wKvaHTelg,2834
|
||||
jsonschema_specifications/schemas/draft3/metaschema.json,sha256=LPdfZENvtb43Si6qJ6uLfh_WUcm0ba6mxnsC_WTiRYs,2600
|
||||
jsonschema_specifications/schemas/draft4/metaschema.json,sha256=4UidC0dV8CeTMCWR0_y48Htok6gqlPJIlfjk7fEbguI,4357
|
||||
jsonschema_specifications/schemas/draft6/metaschema.json,sha256=wp386fVINcOgbAOzxdXsDtp3cGVo-cTffPvHVmpRAG0,4437
|
||||
jsonschema_specifications/schemas/draft7/metaschema.json,sha256=PVOSCIJhYGxVm2A_OFMpyfGrRbXWZ-uZBodFOwVdQF4,4819
|
||||
jsonschema_specifications/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
jsonschema_specifications/tests/__pycache__/__init__.cpython-39.pyc,,
|
||||
jsonschema_specifications/tests/__pycache__/test_jsonschema_specifications.cpython-39.pyc,,
|
||||
jsonschema_specifications/tests/test_jsonschema_specifications.py,sha256=WkbYRW6A6FoZ0rivShfqVLSCsAiHJ2x8TxqECJTXPTY,1106
|
||||
@@ -0,0 +1,4 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: hatchling 1.27.0
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
@@ -0,0 +1,19 @@
|
||||
Copyright (c) 2022 Julian Berman
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
@@ -0,0 +1,12 @@
|
||||
"""
|
||||
The JSON Schema meta-schemas and vocabularies, exposed as a Registry.
|
||||
"""
|
||||
|
||||
from referencing.jsonschema import EMPTY_REGISTRY as _EMPTY_REGISTRY
|
||||
|
||||
from jsonschema_specifications._core import _schemas
|
||||
|
||||
#: A `referencing.jsonschema.SchemaRegistry` containing all of the official
|
||||
#: meta-schemas and vocabularies.
|
||||
REGISTRY = (_schemas() @ _EMPTY_REGISTRY).crawl()
|
||||
__all__ = ["REGISTRY"]
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user