Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +2 -0
- .venv/bin/f2py +8 -0
- .venv/bin/get_gprof +75 -0
- .venv/bin/json-playground +8 -0
- .venv/bin/proton-viewer +8 -0
- .venv/bin/pyrsa-sign +8 -0
- .venv/bin/python +0 -0
- .venv/lib/python3.11/site-packages/_cffi_backend.cpython-311-x86_64-linux-gnu.so +3 -0
- .venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/INSTALLER +1 -0
- .venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/RECORD +35 -0
- .venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/entry_points.txt +2 -0
- .venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/top_level.txt +1 -0
- .venv/lib/python3.11/site-packages/distutils-precedence.pth +3 -0
- .venv/lib/python3.11/site-packages/example.py +169 -0
- .venv/lib/python3.11/site-packages/google_auth_httplib2.py +307 -0
- .venv/lib/python3.11/site-packages/google_generativeai-0.8.4-py3.12-nspkg.pth +3 -0
- .venv/lib/python3.11/site-packages/isympy.py +342 -0
- .venv/lib/python3.11/site-packages/jsonschema/__init__.py +120 -0
- .venv/lib/python3.11/site-packages/jsonschema/__main__.py +6 -0
- .venv/lib/python3.11/site-packages/jsonschema/__pycache__/__init__.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/jsonschema/__pycache__/__main__.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/jsonschema/__pycache__/_format.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/jsonschema/__pycache__/_keywords.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/jsonschema/__pycache__/_legacy_keywords.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/jsonschema/__pycache__/_types.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/jsonschema/__pycache__/_typing.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/jsonschema/__pycache__/_utils.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/jsonschema/__pycache__/cli.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/jsonschema/__pycache__/exceptions.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/jsonschema/__pycache__/protocols.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/jsonschema/__pycache__/validators.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/jsonschema/_format.py +519 -0
- .venv/lib/python3.11/site-packages/jsonschema/_legacy_keywords.py +449 -0
- .venv/lib/python3.11/site-packages/jsonschema/_types.py +200 -0
- .venv/lib/python3.11/site-packages/jsonschema/_typing.py +28 -0
- .venv/lib/python3.11/site-packages/jsonschema/_utils.py +351 -0
- .venv/lib/python3.11/site-packages/jsonschema/benchmarks/__init__.py +5 -0
- .venv/lib/python3.11/site-packages/jsonschema/benchmarks/issue232.py +25 -0
- .venv/lib/python3.11/site-packages/jsonschema/benchmarks/issue232/issue.json +0 -0
- .venv/lib/python3.11/site-packages/jsonschema/benchmarks/nested_schemas.py +56 -0
- .venv/lib/python3.11/site-packages/jsonschema/benchmarks/subcomponents.py +42 -0
- .venv/lib/python3.11/site-packages/jsonschema/benchmarks/unused_registry.py +35 -0
- .venv/lib/python3.11/site-packages/jsonschema/benchmarks/useless_applicator_schemas.py +106 -0
- .venv/lib/python3.11/site-packages/jsonschema/benchmarks/useless_keywords.py +32 -0
- .venv/lib/python3.11/site-packages/jsonschema/cli.py +296 -0
- .venv/lib/python3.11/site-packages/jsonschema/exceptions.py +487 -0
- .venv/lib/python3.11/site-packages/jsonschema/protocols.py +236 -0
- .venv/lib/python3.11/site-packages/jsonschema/tests/__init__.py +0 -0
- .venv/lib/python3.11/site-packages/jsonschema/tests/__pycache__/__init__.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/jsonschema/tests/__pycache__/_suite.cpython-311.pyc +0 -0
.gitattributes
CHANGED
@@ -293,3 +293,5 @@ llm_tutorial/llm_recipes/datasets/bin/common_crawl_and_extended_common_crawl.doc
|
|
293 |
tuning-competition-baseline/.venv/lib/python3.11/site-packages/torch/lib/libtorch_cuda.so filter=lfs diff=lfs merge=lfs -text
|
294 |
tuning-competition-baseline/.venv/lib/python3.11/site-packages/nvidia/cudnn/lib/libcudnn_cnn_infer.so.8 filter=lfs diff=lfs merge=lfs -text
|
295 |
.venv/bin/py-spy filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
293 |
tuning-competition-baseline/.venv/lib/python3.11/site-packages/torch/lib/libtorch_cuda.so filter=lfs diff=lfs merge=lfs -text
|
294 |
tuning-competition-baseline/.venv/lib/python3.11/site-packages/nvidia/cudnn/lib/libcudnn_cnn_infer.so.8 filter=lfs diff=lfs merge=lfs -text
|
295 |
.venv/bin/py-spy filter=lfs diff=lfs merge=lfs -text
|
296 |
+
.venv/lib/python3.11/site-packages/_cffi_backend.cpython-311-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
297 |
+
.venv/lib/python3.11/site-packages/jsonschema/tests/__pycache__/test_validators.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
|
.venv/bin/f2py
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/home/koiwa/work/.venv/bin/python3
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from numpy.f2py.f2py2e import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
.venv/bin/get_gprof
ADDED
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/home/koiwa/work/.venv/bin/python3
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2008-2016 California Institute of Technology.
|
5 |
+
# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
6 |
+
# License: 3-clause BSD. The full license text is available at:
|
7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
8 |
+
'''
|
9 |
+
build profile graph for the given instance
|
10 |
+
|
11 |
+
running:
|
12 |
+
$ get_gprof <args> <instance>
|
13 |
+
|
14 |
+
executes:
|
15 |
+
gprof2dot -f pstats <args> <type>.prof | dot -Tpng -o <type>.call.png
|
16 |
+
|
17 |
+
where:
|
18 |
+
<args> are arguments for gprof2dot, such as "-n 5 -e 5"
|
19 |
+
<instance> is code to create the instance to profile
|
20 |
+
<type> is the class of the instance (i.e. type(instance))
|
21 |
+
|
22 |
+
For example:
|
23 |
+
$ get_gprof -n 5 -e 1 "import numpy; numpy.array([1,2])"
|
24 |
+
|
25 |
+
will create 'ndarray.call.png' with the profile graph for numpy.array([1,2]),
|
26 |
+
where '-n 5' eliminates nodes below 5% threshold, similarly '-e 1' eliminates
|
27 |
+
edges below 1% threshold
|
28 |
+
'''
|
29 |
+
|
30 |
+
if __name__ == "__main__":
|
31 |
+
import sys
|
32 |
+
if len(sys.argv) < 2:
|
33 |
+
print ("Please provide an object instance (e.g. 'import math; math.pi')")
|
34 |
+
sys.exit()
|
35 |
+
# grab args for gprof2dot
|
36 |
+
args = sys.argv[1:-1]
|
37 |
+
args = ' '.join(args)
|
38 |
+
# last arg builds the object
|
39 |
+
obj = sys.argv[-1]
|
40 |
+
obj = obj.split(';')
|
41 |
+
# multi-line prep for generating an instance
|
42 |
+
for line in obj[:-1]:
|
43 |
+
exec(line)
|
44 |
+
# one-line generation of an instance
|
45 |
+
try:
|
46 |
+
obj = eval(obj[-1])
|
47 |
+
except Exception:
|
48 |
+
print ("Error processing object instance")
|
49 |
+
sys.exit()
|
50 |
+
|
51 |
+
# get object 'name'
|
52 |
+
objtype = type(obj)
|
53 |
+
name = getattr(objtype, '__name__', getattr(objtype, '__class__', objtype))
|
54 |
+
|
55 |
+
# profile dumping an object
|
56 |
+
import dill
|
57 |
+
import os
|
58 |
+
import cProfile
|
59 |
+
#name = os.path.splitext(os.path.basename(__file__))[0]
|
60 |
+
cProfile.run("dill.dumps(obj)", filename="%s.prof" % name)
|
61 |
+
msg = "gprof2dot -f pstats %s %s.prof | dot -Tpng -o %s.call.png" % (args, name, name)
|
62 |
+
try:
|
63 |
+
res = os.system(msg)
|
64 |
+
except Exception:
|
65 |
+
print ("Please verify install of 'gprof2dot' to view profile graphs")
|
66 |
+
if res:
|
67 |
+
print ("Please verify install of 'gprof2dot' to view profile graphs")
|
68 |
+
|
69 |
+
# get stats
|
70 |
+
f_prof = "%s.prof" % name
|
71 |
+
import pstats
|
72 |
+
stats = pstats.Stats(f_prof, stream=sys.stdout)
|
73 |
+
stats.strip_dirs().sort_stats('cumtime')
|
74 |
+
stats.print_stats(20) #XXX: save to file instead of print top 20?
|
75 |
+
os.remove(f_prof)
|
.venv/bin/json-playground
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/home/koiwa/work/.venv/bin/python3
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from partial_json_parser.playground import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
.venv/bin/proton-viewer
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/home/koiwa/work/.venv/bin/python3
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from triton.profiler.viewer import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
.venv/bin/pyrsa-sign
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/home/koiwa/work/.venv/bin/python3
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from rsa.cli import sign
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(sign())
|
.venv/bin/python
ADDED
Binary file (17.7 kB). View file
|
|
.venv/lib/python3.11/site-packages/_cffi_backend.cpython-311-x86_64-linux-gnu.so
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2b7220efa1b67cd192ede7fdc9a7623fe80d8e90875ddf89d59373befea37d0b
|
3 |
+
size 1068624
|
.venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/INSTALLER
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
pip
|
.venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/RECORD
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
../../../bin/normalizer,sha256=-E3AlS62VPo0zlTQlnk4VlAcsExlyna6kXwCg1_mDWo,244
|
2 |
+
charset_normalizer-3.4.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
3 |
+
charset_normalizer-3.4.1.dist-info/LICENSE,sha256=bQ1Bv-FwrGx9wkjJpj4lTQ-0WmDVCoJX0K-SxuJJuIc,1071
|
4 |
+
charset_normalizer-3.4.1.dist-info/METADATA,sha256=JbyHzhmqZh_ugEn1Y7TY7CDYZA9FoU6BP25hrCNDf50,35313
|
5 |
+
charset_normalizer-3.4.1.dist-info/RECORD,,
|
6 |
+
charset_normalizer-3.4.1.dist-info/WHEEL,sha256=9BFfIe-Zq441iQ0ehutX65O5faGDpmB1Uw3WaQGk4f0,151
|
7 |
+
charset_normalizer-3.4.1.dist-info/entry_points.txt,sha256=8C-Y3iXIfyXQ83Tpir2B8t-XLJYpxF5xbb38d_js-h4,65
|
8 |
+
charset_normalizer-3.4.1.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19
|
9 |
+
charset_normalizer/__init__.py,sha256=OKRxRv2Zhnqk00tqkN0c1BtJjm165fWXLydE52IKuHc,1590
|
10 |
+
charset_normalizer/__main__.py,sha256=yzYxMR-IhKRHYwcSlavEv8oGdwxsR89mr2X09qXGdps,109
|
11 |
+
charset_normalizer/__pycache__/__init__.cpython-311.pyc,,
|
12 |
+
charset_normalizer/__pycache__/__main__.cpython-311.pyc,,
|
13 |
+
charset_normalizer/__pycache__/api.cpython-311.pyc,,
|
14 |
+
charset_normalizer/__pycache__/cd.cpython-311.pyc,,
|
15 |
+
charset_normalizer/__pycache__/constant.cpython-311.pyc,,
|
16 |
+
charset_normalizer/__pycache__/legacy.cpython-311.pyc,,
|
17 |
+
charset_normalizer/__pycache__/md.cpython-311.pyc,,
|
18 |
+
charset_normalizer/__pycache__/models.cpython-311.pyc,,
|
19 |
+
charset_normalizer/__pycache__/utils.cpython-311.pyc,,
|
20 |
+
charset_normalizer/__pycache__/version.cpython-311.pyc,,
|
21 |
+
charset_normalizer/api.py,sha256=qBRz8mJ_R5E713R6TOyqHEdnmyxbEDnCSHvx32ubDGg,22617
|
22 |
+
charset_normalizer/cd.py,sha256=WKTo1HDb-H9HfCDc3Bfwq5jzS25Ziy9SE2a74SgTq88,12522
|
23 |
+
charset_normalizer/cli/__init__.py,sha256=D8I86lFk2-py45JvqxniTirSj_sFyE6sjaY_0-G1shc,136
|
24 |
+
charset_normalizer/cli/__main__.py,sha256=VGC9klOoi6_R2z8rmyrc936kv7u2A1udjjHtlmNPDTM,10410
|
25 |
+
charset_normalizer/cli/__pycache__/__init__.cpython-311.pyc,,
|
26 |
+
charset_normalizer/cli/__pycache__/__main__.cpython-311.pyc,,
|
27 |
+
charset_normalizer/constant.py,sha256=4VuTcZNLew1j_8ixA-Rt_VVqNWD4pwgHOHMCMlr0964,40477
|
28 |
+
charset_normalizer/legacy.py,sha256=yhNXsPHkBfqPXKRb-sPXNj3Bscp9-mFGcYOkJ62tg9c,2328
|
29 |
+
charset_normalizer/md.cpython-311-x86_64-linux-gnu.so,sha256=Y7QSLD5QLoSFAWys0-tL7R6QB7oi5864zM6zr7RWek4,16064
|
30 |
+
charset_normalizer/md.py,sha256=iyXXQGWl54nnLQLueMWTmUtlivO0-rTBgVkmJxIIAGU,20036
|
31 |
+
charset_normalizer/md__mypyc.cpython-311-x86_64-linux-gnu.so,sha256=XhkMX4lYOzNR-a68sp24uNetCg-njAJCHj14aJM6dY8,272624
|
32 |
+
charset_normalizer/models.py,sha256=lKXhOnIPtiakbK3i__J9wpOfzx3JDTKj7Dn3Rg0VaRI,12394
|
33 |
+
charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
34 |
+
charset_normalizer/utils.py,sha256=T5UHo8AS7NVMmgruWoZyqEf0WrZVcQpgUNetRoborSk,12002
|
35 |
+
charset_normalizer/version.py,sha256=Ambcj3O8FfvdLfDLc8dkaxZx97O1IM_R4_aKGD_TDdE,115
|
.venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/entry_points.txt
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
[console_scripts]
|
2 |
+
normalizer = charset_normalizer:cli.cli_detect
|
.venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/top_level.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
charset_normalizer
|
.venv/lib/python3.11/site-packages/distutils-precedence.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2638ce9e2500e572a5e0de7faed6661eb569d1b696fcba07b0dd223da5f5d224
|
3 |
+
size 151
|
.venv/lib/python3.11/site-packages/example.py
ADDED
@@ -0,0 +1,169 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#################################################################################
|
2 |
+
# Copyright (c) 2020, NVIDIA Corporation. All rights reserved. #
|
3 |
+
# #
|
4 |
+
# Redistribution and use in source and binary forms, with or without #
|
5 |
+
# modification, are permitted provided that the following conditions are met: #
|
6 |
+
# #
|
7 |
+
# * Redistributions of source code must retain the above copyright notice, #
|
8 |
+
# this list of conditions and the following disclaimer. #
|
9 |
+
# * Redistributions in binary form must reproduce the above copyright #
|
10 |
+
# notice, this list of conditions and the following disclaimer in the #
|
11 |
+
# documentation and/or other materials provided with the distribution. #
|
12 |
+
# * Neither the name of the NVIDIA Corporation nor the names of its #
|
13 |
+
# contributors may be used to endorse or promote products derived from #
|
14 |
+
# this software without specific prior written permission. #
|
15 |
+
# #
|
16 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" #
|
17 |
+
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE #
|
18 |
+
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE #
|
19 |
+
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE #
|
20 |
+
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR #
|
21 |
+
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF #
|
22 |
+
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS #
|
23 |
+
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN #
|
24 |
+
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) #
|
25 |
+
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF #
|
26 |
+
# THE POSSIBILITY OF SUCH DAMAGE. #
|
27 |
+
#################################################################################
|
28 |
+
|
29 |
+
#
|
30 |
+
# Sample script to demonstrate the usage of NVML API python bindings
|
31 |
+
#
|
32 |
+
|
33 |
+
# To Run:
|
34 |
+
# $ python ./example.py
|
35 |
+
|
36 |
+
from pynvml import *
|
37 |
+
|
38 |
+
#
|
39 |
+
# Helper function
|
40 |
+
#
|
41 |
+
def StrVirt(mode):
|
42 |
+
if mode == NVML_GPU_VIRTUALIZATION_MODE_NONE:
|
43 |
+
return "None";
|
44 |
+
elif mode == NVML_GPU_VIRTUALIZATION_MODE_PASSTHROUGH:
|
45 |
+
return "Pass-Through";
|
46 |
+
elif mode == NVML_GPU_VIRTUALIZATION_MODE_VGPU:
|
47 |
+
return "VGPU";
|
48 |
+
elif mode == NVML_GPU_VIRTUALIZATION_MODE_HOST_VGPU:
|
49 |
+
return "Host VGPU";
|
50 |
+
elif mode == NVML_GPU_VIRTUALIZATION_MODE_HOST_VSGA:
|
51 |
+
return "Host VSGA";
|
52 |
+
else:
|
53 |
+
return "Unknown";
|
54 |
+
|
55 |
+
#
|
56 |
+
# Converts errors into string messages
|
57 |
+
#
|
58 |
+
def handleError(err):
|
59 |
+
if (err.value == NVML_ERROR_NOT_SUPPORTED):
|
60 |
+
return "N/A"
|
61 |
+
else:
|
62 |
+
return err.__str__()
|
63 |
+
|
64 |
+
#######
|
65 |
+
def deviceQuery():
|
66 |
+
|
67 |
+
strResult = ''
|
68 |
+
try:
|
69 |
+
#
|
70 |
+
# Initialize NVML
|
71 |
+
#
|
72 |
+
nvmlInit()
|
73 |
+
|
74 |
+
strResult += ' <driver_version>' + str(nvmlSystemGetDriverVersion()) + '</driver_version>\n'
|
75 |
+
|
76 |
+
deviceCount = nvmlDeviceGetCount()
|
77 |
+
strResult += ' <attached_gpus>' + str(deviceCount) + '</attached_gpus>\n'
|
78 |
+
|
79 |
+
for i in range(0, deviceCount):
|
80 |
+
handle = nvmlDeviceGetHandleByIndex(i)
|
81 |
+
|
82 |
+
pciInfo = nvmlDeviceGetPciInfo(handle)
|
83 |
+
|
84 |
+
strResult += ' <gpu id="%s">\n' % pciInfo.busId
|
85 |
+
|
86 |
+
strResult += ' <product_name>' + nvmlDeviceGetName(handle) + '</product_name>\n'
|
87 |
+
|
88 |
+
brandNames = {NVML_BRAND_UNKNOWN : "Unknown",
|
89 |
+
NVML_BRAND_QUADRO : "Quadro",
|
90 |
+
NVML_BRAND_TESLA : "Tesla",
|
91 |
+
NVML_BRAND_NVS : "NVS",
|
92 |
+
NVML_BRAND_GRID : "Grid",
|
93 |
+
NVML_BRAND_TITAN : "Titan",
|
94 |
+
NVML_BRAND_GEFORCE : "GeForce",
|
95 |
+
NVML_BRAND_NVIDIA_VAPPS : "NVIDIA Virtual Applications",
|
96 |
+
NVML_BRAND_NVIDIA_VPC : "NVIDIA Virtual PC",
|
97 |
+
NVML_BRAND_NVIDIA_VCS : "NVIDIA Virtual Compute Server",
|
98 |
+
NVML_BRAND_NVIDIA_VWS : "NVIDIA RTX Virtual Workstation",
|
99 |
+
NVML_BRAND_NVIDIA_CLOUD_GAMING : "NVIDIA Cloud Gaming",
|
100 |
+
NVML_BRAND_QUADRO_RTX : "Quadro RTX",
|
101 |
+
NVML_BRAND_NVIDIA_RTX : "NVIDIA RTX",
|
102 |
+
NVML_BRAND_NVIDIA : "NVIDIA",
|
103 |
+
NVML_BRAND_GEFORCE_RTX : "GeForce RTX",
|
104 |
+
NVML_BRAND_TITAN_RTX : "TITAN RTX",
|
105 |
+
|
106 |
+
}
|
107 |
+
|
108 |
+
try:
|
109 |
+
# If nvmlDeviceGetBrand() succeeds it is guaranteed to be in the dictionary
|
110 |
+
brandName = brandNames[nvmlDeviceGetBrand(handle)]
|
111 |
+
except NVMLError as err:
|
112 |
+
brandName = handleError(err)
|
113 |
+
|
114 |
+
strResult += ' <product_brand>' + brandName + '</product_brand>\n'
|
115 |
+
|
116 |
+
try:
|
117 |
+
serial = nvmlDeviceGetSerial(handle)
|
118 |
+
except NVMLError as err:
|
119 |
+
serial = handleError(err)
|
120 |
+
|
121 |
+
strResult += ' <serial>' + serial + '</serial>\n'
|
122 |
+
|
123 |
+
try:
|
124 |
+
uuid = nvmlDeviceGetUUID(handle)
|
125 |
+
except NVMLError as err:
|
126 |
+
uuid = handleError(err)
|
127 |
+
|
128 |
+
strResult += ' <uuid>' + uuid + '</uuid>\n'
|
129 |
+
|
130 |
+
strResult += ' <gpu_virtualization_mode>\n'
|
131 |
+
try:
|
132 |
+
mode = StrVirt(nvmlDeviceGetVirtualizationMode(handle))
|
133 |
+
except NVMLError as err:
|
134 |
+
mode = handleError(err)
|
135 |
+
strResult += ' <virtualization_mode>' + mode + '</virtualization_mode>\n'
|
136 |
+
strResult += ' </gpu_virtualization_mode>\n'
|
137 |
+
|
138 |
+
try:
|
139 |
+
gridLicensableFeatures = nvmlDeviceGetGridLicensableFeatures(handle)
|
140 |
+
if gridLicensableFeatures.isGridLicenseSupported == 1:
|
141 |
+
strResult += ' <vgpu_software_licensed_product>\n'
|
142 |
+
for i in range(gridLicensableFeatures.licensableFeaturesCount):
|
143 |
+
if gridLicensableFeatures.gridLicensableFeatures[i].featureState == 0:
|
144 |
+
if nvmlDeviceGetVirtualizationMode(handle) == NVML_GPU_VIRTUALIZATION_MODE_PASSTHROUGH:
|
145 |
+
strResult += ' <licensed_product_name>' + 'NVIDIA Virtual Applications' + '</licensed_product_name>\n'
|
146 |
+
strResult += ' <license_status>' + 'Licensed' + '</license_status>\n'
|
147 |
+
else:
|
148 |
+
strResult += ' <licensed_product_name>' + gridLicensableFeatures.gridLicensableFeatures[i].productName + '</licensed_product_name>\n'
|
149 |
+
strResult += ' <license_status>' + 'Unlicensed' + '</license_status>\n'
|
150 |
+
else:
|
151 |
+
strResult += ' <licensed_product_name>' + gridLicensableFeatures.gridLicensableFeatures[i].productName + '</licensed_product_name>\n'
|
152 |
+
strResult += ' <license_status>' + 'Licensed' + '</license_status>\n'
|
153 |
+
strResult += ' </vgpu_software_licensed_product>\n'
|
154 |
+
except NVMLError as err:
|
155 |
+
gridLicensableFeatures = handleError(err)
|
156 |
+
|
157 |
+
strResult += ' </gpu>\n'
|
158 |
+
|
159 |
+
except NVMLError as err:
|
160 |
+
strResult += 'example.py: ' + err.__str__() + '\n'
|
161 |
+
|
162 |
+
nvmlShutdown()
|
163 |
+
|
164 |
+
return strResult
|
165 |
+
|
166 |
+
# If this is not exectued when module is imported
|
167 |
+
if __name__ == "__main__":
|
168 |
+
print(deviceQuery())
|
169 |
+
|
.venv/lib/python3.11/site-packages/google_auth_httplib2.py
ADDED
@@ -0,0 +1,307 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright 2016 Google Inc.
|
2 |
+
#
|
3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4 |
+
# you may not use this file except in compliance with the License.
|
5 |
+
# You may obtain a copy of the License at
|
6 |
+
#
|
7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8 |
+
#
|
9 |
+
# Unless required by applicable law or agreed to in writing, software
|
10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 |
+
# See the License for the specific language governing permissions and
|
13 |
+
# limitations under the License.
|
14 |
+
|
15 |
+
"""Transport adapter for httplib2."""
|
16 |
+
|
17 |
+
from __future__ import absolute_import
|
18 |
+
|
19 |
+
import http.client
|
20 |
+
import logging
|
21 |
+
|
22 |
+
from google.auth import exceptions
|
23 |
+
from google.auth import transport
|
24 |
+
import httplib2
|
25 |
+
|
26 |
+
|
27 |
+
_LOGGER = logging.getLogger(__name__)
|
28 |
+
# Properties present in file-like streams / buffers.
|
29 |
+
_STREAM_PROPERTIES = ("read", "seek", "tell")
|
30 |
+
|
31 |
+
|
32 |
+
class _Response(transport.Response):
|
33 |
+
"""httplib2 transport response adapter.
|
34 |
+
|
35 |
+
Args:
|
36 |
+
response (httplib2.Response): The raw httplib2 response.
|
37 |
+
data (bytes): The response body.
|
38 |
+
"""
|
39 |
+
|
40 |
+
def __init__(self, response, data):
|
41 |
+
self._response = response
|
42 |
+
self._data = data
|
43 |
+
|
44 |
+
@property
|
45 |
+
def status(self):
|
46 |
+
"""int: The HTTP status code."""
|
47 |
+
return self._response.status
|
48 |
+
|
49 |
+
@property
|
50 |
+
def headers(self):
|
51 |
+
"""Mapping[str, str]: The HTTP response headers."""
|
52 |
+
return dict(self._response)
|
53 |
+
|
54 |
+
@property
|
55 |
+
def data(self):
|
56 |
+
"""bytes: The response body."""
|
57 |
+
return self._data
|
58 |
+
|
59 |
+
|
60 |
+
class Request(transport.Request):
|
61 |
+
"""httplib2 request adapter.
|
62 |
+
|
63 |
+
This class is used internally for making requests using various transports
|
64 |
+
in a consistent way. If you use :class:`AuthorizedHttp` you do not need
|
65 |
+
to construct or use this class directly.
|
66 |
+
|
67 |
+
This class can be useful if you want to manually refresh a
|
68 |
+
:class:`~google.auth.credentials.Credentials` instance::
|
69 |
+
|
70 |
+
import google_auth_httplib2
|
71 |
+
import httplib2
|
72 |
+
|
73 |
+
http = httplib2.Http()
|
74 |
+
request = google_auth_httplib2.Request(http)
|
75 |
+
|
76 |
+
credentials.refresh(request)
|
77 |
+
|
78 |
+
Args:
|
79 |
+
http (httplib2.Http): The underlying http object to use to make
|
80 |
+
requests.
|
81 |
+
|
82 |
+
.. automethod:: __call__
|
83 |
+
"""
|
84 |
+
|
85 |
+
def __init__(self, http):
|
86 |
+
self.http = http
|
87 |
+
|
88 |
+
def __call__(
|
89 |
+
self, url, method="GET", body=None, headers=None, timeout=None, **kwargs
|
90 |
+
):
|
91 |
+
"""Make an HTTP request using httplib2.
|
92 |
+
|
93 |
+
Args:
|
94 |
+
url (str): The URI to be requested.
|
95 |
+
method (str): The HTTP method to use for the request. Defaults
|
96 |
+
to 'GET'.
|
97 |
+
body (bytes): The payload / body in HTTP request.
|
98 |
+
headers (Mapping[str, str]): Request headers.
|
99 |
+
timeout (Optional[int]): The number of seconds to wait for a
|
100 |
+
response from the server. This is ignored by httplib2 and will
|
101 |
+
issue a warning.
|
102 |
+
kwargs: Additional arguments passed throught to the underlying
|
103 |
+
:meth:`httplib2.Http.request` method.
|
104 |
+
|
105 |
+
Returns:
|
106 |
+
google.auth.transport.Response: The HTTP response.
|
107 |
+
|
108 |
+
Raises:
|
109 |
+
google.auth.exceptions.TransportError: If any exception occurred.
|
110 |
+
"""
|
111 |
+
if timeout is not None:
|
112 |
+
_LOGGER.warning(
|
113 |
+
"httplib2 transport does not support per-request timeout. "
|
114 |
+
"Set the timeout when constructing the httplib2.Http instance."
|
115 |
+
)
|
116 |
+
|
117 |
+
try:
|
118 |
+
_LOGGER.debug("Making request: %s %s", method, url)
|
119 |
+
response, data = self.http.request(
|
120 |
+
url, method=method, body=body, headers=headers, **kwargs
|
121 |
+
)
|
122 |
+
return _Response(response, data)
|
123 |
+
# httplib2 should catch the lower http error, this is a bug and
|
124 |
+
# needs to be fixed there. Catch the error for the meanwhile.
|
125 |
+
except (httplib2.HttpLib2Error, http.client.HTTPException) as exc:
|
126 |
+
raise exceptions.TransportError(exc)
|
127 |
+
|
128 |
+
|
129 |
+
def _make_default_http():
|
130 |
+
"""Returns a default httplib2.Http instance."""
|
131 |
+
return httplib2.Http()
|
132 |
+
|
133 |
+
|
134 |
+
class AuthorizedHttp(object):
|
135 |
+
"""A httplib2 HTTP class with credentials.
|
136 |
+
|
137 |
+
This class is used to perform requests to API endpoints that require
|
138 |
+
authorization::
|
139 |
+
|
140 |
+
from google.auth.transport._httplib2 import AuthorizedHttp
|
141 |
+
|
142 |
+
authed_http = AuthorizedHttp(credentials)
|
143 |
+
|
144 |
+
response = authed_http.request(
|
145 |
+
'https://www.googleapis.com/storage/v1/b')
|
146 |
+
|
147 |
+
This class implements :meth:`request` in the same way as
|
148 |
+
:class:`httplib2.Http` and can usually be used just like any other
|
149 |
+
instance of :class:``httplib2.Http`.
|
150 |
+
|
151 |
+
The underlying :meth:`request` implementation handles adding the
|
152 |
+
credentials' headers to the request and refreshing credentials as needed.
|
153 |
+
"""
|
154 |
+
|
155 |
+
def __init__(
|
156 |
+
self,
|
157 |
+
credentials,
|
158 |
+
http=None,
|
159 |
+
refresh_status_codes=transport.DEFAULT_REFRESH_STATUS_CODES,
|
160 |
+
max_refresh_attempts=transport.DEFAULT_MAX_REFRESH_ATTEMPTS,
|
161 |
+
):
|
162 |
+
"""
|
163 |
+
Args:
|
164 |
+
credentials (google.auth.credentials.Credentials): The credentials
|
165 |
+
to add to the request.
|
166 |
+
http (httplib2.Http): The underlying HTTP object to
|
167 |
+
use to make requests. If not specified, a
|
168 |
+
:class:`httplib2.Http` instance will be constructed.
|
169 |
+
refresh_status_codes (Sequence[int]): Which HTTP status codes
|
170 |
+
indicate that credentials should be refreshed and the request
|
171 |
+
should be retried.
|
172 |
+
max_refresh_attempts (int): The maximum number of times to attempt
|
173 |
+
to refresh the credentials and retry the request.
|
174 |
+
"""
|
175 |
+
|
176 |
+
if http is None:
|
177 |
+
http = _make_default_http()
|
178 |
+
|
179 |
+
self.http = http
|
180 |
+
self.credentials = credentials
|
181 |
+
self._refresh_status_codes = refresh_status_codes
|
182 |
+
self._max_refresh_attempts = max_refresh_attempts
|
183 |
+
# Request instance used by internal methods (for example,
|
184 |
+
# credentials.refresh).
|
185 |
+
self._request = Request(self.http)
|
186 |
+
|
187 |
+
def close(self):
|
188 |
+
"""Calls httplib2's Http.close"""
|
189 |
+
self.http.close()
|
190 |
+
|
191 |
+
def request(
|
192 |
+
self,
|
193 |
+
uri,
|
194 |
+
method="GET",
|
195 |
+
body=None,
|
196 |
+
headers=None,
|
197 |
+
redirections=httplib2.DEFAULT_MAX_REDIRECTS,
|
198 |
+
connection_type=None,
|
199 |
+
**kwargs
|
200 |
+
):
|
201 |
+
"""Implementation of httplib2's Http.request."""
|
202 |
+
|
203 |
+
_credential_refresh_attempt = kwargs.pop("_credential_refresh_attempt", 0)
|
204 |
+
|
205 |
+
# Make a copy of the headers. They will be modified by the credentials
|
206 |
+
# and we want to pass the original headers if we recurse.
|
207 |
+
request_headers = headers.copy() if headers is not None else {}
|
208 |
+
|
209 |
+
self.credentials.before_request(self._request, method, uri, request_headers)
|
210 |
+
|
211 |
+
# Check if the body is a file-like stream, and if so, save the body
|
212 |
+
# stream position so that it can be restored in case of refresh.
|
213 |
+
body_stream_position = None
|
214 |
+
if all(getattr(body, stream_prop, None) for stream_prop in _STREAM_PROPERTIES):
|
215 |
+
body_stream_position = body.tell()
|
216 |
+
|
217 |
+
# Make the request.
|
218 |
+
response, content = self.http.request(
|
219 |
+
uri,
|
220 |
+
method,
|
221 |
+
body=body,
|
222 |
+
headers=request_headers,
|
223 |
+
redirections=redirections,
|
224 |
+
connection_type=connection_type,
|
225 |
+
**kwargs
|
226 |
+
)
|
227 |
+
|
228 |
+
# If the response indicated that the credentials needed to be
|
229 |
+
# refreshed, then refresh the credentials and re-attempt the
|
230 |
+
# request.
|
231 |
+
# A stored token may expire between the time it is retrieved and
|
232 |
+
# the time the request is made, so we may need to try twice.
|
233 |
+
if (
|
234 |
+
response.status in self._refresh_status_codes
|
235 |
+
and _credential_refresh_attempt < self._max_refresh_attempts
|
236 |
+
):
|
237 |
+
|
238 |
+
_LOGGER.info(
|
239 |
+
"Refreshing credentials due to a %s response. Attempt %s/%s.",
|
240 |
+
response.status,
|
241 |
+
_credential_refresh_attempt + 1,
|
242 |
+
self._max_refresh_attempts,
|
243 |
+
)
|
244 |
+
|
245 |
+
self.credentials.refresh(self._request)
|
246 |
+
|
247 |
+
# Restore the body's stream position if needed.
|
248 |
+
if body_stream_position is not None:
|
249 |
+
body.seek(body_stream_position)
|
250 |
+
|
251 |
+
# Recurse. Pass in the original headers, not our modified set.
|
252 |
+
return self.request(
|
253 |
+
uri,
|
254 |
+
method,
|
255 |
+
body=body,
|
256 |
+
headers=headers,
|
257 |
+
redirections=redirections,
|
258 |
+
connection_type=connection_type,
|
259 |
+
_credential_refresh_attempt=_credential_refresh_attempt + 1,
|
260 |
+
**kwargs
|
261 |
+
)
|
262 |
+
|
263 |
+
return response, content
|
264 |
+
|
265 |
+
def add_certificate(self, key, cert, domain, password=None):
|
266 |
+
"""Proxy to httplib2.Http.add_certificate."""
|
267 |
+
self.http.add_certificate(key, cert, domain, password=password)
|
268 |
+
|
269 |
+
@property
|
270 |
+
def connections(self):
|
271 |
+
"""Proxy to httplib2.Http.connections."""
|
272 |
+
return self.http.connections
|
273 |
+
|
274 |
+
@connections.setter
|
275 |
+
def connections(self, value):
|
276 |
+
"""Proxy to httplib2.Http.connections."""
|
277 |
+
self.http.connections = value
|
278 |
+
|
279 |
+
@property
|
280 |
+
def follow_redirects(self):
|
281 |
+
"""Proxy to httplib2.Http.follow_redirects."""
|
282 |
+
return self.http.follow_redirects
|
283 |
+
|
284 |
+
@follow_redirects.setter
|
285 |
+
def follow_redirects(self, value):
|
286 |
+
"""Proxy to httplib2.Http.follow_redirects."""
|
287 |
+
self.http.follow_redirects = value
|
288 |
+
|
289 |
+
@property
|
290 |
+
def timeout(self):
|
291 |
+
"""Proxy to httplib2.Http.timeout."""
|
292 |
+
return self.http.timeout
|
293 |
+
|
294 |
+
@timeout.setter
|
295 |
+
def timeout(self, value):
|
296 |
+
"""Proxy to httplib2.Http.timeout."""
|
297 |
+
self.http.timeout = value
|
298 |
+
|
299 |
+
@property
|
300 |
+
def redirect_codes(self):
|
301 |
+
"""Proxy to httplib2.Http.redirect_codes."""
|
302 |
+
return self.http.redirect_codes
|
303 |
+
|
304 |
+
@redirect_codes.setter
|
305 |
+
def redirect_codes(self, value):
|
306 |
+
"""Proxy to httplib2.Http.redirect_codes."""
|
307 |
+
self.http.redirect_codes = value
|
.venv/lib/python3.11/site-packages/google_generativeai-0.8.4-py3.12-nspkg.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:492187369ed89466a43eca61c645a6d1f2a0de9ceb7d0611438d993ed88f17ee
|
3 |
+
size 467
|
.venv/lib/python3.11/site-packages/isympy.py
ADDED
@@ -0,0 +1,342 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Python shell for SymPy.
|
3 |
+
|
4 |
+
This is just a normal Python shell (IPython shell if you have the
|
5 |
+
IPython package installed), that executes the following commands for
|
6 |
+
the user:
|
7 |
+
|
8 |
+
>>> from __future__ import division
|
9 |
+
>>> from sympy import *
|
10 |
+
>>> x, y, z, t = symbols('x y z t')
|
11 |
+
>>> k, m, n = symbols('k m n', integer=True)
|
12 |
+
>>> f, g, h = symbols('f g h', cls=Function)
|
13 |
+
>>> init_printing()
|
14 |
+
|
15 |
+
So starting 'isympy' is equivalent to starting Python (or IPython) and
|
16 |
+
executing the above commands by hand. It is intended for easy and quick
|
17 |
+
experimentation with SymPy. isympy is a good way to use SymPy as an
|
18 |
+
interactive calculator. If you have IPython and Matplotlib installed, then
|
19 |
+
interactive plotting is enabled by default.
|
20 |
+
|
21 |
+
COMMAND LINE OPTIONS
|
22 |
+
--------------------
|
23 |
+
|
24 |
+
-c CONSOLE, --console=CONSOLE
|
25 |
+
|
26 |
+
Use the specified shell (Python or IPython) shell as the console
|
27 |
+
backend instead of the default one (IPython if present, Python
|
28 |
+
otherwise), e.g.:
|
29 |
+
|
30 |
+
$isympy -c python
|
31 |
+
|
32 |
+
CONSOLE must be one of 'ipython' or 'python'
|
33 |
+
|
34 |
+
-p PRETTY, --pretty PRETTY
|
35 |
+
|
36 |
+
Setup pretty-printing in SymPy. When pretty-printing is enabled,
|
37 |
+
expressions can be printed with Unicode or ASCII. The default is
|
38 |
+
to use pretty-printing (with Unicode if the terminal supports it).
|
39 |
+
When this option is 'no', expressions will not be pretty-printed
|
40 |
+
and ASCII will be used:
|
41 |
+
|
42 |
+
$isympy -p no
|
43 |
+
|
44 |
+
PRETTY must be one of 'unicode', 'ascii', or 'no'
|
45 |
+
|
46 |
+
-t TYPES, --types=TYPES
|
47 |
+
|
48 |
+
Setup the ground types for the polys. By default, gmpy ground types
|
49 |
+
are used if gmpy2 or gmpy is installed, otherwise it falls back to python
|
50 |
+
ground types, which are a little bit slower. You can manually
|
51 |
+
choose python ground types even if gmpy is installed (e.g., for
|
52 |
+
testing purposes):
|
53 |
+
|
54 |
+
$isympy -t python
|
55 |
+
|
56 |
+
TYPES must be one of 'gmpy', 'gmpy1' or 'python'
|
57 |
+
|
58 |
+
Note that the ground type gmpy1 is primarily intended for testing; it
|
59 |
+
forces the use of gmpy version 1 even if gmpy2 is available.
|
60 |
+
|
61 |
+
This is the same as setting the environment variable
|
62 |
+
SYMPY_GROUND_TYPES to the given ground type (e.g.,
|
63 |
+
SYMPY_GROUND_TYPES='gmpy')
|
64 |
+
|
65 |
+
The ground types can be determined interactively from the variable
|
66 |
+
sympy.polys.domains.GROUND_TYPES.
|
67 |
+
|
68 |
+
-o ORDER, --order ORDER
|
69 |
+
|
70 |
+
Setup the ordering of terms for printing. The default is lex, which
|
71 |
+
orders terms lexicographically (e.g., x**2 + x + 1). You can choose
|
72 |
+
other orderings, such as rev-lex, which will use reverse
|
73 |
+
lexicographic ordering (e.g., 1 + x + x**2):
|
74 |
+
|
75 |
+
$isympy -o rev-lex
|
76 |
+
|
77 |
+
ORDER must be one of 'lex', 'rev-lex', 'grlex', 'rev-grlex',
|
78 |
+
'grevlex', 'rev-grevlex', 'old', or 'none'.
|
79 |
+
|
80 |
+
Note that for very large expressions, ORDER='none' may speed up
|
81 |
+
printing considerably but the terms will have no canonical order.
|
82 |
+
|
83 |
+
-q, --quiet
|
84 |
+
|
85 |
+
Print only Python's and SymPy's versions to stdout at startup.
|
86 |
+
|
87 |
+
-d, --doctest
|
88 |
+
|
89 |
+
Use the same format that should be used for doctests. This is
|
90 |
+
equivalent to -c python -p no.
|
91 |
+
|
92 |
+
-C, --no-cache
|
93 |
+
|
94 |
+
Disable the caching mechanism. Disabling the cache may slow certain
|
95 |
+
operations down considerably. This is useful for testing the cache,
|
96 |
+
or for benchmarking, as the cache can result in deceptive timings.
|
97 |
+
|
98 |
+
This is equivalent to setting the environment variable
|
99 |
+
SYMPY_USE_CACHE to 'no'.
|
100 |
+
|
101 |
+
-a, --auto-symbols (requires at least IPython 0.11)
|
102 |
+
|
103 |
+
Automatically create missing symbols. Normally, typing a name of a
|
104 |
+
Symbol that has not been instantiated first would raise NameError,
|
105 |
+
but with this option enabled, any undefined name will be
|
106 |
+
automatically created as a Symbol.
|
107 |
+
|
108 |
+
Note that this is intended only for interactive, calculator style
|
109 |
+
usage. In a script that uses SymPy, Symbols should be instantiated
|
110 |
+
at the top, so that it's clear what they are.
|
111 |
+
|
112 |
+
This will not override any names that are already defined, which
|
113 |
+
includes the single character letters represented by the mnemonic
|
114 |
+
QCOSINE (see the "Gotchas and Pitfalls" document in the
|
115 |
+
documentation). You can delete existing names by executing "del
|
116 |
+
name". If a name is defined, typing "'name' in dir()" will return True.
|
117 |
+
|
118 |
+
The Symbols that are created using this have default assumptions.
|
119 |
+
If you want to place assumptions on symbols, you should create them
|
120 |
+
using symbols() or var().
|
121 |
+
|
122 |
+
Finally, this only works in the top level namespace. So, for
|
123 |
+
example, if you define a function in isympy with an undefined
|
124 |
+
Symbol, it will not work.
|
125 |
+
|
126 |
+
See also the -i and -I options.
|
127 |
+
|
128 |
+
-i, --int-to-Integer (requires at least IPython 0.11)
|
129 |
+
|
130 |
+
Automatically wrap int literals with Integer. This makes it so that
|
131 |
+
things like 1/2 will come out as Rational(1, 2), rather than 0.5. This
|
132 |
+
works by preprocessing the source and wrapping all int literals with
|
133 |
+
Integer. Note that this will not change the behavior of int literals
|
134 |
+
assigned to variables, and it also won't change the behavior of functions
|
135 |
+
that return int literals.
|
136 |
+
|
137 |
+
If you want an int, you can wrap the literal in int(), e.g. int(3)/int(2)
|
138 |
+
gives 1.5 (with division imported from __future__).
|
139 |
+
|
140 |
+
-I, --interactive (requires at least IPython 0.11)
|
141 |
+
|
142 |
+
This is equivalent to --auto-symbols --int-to-Integer. Future options
|
143 |
+
designed for ease of interactive use may be added to this.
|
144 |
+
|
145 |
+
-D, --debug
|
146 |
+
|
147 |
+
Enable debugging output. This is the same as setting the
|
148 |
+
environment variable SYMPY_DEBUG to 'True'. The debug status is set
|
149 |
+
in the variable SYMPY_DEBUG within isympy.
|
150 |
+
|
151 |
+
-- IPython options
|
152 |
+
|
153 |
+
Additionally you can pass command line options directly to the IPython
|
154 |
+
interpreter (the standard Python shell is not supported). However you
|
155 |
+
need to add the '--' separator between two types of options, e.g the
|
156 |
+
startup banner option and the colors option. You need to enter the
|
157 |
+
options as required by the version of IPython that you are using, too:
|
158 |
+
|
159 |
+
in IPython 0.11,
|
160 |
+
|
161 |
+
$isympy -q -- --colors=NoColor
|
162 |
+
|
163 |
+
or older versions of IPython,
|
164 |
+
|
165 |
+
$isympy -q -- -colors NoColor
|
166 |
+
|
167 |
+
See also isympy --help.
|
168 |
+
"""
|
169 |
+
|
170 |
+
import os
|
171 |
+
import sys
|
172 |
+
|
173 |
+
# DO NOT IMPORT SYMPY HERE! Or the setting of the sympy environment variables
|
174 |
+
# by the command line will break.
|
175 |
+
|
176 |
+
def main() -> None:
|
177 |
+
from argparse import ArgumentParser, RawDescriptionHelpFormatter
|
178 |
+
|
179 |
+
VERSION = None
|
180 |
+
if '--version' in sys.argv:
|
181 |
+
# We cannot import sympy before this is run, because flags like -C and
|
182 |
+
# -t set environment variables that must be set before SymPy is
|
183 |
+
# imported. The only thing we need to import it for is to get the
|
184 |
+
# version, which only matters with the --version flag.
|
185 |
+
import sympy
|
186 |
+
VERSION = sympy.__version__
|
187 |
+
|
188 |
+
usage = 'isympy [options] -- [ipython options]'
|
189 |
+
parser = ArgumentParser(
|
190 |
+
usage=usage,
|
191 |
+
description=__doc__,
|
192 |
+
formatter_class=RawDescriptionHelpFormatter,
|
193 |
+
)
|
194 |
+
|
195 |
+
parser.add_argument('--version', action='version', version=VERSION)
|
196 |
+
|
197 |
+
parser.add_argument(
|
198 |
+
'-c', '--console',
|
199 |
+
dest='console',
|
200 |
+
action='store',
|
201 |
+
default=None,
|
202 |
+
choices=['ipython', 'python'],
|
203 |
+
metavar='CONSOLE',
|
204 |
+
help='select type of interactive session: ipython | python; defaults '
|
205 |
+
'to ipython if IPython is installed, otherwise python')
|
206 |
+
|
207 |
+
parser.add_argument(
|
208 |
+
'-p', '--pretty',
|
209 |
+
dest='pretty',
|
210 |
+
action='store',
|
211 |
+
default=None,
|
212 |
+
metavar='PRETTY',
|
213 |
+
choices=['unicode', 'ascii', 'no'],
|
214 |
+
help='setup pretty printing: unicode | ascii | no; defaults to '
|
215 |
+
'unicode printing if the terminal supports it, otherwise ascii')
|
216 |
+
|
217 |
+
parser.add_argument(
|
218 |
+
'-t', '--types',
|
219 |
+
dest='types',
|
220 |
+
action='store',
|
221 |
+
default=None,
|
222 |
+
metavar='TYPES',
|
223 |
+
choices=['gmpy', 'gmpy1', 'python'],
|
224 |
+
help='setup ground types: gmpy | gmpy1 | python; defaults to gmpy if gmpy2 '
|
225 |
+
'or gmpy is installed, otherwise python')
|
226 |
+
|
227 |
+
parser.add_argument(
|
228 |
+
'-o', '--order',
|
229 |
+
dest='order',
|
230 |
+
action='store',
|
231 |
+
default=None,
|
232 |
+
metavar='ORDER',
|
233 |
+
choices=['lex', 'grlex', 'grevlex', 'rev-lex', 'rev-grlex', 'rev-grevlex', 'old', 'none'],
|
234 |
+
help='setup ordering of terms: [rev-]lex | [rev-]grlex | [rev-]grevlex | old | none; defaults to lex')
|
235 |
+
|
236 |
+
parser.add_argument(
|
237 |
+
'-q', '--quiet',
|
238 |
+
dest='quiet',
|
239 |
+
action='store_true',
|
240 |
+
default=False,
|
241 |
+
help='print only version information at startup')
|
242 |
+
|
243 |
+
parser.add_argument(
|
244 |
+
'-d', '--doctest',
|
245 |
+
dest='doctest',
|
246 |
+
action='store_true',
|
247 |
+
default=False,
|
248 |
+
help='use the doctest format for output (you can just copy and paste it)')
|
249 |
+
|
250 |
+
parser.add_argument(
|
251 |
+
'-C', '--no-cache',
|
252 |
+
dest='cache',
|
253 |
+
action='store_false',
|
254 |
+
default=True,
|
255 |
+
help='disable caching mechanism')
|
256 |
+
|
257 |
+
parser.add_argument(
|
258 |
+
'-a', '--auto-symbols',
|
259 |
+
dest='auto_symbols',
|
260 |
+
action='store_true',
|
261 |
+
default=False,
|
262 |
+
help='automatically construct missing symbols')
|
263 |
+
|
264 |
+
parser.add_argument(
|
265 |
+
'-i', '--int-to-Integer',
|
266 |
+
dest='auto_int_to_Integer',
|
267 |
+
action='store_true',
|
268 |
+
default=False,
|
269 |
+
help="automatically wrap int literals with Integer")
|
270 |
+
|
271 |
+
parser.add_argument(
|
272 |
+
'-I', '--interactive',
|
273 |
+
dest='interactive',
|
274 |
+
action='store_true',
|
275 |
+
default=False,
|
276 |
+
help="equivalent to -a -i")
|
277 |
+
|
278 |
+
parser.add_argument(
|
279 |
+
'-D', '--debug',
|
280 |
+
dest='debug',
|
281 |
+
action='store_true',
|
282 |
+
default=False,
|
283 |
+
help='enable debugging output')
|
284 |
+
|
285 |
+
(options, ipy_args) = parser.parse_known_args()
|
286 |
+
if '--' in ipy_args:
|
287 |
+
ipy_args.remove('--')
|
288 |
+
|
289 |
+
if not options.cache:
|
290 |
+
os.environ['SYMPY_USE_CACHE'] = 'no'
|
291 |
+
|
292 |
+
if options.types:
|
293 |
+
os.environ['SYMPY_GROUND_TYPES'] = options.types
|
294 |
+
|
295 |
+
if options.debug:
|
296 |
+
os.environ['SYMPY_DEBUG'] = str(options.debug)
|
297 |
+
|
298 |
+
if options.doctest:
|
299 |
+
options.pretty = 'no'
|
300 |
+
options.console = 'python'
|
301 |
+
|
302 |
+
session = options.console
|
303 |
+
|
304 |
+
if session is not None:
|
305 |
+
ipython = session == 'ipython'
|
306 |
+
else:
|
307 |
+
try:
|
308 |
+
import IPython
|
309 |
+
ipython = True
|
310 |
+
except ImportError:
|
311 |
+
if not options.quiet:
|
312 |
+
from sympy.interactive.session import no_ipython
|
313 |
+
print(no_ipython)
|
314 |
+
ipython = False
|
315 |
+
|
316 |
+
args = {
|
317 |
+
'pretty_print': True,
|
318 |
+
'use_unicode': None,
|
319 |
+
'use_latex': None,
|
320 |
+
'order': None,
|
321 |
+
'argv': ipy_args,
|
322 |
+
}
|
323 |
+
|
324 |
+
if options.pretty == 'unicode':
|
325 |
+
args['use_unicode'] = True
|
326 |
+
elif options.pretty == 'ascii':
|
327 |
+
args['use_unicode'] = False
|
328 |
+
elif options.pretty == 'no':
|
329 |
+
args['pretty_print'] = False
|
330 |
+
|
331 |
+
if options.order is not None:
|
332 |
+
args['order'] = options.order
|
333 |
+
|
334 |
+
args['quiet'] = options.quiet
|
335 |
+
args['auto_symbols'] = options.auto_symbols or options.interactive
|
336 |
+
args['auto_int_to_Integer'] = options.auto_int_to_Integer or options.interactive
|
337 |
+
|
338 |
+
from sympy.interactive import init_session
|
339 |
+
init_session(ipython, **args)
|
340 |
+
|
341 |
+
if __name__ == "__main__":
|
342 |
+
main()
|
.venv/lib/python3.11/site-packages/jsonschema/__init__.py
ADDED
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
An implementation of JSON Schema for Python.
|
3 |
+
|
4 |
+
The main functionality is provided by the validator classes for each of the
|
5 |
+
supported JSON Schema versions.
|
6 |
+
|
7 |
+
Most commonly, `jsonschema.validators.validate` is the quickest way to simply
|
8 |
+
validate a given instance under a schema, and will create a validator
|
9 |
+
for you.
|
10 |
+
"""
|
11 |
+
import warnings
|
12 |
+
|
13 |
+
from jsonschema._format import FormatChecker
|
14 |
+
from jsonschema._types import TypeChecker
|
15 |
+
from jsonschema.exceptions import SchemaError, ValidationError
|
16 |
+
from jsonschema.validators import (
|
17 |
+
Draft3Validator,
|
18 |
+
Draft4Validator,
|
19 |
+
Draft6Validator,
|
20 |
+
Draft7Validator,
|
21 |
+
Draft201909Validator,
|
22 |
+
Draft202012Validator,
|
23 |
+
validate,
|
24 |
+
)
|
25 |
+
|
26 |
+
|
27 |
+
def __getattr__(name):
|
28 |
+
if name == "__version__":
|
29 |
+
warnings.warn(
|
30 |
+
"Accessing jsonschema.__version__ is deprecated and will be "
|
31 |
+
"removed in a future release. Use importlib.metadata directly "
|
32 |
+
"to query for jsonschema's version.",
|
33 |
+
DeprecationWarning,
|
34 |
+
stacklevel=2,
|
35 |
+
)
|
36 |
+
|
37 |
+
from importlib import metadata
|
38 |
+
return metadata.version("jsonschema")
|
39 |
+
elif name == "RefResolver":
|
40 |
+
from jsonschema.validators import _RefResolver
|
41 |
+
warnings.warn(
|
42 |
+
_RefResolver._DEPRECATION_MESSAGE,
|
43 |
+
DeprecationWarning,
|
44 |
+
stacklevel=2,
|
45 |
+
)
|
46 |
+
return _RefResolver
|
47 |
+
elif name == "ErrorTree":
|
48 |
+
warnings.warn(
|
49 |
+
"Importing ErrorTree directly from the jsonschema package "
|
50 |
+
"is deprecated and will become an ImportError. Import it from "
|
51 |
+
"jsonschema.exceptions instead.",
|
52 |
+
DeprecationWarning,
|
53 |
+
stacklevel=2,
|
54 |
+
)
|
55 |
+
from jsonschema.exceptions import ErrorTree
|
56 |
+
return ErrorTree
|
57 |
+
elif name == "FormatError":
|
58 |
+
warnings.warn(
|
59 |
+
"Importing FormatError directly from the jsonschema package "
|
60 |
+
"is deprecated and will become an ImportError. Import it from "
|
61 |
+
"jsonschema.exceptions instead.",
|
62 |
+
DeprecationWarning,
|
63 |
+
stacklevel=2,
|
64 |
+
)
|
65 |
+
from jsonschema.exceptions import FormatError
|
66 |
+
return FormatError
|
67 |
+
elif name == "Validator":
|
68 |
+
warnings.warn(
|
69 |
+
"Importing Validator directly from the jsonschema package "
|
70 |
+
"is deprecated and will become an ImportError. Import it from "
|
71 |
+
"jsonschema.protocols instead.",
|
72 |
+
DeprecationWarning,
|
73 |
+
stacklevel=2,
|
74 |
+
)
|
75 |
+
from jsonschema.protocols import Validator
|
76 |
+
return Validator
|
77 |
+
elif name == "RefResolutionError":
|
78 |
+
from jsonschema.exceptions import _RefResolutionError
|
79 |
+
warnings.warn(
|
80 |
+
_RefResolutionError._DEPRECATION_MESSAGE,
|
81 |
+
DeprecationWarning,
|
82 |
+
stacklevel=2,
|
83 |
+
)
|
84 |
+
return _RefResolutionError
|
85 |
+
|
86 |
+
format_checkers = {
|
87 |
+
"draft3_format_checker": Draft3Validator,
|
88 |
+
"draft4_format_checker": Draft4Validator,
|
89 |
+
"draft6_format_checker": Draft6Validator,
|
90 |
+
"draft7_format_checker": Draft7Validator,
|
91 |
+
"draft201909_format_checker": Draft201909Validator,
|
92 |
+
"draft202012_format_checker": Draft202012Validator,
|
93 |
+
}
|
94 |
+
ValidatorForFormat = format_checkers.get(name)
|
95 |
+
if ValidatorForFormat is not None:
|
96 |
+
warnings.warn(
|
97 |
+
f"Accessing jsonschema.{name} is deprecated and will be "
|
98 |
+
"removed in a future release. Instead, use the FORMAT_CHECKER "
|
99 |
+
"attribute on the corresponding Validator.",
|
100 |
+
DeprecationWarning,
|
101 |
+
stacklevel=2,
|
102 |
+
)
|
103 |
+
return ValidatorForFormat.FORMAT_CHECKER
|
104 |
+
|
105 |
+
raise AttributeError(f"module {__name__} has no attribute {name}")
|
106 |
+
|
107 |
+
|
108 |
+
__all__ = [
|
109 |
+
"Draft201909Validator",
|
110 |
+
"Draft202012Validator",
|
111 |
+
"Draft3Validator",
|
112 |
+
"Draft4Validator",
|
113 |
+
"Draft6Validator",
|
114 |
+
"Draft7Validator",
|
115 |
+
"FormatChecker",
|
116 |
+
"SchemaError",
|
117 |
+
"TypeChecker",
|
118 |
+
"ValidationError",
|
119 |
+
"validate",
|
120 |
+
]
|
.venv/lib/python3.11/site-packages/jsonschema/__main__.py
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
The jsonschema CLI is now deprecated in favor of check-jsonschema.
|
3 |
+
"""
|
4 |
+
from jsonschema.cli import main
|
5 |
+
|
6 |
+
main()
|
.venv/lib/python3.11/site-packages/jsonschema/__pycache__/__init__.cpython-311.pyc
ADDED
Binary file (4.17 kB). View file
|
|
.venv/lib/python3.11/site-packages/jsonschema/__pycache__/__main__.cpython-311.pyc
ADDED
Binary file (364 Bytes). View file
|
|
.venv/lib/python3.11/site-packages/jsonschema/__pycache__/_format.cpython-311.pyc
ADDED
Binary file (21.1 kB). View file
|
|
.venv/lib/python3.11/site-packages/jsonschema/__pycache__/_keywords.cpython-311.pyc
ADDED
Binary file (23.4 kB). View file
|
|
.venv/lib/python3.11/site-packages/jsonschema/__pycache__/_legacy_keywords.cpython-311.pyc
ADDED
Binary file (19.7 kB). View file
|
|
.venv/lib/python3.11/site-packages/jsonschema/__pycache__/_types.cpython-311.pyc
ADDED
Binary file (7.74 kB). View file
|
|
.venv/lib/python3.11/site-packages/jsonschema/__pycache__/_typing.cpython-311.pyc
ADDED
Binary file (1.42 kB). View file
|
|
.venv/lib/python3.11/site-packages/jsonschema/__pycache__/_utils.cpython-311.pyc
ADDED
Binary file (15.4 kB). View file
|
|
.venv/lib/python3.11/site-packages/jsonschema/__pycache__/cli.cpython-311.pyc
ADDED
Binary file (13.8 kB). View file
|
|
.venv/lib/python3.11/site-packages/jsonschema/__pycache__/exceptions.cpython-311.pyc
ADDED
Binary file (23.2 kB). View file
|
|
.venv/lib/python3.11/site-packages/jsonschema/__pycache__/protocols.cpython-311.pyc
ADDED
Binary file (7.2 kB). View file
|
|
.venv/lib/python3.11/site-packages/jsonschema/__pycache__/validators.cpython-311.pyc
ADDED
Binary file (53.8 kB). View file
|
|
.venv/lib/python3.11/site-packages/jsonschema/_format.py
ADDED
@@ -0,0 +1,519 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
from contextlib import suppress
|
4 |
+
from datetime import date, datetime
|
5 |
+
from uuid import UUID
|
6 |
+
import ipaddress
|
7 |
+
import re
|
8 |
+
import typing
|
9 |
+
import warnings
|
10 |
+
|
11 |
+
from jsonschema.exceptions import FormatError
|
12 |
+
|
13 |
+
_FormatCheckCallable = typing.Callable[[object], bool]
|
14 |
+
#: A format checker callable.
|
15 |
+
_F = typing.TypeVar("_F", bound=_FormatCheckCallable)
|
16 |
+
_RaisesType = typing.Union[
|
17 |
+
typing.Type[Exception], typing.Tuple[typing.Type[Exception], ...],
|
18 |
+
]
|
19 |
+
|
20 |
+
_RE_DATE = re.compile(r"^\d{4}-\d{2}-\d{2}$", re.ASCII)
|
21 |
+
|
22 |
+
|
23 |
+
class FormatChecker:
|
24 |
+
"""
|
25 |
+
A ``format`` property checker.
|
26 |
+
|
27 |
+
JSON Schema does not mandate that the ``format`` property actually do any
|
28 |
+
validation. If validation is desired however, instances of this class can
|
29 |
+
be hooked into validators to enable format validation.
|
30 |
+
|
31 |
+
`FormatChecker` objects always return ``True`` when asked about
|
32 |
+
formats that they do not know how to validate.
|
33 |
+
|
34 |
+
To add a check for a custom format use the `FormatChecker.checks`
|
35 |
+
decorator.
|
36 |
+
|
37 |
+
Arguments:
|
38 |
+
|
39 |
+
formats:
|
40 |
+
|
41 |
+
The known formats to validate. This argument can be used to
|
42 |
+
limit which formats will be used during validation.
|
43 |
+
|
44 |
+
"""
|
45 |
+
|
46 |
+
checkers: dict[
|
47 |
+
str,
|
48 |
+
tuple[_FormatCheckCallable, _RaisesType],
|
49 |
+
] = {} # noqa: RUF012
|
50 |
+
|
51 |
+
def __init__(self, formats: typing.Iterable[str] | None = None):
|
52 |
+
if formats is None:
|
53 |
+
formats = self.checkers.keys()
|
54 |
+
self.checkers = {k: self.checkers[k] for k in formats}
|
55 |
+
|
56 |
+
def __repr__(self):
|
57 |
+
return f"<FormatChecker checkers={sorted(self.checkers)}>"
|
58 |
+
|
59 |
+
def checks(
|
60 |
+
self, format: str, raises: _RaisesType = (),
|
61 |
+
) -> typing.Callable[[_F], _F]:
|
62 |
+
"""
|
63 |
+
Register a decorated function as validating a new format.
|
64 |
+
|
65 |
+
Arguments:
|
66 |
+
|
67 |
+
format:
|
68 |
+
|
69 |
+
The format that the decorated function will check.
|
70 |
+
|
71 |
+
raises:
|
72 |
+
|
73 |
+
The exception(s) raised by the decorated function when an
|
74 |
+
invalid instance is found.
|
75 |
+
|
76 |
+
The exception object will be accessible as the
|
77 |
+
`jsonschema.exceptions.ValidationError.cause` attribute of the
|
78 |
+
resulting validation error.
|
79 |
+
|
80 |
+
"""
|
81 |
+
|
82 |
+
def _checks(func: _F) -> _F:
|
83 |
+
self.checkers[format] = (func, raises)
|
84 |
+
return func
|
85 |
+
|
86 |
+
return _checks
|
87 |
+
|
88 |
+
@classmethod
|
89 |
+
def cls_checks(
|
90 |
+
cls, format: str, raises: _RaisesType = (),
|
91 |
+
) -> typing.Callable[[_F], _F]:
|
92 |
+
warnings.warn(
|
93 |
+
(
|
94 |
+
"FormatChecker.cls_checks is deprecated. Call "
|
95 |
+
"FormatChecker.checks on a specific FormatChecker instance "
|
96 |
+
"instead."
|
97 |
+
),
|
98 |
+
DeprecationWarning,
|
99 |
+
stacklevel=2,
|
100 |
+
)
|
101 |
+
return cls._cls_checks(format=format, raises=raises)
|
102 |
+
|
103 |
+
@classmethod
|
104 |
+
def _cls_checks(
|
105 |
+
cls, format: str, raises: _RaisesType = (),
|
106 |
+
) -> typing.Callable[[_F], _F]:
|
107 |
+
def _checks(func: _F) -> _F:
|
108 |
+
cls.checkers[format] = (func, raises)
|
109 |
+
return func
|
110 |
+
|
111 |
+
return _checks
|
112 |
+
|
113 |
+
def check(self, instance: object, format: str) -> None:
|
114 |
+
"""
|
115 |
+
Check whether the instance conforms to the given format.
|
116 |
+
|
117 |
+
Arguments:
|
118 |
+
|
119 |
+
instance (*any primitive type*, i.e. str, number, bool):
|
120 |
+
|
121 |
+
The instance to check
|
122 |
+
|
123 |
+
format:
|
124 |
+
|
125 |
+
The format that instance should conform to
|
126 |
+
|
127 |
+
Raises:
|
128 |
+
|
129 |
+
FormatError:
|
130 |
+
|
131 |
+
if the instance does not conform to ``format``
|
132 |
+
|
133 |
+
"""
|
134 |
+
if format not in self.checkers:
|
135 |
+
return
|
136 |
+
|
137 |
+
func, raises = self.checkers[format]
|
138 |
+
result, cause = None, None
|
139 |
+
try:
|
140 |
+
result = func(instance)
|
141 |
+
except raises as e:
|
142 |
+
cause = e
|
143 |
+
if not result:
|
144 |
+
raise FormatError(f"{instance!r} is not a {format!r}", cause=cause)
|
145 |
+
|
146 |
+
def conforms(self, instance: object, format: str) -> bool:
|
147 |
+
"""
|
148 |
+
Check whether the instance conforms to the given format.
|
149 |
+
|
150 |
+
Arguments:
|
151 |
+
|
152 |
+
instance (*any primitive type*, i.e. str, number, bool):
|
153 |
+
|
154 |
+
The instance to check
|
155 |
+
|
156 |
+
format:
|
157 |
+
|
158 |
+
The format that instance should conform to
|
159 |
+
|
160 |
+
Returns:
|
161 |
+
|
162 |
+
bool: whether it conformed
|
163 |
+
|
164 |
+
"""
|
165 |
+
try:
|
166 |
+
self.check(instance, format)
|
167 |
+
except FormatError:
|
168 |
+
return False
|
169 |
+
else:
|
170 |
+
return True
|
171 |
+
|
172 |
+
|
173 |
+
draft3_format_checker = FormatChecker()
|
174 |
+
draft4_format_checker = FormatChecker()
|
175 |
+
draft6_format_checker = FormatChecker()
|
176 |
+
draft7_format_checker = FormatChecker()
|
177 |
+
draft201909_format_checker = FormatChecker()
|
178 |
+
draft202012_format_checker = FormatChecker()
|
179 |
+
|
180 |
+
_draft_checkers: dict[str, FormatChecker] = dict(
|
181 |
+
draft3=draft3_format_checker,
|
182 |
+
draft4=draft4_format_checker,
|
183 |
+
draft6=draft6_format_checker,
|
184 |
+
draft7=draft7_format_checker,
|
185 |
+
draft201909=draft201909_format_checker,
|
186 |
+
draft202012=draft202012_format_checker,
|
187 |
+
)
|
188 |
+
|
189 |
+
|
190 |
+
def _checks_drafts(
|
191 |
+
name=None,
|
192 |
+
draft3=None,
|
193 |
+
draft4=None,
|
194 |
+
draft6=None,
|
195 |
+
draft7=None,
|
196 |
+
draft201909=None,
|
197 |
+
draft202012=None,
|
198 |
+
raises=(),
|
199 |
+
) -> typing.Callable[[_F], _F]:
|
200 |
+
draft3 = draft3 or name
|
201 |
+
draft4 = draft4 or name
|
202 |
+
draft6 = draft6 or name
|
203 |
+
draft7 = draft7 or name
|
204 |
+
draft201909 = draft201909 or name
|
205 |
+
draft202012 = draft202012 or name
|
206 |
+
|
207 |
+
def wrap(func: _F) -> _F:
|
208 |
+
if draft3:
|
209 |
+
func = _draft_checkers["draft3"].checks(draft3, raises)(func)
|
210 |
+
if draft4:
|
211 |
+
func = _draft_checkers["draft4"].checks(draft4, raises)(func)
|
212 |
+
if draft6:
|
213 |
+
func = _draft_checkers["draft6"].checks(draft6, raises)(func)
|
214 |
+
if draft7:
|
215 |
+
func = _draft_checkers["draft7"].checks(draft7, raises)(func)
|
216 |
+
if draft201909:
|
217 |
+
func = _draft_checkers["draft201909"].checks(draft201909, raises)(
|
218 |
+
func,
|
219 |
+
)
|
220 |
+
if draft202012:
|
221 |
+
func = _draft_checkers["draft202012"].checks(draft202012, raises)(
|
222 |
+
func,
|
223 |
+
)
|
224 |
+
|
225 |
+
# Oy. This is bad global state, but relied upon for now, until
|
226 |
+
# deprecation. See #519 and test_format_checkers_come_with_defaults
|
227 |
+
FormatChecker._cls_checks(
|
228 |
+
draft202012 or draft201909 or draft7 or draft6 or draft4 or draft3,
|
229 |
+
raises,
|
230 |
+
)(func)
|
231 |
+
return func
|
232 |
+
|
233 |
+
return wrap
|
234 |
+
|
235 |
+
|
236 |
+
@_checks_drafts(name="idn-email")
|
237 |
+
@_checks_drafts(name="email")
|
238 |
+
def is_email(instance: object) -> bool:
|
239 |
+
if not isinstance(instance, str):
|
240 |
+
return True
|
241 |
+
return "@" in instance
|
242 |
+
|
243 |
+
|
244 |
+
@_checks_drafts(
|
245 |
+
draft3="ip-address",
|
246 |
+
draft4="ipv4",
|
247 |
+
draft6="ipv4",
|
248 |
+
draft7="ipv4",
|
249 |
+
draft201909="ipv4",
|
250 |
+
draft202012="ipv4",
|
251 |
+
raises=ipaddress.AddressValueError,
|
252 |
+
)
|
253 |
+
def is_ipv4(instance: object) -> bool:
|
254 |
+
if not isinstance(instance, str):
|
255 |
+
return True
|
256 |
+
return bool(ipaddress.IPv4Address(instance))
|
257 |
+
|
258 |
+
|
259 |
+
@_checks_drafts(name="ipv6", raises=ipaddress.AddressValueError)
|
260 |
+
def is_ipv6(instance: object) -> bool:
|
261 |
+
if not isinstance(instance, str):
|
262 |
+
return True
|
263 |
+
address = ipaddress.IPv6Address(instance)
|
264 |
+
return not getattr(address, "scope_id", "")
|
265 |
+
|
266 |
+
|
267 |
+
with suppress(ImportError):
|
268 |
+
from fqdn import FQDN
|
269 |
+
|
270 |
+
@_checks_drafts(
|
271 |
+
draft3="host-name",
|
272 |
+
draft4="hostname",
|
273 |
+
draft6="hostname",
|
274 |
+
draft7="hostname",
|
275 |
+
draft201909="hostname",
|
276 |
+
draft202012="hostname",
|
277 |
+
)
|
278 |
+
def is_host_name(instance: object) -> bool:
|
279 |
+
if not isinstance(instance, str):
|
280 |
+
return True
|
281 |
+
return FQDN(instance, min_labels=1).is_valid
|
282 |
+
|
283 |
+
|
284 |
+
with suppress(ImportError):
|
285 |
+
# The built-in `idna` codec only implements RFC 3890, so we go elsewhere.
|
286 |
+
import idna
|
287 |
+
|
288 |
+
@_checks_drafts(
|
289 |
+
draft7="idn-hostname",
|
290 |
+
draft201909="idn-hostname",
|
291 |
+
draft202012="idn-hostname",
|
292 |
+
raises=(idna.IDNAError, UnicodeError),
|
293 |
+
)
|
294 |
+
def is_idn_host_name(instance: object) -> bool:
|
295 |
+
if not isinstance(instance, str):
|
296 |
+
return True
|
297 |
+
idna.encode(instance)
|
298 |
+
return True
|
299 |
+
|
300 |
+
|
301 |
+
try:
|
302 |
+
import rfc3987
|
303 |
+
except ImportError:
|
304 |
+
with suppress(ImportError):
|
305 |
+
from rfc3986_validator import validate_rfc3986
|
306 |
+
|
307 |
+
@_checks_drafts(name="uri")
|
308 |
+
def is_uri(instance: object) -> bool:
|
309 |
+
if not isinstance(instance, str):
|
310 |
+
return True
|
311 |
+
return validate_rfc3986(instance, rule="URI")
|
312 |
+
|
313 |
+
@_checks_drafts(
|
314 |
+
draft6="uri-reference",
|
315 |
+
draft7="uri-reference",
|
316 |
+
draft201909="uri-reference",
|
317 |
+
draft202012="uri-reference",
|
318 |
+
raises=ValueError,
|
319 |
+
)
|
320 |
+
def is_uri_reference(instance: object) -> bool:
|
321 |
+
if not isinstance(instance, str):
|
322 |
+
return True
|
323 |
+
return validate_rfc3986(instance, rule="URI_reference")
|
324 |
+
|
325 |
+
else:
|
326 |
+
|
327 |
+
@_checks_drafts(
|
328 |
+
draft7="iri",
|
329 |
+
draft201909="iri",
|
330 |
+
draft202012="iri",
|
331 |
+
raises=ValueError,
|
332 |
+
)
|
333 |
+
def is_iri(instance: object) -> bool:
|
334 |
+
if not isinstance(instance, str):
|
335 |
+
return True
|
336 |
+
return rfc3987.parse(instance, rule="IRI")
|
337 |
+
|
338 |
+
@_checks_drafts(
|
339 |
+
draft7="iri-reference",
|
340 |
+
draft201909="iri-reference",
|
341 |
+
draft202012="iri-reference",
|
342 |
+
raises=ValueError,
|
343 |
+
)
|
344 |
+
def is_iri_reference(instance: object) -> bool:
|
345 |
+
if not isinstance(instance, str):
|
346 |
+
return True
|
347 |
+
return rfc3987.parse(instance, rule="IRI_reference")
|
348 |
+
|
349 |
+
@_checks_drafts(name="uri", raises=ValueError)
|
350 |
+
def is_uri(instance: object) -> bool:
|
351 |
+
if not isinstance(instance, str):
|
352 |
+
return True
|
353 |
+
return rfc3987.parse(instance, rule="URI")
|
354 |
+
|
355 |
+
@_checks_drafts(
|
356 |
+
draft6="uri-reference",
|
357 |
+
draft7="uri-reference",
|
358 |
+
draft201909="uri-reference",
|
359 |
+
draft202012="uri-reference",
|
360 |
+
raises=ValueError,
|
361 |
+
)
|
362 |
+
def is_uri_reference(instance: object) -> bool:
|
363 |
+
if not isinstance(instance, str):
|
364 |
+
return True
|
365 |
+
return rfc3987.parse(instance, rule="URI_reference")
|
366 |
+
|
367 |
+
|
368 |
+
with suppress(ImportError):
|
369 |
+
from rfc3339_validator import validate_rfc3339
|
370 |
+
|
371 |
+
@_checks_drafts(name="date-time")
|
372 |
+
def is_datetime(instance: object) -> bool:
|
373 |
+
if not isinstance(instance, str):
|
374 |
+
return True
|
375 |
+
return validate_rfc3339(instance.upper())
|
376 |
+
|
377 |
+
@_checks_drafts(
|
378 |
+
draft7="time",
|
379 |
+
draft201909="time",
|
380 |
+
draft202012="time",
|
381 |
+
)
|
382 |
+
def is_time(instance: object) -> bool:
|
383 |
+
if not isinstance(instance, str):
|
384 |
+
return True
|
385 |
+
return is_datetime("1970-01-01T" + instance)
|
386 |
+
|
387 |
+
|
388 |
+
@_checks_drafts(name="regex", raises=re.error)
|
389 |
+
def is_regex(instance: object) -> bool:
|
390 |
+
if not isinstance(instance, str):
|
391 |
+
return True
|
392 |
+
return bool(re.compile(instance))
|
393 |
+
|
394 |
+
|
395 |
+
@_checks_drafts(
|
396 |
+
draft3="date",
|
397 |
+
draft7="date",
|
398 |
+
draft201909="date",
|
399 |
+
draft202012="date",
|
400 |
+
raises=ValueError,
|
401 |
+
)
|
402 |
+
def is_date(instance: object) -> bool:
|
403 |
+
if not isinstance(instance, str):
|
404 |
+
return True
|
405 |
+
return bool(_RE_DATE.fullmatch(instance) and date.fromisoformat(instance))
|
406 |
+
|
407 |
+
|
408 |
+
@_checks_drafts(draft3="time", raises=ValueError)
|
409 |
+
def is_draft3_time(instance: object) -> bool:
|
410 |
+
if not isinstance(instance, str):
|
411 |
+
return True
|
412 |
+
return bool(datetime.strptime(instance, "%H:%M:%S")) # noqa: DTZ007
|
413 |
+
|
414 |
+
|
415 |
+
with suppress(ImportError):
|
416 |
+
import webcolors
|
417 |
+
|
418 |
+
@_checks_drafts(draft3="color", raises=(ValueError, TypeError))
|
419 |
+
def is_css21_color(instance: object) -> bool:
|
420 |
+
if isinstance(instance, str):
|
421 |
+
try:
|
422 |
+
webcolors.name_to_hex(instance)
|
423 |
+
except ValueError:
|
424 |
+
webcolors.normalize_hex(instance.lower())
|
425 |
+
return True
|
426 |
+
|
427 |
+
|
428 |
+
with suppress(ImportError):
|
429 |
+
import jsonpointer
|
430 |
+
|
431 |
+
@_checks_drafts(
|
432 |
+
draft6="json-pointer",
|
433 |
+
draft7="json-pointer",
|
434 |
+
draft201909="json-pointer",
|
435 |
+
draft202012="json-pointer",
|
436 |
+
raises=jsonpointer.JsonPointerException,
|
437 |
+
)
|
438 |
+
def is_json_pointer(instance: object) -> bool:
|
439 |
+
if not isinstance(instance, str):
|
440 |
+
return True
|
441 |
+
return bool(jsonpointer.JsonPointer(instance))
|
442 |
+
|
443 |
+
# TODO: I don't want to maintain this, so it
|
444 |
+
# needs to go either into jsonpointer (pending
|
445 |
+
# https://github.com/stefankoegl/python-json-pointer/issues/34) or
|
446 |
+
# into a new external library.
|
447 |
+
@_checks_drafts(
|
448 |
+
draft7="relative-json-pointer",
|
449 |
+
draft201909="relative-json-pointer",
|
450 |
+
draft202012="relative-json-pointer",
|
451 |
+
raises=jsonpointer.JsonPointerException,
|
452 |
+
)
|
453 |
+
def is_relative_json_pointer(instance: object) -> bool:
|
454 |
+
# Definition taken from:
|
455 |
+
# https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
|
456 |
+
if not isinstance(instance, str):
|
457 |
+
return True
|
458 |
+
if not instance:
|
459 |
+
return False
|
460 |
+
|
461 |
+
non_negative_integer, rest = [], ""
|
462 |
+
for i, character in enumerate(instance):
|
463 |
+
if character.isdigit():
|
464 |
+
# digits with a leading "0" are not allowed
|
465 |
+
if i > 0 and int(instance[i - 1]) == 0:
|
466 |
+
return False
|
467 |
+
|
468 |
+
non_negative_integer.append(character)
|
469 |
+
continue
|
470 |
+
|
471 |
+
if not non_negative_integer:
|
472 |
+
return False
|
473 |
+
|
474 |
+
rest = instance[i:]
|
475 |
+
break
|
476 |
+
return (rest == "#") or bool(jsonpointer.JsonPointer(rest))
|
477 |
+
|
478 |
+
|
479 |
+
with suppress(ImportError):
|
480 |
+
import uri_template
|
481 |
+
|
482 |
+
@_checks_drafts(
|
483 |
+
draft6="uri-template",
|
484 |
+
draft7="uri-template",
|
485 |
+
draft201909="uri-template",
|
486 |
+
draft202012="uri-template",
|
487 |
+
)
|
488 |
+
def is_uri_template(instance: object) -> bool:
|
489 |
+
if not isinstance(instance, str):
|
490 |
+
return True
|
491 |
+
return uri_template.validate(instance)
|
492 |
+
|
493 |
+
|
494 |
+
with suppress(ImportError):
|
495 |
+
import isoduration
|
496 |
+
|
497 |
+
@_checks_drafts(
|
498 |
+
draft201909="duration",
|
499 |
+
draft202012="duration",
|
500 |
+
raises=isoduration.DurationParsingException,
|
501 |
+
)
|
502 |
+
def is_duration(instance: object) -> bool:
|
503 |
+
if not isinstance(instance, str):
|
504 |
+
return True
|
505 |
+
isoduration.parse_duration(instance)
|
506 |
+
# FIXME: See bolsote/isoduration#25 and bolsote/isoduration#21
|
507 |
+
return instance.endswith(tuple("DMYWHMS"))
|
508 |
+
|
509 |
+
|
510 |
+
@_checks_drafts(
|
511 |
+
draft201909="uuid",
|
512 |
+
draft202012="uuid",
|
513 |
+
raises=ValueError,
|
514 |
+
)
|
515 |
+
def is_uuid(instance: object) -> bool:
|
516 |
+
if not isinstance(instance, str):
|
517 |
+
return True
|
518 |
+
UUID(instance)
|
519 |
+
return all(instance[position] == "-" for position in (8, 13, 18, 23))
|
.venv/lib/python3.11/site-packages/jsonschema/_legacy_keywords.py
ADDED
@@ -0,0 +1,449 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import re
|
2 |
+
|
3 |
+
from referencing.jsonschema import lookup_recursive_ref
|
4 |
+
|
5 |
+
from jsonschema import _utils
|
6 |
+
from jsonschema.exceptions import ValidationError
|
7 |
+
|
8 |
+
|
9 |
+
def ignore_ref_siblings(schema):
|
10 |
+
"""
|
11 |
+
Ignore siblings of ``$ref`` if it is present.
|
12 |
+
|
13 |
+
Otherwise, return all keywords.
|
14 |
+
|
15 |
+
Suitable for use with `create`'s ``applicable_validators`` argument.
|
16 |
+
"""
|
17 |
+
ref = schema.get("$ref")
|
18 |
+
if ref is not None:
|
19 |
+
return [("$ref", ref)]
|
20 |
+
else:
|
21 |
+
return schema.items()
|
22 |
+
|
23 |
+
|
24 |
+
def dependencies_draft3(validator, dependencies, instance, schema):
|
25 |
+
if not validator.is_type(instance, "object"):
|
26 |
+
return
|
27 |
+
|
28 |
+
for property, dependency in dependencies.items():
|
29 |
+
if property not in instance:
|
30 |
+
continue
|
31 |
+
|
32 |
+
if validator.is_type(dependency, "object"):
|
33 |
+
yield from validator.descend(
|
34 |
+
instance, dependency, schema_path=property,
|
35 |
+
)
|
36 |
+
elif validator.is_type(dependency, "string"):
|
37 |
+
if dependency not in instance:
|
38 |
+
message = f"{dependency!r} is a dependency of {property!r}"
|
39 |
+
yield ValidationError(message)
|
40 |
+
else:
|
41 |
+
for each in dependency:
|
42 |
+
if each not in instance:
|
43 |
+
message = f"{each!r} is a dependency of {property!r}"
|
44 |
+
yield ValidationError(message)
|
45 |
+
|
46 |
+
|
47 |
+
def dependencies_draft4_draft6_draft7(
|
48 |
+
validator,
|
49 |
+
dependencies,
|
50 |
+
instance,
|
51 |
+
schema,
|
52 |
+
):
|
53 |
+
"""
|
54 |
+
Support for the ``dependencies`` keyword from pre-draft 2019-09.
|
55 |
+
|
56 |
+
In later drafts, the keyword was split into separate
|
57 |
+
``dependentRequired`` and ``dependentSchemas`` validators.
|
58 |
+
"""
|
59 |
+
if not validator.is_type(instance, "object"):
|
60 |
+
return
|
61 |
+
|
62 |
+
for property, dependency in dependencies.items():
|
63 |
+
if property not in instance:
|
64 |
+
continue
|
65 |
+
|
66 |
+
if validator.is_type(dependency, "array"):
|
67 |
+
for each in dependency:
|
68 |
+
if each not in instance:
|
69 |
+
message = f"{each!r} is a dependency of {property!r}"
|
70 |
+
yield ValidationError(message)
|
71 |
+
else:
|
72 |
+
yield from validator.descend(
|
73 |
+
instance, dependency, schema_path=property,
|
74 |
+
)
|
75 |
+
|
76 |
+
|
77 |
+
def disallow_draft3(validator, disallow, instance, schema):
|
78 |
+
for disallowed in _utils.ensure_list(disallow):
|
79 |
+
if validator.evolve(schema={"type": [disallowed]}).is_valid(instance):
|
80 |
+
message = f"{disallowed!r} is disallowed for {instance!r}"
|
81 |
+
yield ValidationError(message)
|
82 |
+
|
83 |
+
|
84 |
+
def extends_draft3(validator, extends, instance, schema):
|
85 |
+
if validator.is_type(extends, "object"):
|
86 |
+
yield from validator.descend(instance, extends)
|
87 |
+
return
|
88 |
+
for index, subschema in enumerate(extends):
|
89 |
+
yield from validator.descend(instance, subschema, schema_path=index)
|
90 |
+
|
91 |
+
|
92 |
+
def items_draft3_draft4(validator, items, instance, schema):
|
93 |
+
if not validator.is_type(instance, "array"):
|
94 |
+
return
|
95 |
+
|
96 |
+
if validator.is_type(items, "object"):
|
97 |
+
for index, item in enumerate(instance):
|
98 |
+
yield from validator.descend(item, items, path=index)
|
99 |
+
else:
|
100 |
+
for (index, item), subschema in zip(enumerate(instance), items):
|
101 |
+
yield from validator.descend(
|
102 |
+
item, subschema, path=index, schema_path=index,
|
103 |
+
)
|
104 |
+
|
105 |
+
|
106 |
+
def additionalItems(validator, aI, instance, schema):
|
107 |
+
if (
|
108 |
+
not validator.is_type(instance, "array")
|
109 |
+
or validator.is_type(schema.get("items", {}), "object")
|
110 |
+
):
|
111 |
+
return
|
112 |
+
|
113 |
+
len_items = len(schema.get("items", []))
|
114 |
+
if validator.is_type(aI, "object"):
|
115 |
+
for index, item in enumerate(instance[len_items:], start=len_items):
|
116 |
+
yield from validator.descend(item, aI, path=index)
|
117 |
+
elif not aI and len(instance) > len(schema.get("items", [])):
|
118 |
+
error = "Additional items are not allowed (%s %s unexpected)"
|
119 |
+
yield ValidationError(
|
120 |
+
error % _utils.extras_msg(instance[len(schema.get("items", [])):]),
|
121 |
+
)
|
122 |
+
|
123 |
+
|
124 |
+
def items_draft6_draft7_draft201909(validator, items, instance, schema):
|
125 |
+
if not validator.is_type(instance, "array"):
|
126 |
+
return
|
127 |
+
|
128 |
+
if validator.is_type(items, "array"):
|
129 |
+
for (index, item), subschema in zip(enumerate(instance), items):
|
130 |
+
yield from validator.descend(
|
131 |
+
item, subschema, path=index, schema_path=index,
|
132 |
+
)
|
133 |
+
else:
|
134 |
+
for index, item in enumerate(instance):
|
135 |
+
yield from validator.descend(item, items, path=index)
|
136 |
+
|
137 |
+
|
138 |
+
def minimum_draft3_draft4(validator, minimum, instance, schema):
|
139 |
+
if not validator.is_type(instance, "number"):
|
140 |
+
return
|
141 |
+
|
142 |
+
if schema.get("exclusiveMinimum", False):
|
143 |
+
failed = instance <= minimum
|
144 |
+
cmp = "less than or equal to"
|
145 |
+
else:
|
146 |
+
failed = instance < minimum
|
147 |
+
cmp = "less than"
|
148 |
+
|
149 |
+
if failed:
|
150 |
+
message = f"{instance!r} is {cmp} the minimum of {minimum!r}"
|
151 |
+
yield ValidationError(message)
|
152 |
+
|
153 |
+
|
154 |
+
def maximum_draft3_draft4(validator, maximum, instance, schema):
|
155 |
+
if not validator.is_type(instance, "number"):
|
156 |
+
return
|
157 |
+
|
158 |
+
if schema.get("exclusiveMaximum", False):
|
159 |
+
failed = instance >= maximum
|
160 |
+
cmp = "greater than or equal to"
|
161 |
+
else:
|
162 |
+
failed = instance > maximum
|
163 |
+
cmp = "greater than"
|
164 |
+
|
165 |
+
if failed:
|
166 |
+
message = f"{instance!r} is {cmp} the maximum of {maximum!r}"
|
167 |
+
yield ValidationError(message)
|
168 |
+
|
169 |
+
|
170 |
+
def properties_draft3(validator, properties, instance, schema):
|
171 |
+
if not validator.is_type(instance, "object"):
|
172 |
+
return
|
173 |
+
|
174 |
+
for property, subschema in properties.items():
|
175 |
+
if property in instance:
|
176 |
+
yield from validator.descend(
|
177 |
+
instance[property],
|
178 |
+
subschema,
|
179 |
+
path=property,
|
180 |
+
schema_path=property,
|
181 |
+
)
|
182 |
+
elif subschema.get("required", False):
|
183 |
+
error = ValidationError(f"{property!r} is a required property")
|
184 |
+
error._set(
|
185 |
+
validator="required",
|
186 |
+
validator_value=subschema["required"],
|
187 |
+
instance=instance,
|
188 |
+
schema=schema,
|
189 |
+
)
|
190 |
+
error.path.appendleft(property)
|
191 |
+
error.schema_path.extend([property, "required"])
|
192 |
+
yield error
|
193 |
+
|
194 |
+
|
195 |
+
def type_draft3(validator, types, instance, schema):
|
196 |
+
types = _utils.ensure_list(types)
|
197 |
+
|
198 |
+
all_errors = []
|
199 |
+
for index, type in enumerate(types):
|
200 |
+
if validator.is_type(type, "object"):
|
201 |
+
errors = list(validator.descend(instance, type, schema_path=index))
|
202 |
+
if not errors:
|
203 |
+
return
|
204 |
+
all_errors.extend(errors)
|
205 |
+
elif validator.is_type(instance, type):
|
206 |
+
return
|
207 |
+
|
208 |
+
reprs = []
|
209 |
+
for type in types:
|
210 |
+
try:
|
211 |
+
reprs.append(repr(type["name"]))
|
212 |
+
except Exception: # noqa: BLE001
|
213 |
+
reprs.append(repr(type))
|
214 |
+
yield ValidationError(
|
215 |
+
f"{instance!r} is not of type {', '.join(reprs)}",
|
216 |
+
context=all_errors,
|
217 |
+
)
|
218 |
+
|
219 |
+
|
220 |
+
def contains_draft6_draft7(validator, contains, instance, schema):
|
221 |
+
if not validator.is_type(instance, "array"):
|
222 |
+
return
|
223 |
+
|
224 |
+
if not any(
|
225 |
+
validator.evolve(schema=contains).is_valid(element)
|
226 |
+
for element in instance
|
227 |
+
):
|
228 |
+
yield ValidationError(
|
229 |
+
f"None of {instance!r} are valid under the given schema",
|
230 |
+
)
|
231 |
+
|
232 |
+
|
233 |
+
def recursiveRef(validator, recursiveRef, instance, schema):
|
234 |
+
resolved = lookup_recursive_ref(validator._resolver)
|
235 |
+
yield from validator.descend(
|
236 |
+
instance,
|
237 |
+
resolved.contents,
|
238 |
+
resolver=resolved.resolver,
|
239 |
+
)
|
240 |
+
|
241 |
+
|
242 |
+
def find_evaluated_item_indexes_by_schema(validator, instance, schema):
|
243 |
+
"""
|
244 |
+
Get all indexes of items that get evaluated under the current schema.
|
245 |
+
|
246 |
+
Covers all keywords related to unevaluatedItems: items, prefixItems, if,
|
247 |
+
then, else, contains, unevaluatedItems, allOf, oneOf, anyOf
|
248 |
+
"""
|
249 |
+
if validator.is_type(schema, "boolean"):
|
250 |
+
return []
|
251 |
+
evaluated_indexes = []
|
252 |
+
|
253 |
+
ref = schema.get("$ref")
|
254 |
+
if ref is not None:
|
255 |
+
resolved = validator._resolver.lookup(ref)
|
256 |
+
evaluated_indexes.extend(
|
257 |
+
find_evaluated_item_indexes_by_schema(
|
258 |
+
validator.evolve(
|
259 |
+
schema=resolved.contents,
|
260 |
+
_resolver=resolved.resolver,
|
261 |
+
),
|
262 |
+
instance,
|
263 |
+
resolved.contents,
|
264 |
+
),
|
265 |
+
)
|
266 |
+
|
267 |
+
if "$recursiveRef" in schema:
|
268 |
+
resolved = lookup_recursive_ref(validator._resolver)
|
269 |
+
evaluated_indexes.extend(
|
270 |
+
find_evaluated_item_indexes_by_schema(
|
271 |
+
validator.evolve(
|
272 |
+
schema=resolved.contents,
|
273 |
+
_resolver=resolved.resolver,
|
274 |
+
),
|
275 |
+
instance,
|
276 |
+
resolved.contents,
|
277 |
+
),
|
278 |
+
)
|
279 |
+
|
280 |
+
if "items" in schema:
|
281 |
+
if "additionalItems" in schema:
|
282 |
+
return list(range(len(instance)))
|
283 |
+
|
284 |
+
if validator.is_type(schema["items"], "object"):
|
285 |
+
return list(range(len(instance)))
|
286 |
+
evaluated_indexes += list(range(len(schema["items"])))
|
287 |
+
|
288 |
+
if "if" in schema:
|
289 |
+
if validator.evolve(schema=schema["if"]).is_valid(instance):
|
290 |
+
evaluated_indexes += find_evaluated_item_indexes_by_schema(
|
291 |
+
validator, instance, schema["if"],
|
292 |
+
)
|
293 |
+
if "then" in schema:
|
294 |
+
evaluated_indexes += find_evaluated_item_indexes_by_schema(
|
295 |
+
validator, instance, schema["then"],
|
296 |
+
)
|
297 |
+
elif "else" in schema:
|
298 |
+
evaluated_indexes += find_evaluated_item_indexes_by_schema(
|
299 |
+
validator, instance, schema["else"],
|
300 |
+
)
|
301 |
+
|
302 |
+
for keyword in ["contains", "unevaluatedItems"]:
|
303 |
+
if keyword in schema:
|
304 |
+
for k, v in enumerate(instance):
|
305 |
+
if validator.evolve(schema=schema[keyword]).is_valid(v):
|
306 |
+
evaluated_indexes.append(k)
|
307 |
+
|
308 |
+
for keyword in ["allOf", "oneOf", "anyOf"]:
|
309 |
+
if keyword in schema:
|
310 |
+
for subschema in schema[keyword]:
|
311 |
+
errs = next(validator.descend(instance, subschema), None)
|
312 |
+
if errs is None:
|
313 |
+
evaluated_indexes += find_evaluated_item_indexes_by_schema(
|
314 |
+
validator, instance, subschema,
|
315 |
+
)
|
316 |
+
|
317 |
+
return evaluated_indexes
|
318 |
+
|
319 |
+
|
320 |
+
def unevaluatedItems_draft2019(validator, unevaluatedItems, instance, schema):
|
321 |
+
if not validator.is_type(instance, "array"):
|
322 |
+
return
|
323 |
+
evaluated_item_indexes = find_evaluated_item_indexes_by_schema(
|
324 |
+
validator, instance, schema,
|
325 |
+
)
|
326 |
+
unevaluated_items = [
|
327 |
+
item for index, item in enumerate(instance)
|
328 |
+
if index not in evaluated_item_indexes
|
329 |
+
]
|
330 |
+
if unevaluated_items:
|
331 |
+
error = "Unevaluated items are not allowed (%s %s unexpected)"
|
332 |
+
yield ValidationError(error % _utils.extras_msg(unevaluated_items))
|
333 |
+
|
334 |
+
|
335 |
+
def find_evaluated_property_keys_by_schema(validator, instance, schema):
|
336 |
+
if validator.is_type(schema, "boolean"):
|
337 |
+
return []
|
338 |
+
evaluated_keys = []
|
339 |
+
|
340 |
+
ref = schema.get("$ref")
|
341 |
+
if ref is not None:
|
342 |
+
resolved = validator._resolver.lookup(ref)
|
343 |
+
evaluated_keys.extend(
|
344 |
+
find_evaluated_property_keys_by_schema(
|
345 |
+
validator.evolve(
|
346 |
+
schema=resolved.contents,
|
347 |
+
_resolver=resolved.resolver,
|
348 |
+
),
|
349 |
+
instance,
|
350 |
+
resolved.contents,
|
351 |
+
),
|
352 |
+
)
|
353 |
+
|
354 |
+
if "$recursiveRef" in schema:
|
355 |
+
resolved = lookup_recursive_ref(validator._resolver)
|
356 |
+
evaluated_keys.extend(
|
357 |
+
find_evaluated_property_keys_by_schema(
|
358 |
+
validator.evolve(
|
359 |
+
schema=resolved.contents,
|
360 |
+
_resolver=resolved.resolver,
|
361 |
+
),
|
362 |
+
instance,
|
363 |
+
resolved.contents,
|
364 |
+
),
|
365 |
+
)
|
366 |
+
|
367 |
+
for keyword in [
|
368 |
+
"properties", "additionalProperties", "unevaluatedProperties",
|
369 |
+
]:
|
370 |
+
if keyword in schema:
|
371 |
+
schema_value = schema[keyword]
|
372 |
+
if validator.is_type(schema_value, "boolean") and schema_value:
|
373 |
+
evaluated_keys += instance.keys()
|
374 |
+
|
375 |
+
elif validator.is_type(schema_value, "object"):
|
376 |
+
for property in schema_value:
|
377 |
+
if property in instance:
|
378 |
+
evaluated_keys.append(property)
|
379 |
+
|
380 |
+
if "patternProperties" in schema:
|
381 |
+
for property in instance:
|
382 |
+
for pattern in schema["patternProperties"]:
|
383 |
+
if re.search(pattern, property):
|
384 |
+
evaluated_keys.append(property)
|
385 |
+
|
386 |
+
if "dependentSchemas" in schema:
|
387 |
+
for property, subschema in schema["dependentSchemas"].items():
|
388 |
+
if property not in instance:
|
389 |
+
continue
|
390 |
+
evaluated_keys += find_evaluated_property_keys_by_schema(
|
391 |
+
validator, instance, subschema,
|
392 |
+
)
|
393 |
+
|
394 |
+
for keyword in ["allOf", "oneOf", "anyOf"]:
|
395 |
+
if keyword in schema:
|
396 |
+
for subschema in schema[keyword]:
|
397 |
+
errs = next(validator.descend(instance, subschema), None)
|
398 |
+
if errs is None:
|
399 |
+
evaluated_keys += find_evaluated_property_keys_by_schema(
|
400 |
+
validator, instance, subschema,
|
401 |
+
)
|
402 |
+
|
403 |
+
if "if" in schema:
|
404 |
+
if validator.evolve(schema=schema["if"]).is_valid(instance):
|
405 |
+
evaluated_keys += find_evaluated_property_keys_by_schema(
|
406 |
+
validator, instance, schema["if"],
|
407 |
+
)
|
408 |
+
if "then" in schema:
|
409 |
+
evaluated_keys += find_evaluated_property_keys_by_schema(
|
410 |
+
validator, instance, schema["then"],
|
411 |
+
)
|
412 |
+
elif "else" in schema:
|
413 |
+
evaluated_keys += find_evaluated_property_keys_by_schema(
|
414 |
+
validator, instance, schema["else"],
|
415 |
+
)
|
416 |
+
|
417 |
+
return evaluated_keys
|
418 |
+
|
419 |
+
|
420 |
+
def unevaluatedProperties_draft2019(validator, uP, instance, schema):
|
421 |
+
if not validator.is_type(instance, "object"):
|
422 |
+
return
|
423 |
+
evaluated_keys = find_evaluated_property_keys_by_schema(
|
424 |
+
validator, instance, schema,
|
425 |
+
)
|
426 |
+
unevaluated_keys = []
|
427 |
+
for property in instance:
|
428 |
+
if property not in evaluated_keys:
|
429 |
+
for _ in validator.descend(
|
430 |
+
instance[property],
|
431 |
+
uP,
|
432 |
+
path=property,
|
433 |
+
schema_path=property,
|
434 |
+
):
|
435 |
+
# FIXME: Include context for each unevaluated property
|
436 |
+
# indicating why it's invalid under the subschema.
|
437 |
+
unevaluated_keys.append(property) # noqa: PERF401
|
438 |
+
|
439 |
+
if unevaluated_keys:
|
440 |
+
if uP is False:
|
441 |
+
error = "Unevaluated properties are not allowed (%s %s unexpected)"
|
442 |
+
extras = sorted(unevaluated_keys, key=str)
|
443 |
+
yield ValidationError(error % _utils.extras_msg(extras))
|
444 |
+
else:
|
445 |
+
error = (
|
446 |
+
"Unevaluated properties are not valid under "
|
447 |
+
"the given schema (%s %s unevaluated and invalid)"
|
448 |
+
)
|
449 |
+
yield ValidationError(error % _utils.extras_msg(unevaluated_keys))
|
.venv/lib/python3.11/site-packages/jsonschema/_types.py
ADDED
@@ -0,0 +1,200 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
from typing import Any, Callable, Mapping
|
4 |
+
import numbers
|
5 |
+
|
6 |
+
from attrs import evolve, field, frozen
|
7 |
+
from rpds import HashTrieMap
|
8 |
+
|
9 |
+
from jsonschema.exceptions import UndefinedTypeCheck
|
10 |
+
|
11 |
+
|
12 |
+
# unfortunately, the type of HashTrieMap is generic, and if used as an attrs
|
13 |
+
# converter, the generic type is presented to mypy, which then fails to match
|
14 |
+
# the concrete type of a type checker mapping
|
15 |
+
# this "do nothing" wrapper presents the correct information to mypy
|
16 |
+
def _typed_map_converter(
|
17 |
+
init_val: Mapping[str, Callable[[TypeChecker, Any], bool]],
|
18 |
+
) -> HashTrieMap[str, Callable[[TypeChecker, Any], bool]]:
|
19 |
+
return HashTrieMap.convert(init_val)
|
20 |
+
|
21 |
+
|
22 |
+
def is_array(checker, instance):
|
23 |
+
return isinstance(instance, list)
|
24 |
+
|
25 |
+
|
26 |
+
def is_bool(checker, instance):
|
27 |
+
return isinstance(instance, bool)
|
28 |
+
|
29 |
+
|
30 |
+
def is_integer(checker, instance):
|
31 |
+
# bool inherits from int, so ensure bools aren't reported as ints
|
32 |
+
if isinstance(instance, bool):
|
33 |
+
return False
|
34 |
+
return isinstance(instance, int)
|
35 |
+
|
36 |
+
|
37 |
+
def is_null(checker, instance):
|
38 |
+
return instance is None
|
39 |
+
|
40 |
+
|
41 |
+
def is_number(checker, instance):
|
42 |
+
# bool inherits from int, so ensure bools aren't reported as ints
|
43 |
+
if isinstance(instance, bool):
|
44 |
+
return False
|
45 |
+
return isinstance(instance, numbers.Number)
|
46 |
+
|
47 |
+
|
48 |
+
def is_object(checker, instance):
|
49 |
+
return isinstance(instance, dict)
|
50 |
+
|
51 |
+
|
52 |
+
def is_string(checker, instance):
|
53 |
+
return isinstance(instance, str)
|
54 |
+
|
55 |
+
|
56 |
+
def is_any(checker, instance):
|
57 |
+
return True
|
58 |
+
|
59 |
+
|
60 |
+
@frozen(repr=False)
|
61 |
+
class TypeChecker:
|
62 |
+
"""
|
63 |
+
A :kw:`type` property checker.
|
64 |
+
|
65 |
+
A `TypeChecker` performs type checking for a `Validator`, converting
|
66 |
+
between the defined JSON Schema types and some associated Python types or
|
67 |
+
objects.
|
68 |
+
|
69 |
+
Modifying the behavior just mentioned by redefining which Python objects
|
70 |
+
are considered to be of which JSON Schema types can be done using
|
71 |
+
`TypeChecker.redefine` or `TypeChecker.redefine_many`, and types can be
|
72 |
+
removed via `TypeChecker.remove`. Each of these return a new `TypeChecker`.
|
73 |
+
|
74 |
+
Arguments:
|
75 |
+
|
76 |
+
type_checkers:
|
77 |
+
|
78 |
+
The initial mapping of types to their checking functions.
|
79 |
+
|
80 |
+
"""
|
81 |
+
|
82 |
+
_type_checkers: HashTrieMap[
|
83 |
+
str, Callable[[TypeChecker, Any], bool],
|
84 |
+
] = field(default=HashTrieMap(), converter=_typed_map_converter)
|
85 |
+
|
86 |
+
def __repr__(self):
|
87 |
+
types = ", ".join(repr(k) for k in sorted(self._type_checkers))
|
88 |
+
return f"<{self.__class__.__name__} types={{{types}}}>"
|
89 |
+
|
90 |
+
def is_type(self, instance, type: str) -> bool:
|
91 |
+
"""
|
92 |
+
Check if the instance is of the appropriate type.
|
93 |
+
|
94 |
+
Arguments:
|
95 |
+
|
96 |
+
instance:
|
97 |
+
|
98 |
+
The instance to check
|
99 |
+
|
100 |
+
type:
|
101 |
+
|
102 |
+
The name of the type that is expected.
|
103 |
+
|
104 |
+
Raises:
|
105 |
+
|
106 |
+
`jsonschema.exceptions.UndefinedTypeCheck`:
|
107 |
+
|
108 |
+
if ``type`` is unknown to this object.
|
109 |
+
|
110 |
+
"""
|
111 |
+
try:
|
112 |
+
fn = self._type_checkers[type]
|
113 |
+
except KeyError:
|
114 |
+
raise UndefinedTypeCheck(type) from None
|
115 |
+
|
116 |
+
return fn(self, instance)
|
117 |
+
|
118 |
+
def redefine(self, type: str, fn) -> TypeChecker:
|
119 |
+
"""
|
120 |
+
Produce a new checker with the given type redefined.
|
121 |
+
|
122 |
+
Arguments:
|
123 |
+
|
124 |
+
type:
|
125 |
+
|
126 |
+
The name of the type to check.
|
127 |
+
|
128 |
+
fn (collections.abc.Callable):
|
129 |
+
|
130 |
+
A callable taking exactly two parameters - the type
|
131 |
+
checker calling the function and the instance to check.
|
132 |
+
The function should return true if instance is of this
|
133 |
+
type and false otherwise.
|
134 |
+
|
135 |
+
"""
|
136 |
+
return self.redefine_many({type: fn})
|
137 |
+
|
138 |
+
def redefine_many(self, definitions=()) -> TypeChecker:
|
139 |
+
"""
|
140 |
+
Produce a new checker with the given types redefined.
|
141 |
+
|
142 |
+
Arguments:
|
143 |
+
|
144 |
+
definitions (dict):
|
145 |
+
|
146 |
+
A dictionary mapping types to their checking functions.
|
147 |
+
|
148 |
+
"""
|
149 |
+
type_checkers = self._type_checkers.update(definitions)
|
150 |
+
return evolve(self, type_checkers=type_checkers)
|
151 |
+
|
152 |
+
def remove(self, *types) -> TypeChecker:
|
153 |
+
"""
|
154 |
+
Produce a new checker with the given types forgotten.
|
155 |
+
|
156 |
+
Arguments:
|
157 |
+
|
158 |
+
types:
|
159 |
+
|
160 |
+
the names of the types to remove.
|
161 |
+
|
162 |
+
Raises:
|
163 |
+
|
164 |
+
`jsonschema.exceptions.UndefinedTypeCheck`:
|
165 |
+
|
166 |
+
if any given type is unknown to this object
|
167 |
+
|
168 |
+
"""
|
169 |
+
type_checkers = self._type_checkers
|
170 |
+
for each in types:
|
171 |
+
try:
|
172 |
+
type_checkers = type_checkers.remove(each)
|
173 |
+
except KeyError:
|
174 |
+
raise UndefinedTypeCheck(each) from None
|
175 |
+
return evolve(self, type_checkers=type_checkers)
|
176 |
+
|
177 |
+
|
178 |
+
draft3_type_checker = TypeChecker(
|
179 |
+
{
|
180 |
+
"any": is_any,
|
181 |
+
"array": is_array,
|
182 |
+
"boolean": is_bool,
|
183 |
+
"integer": is_integer,
|
184 |
+
"object": is_object,
|
185 |
+
"null": is_null,
|
186 |
+
"number": is_number,
|
187 |
+
"string": is_string,
|
188 |
+
},
|
189 |
+
)
|
190 |
+
draft4_type_checker = draft3_type_checker.remove("any")
|
191 |
+
draft6_type_checker = draft4_type_checker.redefine(
|
192 |
+
"integer",
|
193 |
+
lambda checker, instance: (
|
194 |
+
is_integer(checker, instance)
|
195 |
+
or isinstance(instance, float) and instance.is_integer()
|
196 |
+
),
|
197 |
+
)
|
198 |
+
draft7_type_checker = draft6_type_checker
|
199 |
+
draft201909_type_checker = draft7_type_checker
|
200 |
+
draft202012_type_checker = draft201909_type_checker
|
.venv/lib/python3.11/site-packages/jsonschema/_typing.py
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Some (initially private) typing helpers for jsonschema's types.
|
3 |
+
"""
|
4 |
+
from typing import Any, Callable, Iterable, Protocol, Tuple, Union
|
5 |
+
|
6 |
+
import referencing.jsonschema
|
7 |
+
|
8 |
+
from jsonschema.protocols import Validator
|
9 |
+
|
10 |
+
|
11 |
+
class SchemaKeywordValidator(Protocol):
|
12 |
+
def __call__(
|
13 |
+
self,
|
14 |
+
validator: Validator,
|
15 |
+
value: Any,
|
16 |
+
instance: Any,
|
17 |
+
schema: referencing.jsonschema.Schema,
|
18 |
+
) -> None:
|
19 |
+
...
|
20 |
+
|
21 |
+
|
22 |
+
id_of = Callable[[referencing.jsonschema.Schema], Union[str, None]]
|
23 |
+
|
24 |
+
|
25 |
+
ApplicableValidators = Callable[
|
26 |
+
[referencing.jsonschema.Schema],
|
27 |
+
Iterable[Tuple[str, Any]],
|
28 |
+
]
|
.venv/lib/python3.11/site-packages/jsonschema/_utils.py
ADDED
@@ -0,0 +1,351 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from collections.abc import Mapping, MutableMapping, Sequence
|
2 |
+
from urllib.parse import urlsplit
|
3 |
+
import itertools
|
4 |
+
import re
|
5 |
+
|
6 |
+
|
7 |
+
class URIDict(MutableMapping):
|
8 |
+
"""
|
9 |
+
Dictionary which uses normalized URIs as keys.
|
10 |
+
"""
|
11 |
+
|
12 |
+
def normalize(self, uri):
|
13 |
+
return urlsplit(uri).geturl()
|
14 |
+
|
15 |
+
def __init__(self, *args, **kwargs):
|
16 |
+
self.store = dict()
|
17 |
+
self.store.update(*args, **kwargs)
|
18 |
+
|
19 |
+
def __getitem__(self, uri):
|
20 |
+
return self.store[self.normalize(uri)]
|
21 |
+
|
22 |
+
def __setitem__(self, uri, value):
|
23 |
+
self.store[self.normalize(uri)] = value
|
24 |
+
|
25 |
+
def __delitem__(self, uri):
|
26 |
+
del self.store[self.normalize(uri)]
|
27 |
+
|
28 |
+
def __iter__(self):
|
29 |
+
return iter(self.store)
|
30 |
+
|
31 |
+
def __len__(self): # pragma: no cover -- untested, but to be removed
|
32 |
+
return len(self.store)
|
33 |
+
|
34 |
+
def __repr__(self): # pragma: no cover -- untested, but to be removed
|
35 |
+
return repr(self.store)
|
36 |
+
|
37 |
+
|
38 |
+
class Unset:
|
39 |
+
"""
|
40 |
+
An as-of-yet unset attribute or unprovided default parameter.
|
41 |
+
"""
|
42 |
+
|
43 |
+
def __repr__(self): # pragma: no cover
|
44 |
+
return "<unset>"
|
45 |
+
|
46 |
+
|
47 |
+
def format_as_index(container, indices):
|
48 |
+
"""
|
49 |
+
Construct a single string containing indexing operations for the indices.
|
50 |
+
|
51 |
+
For example for a container ``bar``, [1, 2, "foo"] -> bar[1][2]["foo"]
|
52 |
+
|
53 |
+
Arguments:
|
54 |
+
|
55 |
+
container (str):
|
56 |
+
|
57 |
+
A word to use for the thing being indexed
|
58 |
+
|
59 |
+
indices (sequence):
|
60 |
+
|
61 |
+
The indices to format.
|
62 |
+
|
63 |
+
"""
|
64 |
+
if not indices:
|
65 |
+
return container
|
66 |
+
return f"{container}[{']['.join(repr(index) for index in indices)}]"
|
67 |
+
|
68 |
+
|
69 |
+
def find_additional_properties(instance, schema):
|
70 |
+
"""
|
71 |
+
Return the set of additional properties for the given ``instance``.
|
72 |
+
|
73 |
+
Weeds out properties that should have been validated by ``properties`` and
|
74 |
+
/ or ``patternProperties``.
|
75 |
+
|
76 |
+
Assumes ``instance`` is dict-like already.
|
77 |
+
"""
|
78 |
+
properties = schema.get("properties", {})
|
79 |
+
patterns = "|".join(schema.get("patternProperties", {}))
|
80 |
+
for property in instance:
|
81 |
+
if property not in properties:
|
82 |
+
if patterns and re.search(patterns, property):
|
83 |
+
continue
|
84 |
+
yield property
|
85 |
+
|
86 |
+
|
87 |
+
def extras_msg(extras):
|
88 |
+
"""
|
89 |
+
Create an error message for extra items or properties.
|
90 |
+
"""
|
91 |
+
verb = "was" if len(extras) == 1 else "were"
|
92 |
+
return ", ".join(repr(extra) for extra in extras), verb
|
93 |
+
|
94 |
+
|
95 |
+
def ensure_list(thing):
|
96 |
+
"""
|
97 |
+
Wrap ``thing`` in a list if it's a single str.
|
98 |
+
|
99 |
+
Otherwise, return it unchanged.
|
100 |
+
"""
|
101 |
+
if isinstance(thing, str):
|
102 |
+
return [thing]
|
103 |
+
return thing
|
104 |
+
|
105 |
+
|
106 |
+
def _mapping_equal(one, two):
|
107 |
+
"""
|
108 |
+
Check if two mappings are equal using the semantics of `equal`.
|
109 |
+
"""
|
110 |
+
if len(one) != len(two):
|
111 |
+
return False
|
112 |
+
return all(
|
113 |
+
key in two and equal(value, two[key])
|
114 |
+
for key, value in one.items()
|
115 |
+
)
|
116 |
+
|
117 |
+
|
118 |
+
def _sequence_equal(one, two):
|
119 |
+
"""
|
120 |
+
Check if two sequences are equal using the semantics of `equal`.
|
121 |
+
"""
|
122 |
+
if len(one) != len(two):
|
123 |
+
return False
|
124 |
+
return all(equal(i, j) for i, j in zip(one, two))
|
125 |
+
|
126 |
+
|
127 |
+
def equal(one, two):
|
128 |
+
"""
|
129 |
+
Check if two things are equal evading some Python type hierarchy semantics.
|
130 |
+
|
131 |
+
Specifically in JSON Schema, evade `bool` inheriting from `int`,
|
132 |
+
recursing into sequences to do the same.
|
133 |
+
"""
|
134 |
+
if one is two:
|
135 |
+
return True
|
136 |
+
if isinstance(one, str) or isinstance(two, str):
|
137 |
+
return one == two
|
138 |
+
if isinstance(one, Sequence) and isinstance(two, Sequence):
|
139 |
+
return _sequence_equal(one, two)
|
140 |
+
if isinstance(one, Mapping) and isinstance(two, Mapping):
|
141 |
+
return _mapping_equal(one, two)
|
142 |
+
return unbool(one) == unbool(two)
|
143 |
+
|
144 |
+
|
145 |
+
def unbool(element, true=object(), false=object()):
|
146 |
+
"""
|
147 |
+
A hack to make True and 1 and False and 0 unique for ``uniq``.
|
148 |
+
"""
|
149 |
+
if element is True:
|
150 |
+
return true
|
151 |
+
elif element is False:
|
152 |
+
return false
|
153 |
+
return element
|
154 |
+
|
155 |
+
|
156 |
+
def uniq(container):
|
157 |
+
"""
|
158 |
+
Check if all of a container's elements are unique.
|
159 |
+
|
160 |
+
Tries to rely on the container being recursively sortable, or otherwise
|
161 |
+
falls back on (slow) brute force.
|
162 |
+
"""
|
163 |
+
try:
|
164 |
+
sort = sorted(unbool(i) for i in container)
|
165 |
+
sliced = itertools.islice(sort, 1, None)
|
166 |
+
|
167 |
+
for i, j in zip(sort, sliced):
|
168 |
+
if equal(i, j):
|
169 |
+
return False
|
170 |
+
|
171 |
+
except (NotImplementedError, TypeError):
|
172 |
+
seen = []
|
173 |
+
for e in container:
|
174 |
+
e = unbool(e)
|
175 |
+
|
176 |
+
for i in seen:
|
177 |
+
if equal(i, e):
|
178 |
+
return False
|
179 |
+
|
180 |
+
seen.append(e)
|
181 |
+
return True
|
182 |
+
|
183 |
+
|
184 |
+
def find_evaluated_item_indexes_by_schema(validator, instance, schema):
|
185 |
+
"""
|
186 |
+
Get all indexes of items that get evaluated under the current schema.
|
187 |
+
|
188 |
+
Covers all keywords related to unevaluatedItems: items, prefixItems, if,
|
189 |
+
then, else, contains, unevaluatedItems, allOf, oneOf, anyOf
|
190 |
+
"""
|
191 |
+
if validator.is_type(schema, "boolean"):
|
192 |
+
return []
|
193 |
+
evaluated_indexes = []
|
194 |
+
|
195 |
+
if "items" in schema:
|
196 |
+
return list(range(len(instance)))
|
197 |
+
|
198 |
+
ref = schema.get("$ref")
|
199 |
+
if ref is not None:
|
200 |
+
resolved = validator._resolver.lookup(ref)
|
201 |
+
evaluated_indexes.extend(
|
202 |
+
find_evaluated_item_indexes_by_schema(
|
203 |
+
validator.evolve(
|
204 |
+
schema=resolved.contents,
|
205 |
+
_resolver=resolved.resolver,
|
206 |
+
),
|
207 |
+
instance,
|
208 |
+
resolved.contents,
|
209 |
+
),
|
210 |
+
)
|
211 |
+
|
212 |
+
dynamicRef = schema.get("$dynamicRef")
|
213 |
+
if dynamicRef is not None:
|
214 |
+
resolved = validator._resolver.lookup(dynamicRef)
|
215 |
+
evaluated_indexes.extend(
|
216 |
+
find_evaluated_item_indexes_by_schema(
|
217 |
+
validator.evolve(
|
218 |
+
schema=resolved.contents,
|
219 |
+
_resolver=resolved.resolver,
|
220 |
+
),
|
221 |
+
instance,
|
222 |
+
resolved.contents,
|
223 |
+
),
|
224 |
+
)
|
225 |
+
|
226 |
+
if "prefixItems" in schema:
|
227 |
+
evaluated_indexes += list(range(len(schema["prefixItems"])))
|
228 |
+
|
229 |
+
if "if" in schema:
|
230 |
+
if validator.evolve(schema=schema["if"]).is_valid(instance):
|
231 |
+
evaluated_indexes += find_evaluated_item_indexes_by_schema(
|
232 |
+
validator, instance, schema["if"],
|
233 |
+
)
|
234 |
+
if "then" in schema:
|
235 |
+
evaluated_indexes += find_evaluated_item_indexes_by_schema(
|
236 |
+
validator, instance, schema["then"],
|
237 |
+
)
|
238 |
+
elif "else" in schema:
|
239 |
+
evaluated_indexes += find_evaluated_item_indexes_by_schema(
|
240 |
+
validator, instance, schema["else"],
|
241 |
+
)
|
242 |
+
|
243 |
+
for keyword in ["contains", "unevaluatedItems"]:
|
244 |
+
if keyword in schema:
|
245 |
+
for k, v in enumerate(instance):
|
246 |
+
if validator.evolve(schema=schema[keyword]).is_valid(v):
|
247 |
+
evaluated_indexes.append(k)
|
248 |
+
|
249 |
+
for keyword in ["allOf", "oneOf", "anyOf"]:
|
250 |
+
if keyword in schema:
|
251 |
+
for subschema in schema[keyword]:
|
252 |
+
errs = next(validator.descend(instance, subschema), None)
|
253 |
+
if errs is None:
|
254 |
+
evaluated_indexes += find_evaluated_item_indexes_by_schema(
|
255 |
+
validator, instance, subschema,
|
256 |
+
)
|
257 |
+
|
258 |
+
return evaluated_indexes
|
259 |
+
|
260 |
+
|
261 |
+
def find_evaluated_property_keys_by_schema(validator, instance, schema):
|
262 |
+
"""
|
263 |
+
Get all keys of items that get evaluated under the current schema.
|
264 |
+
|
265 |
+
Covers all keywords related to unevaluatedProperties: properties,
|
266 |
+
additionalProperties, unevaluatedProperties, patternProperties,
|
267 |
+
dependentSchemas, allOf, oneOf, anyOf, if, then, else
|
268 |
+
"""
|
269 |
+
if validator.is_type(schema, "boolean"):
|
270 |
+
return []
|
271 |
+
evaluated_keys = []
|
272 |
+
|
273 |
+
ref = schema.get("$ref")
|
274 |
+
if ref is not None:
|
275 |
+
resolved = validator._resolver.lookup(ref)
|
276 |
+
evaluated_keys.extend(
|
277 |
+
find_evaluated_property_keys_by_schema(
|
278 |
+
validator.evolve(
|
279 |
+
schema=resolved.contents,
|
280 |
+
_resolver=resolved.resolver,
|
281 |
+
),
|
282 |
+
instance,
|
283 |
+
resolved.contents,
|
284 |
+
),
|
285 |
+
)
|
286 |
+
|
287 |
+
dynamicRef = schema.get("$dynamicRef")
|
288 |
+
if dynamicRef is not None:
|
289 |
+
resolved = validator._resolver.lookup(dynamicRef)
|
290 |
+
evaluated_keys.extend(
|
291 |
+
find_evaluated_property_keys_by_schema(
|
292 |
+
validator.evolve(
|
293 |
+
schema=resolved.contents,
|
294 |
+
_resolver=resolved.resolver,
|
295 |
+
),
|
296 |
+
instance,
|
297 |
+
resolved.contents,
|
298 |
+
),
|
299 |
+
)
|
300 |
+
|
301 |
+
for keyword in [
|
302 |
+
"properties", "additionalProperties", "unevaluatedProperties",
|
303 |
+
]:
|
304 |
+
if keyword in schema:
|
305 |
+
schema_value = schema[keyword]
|
306 |
+
if validator.is_type(schema_value, "boolean") and schema_value:
|
307 |
+
evaluated_keys += instance.keys()
|
308 |
+
|
309 |
+
elif validator.is_type(schema_value, "object"):
|
310 |
+
for property in schema_value:
|
311 |
+
if property in instance:
|
312 |
+
evaluated_keys.append(property)
|
313 |
+
|
314 |
+
if "patternProperties" in schema:
|
315 |
+
for property in instance:
|
316 |
+
for pattern in schema["patternProperties"]:
|
317 |
+
if re.search(pattern, property):
|
318 |
+
evaluated_keys.append(property)
|
319 |
+
|
320 |
+
if "dependentSchemas" in schema:
|
321 |
+
for property, subschema in schema["dependentSchemas"].items():
|
322 |
+
if property not in instance:
|
323 |
+
continue
|
324 |
+
evaluated_keys += find_evaluated_property_keys_by_schema(
|
325 |
+
validator, instance, subschema,
|
326 |
+
)
|
327 |
+
|
328 |
+
for keyword in ["allOf", "oneOf", "anyOf"]:
|
329 |
+
if keyword in schema:
|
330 |
+
for subschema in schema[keyword]:
|
331 |
+
errs = next(validator.descend(instance, subschema), None)
|
332 |
+
if errs is None:
|
333 |
+
evaluated_keys += find_evaluated_property_keys_by_schema(
|
334 |
+
validator, instance, subschema,
|
335 |
+
)
|
336 |
+
|
337 |
+
if "if" in schema:
|
338 |
+
if validator.evolve(schema=schema["if"]).is_valid(instance):
|
339 |
+
evaluated_keys += find_evaluated_property_keys_by_schema(
|
340 |
+
validator, instance, schema["if"],
|
341 |
+
)
|
342 |
+
if "then" in schema:
|
343 |
+
evaluated_keys += find_evaluated_property_keys_by_schema(
|
344 |
+
validator, instance, schema["then"],
|
345 |
+
)
|
346 |
+
elif "else" in schema:
|
347 |
+
evaluated_keys += find_evaluated_property_keys_by_schema(
|
348 |
+
validator, instance, schema["else"],
|
349 |
+
)
|
350 |
+
|
351 |
+
return evaluated_keys
|
.venv/lib/python3.11/site-packages/jsonschema/benchmarks/__init__.py
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Benchmarks for validation.
|
3 |
+
|
4 |
+
This package is *not* public API.
|
5 |
+
"""
|
.venv/lib/python3.11/site-packages/jsonschema/benchmarks/issue232.py
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
A performance benchmark using the example from issue #232.
|
3 |
+
|
4 |
+
See https://github.com/python-jsonschema/jsonschema/pull/232.
|
5 |
+
"""
|
6 |
+
from pathlib import Path
|
7 |
+
|
8 |
+
from pyperf import Runner
|
9 |
+
from referencing import Registry
|
10 |
+
|
11 |
+
from jsonschema.tests._suite import Version
|
12 |
+
import jsonschema
|
13 |
+
|
14 |
+
issue232 = Version(
|
15 |
+
path=Path(__file__).parent / "issue232",
|
16 |
+
remotes=Registry(),
|
17 |
+
name="issue232",
|
18 |
+
)
|
19 |
+
|
20 |
+
|
21 |
+
if __name__ == "__main__":
|
22 |
+
issue232.benchmark(
|
23 |
+
runner=Runner(),
|
24 |
+
Validator=jsonschema.Draft4Validator,
|
25 |
+
)
|
.venv/lib/python3.11/site-packages/jsonschema/benchmarks/issue232/issue.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
.venv/lib/python3.11/site-packages/jsonschema/benchmarks/nested_schemas.py
ADDED
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Validating highly nested schemas shouldn't cause exponential time blowups.
|
3 |
+
|
4 |
+
See https://github.com/python-jsonschema/jsonschema/issues/1097.
|
5 |
+
"""
|
6 |
+
from itertools import cycle
|
7 |
+
|
8 |
+
from jsonschema.validators import validator_for
|
9 |
+
|
10 |
+
metaschemaish = {
|
11 |
+
"$id": "https://example.com/draft/2020-12/schema/strict",
|
12 |
+
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
13 |
+
|
14 |
+
"$vocabulary": {
|
15 |
+
"https://json-schema.org/draft/2020-12/vocab/core": True,
|
16 |
+
"https://json-schema.org/draft/2020-12/vocab/applicator": True,
|
17 |
+
"https://json-schema.org/draft/2020-12/vocab/unevaluated": True,
|
18 |
+
"https://json-schema.org/draft/2020-12/vocab/validation": True,
|
19 |
+
"https://json-schema.org/draft/2020-12/vocab/meta-data": True,
|
20 |
+
"https://json-schema.org/draft/2020-12/vocab/format-annotation": True,
|
21 |
+
"https://json-schema.org/draft/2020-12/vocab/content": True,
|
22 |
+
},
|
23 |
+
"$dynamicAnchor": "meta",
|
24 |
+
|
25 |
+
"$ref": "https://json-schema.org/draft/2020-12/schema",
|
26 |
+
"unevaluatedProperties": False,
|
27 |
+
}
|
28 |
+
|
29 |
+
|
30 |
+
def nested_schema(levels):
|
31 |
+
"""
|
32 |
+
Produce a schema which validates deeply nested objects and arrays.
|
33 |
+
"""
|
34 |
+
|
35 |
+
names = cycle(["foo", "bar", "baz", "quux", "spam", "eggs"])
|
36 |
+
schema = {"type": "object", "properties": {"ham": {"type": "string"}}}
|
37 |
+
for _, name in zip(range(levels - 1), names):
|
38 |
+
schema = {"type": "object", "properties": {name: schema}}
|
39 |
+
return schema
|
40 |
+
|
41 |
+
|
42 |
+
validator = validator_for(metaschemaish)(metaschemaish)
|
43 |
+
|
44 |
+
if __name__ == "__main__":
|
45 |
+
from pyperf import Runner
|
46 |
+
runner = Runner()
|
47 |
+
|
48 |
+
not_nested = nested_schema(levels=1)
|
49 |
+
runner.bench_func("not nested", lambda: validator.is_valid(not_nested))
|
50 |
+
|
51 |
+
for levels in range(1, 11, 3):
|
52 |
+
schema = nested_schema(levels=levels)
|
53 |
+
runner.bench_func(
|
54 |
+
f"nested * {levels}",
|
55 |
+
lambda schema=schema: validator.is_valid(schema),
|
56 |
+
)
|
.venv/lib/python3.11/site-packages/jsonschema/benchmarks/subcomponents.py
ADDED
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
A benchmark which tries to compare the possible slow subparts of validation.
|
3 |
+
"""
|
4 |
+
from referencing import Registry
|
5 |
+
from referencing.jsonschema import DRAFT202012
|
6 |
+
from rpds import HashTrieMap, HashTrieSet
|
7 |
+
|
8 |
+
from jsonschema import Draft202012Validator
|
9 |
+
|
10 |
+
schema = {
|
11 |
+
"type": "array",
|
12 |
+
"minLength": 1,
|
13 |
+
"maxLength": 1,
|
14 |
+
"items": {"type": "integer"},
|
15 |
+
}
|
16 |
+
|
17 |
+
hmap = HashTrieMap()
|
18 |
+
hset = HashTrieSet()
|
19 |
+
|
20 |
+
registry = Registry()
|
21 |
+
|
22 |
+
v = Draft202012Validator(schema)
|
23 |
+
|
24 |
+
|
25 |
+
def registry_data_structures():
|
26 |
+
return hmap.insert("foo", "bar"), hset.insert("foo")
|
27 |
+
|
28 |
+
|
29 |
+
def registry_add():
|
30 |
+
resource = DRAFT202012.create_resource(schema)
|
31 |
+
return registry.with_resource(uri="urn:example", resource=resource)
|
32 |
+
|
33 |
+
|
34 |
+
if __name__ == "__main__":
|
35 |
+
from pyperf import Runner
|
36 |
+
runner = Runner()
|
37 |
+
|
38 |
+
runner.bench_func("HashMap/HashSet insertion", registry_data_structures)
|
39 |
+
runner.bench_func("Registry insertion", registry_add)
|
40 |
+
runner.bench_func("Success", lambda: v.is_valid([1]))
|
41 |
+
runner.bench_func("Failure", lambda: v.is_valid(["foo"]))
|
42 |
+
runner.bench_func("Metaschema validation", lambda: v.check_schema(schema))
|
.venv/lib/python3.11/site-packages/jsonschema/benchmarks/unused_registry.py
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
An unused schema registry should not cause slower validation.
|
3 |
+
|
4 |
+
"Unused" here means one where no reference resolution is occurring anyhow.
|
5 |
+
|
6 |
+
See https://github.com/python-jsonschema/jsonschema/issues/1088.
|
7 |
+
"""
|
8 |
+
from pyperf import Runner
|
9 |
+
from referencing import Registry
|
10 |
+
from referencing.jsonschema import DRAFT201909
|
11 |
+
|
12 |
+
from jsonschema import Draft201909Validator
|
13 |
+
|
14 |
+
registry = Registry().with_resource(
|
15 |
+
"urn:example:foo",
|
16 |
+
DRAFT201909.create_resource({}),
|
17 |
+
)
|
18 |
+
|
19 |
+
schema = {"$ref": "https://json-schema.org/draft/2019-09/schema"}
|
20 |
+
instance = {"maxLength": 4}
|
21 |
+
|
22 |
+
no_registry = Draft201909Validator(schema)
|
23 |
+
with_useless_registry = Draft201909Validator(schema, registry=registry)
|
24 |
+
|
25 |
+
if __name__ == "__main__":
|
26 |
+
runner = Runner()
|
27 |
+
|
28 |
+
runner.bench_func(
|
29 |
+
"no registry",
|
30 |
+
lambda: no_registry.is_valid(instance),
|
31 |
+
)
|
32 |
+
runner.bench_func(
|
33 |
+
"useless registry",
|
34 |
+
lambda: with_useless_registry.is_valid(instance),
|
35 |
+
)
|
.venv/lib/python3.11/site-packages/jsonschema/benchmarks/useless_applicator_schemas.py
ADDED
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
"""
|
3 |
+
A benchmark for validation of applicators containing lots of useless schemas.
|
4 |
+
|
5 |
+
Signals a small possible optimization to remove all such schemas ahead of time.
|
6 |
+
"""
|
7 |
+
|
8 |
+
from pyperf import Runner
|
9 |
+
|
10 |
+
from jsonschema import Draft202012Validator as Validator
|
11 |
+
|
12 |
+
NUM_USELESS = 100000
|
13 |
+
|
14 |
+
subschema = {"const": 37}
|
15 |
+
|
16 |
+
valid = 37
|
17 |
+
invalid = 12
|
18 |
+
|
19 |
+
baseline = Validator(subschema)
|
20 |
+
|
21 |
+
|
22 |
+
# These should be indistinguishable from just `subschema`
|
23 |
+
by_name = {
|
24 |
+
"single subschema": {
|
25 |
+
"anyOf": Validator({"anyOf": [subschema]}),
|
26 |
+
"allOf": Validator({"allOf": [subschema]}),
|
27 |
+
"oneOf": Validator({"oneOf": [subschema]}),
|
28 |
+
},
|
29 |
+
"redundant subschemas": {
|
30 |
+
"anyOf": Validator({"anyOf": [subschema] * NUM_USELESS}),
|
31 |
+
"allOf": Validator({"allOf": [subschema] * NUM_USELESS}),
|
32 |
+
},
|
33 |
+
"useless successful subschemas (beginning)": {
|
34 |
+
"anyOf": Validator({"anyOf": [subschema, *[True] * NUM_USELESS]}),
|
35 |
+
"allOf": Validator({"allOf": [subschema, *[True] * NUM_USELESS]}),
|
36 |
+
},
|
37 |
+
"useless successful subschemas (middle)": {
|
38 |
+
"anyOf": Validator(
|
39 |
+
{
|
40 |
+
"anyOf": [
|
41 |
+
*[True] * (NUM_USELESS // 2),
|
42 |
+
subschema,
|
43 |
+
*[True] * (NUM_USELESS // 2),
|
44 |
+
],
|
45 |
+
},
|
46 |
+
),
|
47 |
+
"allOf": Validator(
|
48 |
+
{
|
49 |
+
"allOf": [
|
50 |
+
*[True] * (NUM_USELESS // 2),
|
51 |
+
subschema,
|
52 |
+
*[True] * (NUM_USELESS // 2),
|
53 |
+
],
|
54 |
+
},
|
55 |
+
),
|
56 |
+
},
|
57 |
+
"useless successful subschemas (end)": {
|
58 |
+
"anyOf": Validator({"anyOf": [*[True] * NUM_USELESS, subschema]}),
|
59 |
+
"allOf": Validator({"allOf": [*[True] * NUM_USELESS, subschema]}),
|
60 |
+
},
|
61 |
+
"useless failing subschemas (beginning)": {
|
62 |
+
"anyOf": Validator({"anyOf": [subschema, *[False] * NUM_USELESS]}),
|
63 |
+
"oneOf": Validator({"oneOf": [subschema, *[False] * NUM_USELESS]}),
|
64 |
+
},
|
65 |
+
"useless failing subschemas (middle)": {
|
66 |
+
"anyOf": Validator(
|
67 |
+
{
|
68 |
+
"anyOf": [
|
69 |
+
*[False] * (NUM_USELESS // 2),
|
70 |
+
subschema,
|
71 |
+
*[False] * (NUM_USELESS // 2),
|
72 |
+
],
|
73 |
+
},
|
74 |
+
),
|
75 |
+
"oneOf": Validator(
|
76 |
+
{
|
77 |
+
"oneOf": [
|
78 |
+
*[False] * (NUM_USELESS // 2),
|
79 |
+
subschema,
|
80 |
+
*[False] * (NUM_USELESS // 2),
|
81 |
+
],
|
82 |
+
},
|
83 |
+
),
|
84 |
+
},
|
85 |
+
"useless failing subschemas (end)": {
|
86 |
+
"anyOf": Validator({"anyOf": [*[False] * NUM_USELESS, subschema]}),
|
87 |
+
"oneOf": Validator({"oneOf": [*[False] * NUM_USELESS, subschema]}),
|
88 |
+
},
|
89 |
+
}
|
90 |
+
|
91 |
+
if __name__ == "__main__":
|
92 |
+
runner = Runner()
|
93 |
+
|
94 |
+
runner.bench_func("baseline valid", lambda: baseline.is_valid(valid))
|
95 |
+
runner.bench_func("baseline invalid", lambda: baseline.is_valid(invalid))
|
96 |
+
|
97 |
+
for group, applicators in by_name.items():
|
98 |
+
for applicator, validator in applicators.items():
|
99 |
+
runner.bench_func(
|
100 |
+
f"{group}: {applicator} valid",
|
101 |
+
lambda validator=validator: validator.is_valid(valid),
|
102 |
+
)
|
103 |
+
runner.bench_func(
|
104 |
+
f"{group}: {applicator} invalid",
|
105 |
+
lambda validator=validator: validator.is_valid(invalid),
|
106 |
+
)
|
.venv/lib/python3.11/site-packages/jsonschema/benchmarks/useless_keywords.py
ADDED
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
A benchmark for validation of schemas containing lots of useless keywords.
|
3 |
+
|
4 |
+
Checks we filter them out once, ahead of time.
|
5 |
+
"""
|
6 |
+
|
7 |
+
from pyperf import Runner
|
8 |
+
|
9 |
+
from jsonschema import Draft202012Validator
|
10 |
+
|
11 |
+
NUM_USELESS = 100000
|
12 |
+
schema = dict(
|
13 |
+
[
|
14 |
+
("not", {"const": 42}),
|
15 |
+
*((str(i), i) for i in range(NUM_USELESS)),
|
16 |
+
("type", "integer"),
|
17 |
+
*((str(i), i) for i in range(NUM_USELESS, NUM_USELESS)),
|
18 |
+
("minimum", 37),
|
19 |
+
],
|
20 |
+
)
|
21 |
+
validator = Draft202012Validator(schema)
|
22 |
+
|
23 |
+
valid = 3737
|
24 |
+
invalid = 12
|
25 |
+
|
26 |
+
|
27 |
+
if __name__ == "__main__":
|
28 |
+
runner = Runner()
|
29 |
+
runner.bench_func("beginning of schema", lambda: validator.is_valid(42))
|
30 |
+
runner.bench_func("middle of schema", lambda: validator.is_valid("foo"))
|
31 |
+
runner.bench_func("end of schema", lambda: validator.is_valid(12))
|
32 |
+
runner.bench_func("valid", lambda: validator.is_valid(3737))
|
.venv/lib/python3.11/site-packages/jsonschema/cli.py
ADDED
@@ -0,0 +1,296 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
The ``jsonschema`` command line.
|
3 |
+
"""
|
4 |
+
|
5 |
+
from importlib import metadata
|
6 |
+
from json import JSONDecodeError
|
7 |
+
from textwrap import dedent
|
8 |
+
import argparse
|
9 |
+
import json
|
10 |
+
import sys
|
11 |
+
import traceback
|
12 |
+
import warnings
|
13 |
+
|
14 |
+
try:
|
15 |
+
from pkgutil import resolve_name
|
16 |
+
except ImportError:
|
17 |
+
from pkgutil_resolve_name import resolve_name # type: ignore[no-redef]
|
18 |
+
|
19 |
+
from attrs import define, field
|
20 |
+
|
21 |
+
from jsonschema.exceptions import SchemaError
|
22 |
+
from jsonschema.validators import _RefResolver, validator_for
|
23 |
+
|
24 |
+
warnings.warn(
|
25 |
+
(
|
26 |
+
"The jsonschema CLI is deprecated and will be removed in a future "
|
27 |
+
"version. Please use check-jsonschema instead, which can be installed "
|
28 |
+
"from https://pypi.org/project/check-jsonschema/"
|
29 |
+
),
|
30 |
+
DeprecationWarning,
|
31 |
+
stacklevel=2,
|
32 |
+
)
|
33 |
+
|
34 |
+
|
35 |
+
class _CannotLoadFile(Exception):
|
36 |
+
pass
|
37 |
+
|
38 |
+
|
39 |
+
@define
|
40 |
+
class _Outputter:
|
41 |
+
|
42 |
+
_formatter = field()
|
43 |
+
_stdout = field()
|
44 |
+
_stderr = field()
|
45 |
+
|
46 |
+
@classmethod
|
47 |
+
def from_arguments(cls, arguments, stdout, stderr):
|
48 |
+
if arguments["output"] == "plain":
|
49 |
+
formatter = _PlainFormatter(arguments["error_format"])
|
50 |
+
elif arguments["output"] == "pretty":
|
51 |
+
formatter = _PrettyFormatter()
|
52 |
+
return cls(formatter=formatter, stdout=stdout, stderr=stderr)
|
53 |
+
|
54 |
+
def load(self, path):
|
55 |
+
try:
|
56 |
+
file = open(path) # noqa: SIM115, PTH123
|
57 |
+
except FileNotFoundError as error:
|
58 |
+
self.filenotfound_error(path=path, exc_info=sys.exc_info())
|
59 |
+
raise _CannotLoadFile() from error
|
60 |
+
|
61 |
+
with file:
|
62 |
+
try:
|
63 |
+
return json.load(file)
|
64 |
+
except JSONDecodeError as error:
|
65 |
+
self.parsing_error(path=path, exc_info=sys.exc_info())
|
66 |
+
raise _CannotLoadFile() from error
|
67 |
+
|
68 |
+
def filenotfound_error(self, **kwargs):
|
69 |
+
self._stderr.write(self._formatter.filenotfound_error(**kwargs))
|
70 |
+
|
71 |
+
def parsing_error(self, **kwargs):
|
72 |
+
self._stderr.write(self._formatter.parsing_error(**kwargs))
|
73 |
+
|
74 |
+
def validation_error(self, **kwargs):
|
75 |
+
self._stderr.write(self._formatter.validation_error(**kwargs))
|
76 |
+
|
77 |
+
def validation_success(self, **kwargs):
|
78 |
+
self._stdout.write(self._formatter.validation_success(**kwargs))
|
79 |
+
|
80 |
+
|
81 |
+
@define
|
82 |
+
class _PrettyFormatter:
|
83 |
+
|
84 |
+
_ERROR_MSG = dedent(
|
85 |
+
"""\
|
86 |
+
===[{type}]===({path})===
|
87 |
+
|
88 |
+
{body}
|
89 |
+
-----------------------------
|
90 |
+
""",
|
91 |
+
)
|
92 |
+
_SUCCESS_MSG = "===[SUCCESS]===({path})===\n"
|
93 |
+
|
94 |
+
def filenotfound_error(self, path, exc_info):
|
95 |
+
return self._ERROR_MSG.format(
|
96 |
+
path=path,
|
97 |
+
type="FileNotFoundError",
|
98 |
+
body=f"{path!r} does not exist.",
|
99 |
+
)
|
100 |
+
|
101 |
+
def parsing_error(self, path, exc_info):
|
102 |
+
exc_type, exc_value, exc_traceback = exc_info
|
103 |
+
exc_lines = "".join(
|
104 |
+
traceback.format_exception(exc_type, exc_value, exc_traceback),
|
105 |
+
)
|
106 |
+
return self._ERROR_MSG.format(
|
107 |
+
path=path,
|
108 |
+
type=exc_type.__name__,
|
109 |
+
body=exc_lines,
|
110 |
+
)
|
111 |
+
|
112 |
+
def validation_error(self, instance_path, error):
|
113 |
+
return self._ERROR_MSG.format(
|
114 |
+
path=instance_path,
|
115 |
+
type=error.__class__.__name__,
|
116 |
+
body=error,
|
117 |
+
)
|
118 |
+
|
119 |
+
def validation_success(self, instance_path):
|
120 |
+
return self._SUCCESS_MSG.format(path=instance_path)
|
121 |
+
|
122 |
+
|
123 |
+
@define
|
124 |
+
class _PlainFormatter:
|
125 |
+
|
126 |
+
_error_format = field()
|
127 |
+
|
128 |
+
def filenotfound_error(self, path, exc_info):
|
129 |
+
return f"{path!r} does not exist.\n"
|
130 |
+
|
131 |
+
def parsing_error(self, path, exc_info):
|
132 |
+
return "Failed to parse {}: {}\n".format(
|
133 |
+
"<stdin>" if path == "<stdin>" else repr(path),
|
134 |
+
exc_info[1],
|
135 |
+
)
|
136 |
+
|
137 |
+
def validation_error(self, instance_path, error):
|
138 |
+
return self._error_format.format(file_name=instance_path, error=error)
|
139 |
+
|
140 |
+
def validation_success(self, instance_path):
|
141 |
+
return ""
|
142 |
+
|
143 |
+
|
144 |
+
def _resolve_name_with_default(name):
|
145 |
+
if "." not in name:
|
146 |
+
name = "jsonschema." + name
|
147 |
+
return resolve_name(name)
|
148 |
+
|
149 |
+
|
150 |
+
parser = argparse.ArgumentParser(
|
151 |
+
description="JSON Schema Validation CLI",
|
152 |
+
)
|
153 |
+
parser.add_argument(
|
154 |
+
"-i", "--instance",
|
155 |
+
action="append",
|
156 |
+
dest="instances",
|
157 |
+
help="""
|
158 |
+
a path to a JSON instance (i.e. filename.json) to validate (may
|
159 |
+
be specified multiple times). If no instances are provided via this
|
160 |
+
option, one will be expected on standard input.
|
161 |
+
""",
|
162 |
+
)
|
163 |
+
parser.add_argument(
|
164 |
+
"-F", "--error-format",
|
165 |
+
help="""
|
166 |
+
the format to use for each validation error message, specified
|
167 |
+
in a form suitable for str.format. This string will be passed
|
168 |
+
one formatted object named 'error' for each ValidationError.
|
169 |
+
Only provide this option when using --output=plain, which is the
|
170 |
+
default. If this argument is unprovided and --output=plain is
|
171 |
+
used, a simple default representation will be used.
|
172 |
+
""",
|
173 |
+
)
|
174 |
+
parser.add_argument(
|
175 |
+
"-o", "--output",
|
176 |
+
choices=["plain", "pretty"],
|
177 |
+
default="plain",
|
178 |
+
help="""
|
179 |
+
an output format to use. 'plain' (default) will produce minimal
|
180 |
+
text with one line for each error, while 'pretty' will produce
|
181 |
+
more detailed human-readable output on multiple lines.
|
182 |
+
""",
|
183 |
+
)
|
184 |
+
parser.add_argument(
|
185 |
+
"-V", "--validator",
|
186 |
+
type=_resolve_name_with_default,
|
187 |
+
help="""
|
188 |
+
the fully qualified object name of a validator to use, or, for
|
189 |
+
validators that are registered with jsonschema, simply the name
|
190 |
+
of the class.
|
191 |
+
""",
|
192 |
+
)
|
193 |
+
parser.add_argument(
|
194 |
+
"--base-uri",
|
195 |
+
help="""
|
196 |
+
a base URI to assign to the provided schema, even if it does not
|
197 |
+
declare one (via e.g. $id). This option can be used if you wish to
|
198 |
+
resolve relative references to a particular URI (or local path)
|
199 |
+
""",
|
200 |
+
)
|
201 |
+
parser.add_argument(
|
202 |
+
"--version",
|
203 |
+
action="version",
|
204 |
+
version=metadata.version("jsonschema"),
|
205 |
+
)
|
206 |
+
parser.add_argument(
|
207 |
+
"schema",
|
208 |
+
help="the path to a JSON Schema to validate with (i.e. schema.json)",
|
209 |
+
)
|
210 |
+
|
211 |
+
|
212 |
+
def parse_args(args): # noqa: D103
|
213 |
+
arguments = vars(parser.parse_args(args=args or ["--help"]))
|
214 |
+
if arguments["output"] != "plain" and arguments["error_format"]:
|
215 |
+
raise parser.error(
|
216 |
+
"--error-format can only be used with --output plain",
|
217 |
+
)
|
218 |
+
if arguments["output"] == "plain" and arguments["error_format"] is None:
|
219 |
+
arguments["error_format"] = "{error.instance}: {error.message}\n"
|
220 |
+
return arguments
|
221 |
+
|
222 |
+
|
223 |
+
def _validate_instance(instance_path, instance, validator, outputter):
|
224 |
+
invalid = False
|
225 |
+
for error in validator.iter_errors(instance):
|
226 |
+
invalid = True
|
227 |
+
outputter.validation_error(instance_path=instance_path, error=error)
|
228 |
+
|
229 |
+
if not invalid:
|
230 |
+
outputter.validation_success(instance_path=instance_path)
|
231 |
+
return invalid
|
232 |
+
|
233 |
+
|
234 |
+
def main(args=sys.argv[1:]): # noqa: D103
|
235 |
+
sys.exit(run(arguments=parse_args(args=args)))
|
236 |
+
|
237 |
+
|
238 |
+
def run(arguments, stdout=sys.stdout, stderr=sys.stderr, stdin=sys.stdin): # noqa: D103
|
239 |
+
outputter = _Outputter.from_arguments(
|
240 |
+
arguments=arguments,
|
241 |
+
stdout=stdout,
|
242 |
+
stderr=stderr,
|
243 |
+
)
|
244 |
+
|
245 |
+
try:
|
246 |
+
schema = outputter.load(arguments["schema"])
|
247 |
+
except _CannotLoadFile:
|
248 |
+
return 1
|
249 |
+
|
250 |
+
Validator = arguments["validator"]
|
251 |
+
if Validator is None:
|
252 |
+
Validator = validator_for(schema)
|
253 |
+
|
254 |
+
try:
|
255 |
+
Validator.check_schema(schema)
|
256 |
+
except SchemaError as error:
|
257 |
+
outputter.validation_error(
|
258 |
+
instance_path=arguments["schema"],
|
259 |
+
error=error,
|
260 |
+
)
|
261 |
+
return 1
|
262 |
+
|
263 |
+
if arguments["instances"]:
|
264 |
+
load, instances = outputter.load, arguments["instances"]
|
265 |
+
else:
|
266 |
+
def load(_):
|
267 |
+
try:
|
268 |
+
return json.load(stdin)
|
269 |
+
except JSONDecodeError as error:
|
270 |
+
outputter.parsing_error(
|
271 |
+
path="<stdin>", exc_info=sys.exc_info(),
|
272 |
+
)
|
273 |
+
raise _CannotLoadFile() from error
|
274 |
+
instances = ["<stdin>"]
|
275 |
+
|
276 |
+
resolver = _RefResolver(
|
277 |
+
base_uri=arguments["base_uri"],
|
278 |
+
referrer=schema,
|
279 |
+
) if arguments["base_uri"] is not None else None
|
280 |
+
|
281 |
+
validator = Validator(schema, resolver=resolver)
|
282 |
+
exit_code = 0
|
283 |
+
for each in instances:
|
284 |
+
try:
|
285 |
+
instance = load(each)
|
286 |
+
except _CannotLoadFile:
|
287 |
+
exit_code = 1
|
288 |
+
else:
|
289 |
+
exit_code |= _validate_instance(
|
290 |
+
instance_path=each,
|
291 |
+
instance=instance,
|
292 |
+
validator=validator,
|
293 |
+
outputter=outputter,
|
294 |
+
)
|
295 |
+
|
296 |
+
return exit_code
|
.venv/lib/python3.11/site-packages/jsonschema/exceptions.py
ADDED
@@ -0,0 +1,487 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Validation errors, and some surrounding helpers.
|
3 |
+
"""
|
4 |
+
from __future__ import annotations
|
5 |
+
|
6 |
+
from collections import defaultdict, deque
|
7 |
+
from pprint import pformat
|
8 |
+
from textwrap import dedent, indent
|
9 |
+
from typing import TYPE_CHECKING, Any, ClassVar
|
10 |
+
import heapq
|
11 |
+
import itertools
|
12 |
+
import warnings
|
13 |
+
|
14 |
+
from attrs import define
|
15 |
+
from referencing.exceptions import Unresolvable as _Unresolvable
|
16 |
+
|
17 |
+
from jsonschema import _utils
|
18 |
+
|
19 |
+
if TYPE_CHECKING:
|
20 |
+
from collections.abc import Iterable, Mapping, MutableMapping, Sequence
|
21 |
+
|
22 |
+
from jsonschema import _types
|
23 |
+
|
24 |
+
WEAK_MATCHES: frozenset[str] = frozenset(["anyOf", "oneOf"])
|
25 |
+
STRONG_MATCHES: frozenset[str] = frozenset()
|
26 |
+
|
27 |
+
_unset = _utils.Unset()
|
28 |
+
|
29 |
+
|
30 |
+
def _pretty(thing: Any, prefix: str):
|
31 |
+
"""
|
32 |
+
Format something for an error message as prettily as we currently can.
|
33 |
+
"""
|
34 |
+
return indent(pformat(thing, width=72, sort_dicts=False), prefix).lstrip()
|
35 |
+
|
36 |
+
|
37 |
+
def __getattr__(name):
|
38 |
+
if name == "RefResolutionError":
|
39 |
+
warnings.warn(
|
40 |
+
_RefResolutionError._DEPRECATION_MESSAGE,
|
41 |
+
DeprecationWarning,
|
42 |
+
stacklevel=2,
|
43 |
+
)
|
44 |
+
return _RefResolutionError
|
45 |
+
raise AttributeError(f"module {__name__} has no attribute {name}")
|
46 |
+
|
47 |
+
|
48 |
+
class _Error(Exception):
|
49 |
+
|
50 |
+
_word_for_schema_in_error_message: ClassVar[str]
|
51 |
+
_word_for_instance_in_error_message: ClassVar[str]
|
52 |
+
|
53 |
+
def __init__(
|
54 |
+
self,
|
55 |
+
message: str,
|
56 |
+
validator: str = _unset, # type: ignore[assignment]
|
57 |
+
path: Iterable[str | int] = (),
|
58 |
+
cause: Exception | None = None,
|
59 |
+
context=(),
|
60 |
+
validator_value: Any = _unset,
|
61 |
+
instance: Any = _unset,
|
62 |
+
schema: Mapping[str, Any] | bool = _unset, # type: ignore[assignment]
|
63 |
+
schema_path: Iterable[str | int] = (),
|
64 |
+
parent: _Error | None = None,
|
65 |
+
type_checker: _types.TypeChecker = _unset, # type: ignore[assignment]
|
66 |
+
) -> None:
|
67 |
+
super().__init__(
|
68 |
+
message,
|
69 |
+
validator,
|
70 |
+
path,
|
71 |
+
cause,
|
72 |
+
context,
|
73 |
+
validator_value,
|
74 |
+
instance,
|
75 |
+
schema,
|
76 |
+
schema_path,
|
77 |
+
parent,
|
78 |
+
)
|
79 |
+
self.message = message
|
80 |
+
self.path = self.relative_path = deque(path)
|
81 |
+
self.schema_path = self.relative_schema_path = deque(schema_path)
|
82 |
+
self.context = list(context)
|
83 |
+
self.cause = self.__cause__ = cause
|
84 |
+
self.validator = validator
|
85 |
+
self.validator_value = validator_value
|
86 |
+
self.instance = instance
|
87 |
+
self.schema = schema
|
88 |
+
self.parent = parent
|
89 |
+
self._type_checker = type_checker
|
90 |
+
|
91 |
+
for error in context:
|
92 |
+
error.parent = self
|
93 |
+
|
94 |
+
def __repr__(self) -> str:
|
95 |
+
return f"<{self.__class__.__name__}: {self.message!r}>"
|
96 |
+
|
97 |
+
def __str__(self) -> str:
|
98 |
+
essential_for_verbose = (
|
99 |
+
self.validator, self.validator_value, self.instance, self.schema,
|
100 |
+
)
|
101 |
+
if any(m is _unset for m in essential_for_verbose):
|
102 |
+
return self.message
|
103 |
+
|
104 |
+
schema_path = _utils.format_as_index(
|
105 |
+
container=self._word_for_schema_in_error_message,
|
106 |
+
indices=list(self.relative_schema_path)[:-1],
|
107 |
+
)
|
108 |
+
instance_path = _utils.format_as_index(
|
109 |
+
container=self._word_for_instance_in_error_message,
|
110 |
+
indices=self.relative_path,
|
111 |
+
)
|
112 |
+
prefix = 16 * " "
|
113 |
+
|
114 |
+
return dedent(
|
115 |
+
f"""\
|
116 |
+
{self.message}
|
117 |
+
|
118 |
+
Failed validating {self.validator!r} in {schema_path}:
|
119 |
+
{_pretty(self.schema, prefix=prefix)}
|
120 |
+
|
121 |
+
On {instance_path}:
|
122 |
+
{_pretty(self.instance, prefix=prefix)}
|
123 |
+
""".rstrip(),
|
124 |
+
)
|
125 |
+
|
126 |
+
@classmethod
|
127 |
+
def create_from(cls, other: _Error):
|
128 |
+
return cls(**other._contents())
|
129 |
+
|
130 |
+
@property
|
131 |
+
def absolute_path(self) -> Sequence[str | int]:
|
132 |
+
parent = self.parent
|
133 |
+
if parent is None:
|
134 |
+
return self.relative_path
|
135 |
+
|
136 |
+
path = deque(self.relative_path)
|
137 |
+
path.extendleft(reversed(parent.absolute_path))
|
138 |
+
return path
|
139 |
+
|
140 |
+
@property
|
141 |
+
def absolute_schema_path(self) -> Sequence[str | int]:
|
142 |
+
parent = self.parent
|
143 |
+
if parent is None:
|
144 |
+
return self.relative_schema_path
|
145 |
+
|
146 |
+
path = deque(self.relative_schema_path)
|
147 |
+
path.extendleft(reversed(parent.absolute_schema_path))
|
148 |
+
return path
|
149 |
+
|
150 |
+
@property
|
151 |
+
def json_path(self) -> str:
|
152 |
+
path = "$"
|
153 |
+
for elem in self.absolute_path:
|
154 |
+
if isinstance(elem, int):
|
155 |
+
path += "[" + str(elem) + "]"
|
156 |
+
else:
|
157 |
+
path += "." + elem
|
158 |
+
return path
|
159 |
+
|
160 |
+
def _set(
|
161 |
+
self,
|
162 |
+
type_checker: _types.TypeChecker | None = None,
|
163 |
+
**kwargs: Any,
|
164 |
+
) -> None:
|
165 |
+
if type_checker is not None and self._type_checker is _unset:
|
166 |
+
self._type_checker = type_checker
|
167 |
+
|
168 |
+
for k, v in kwargs.items():
|
169 |
+
if getattr(self, k) is _unset:
|
170 |
+
setattr(self, k, v)
|
171 |
+
|
172 |
+
def _contents(self):
|
173 |
+
attrs = (
|
174 |
+
"message", "cause", "context", "validator", "validator_value",
|
175 |
+
"path", "schema_path", "instance", "schema", "parent",
|
176 |
+
)
|
177 |
+
return {attr: getattr(self, attr) for attr in attrs}
|
178 |
+
|
179 |
+
def _matches_type(self) -> bool:
|
180 |
+
try:
|
181 |
+
# We ignore this as we want to simply crash if this happens
|
182 |
+
expected = self.schema["type"] # type: ignore[index]
|
183 |
+
except (KeyError, TypeError):
|
184 |
+
return False
|
185 |
+
|
186 |
+
if isinstance(expected, str):
|
187 |
+
return self._type_checker.is_type(self.instance, expected)
|
188 |
+
|
189 |
+
return any(
|
190 |
+
self._type_checker.is_type(self.instance, expected_type)
|
191 |
+
for expected_type in expected
|
192 |
+
)
|
193 |
+
|
194 |
+
|
195 |
+
class ValidationError(_Error):
|
196 |
+
"""
|
197 |
+
An instance was invalid under a provided schema.
|
198 |
+
"""
|
199 |
+
|
200 |
+
_word_for_schema_in_error_message = "schema"
|
201 |
+
_word_for_instance_in_error_message = "instance"
|
202 |
+
|
203 |
+
|
204 |
+
class SchemaError(_Error):
|
205 |
+
"""
|
206 |
+
A schema was invalid under its corresponding metaschema.
|
207 |
+
"""
|
208 |
+
|
209 |
+
_word_for_schema_in_error_message = "metaschema"
|
210 |
+
_word_for_instance_in_error_message = "schema"
|
211 |
+
|
212 |
+
|
213 |
+
@define(slots=False)
|
214 |
+
class _RefResolutionError(Exception):
|
215 |
+
"""
|
216 |
+
A ref could not be resolved.
|
217 |
+
"""
|
218 |
+
|
219 |
+
_DEPRECATION_MESSAGE = (
|
220 |
+
"jsonschema.exceptions.RefResolutionError is deprecated as of version "
|
221 |
+
"4.18.0. If you wish to catch potential reference resolution errors, "
|
222 |
+
"directly catch referencing.exceptions.Unresolvable."
|
223 |
+
)
|
224 |
+
|
225 |
+
_cause: Exception
|
226 |
+
|
227 |
+
def __eq__(self, other):
|
228 |
+
if self.__class__ is not other.__class__:
|
229 |
+
return NotImplemented # pragma: no cover -- uncovered but deprecated # noqa: E501
|
230 |
+
return self._cause == other._cause
|
231 |
+
|
232 |
+
def __str__(self) -> str:
|
233 |
+
return str(self._cause)
|
234 |
+
|
235 |
+
|
236 |
+
class _WrappedReferencingError(_RefResolutionError, _Unresolvable): # pragma: no cover -- partially uncovered but to be removed # noqa: E501
|
237 |
+
def __init__(self, cause: _Unresolvable):
|
238 |
+
object.__setattr__(self, "_wrapped", cause)
|
239 |
+
|
240 |
+
def __eq__(self, other):
|
241 |
+
if other.__class__ is self.__class__:
|
242 |
+
return self._wrapped == other._wrapped
|
243 |
+
elif other.__class__ is self._wrapped.__class__:
|
244 |
+
return self._wrapped == other
|
245 |
+
return NotImplemented
|
246 |
+
|
247 |
+
def __getattr__(self, attr):
|
248 |
+
return getattr(self._wrapped, attr)
|
249 |
+
|
250 |
+
def __hash__(self):
|
251 |
+
return hash(self._wrapped)
|
252 |
+
|
253 |
+
def __repr__(self):
|
254 |
+
return f"<WrappedReferencingError {self._wrapped!r}>"
|
255 |
+
|
256 |
+
def __str__(self):
|
257 |
+
return f"{self._wrapped.__class__.__name__}: {self._wrapped}"
|
258 |
+
|
259 |
+
|
260 |
+
class UndefinedTypeCheck(Exception):
|
261 |
+
"""
|
262 |
+
A type checker was asked to check a type it did not have registered.
|
263 |
+
"""
|
264 |
+
|
265 |
+
def __init__(self, type: str) -> None:
|
266 |
+
self.type = type
|
267 |
+
|
268 |
+
def __str__(self) -> str:
|
269 |
+
return f"Type {self.type!r} is unknown to this type checker"
|
270 |
+
|
271 |
+
|
272 |
+
class UnknownType(Exception):
|
273 |
+
"""
|
274 |
+
A validator was asked to validate an instance against an unknown type.
|
275 |
+
"""
|
276 |
+
|
277 |
+
def __init__(self, type, instance, schema):
|
278 |
+
self.type = type
|
279 |
+
self.instance = instance
|
280 |
+
self.schema = schema
|
281 |
+
|
282 |
+
def __str__(self):
|
283 |
+
prefix = 16 * " "
|
284 |
+
|
285 |
+
return dedent(
|
286 |
+
f"""\
|
287 |
+
Unknown type {self.type!r} for validator with schema:
|
288 |
+
{_pretty(self.schema, prefix=prefix)}
|
289 |
+
|
290 |
+
While checking instance:
|
291 |
+
{_pretty(self.instance, prefix=prefix)}
|
292 |
+
""".rstrip(),
|
293 |
+
)
|
294 |
+
|
295 |
+
|
296 |
+
class FormatError(Exception):
|
297 |
+
"""
|
298 |
+
Validating a format failed.
|
299 |
+
"""
|
300 |
+
|
301 |
+
def __init__(self, message, cause=None):
|
302 |
+
super().__init__(message, cause)
|
303 |
+
self.message = message
|
304 |
+
self.cause = self.__cause__ = cause
|
305 |
+
|
306 |
+
def __str__(self):
|
307 |
+
return self.message
|
308 |
+
|
309 |
+
|
310 |
+
class ErrorTree:
|
311 |
+
"""
|
312 |
+
ErrorTrees make it easier to check which validations failed.
|
313 |
+
"""
|
314 |
+
|
315 |
+
_instance = _unset
|
316 |
+
|
317 |
+
def __init__(self, errors: Iterable[ValidationError] = ()):
|
318 |
+
self.errors: MutableMapping[str, ValidationError] = {}
|
319 |
+
self._contents: Mapping[str, ErrorTree] = defaultdict(self.__class__)
|
320 |
+
|
321 |
+
for error in errors:
|
322 |
+
container = self
|
323 |
+
for element in error.path:
|
324 |
+
container = container[element]
|
325 |
+
container.errors[error.validator] = error
|
326 |
+
|
327 |
+
container._instance = error.instance
|
328 |
+
|
329 |
+
def __contains__(self, index: str | int):
|
330 |
+
"""
|
331 |
+
Check whether ``instance[index]`` has any errors.
|
332 |
+
"""
|
333 |
+
return index in self._contents
|
334 |
+
|
335 |
+
def __getitem__(self, index):
|
336 |
+
"""
|
337 |
+
Retrieve the child tree one level down at the given ``index``.
|
338 |
+
|
339 |
+
If the index is not in the instance that this tree corresponds
|
340 |
+
to and is not known by this tree, whatever error would be raised
|
341 |
+
by ``instance.__getitem__`` will be propagated (usually this is
|
342 |
+
some subclass of `LookupError`.
|
343 |
+
"""
|
344 |
+
if self._instance is not _unset and index not in self:
|
345 |
+
self._instance[index]
|
346 |
+
return self._contents[index]
|
347 |
+
|
348 |
+
def __setitem__(self, index: str | int, value: ErrorTree):
|
349 |
+
"""
|
350 |
+
Add an error to the tree at the given ``index``.
|
351 |
+
|
352 |
+
.. deprecated:: v4.20.0
|
353 |
+
|
354 |
+
Setting items on an `ErrorTree` is deprecated without replacement.
|
355 |
+
To populate a tree, provide all of its sub-errors when you
|
356 |
+
construct the tree.
|
357 |
+
"""
|
358 |
+
warnings.warn(
|
359 |
+
"ErrorTree.__setitem__ is deprecated without replacement.",
|
360 |
+
DeprecationWarning,
|
361 |
+
stacklevel=2,
|
362 |
+
)
|
363 |
+
self._contents[index] = value # type: ignore[index]
|
364 |
+
|
365 |
+
def __iter__(self):
|
366 |
+
"""
|
367 |
+
Iterate (non-recursively) over the indices in the instance with errors.
|
368 |
+
"""
|
369 |
+
return iter(self._contents)
|
370 |
+
|
371 |
+
def __len__(self):
|
372 |
+
"""
|
373 |
+
Return the `total_errors`.
|
374 |
+
"""
|
375 |
+
return self.total_errors
|
376 |
+
|
377 |
+
def __repr__(self):
|
378 |
+
total = len(self)
|
379 |
+
errors = "error" if total == 1 else "errors"
|
380 |
+
return f"<{self.__class__.__name__} ({total} total {errors})>"
|
381 |
+
|
382 |
+
@property
|
383 |
+
def total_errors(self):
|
384 |
+
"""
|
385 |
+
The total number of errors in the entire tree, including children.
|
386 |
+
"""
|
387 |
+
child_errors = sum(len(tree) for _, tree in self._contents.items())
|
388 |
+
return len(self.errors) + child_errors
|
389 |
+
|
390 |
+
|
391 |
+
def by_relevance(weak=WEAK_MATCHES, strong=STRONG_MATCHES):
|
392 |
+
"""
|
393 |
+
Create a key function that can be used to sort errors by relevance.
|
394 |
+
|
395 |
+
Arguments:
|
396 |
+
weak (set):
|
397 |
+
a collection of validation keywords to consider to be
|
398 |
+
"weak". If there are two errors at the same level of the
|
399 |
+
instance and one is in the set of weak validation keywords,
|
400 |
+
the other error will take priority. By default, :kw:`anyOf`
|
401 |
+
and :kw:`oneOf` are considered weak keywords and will be
|
402 |
+
superseded by other same-level validation errors.
|
403 |
+
|
404 |
+
strong (set):
|
405 |
+
a collection of validation keywords to consider to be
|
406 |
+
"strong"
|
407 |
+
|
408 |
+
"""
|
409 |
+
|
410 |
+
def relevance(error):
|
411 |
+
validator = error.validator
|
412 |
+
return ( # prefer errors which are ...
|
413 |
+
-len(error.path), # 'deeper' and thereby more specific
|
414 |
+
error.path, # earlier (for sibling errors)
|
415 |
+
validator not in weak, # for a non-low-priority keyword
|
416 |
+
validator in strong, # for a high priority keyword
|
417 |
+
not error._matches_type(), # at least match the instance's type
|
418 |
+
) # otherwise we'll treat them the same
|
419 |
+
|
420 |
+
return relevance
|
421 |
+
|
422 |
+
|
423 |
+
relevance = by_relevance()
|
424 |
+
"""
|
425 |
+
A key function (e.g. to use with `sorted`) which sorts errors by relevance.
|
426 |
+
|
427 |
+
Example:
|
428 |
+
|
429 |
+
.. code:: python
|
430 |
+
|
431 |
+
sorted(validator.iter_errors(12), key=jsonschema.exceptions.relevance)
|
432 |
+
"""
|
433 |
+
|
434 |
+
|
435 |
+
def best_match(errors, key=relevance):
|
436 |
+
"""
|
437 |
+
Try to find an error that appears to be the best match among given errors.
|
438 |
+
|
439 |
+
In general, errors that are higher up in the instance (i.e. for which
|
440 |
+
`ValidationError.path` is shorter) are considered better matches,
|
441 |
+
since they indicate "more" is wrong with the instance.
|
442 |
+
|
443 |
+
If the resulting match is either :kw:`oneOf` or :kw:`anyOf`, the
|
444 |
+
*opposite* assumption is made -- i.e. the deepest error is picked,
|
445 |
+
since these keywords only need to match once, and any other errors
|
446 |
+
may not be relevant.
|
447 |
+
|
448 |
+
Arguments:
|
449 |
+
errors (collections.abc.Iterable):
|
450 |
+
|
451 |
+
the errors to select from. Do not provide a mixture of
|
452 |
+
errors from different validation attempts (i.e. from
|
453 |
+
different instances or schemas), since it won't produce
|
454 |
+
sensical output.
|
455 |
+
|
456 |
+
key (collections.abc.Callable):
|
457 |
+
|
458 |
+
the key to use when sorting errors. See `relevance` and
|
459 |
+
transitively `by_relevance` for more details (the default is
|
460 |
+
to sort with the defaults of that function). Changing the
|
461 |
+
default is only useful if you want to change the function
|
462 |
+
that rates errors but still want the error context descent
|
463 |
+
done by this function.
|
464 |
+
|
465 |
+
Returns:
|
466 |
+
the best matching error, or ``None`` if the iterable was empty
|
467 |
+
|
468 |
+
.. note::
|
469 |
+
|
470 |
+
This function is a heuristic. Its return value may change for a given
|
471 |
+
set of inputs from version to version if better heuristics are added.
|
472 |
+
|
473 |
+
"""
|
474 |
+
errors = iter(errors)
|
475 |
+
best = next(errors, None)
|
476 |
+
if best is None:
|
477 |
+
return
|
478 |
+
best = max(itertools.chain([best], errors), key=key)
|
479 |
+
|
480 |
+
while best.context:
|
481 |
+
# Calculate the minimum via nsmallest, because we don't recurse if
|
482 |
+
# all nested errors have the same relevance (i.e. if min == max == all)
|
483 |
+
smallest = heapq.nsmallest(2, best.context, key=key)
|
484 |
+
if len(smallest) == 2 and key(smallest[0]) == key(smallest[1]): # noqa: PLR2004
|
485 |
+
return best
|
486 |
+
best = smallest[0]
|
487 |
+
return best
|
.venv/lib/python3.11/site-packages/jsonschema/protocols.py
ADDED
@@ -0,0 +1,236 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
typing.Protocol classes for jsonschema interfaces.
|
3 |
+
"""
|
4 |
+
|
5 |
+
# for reference material on Protocols, see
|
6 |
+
# https://www.python.org/dev/peps/pep-0544/
|
7 |
+
|
8 |
+
from __future__ import annotations
|
9 |
+
|
10 |
+
from typing import (
|
11 |
+
TYPE_CHECKING,
|
12 |
+
Any,
|
13 |
+
ClassVar,
|
14 |
+
Iterable,
|
15 |
+
Protocol,
|
16 |
+
runtime_checkable,
|
17 |
+
)
|
18 |
+
|
19 |
+
# in order for Sphinx to resolve references accurately from type annotations,
|
20 |
+
# it needs to see names like `jsonschema.TypeChecker`
|
21 |
+
# therefore, only import at type-checking time (to avoid circular references),
|
22 |
+
# but use `jsonschema` for any types which will otherwise not be resolvable
|
23 |
+
if TYPE_CHECKING:
|
24 |
+
from collections.abc import Mapping
|
25 |
+
|
26 |
+
import referencing.jsonschema
|
27 |
+
|
28 |
+
from jsonschema import _typing
|
29 |
+
from jsonschema.exceptions import ValidationError
|
30 |
+
import jsonschema
|
31 |
+
import jsonschema.validators
|
32 |
+
|
33 |
+
# For code authors working on the validator protocol, these are the three
|
34 |
+
# use-cases which should be kept in mind:
|
35 |
+
#
|
36 |
+
# 1. As a protocol class, it can be used in type annotations to describe the
|
37 |
+
# available methods and attributes of a validator
|
38 |
+
# 2. It is the source of autodoc for the validator documentation
|
39 |
+
# 3. It is runtime_checkable, meaning that it can be used in isinstance()
|
40 |
+
# checks.
|
41 |
+
#
|
42 |
+
# Since protocols are not base classes, isinstance() checking is limited in
|
43 |
+
# its capabilities. See docs on runtime_checkable for detail
|
44 |
+
|
45 |
+
|
46 |
+
@runtime_checkable
|
47 |
+
class Validator(Protocol):
|
48 |
+
"""
|
49 |
+
The protocol to which all validator classes adhere.
|
50 |
+
|
51 |
+
Arguments:
|
52 |
+
|
53 |
+
schema:
|
54 |
+
|
55 |
+
The schema that the validator object will validate with.
|
56 |
+
It is assumed to be valid, and providing
|
57 |
+
an invalid schema can lead to undefined behavior. See
|
58 |
+
`Validator.check_schema` to validate a schema first.
|
59 |
+
|
60 |
+
registry:
|
61 |
+
|
62 |
+
a schema registry that will be used for looking up JSON references
|
63 |
+
|
64 |
+
resolver:
|
65 |
+
|
66 |
+
a resolver that will be used to resolve :kw:`$ref`
|
67 |
+
properties (JSON references). If unprovided, one will be created.
|
68 |
+
|
69 |
+
.. deprecated:: v4.18.0
|
70 |
+
|
71 |
+
`RefResolver <_RefResolver>` has been deprecated in favor of
|
72 |
+
`referencing`, and with it, this argument.
|
73 |
+
|
74 |
+
format_checker:
|
75 |
+
|
76 |
+
if provided, a checker which will be used to assert about
|
77 |
+
:kw:`format` properties present in the schema. If unprovided,
|
78 |
+
*no* format validation is done, and the presence of format
|
79 |
+
within schemas is strictly informational. Certain formats
|
80 |
+
require additional packages to be installed in order to assert
|
81 |
+
against instances. Ensure you've installed `jsonschema` with
|
82 |
+
its `extra (optional) dependencies <index:extras>` when
|
83 |
+
invoking ``pip``.
|
84 |
+
|
85 |
+
.. deprecated:: v4.12.0
|
86 |
+
|
87 |
+
Subclassing validator classes now explicitly warns this is not part of
|
88 |
+
their public API.
|
89 |
+
|
90 |
+
"""
|
91 |
+
|
92 |
+
#: An object representing the validator's meta schema (the schema that
|
93 |
+
#: describes valid schemas in the given version).
|
94 |
+
META_SCHEMA: ClassVar[Mapping]
|
95 |
+
|
96 |
+
#: A mapping of validation keywords (`str`\s) to functions that
|
97 |
+
#: validate the keyword with that name. For more information see
|
98 |
+
#: `creating-validators`.
|
99 |
+
VALIDATORS: ClassVar[Mapping]
|
100 |
+
|
101 |
+
#: A `jsonschema.TypeChecker` that will be used when validating
|
102 |
+
#: :kw:`type` keywords in JSON schemas.
|
103 |
+
TYPE_CHECKER: ClassVar[jsonschema.TypeChecker]
|
104 |
+
|
105 |
+
#: A `jsonschema.FormatChecker` that will be used when validating
|
106 |
+
#: :kw:`format` keywords in JSON schemas.
|
107 |
+
FORMAT_CHECKER: ClassVar[jsonschema.FormatChecker]
|
108 |
+
|
109 |
+
#: A function which given a schema returns its ID.
|
110 |
+
ID_OF: _typing.id_of
|
111 |
+
|
112 |
+
#: The schema that will be used to validate instances
|
113 |
+
schema: Mapping | bool
|
114 |
+
|
115 |
+
def __init__(
|
116 |
+
self,
|
117 |
+
schema: Mapping | bool,
|
118 |
+
registry: referencing.jsonschema.SchemaRegistry,
|
119 |
+
format_checker: jsonschema.FormatChecker | None = None,
|
120 |
+
) -> None:
|
121 |
+
...
|
122 |
+
|
123 |
+
@classmethod
|
124 |
+
def check_schema(cls, schema: Mapping | bool) -> None:
|
125 |
+
"""
|
126 |
+
Validate the given schema against the validator's `META_SCHEMA`.
|
127 |
+
|
128 |
+
Raises:
|
129 |
+
|
130 |
+
`jsonschema.exceptions.SchemaError`:
|
131 |
+
|
132 |
+
if the schema is invalid
|
133 |
+
|
134 |
+
"""
|
135 |
+
|
136 |
+
def is_type(self, instance: Any, type: str) -> bool:
|
137 |
+
"""
|
138 |
+
Check if the instance is of the given (JSON Schema) type.
|
139 |
+
|
140 |
+
Arguments:
|
141 |
+
|
142 |
+
instance:
|
143 |
+
|
144 |
+
the value to check
|
145 |
+
|
146 |
+
type:
|
147 |
+
|
148 |
+
the name of a known (JSON Schema) type
|
149 |
+
|
150 |
+
Returns:
|
151 |
+
|
152 |
+
whether the instance is of the given type
|
153 |
+
|
154 |
+
Raises:
|
155 |
+
|
156 |
+
`jsonschema.exceptions.UnknownType`:
|
157 |
+
|
158 |
+
if ``type`` is not a known type
|
159 |
+
|
160 |
+
"""
|
161 |
+
|
162 |
+
def is_valid(self, instance: Any) -> bool:
|
163 |
+
"""
|
164 |
+
Check if the instance is valid under the current `schema`.
|
165 |
+
|
166 |
+
Returns:
|
167 |
+
|
168 |
+
whether the instance is valid or not
|
169 |
+
|
170 |
+
>>> schema = {"maxItems" : 2}
|
171 |
+
>>> Draft202012Validator(schema).is_valid([2, 3, 4])
|
172 |
+
False
|
173 |
+
|
174 |
+
"""
|
175 |
+
|
176 |
+
def iter_errors(self, instance: Any) -> Iterable[ValidationError]:
|
177 |
+
r"""
|
178 |
+
Lazily yield each of the validation errors in the given instance.
|
179 |
+
|
180 |
+
>>> schema = {
|
181 |
+
... "type" : "array",
|
182 |
+
... "items" : {"enum" : [1, 2, 3]},
|
183 |
+
... "maxItems" : 2,
|
184 |
+
... }
|
185 |
+
>>> v = Draft202012Validator(schema)
|
186 |
+
>>> for error in sorted(v.iter_errors([2, 3, 4]), key=str):
|
187 |
+
... print(error.message)
|
188 |
+
4 is not one of [1, 2, 3]
|
189 |
+
[2, 3, 4] is too long
|
190 |
+
|
191 |
+
.. deprecated:: v4.0.0
|
192 |
+
|
193 |
+
Calling this function with a second schema argument is deprecated.
|
194 |
+
Use `Validator.evolve` instead.
|
195 |
+
"""
|
196 |
+
|
197 |
+
def validate(self, instance: Any) -> None:
|
198 |
+
"""
|
199 |
+
Check if the instance is valid under the current `schema`.
|
200 |
+
|
201 |
+
Raises:
|
202 |
+
|
203 |
+
`jsonschema.exceptions.ValidationError`:
|
204 |
+
|
205 |
+
if the instance is invalid
|
206 |
+
|
207 |
+
>>> schema = {"maxItems" : 2}
|
208 |
+
>>> Draft202012Validator(schema).validate([2, 3, 4])
|
209 |
+
Traceback (most recent call last):
|
210 |
+
...
|
211 |
+
ValidationError: [2, 3, 4] is too long
|
212 |
+
|
213 |
+
"""
|
214 |
+
|
215 |
+
def evolve(self, **kwargs) -> Validator:
|
216 |
+
"""
|
217 |
+
Create a new validator like this one, but with given changes.
|
218 |
+
|
219 |
+
Preserves all other attributes, so can be used to e.g. create a
|
220 |
+
validator with a different schema but with the same :kw:`$ref`
|
221 |
+
resolution behavior.
|
222 |
+
|
223 |
+
>>> validator = Draft202012Validator({})
|
224 |
+
>>> validator.evolve(schema={"type": "number"})
|
225 |
+
Draft202012Validator(schema={'type': 'number'}, format_checker=None)
|
226 |
+
|
227 |
+
The returned object satisfies the validator protocol, but may not
|
228 |
+
be of the same concrete class! In particular this occurs
|
229 |
+
when a :kw:`$ref` occurs to a schema with a different
|
230 |
+
:kw:`$schema` than this one (i.e. for a different draft).
|
231 |
+
|
232 |
+
>>> validator.evolve(
|
233 |
+
... schema={"$schema": Draft7Validator.META_SCHEMA["$id"]}
|
234 |
+
... )
|
235 |
+
Draft7Validator(schema=..., format_checker=None)
|
236 |
+
"""
|
.venv/lib/python3.11/site-packages/jsonschema/tests/__init__.py
ADDED
File without changes
|
.venv/lib/python3.11/site-packages/jsonschema/tests/__pycache__/__init__.cpython-311.pyc
ADDED
Binary file (189 Bytes). View file
|
|
.venv/lib/python3.11/site-packages/jsonschema/tests/__pycache__/_suite.cpython-311.pyc
ADDED
Binary file (15.2 kB). View file
|
|