Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +3 -0
- .venv/lib/python3.11/site-packages/mistral_common/data/mistral_instruct_tokenizer_241114.model.v7 +3 -0
- .venv/lib/python3.11/site-packages/mistral_common/data/mistral_instruct_tokenizer_241114.model.v7m1 +3 -0
- .venv/lib/python3.11/site-packages/mistral_common/data/tokenizer.model.v1 +3 -0
- .venv/lib/python3.11/site-packages/numpy/f2py/__init__.pyi +42 -0
- .venv/lib/python3.11/site-packages/numpy/f2py/__main__.py +5 -0
- .venv/lib/python3.11/site-packages/numpy/f2py/__pycache__/__init__.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/numpy/f2py/__pycache__/_isocbind.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/numpy/f2py/__pycache__/auxfuncs.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/numpy/f2py/__pycache__/capi_maps.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/numpy/f2py/__pycache__/rules.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/numpy/f2py/__pycache__/setup.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/numpy/f2py/__pycache__/symbolic.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/numpy/f2py/__pycache__/use_rules.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/numpy/f2py/__version__.py +1 -0
- .venv/lib/python3.11/site-packages/numpy/f2py/auxfuncs.py +988 -0
- .venv/lib/python3.11/site-packages/numpy/f2py/cb_rules.py +644 -0
- .venv/lib/python3.11/site-packages/numpy/f2py/cfuncs.py +1536 -0
- .venv/lib/python3.11/site-packages/numpy/f2py/f90mod_rules.py +264 -0
- .venv/lib/python3.11/site-packages/numpy/f2py/setup.cfg +3 -0
- .venv/lib/python3.11/site-packages/numpy/f2py/symbolic.py +1517 -0
- .venv/lib/python3.11/site-packages/numpy/f2py/tests/test_parameter.py +112 -0
- .venv/lib/python3.11/site-packages/numpy/f2py/tests/test_return_complex.py +65 -0
- .venv/lib/python3.11/site-packages/numpy/f2py/use_rules.py +106 -0
- .venv/lib/python3.11/site-packages/urllib3/__init__.py +211 -0
- .venv/lib/python3.11/site-packages/urllib3/_base_connection.py +165 -0
- .venv/lib/python3.11/site-packages/urllib3/_version.py +16 -0
- .venv/lib/python3.11/site-packages/urllib3/contrib/__init__.py +0 -0
- .venv/lib/python3.11/site-packages/urllib3/contrib/emscripten/__init__.py +16 -0
- .venv/lib/python3.11/site-packages/urllib3/contrib/emscripten/__pycache__/connection.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/urllib3/contrib/emscripten/__pycache__/request.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/urllib3/contrib/emscripten/connection.py +255 -0
- .venv/lib/python3.11/site-packages/urllib3/contrib/emscripten/emscripten_fetch_worker.js +110 -0
- .venv/lib/python3.11/site-packages/urllib3/contrib/emscripten/fetch.py +708 -0
- .venv/lib/python3.11/site-packages/urllib3/contrib/emscripten/request.py +22 -0
- .venv/lib/python3.11/site-packages/urllib3/contrib/emscripten/response.py +285 -0
- .venv/lib/python3.11/site-packages/urllib3/contrib/pyopenssl.py +554 -0
- .venv/lib/python3.11/site-packages/urllib3/contrib/socks.py +228 -0
- .venv/lib/python3.11/site-packages/urllib3/http2/__init__.py +53 -0
- .venv/lib/python3.11/site-packages/urllib3/http2/__pycache__/__init__.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/urllib3/http2/__pycache__/connection.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/urllib3/http2/__pycache__/probe.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/urllib3/http2/connection.py +356 -0
- .venv/lib/python3.11/site-packages/urllib3/http2/probe.py +87 -0
- .venv/lib/python3.11/site-packages/urllib3/poolmanager.py +637 -0
- .venv/lib/python3.11/site-packages/urllib3/util/__init__.py +42 -0
- .venv/lib/python3.11/site-packages/urllib3/util/__pycache__/ssl_.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/urllib3/util/proxy.py +43 -0
- .venv/lib/python3.11/site-packages/urllib3/util/request.py +258 -0
- .venv/lib/python3.11/site-packages/urllib3/util/retry.py +533 -0
.gitattributes
CHANGED
@@ -392,3 +392,6 @@ tuning-competition-baseline/.venv/lib/python3.11/site-packages/nvidia/cudnn/lib/
|
|
392 |
.venv/lib/python3.11/site-packages/numpy/lib/__pycache__/npyio.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
|
393 |
.venv/lib/python3.11/site-packages/numpy/lib/tests/__pycache__/test_function_base.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
|
394 |
.venv/lib/python3.11/site-packages/mistral_common/data/mistral_instruct_tokenizer_240323.model.v3 filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
392 |
.venv/lib/python3.11/site-packages/numpy/lib/__pycache__/npyio.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
|
393 |
.venv/lib/python3.11/site-packages/numpy/lib/tests/__pycache__/test_function_base.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
|
394 |
.venv/lib/python3.11/site-packages/mistral_common/data/mistral_instruct_tokenizer_240323.model.v3 filter=lfs diff=lfs merge=lfs -text
|
395 |
+
.venv/lib/python3.11/site-packages/mistral_common/data/mistral_instruct_tokenizer_241114.model.v7 filter=lfs diff=lfs merge=lfs -text
|
396 |
+
.venv/lib/python3.11/site-packages/mistral_common/data/mistral_instruct_tokenizer_241114.model.v7m1 filter=lfs diff=lfs merge=lfs -text
|
397 |
+
.venv/lib/python3.11/site-packages/mistral_common/data/tokenizer.model.v1 filter=lfs diff=lfs merge=lfs -text
|
.venv/lib/python3.11/site-packages/mistral_common/data/mistral_instruct_tokenizer_241114.model.v7
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1b968b8dc352f42192367337c78ccc61e1eaddc6d641a579372d4f20694beb7a
|
3 |
+
size 587562
|
.venv/lib/python3.11/site-packages/mistral_common/data/mistral_instruct_tokenizer_241114.model.v7m1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1b968b8dc352f42192367337c78ccc61e1eaddc6d641a579372d4f20694beb7a
|
3 |
+
size 587562
|
.venv/lib/python3.11/site-packages/mistral_common/data/tokenizer.model.v1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055
|
3 |
+
size 493443
|
.venv/lib/python3.11/site-packages/numpy/f2py/__init__.pyi
ADDED
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import subprocess
|
3 |
+
from collections.abc import Iterable
|
4 |
+
from typing import Literal as L, Any, overload, TypedDict
|
5 |
+
|
6 |
+
from numpy._pytesttester import PytestTester
|
7 |
+
|
8 |
+
class _F2PyDictBase(TypedDict):
|
9 |
+
csrc: list[str]
|
10 |
+
h: list[str]
|
11 |
+
|
12 |
+
class _F2PyDict(_F2PyDictBase, total=False):
|
13 |
+
fsrc: list[str]
|
14 |
+
ltx: list[str]
|
15 |
+
|
16 |
+
__all__: list[str]
|
17 |
+
test: PytestTester
|
18 |
+
|
19 |
+
def run_main(comline_list: Iterable[str]) -> dict[str, _F2PyDict]: ...
|
20 |
+
|
21 |
+
@overload
|
22 |
+
def compile( # type: ignore[misc]
|
23 |
+
source: str | bytes,
|
24 |
+
modulename: str = ...,
|
25 |
+
extra_args: str | list[str] = ...,
|
26 |
+
verbose: bool = ...,
|
27 |
+
source_fn: None | str | bytes | os.PathLike[Any] = ...,
|
28 |
+
extension: L[".f", ".f90"] = ...,
|
29 |
+
full_output: L[False] = ...,
|
30 |
+
) -> int: ...
|
31 |
+
@overload
|
32 |
+
def compile(
|
33 |
+
source: str | bytes,
|
34 |
+
modulename: str = ...,
|
35 |
+
extra_args: str | list[str] = ...,
|
36 |
+
verbose: bool = ...,
|
37 |
+
source_fn: None | str | bytes | os.PathLike[Any] = ...,
|
38 |
+
extension: L[".f", ".f90"] = ...,
|
39 |
+
full_output: L[True] = ...,
|
40 |
+
) -> subprocess.CompletedProcess[bytes]: ...
|
41 |
+
|
42 |
+
def get_include() -> str: ...
|
.venv/lib/python3.11/site-packages/numpy/f2py/__main__.py
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# See:
|
2 |
+
# https://web.archive.org/web/20140822061353/http://cens.ioc.ee/projects/f2py2e
|
3 |
+
from numpy.f2py.f2py2e import main
|
4 |
+
|
5 |
+
main()
|
.venv/lib/python3.11/site-packages/numpy/f2py/__pycache__/__init__.cpython-311.pyc
ADDED
Binary file (7.08 kB). View file
|
|
.venv/lib/python3.11/site-packages/numpy/f2py/__pycache__/_isocbind.cpython-311.pyc
ADDED
Binary file (1.95 kB). View file
|
|
.venv/lib/python3.11/site-packages/numpy/f2py/__pycache__/auxfuncs.cpython-311.pyc
ADDED
Binary file (44.7 kB). View file
|
|
.venv/lib/python3.11/site-packages/numpy/f2py/__pycache__/capi_maps.cpython-311.pyc
ADDED
Binary file (36.7 kB). View file
|
|
.venv/lib/python3.11/site-packages/numpy/f2py/__pycache__/rules.cpython-311.pyc
ADDED
Binary file (58.8 kB). View file
|
|
.venv/lib/python3.11/site-packages/numpy/f2py/__pycache__/setup.cpython-311.pyc
ADDED
Binary file (2.9 kB). View file
|
|
.venv/lib/python3.11/site-packages/numpy/f2py/__pycache__/symbolic.cpython-311.pyc
ADDED
Binary file (85.8 kB). View file
|
|
.venv/lib/python3.11/site-packages/numpy/f2py/__pycache__/use_rules.cpython-311.pyc
ADDED
Binary file (4.94 kB). View file
|
|
.venv/lib/python3.11/site-packages/numpy/f2py/__version__.py
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
from numpy.version import version
|
.venv/lib/python3.11/site-packages/numpy/f2py/auxfuncs.py
ADDED
@@ -0,0 +1,988 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Auxiliary functions for f2py2e.
|
3 |
+
|
4 |
+
Copyright 1999 -- 2011 Pearu Peterson all rights reserved.
|
5 |
+
Copyright 2011 -- present NumPy Developers.
|
6 |
+
Permission to use, modify, and distribute this software is given under the
|
7 |
+
terms of the NumPy (BSD style) LICENSE.
|
8 |
+
|
9 |
+
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
|
10 |
+
"""
|
11 |
+
import pprint
|
12 |
+
import sys
|
13 |
+
import re
|
14 |
+
import types
|
15 |
+
from functools import reduce
|
16 |
+
from copy import deepcopy
|
17 |
+
|
18 |
+
from . import __version__
|
19 |
+
from . import cfuncs
|
20 |
+
|
21 |
+
__all__ = [
|
22 |
+
'applyrules', 'debugcapi', 'dictappend', 'errmess', 'gentitle',
|
23 |
+
'getargs2', 'getcallprotoargument', 'getcallstatement',
|
24 |
+
'getfortranname', 'getpymethoddef', 'getrestdoc', 'getusercode',
|
25 |
+
'getusercode1', 'getdimension', 'hasbody', 'hascallstatement', 'hascommon',
|
26 |
+
'hasexternals', 'hasinitvalue', 'hasnote', 'hasresultnote',
|
27 |
+
'isallocatable', 'isarray', 'isarrayofstrings',
|
28 |
+
'ischaracter', 'ischaracterarray', 'ischaracter_or_characterarray',
|
29 |
+
'iscomplex',
|
30 |
+
'iscomplexarray', 'iscomplexfunction', 'iscomplexfunction_warn',
|
31 |
+
'isdouble', 'isdummyroutine', 'isexternal', 'isfunction',
|
32 |
+
'isfunction_wrap', 'isint1', 'isint1array', 'isinteger', 'isintent_aux',
|
33 |
+
'isintent_c', 'isintent_callback', 'isintent_copy', 'isintent_dict',
|
34 |
+
'isintent_hide', 'isintent_in', 'isintent_inout', 'isintent_inplace',
|
35 |
+
'isintent_nothide', 'isintent_out', 'isintent_overwrite', 'islogical',
|
36 |
+
'islogicalfunction', 'islong_complex', 'islong_double',
|
37 |
+
'islong_doublefunction', 'islong_long', 'islong_longfunction',
|
38 |
+
'ismodule', 'ismoduleroutine', 'isoptional', 'isprivate', 'isrequired',
|
39 |
+
'isroutine', 'isscalar', 'issigned_long_longarray', 'isstring',
|
40 |
+
'isstringarray', 'isstring_or_stringarray', 'isstringfunction',
|
41 |
+
'issubroutine', 'get_f2py_modulename',
|
42 |
+
'issubroutine_wrap', 'isthreadsafe', 'isunsigned', 'isunsigned_char',
|
43 |
+
'isunsigned_chararray', 'isunsigned_long_long',
|
44 |
+
'isunsigned_long_longarray', 'isunsigned_short',
|
45 |
+
'isunsigned_shortarray', 'l_and', 'l_not', 'l_or', 'outmess',
|
46 |
+
'replace', 'show', 'stripcomma', 'throw_error', 'isattr_value',
|
47 |
+
'getuseblocks', 'process_f2cmap_dict'
|
48 |
+
]
|
49 |
+
|
50 |
+
|
51 |
+
f2py_version = __version__.version
|
52 |
+
|
53 |
+
|
54 |
+
errmess = sys.stderr.write
|
55 |
+
show = pprint.pprint
|
56 |
+
|
57 |
+
options = {}
|
58 |
+
debugoptions = []
|
59 |
+
wrapfuncs = 1
|
60 |
+
|
61 |
+
|
62 |
+
def outmess(t):
|
63 |
+
if options.get('verbose', 1):
|
64 |
+
sys.stdout.write(t)
|
65 |
+
|
66 |
+
|
67 |
+
def debugcapi(var):
|
68 |
+
return 'capi' in debugoptions
|
69 |
+
|
70 |
+
|
71 |
+
def _ischaracter(var):
|
72 |
+
return 'typespec' in var and var['typespec'] == 'character' and \
|
73 |
+
not isexternal(var)
|
74 |
+
|
75 |
+
|
76 |
+
def _isstring(var):
|
77 |
+
return 'typespec' in var and var['typespec'] == 'character' and \
|
78 |
+
not isexternal(var)
|
79 |
+
|
80 |
+
|
81 |
+
def ischaracter_or_characterarray(var):
|
82 |
+
return _ischaracter(var) and 'charselector' not in var
|
83 |
+
|
84 |
+
|
85 |
+
def ischaracter(var):
|
86 |
+
return ischaracter_or_characterarray(var) and not isarray(var)
|
87 |
+
|
88 |
+
|
89 |
+
def ischaracterarray(var):
|
90 |
+
return ischaracter_or_characterarray(var) and isarray(var)
|
91 |
+
|
92 |
+
|
93 |
+
def isstring_or_stringarray(var):
|
94 |
+
return _ischaracter(var) and 'charselector' in var
|
95 |
+
|
96 |
+
|
97 |
+
def isstring(var):
|
98 |
+
return isstring_or_stringarray(var) and not isarray(var)
|
99 |
+
|
100 |
+
|
101 |
+
def isstringarray(var):
|
102 |
+
return isstring_or_stringarray(var) and isarray(var)
|
103 |
+
|
104 |
+
|
105 |
+
def isarrayofstrings(var): # obsolete?
|
106 |
+
# leaving out '*' for now so that `character*(*) a(m)` and `character
|
107 |
+
# a(m,*)` are treated differently. Luckily `character**` is illegal.
|
108 |
+
return isstringarray(var) and var['dimension'][-1] == '(*)'
|
109 |
+
|
110 |
+
|
111 |
+
def isarray(var):
|
112 |
+
return 'dimension' in var and not isexternal(var)
|
113 |
+
|
114 |
+
|
115 |
+
def isscalar(var):
|
116 |
+
return not (isarray(var) or isstring(var) or isexternal(var))
|
117 |
+
|
118 |
+
|
119 |
+
def iscomplex(var):
|
120 |
+
return isscalar(var) and \
|
121 |
+
var.get('typespec') in ['complex', 'double complex']
|
122 |
+
|
123 |
+
|
124 |
+
def islogical(var):
|
125 |
+
return isscalar(var) and var.get('typespec') == 'logical'
|
126 |
+
|
127 |
+
|
128 |
+
def isinteger(var):
|
129 |
+
return isscalar(var) and var.get('typespec') == 'integer'
|
130 |
+
|
131 |
+
|
132 |
+
def isreal(var):
|
133 |
+
return isscalar(var) and var.get('typespec') == 'real'
|
134 |
+
|
135 |
+
|
136 |
+
def get_kind(var):
|
137 |
+
try:
|
138 |
+
return var['kindselector']['*']
|
139 |
+
except KeyError:
|
140 |
+
try:
|
141 |
+
return var['kindselector']['kind']
|
142 |
+
except KeyError:
|
143 |
+
pass
|
144 |
+
|
145 |
+
|
146 |
+
def isint1(var):
|
147 |
+
return var.get('typespec') == 'integer' \
|
148 |
+
and get_kind(var) == '1' and not isarray(var)
|
149 |
+
|
150 |
+
|
151 |
+
def islong_long(var):
|
152 |
+
if not isscalar(var):
|
153 |
+
return 0
|
154 |
+
if var.get('typespec') not in ['integer', 'logical']:
|
155 |
+
return 0
|
156 |
+
return get_kind(var) == '8'
|
157 |
+
|
158 |
+
|
159 |
+
def isunsigned_char(var):
|
160 |
+
if not isscalar(var):
|
161 |
+
return 0
|
162 |
+
if var.get('typespec') != 'integer':
|
163 |
+
return 0
|
164 |
+
return get_kind(var) == '-1'
|
165 |
+
|
166 |
+
|
167 |
+
def isunsigned_short(var):
|
168 |
+
if not isscalar(var):
|
169 |
+
return 0
|
170 |
+
if var.get('typespec') != 'integer':
|
171 |
+
return 0
|
172 |
+
return get_kind(var) == '-2'
|
173 |
+
|
174 |
+
|
175 |
+
def isunsigned(var):
|
176 |
+
if not isscalar(var):
|
177 |
+
return 0
|
178 |
+
if var.get('typespec') != 'integer':
|
179 |
+
return 0
|
180 |
+
return get_kind(var) == '-4'
|
181 |
+
|
182 |
+
|
183 |
+
def isunsigned_long_long(var):
|
184 |
+
if not isscalar(var):
|
185 |
+
return 0
|
186 |
+
if var.get('typespec') != 'integer':
|
187 |
+
return 0
|
188 |
+
return get_kind(var) == '-8'
|
189 |
+
|
190 |
+
|
191 |
+
def isdouble(var):
|
192 |
+
if not isscalar(var):
|
193 |
+
return 0
|
194 |
+
if not var.get('typespec') == 'real':
|
195 |
+
return 0
|
196 |
+
return get_kind(var) == '8'
|
197 |
+
|
198 |
+
|
199 |
+
def islong_double(var):
|
200 |
+
if not isscalar(var):
|
201 |
+
return 0
|
202 |
+
if not var.get('typespec') == 'real':
|
203 |
+
return 0
|
204 |
+
return get_kind(var) == '16'
|
205 |
+
|
206 |
+
|
207 |
+
def islong_complex(var):
|
208 |
+
if not iscomplex(var):
|
209 |
+
return 0
|
210 |
+
return get_kind(var) == '32'
|
211 |
+
|
212 |
+
|
213 |
+
def iscomplexarray(var):
|
214 |
+
return isarray(var) and \
|
215 |
+
var.get('typespec') in ['complex', 'double complex']
|
216 |
+
|
217 |
+
|
218 |
+
def isint1array(var):
|
219 |
+
return isarray(var) and var.get('typespec') == 'integer' \
|
220 |
+
and get_kind(var) == '1'
|
221 |
+
|
222 |
+
|
223 |
+
def isunsigned_chararray(var):
|
224 |
+
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
|
225 |
+
and get_kind(var) == '-1'
|
226 |
+
|
227 |
+
|
228 |
+
def isunsigned_shortarray(var):
|
229 |
+
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
|
230 |
+
and get_kind(var) == '-2'
|
231 |
+
|
232 |
+
|
233 |
+
def isunsignedarray(var):
|
234 |
+
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
|
235 |
+
and get_kind(var) == '-4'
|
236 |
+
|
237 |
+
|
238 |
+
def isunsigned_long_longarray(var):
|
239 |
+
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
|
240 |
+
and get_kind(var) == '-8'
|
241 |
+
|
242 |
+
|
243 |
+
def issigned_chararray(var):
|
244 |
+
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
|
245 |
+
and get_kind(var) == '1'
|
246 |
+
|
247 |
+
|
248 |
+
def issigned_shortarray(var):
|
249 |
+
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
|
250 |
+
and get_kind(var) == '2'
|
251 |
+
|
252 |
+
|
253 |
+
def issigned_array(var):
|
254 |
+
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
|
255 |
+
and get_kind(var) == '4'
|
256 |
+
|
257 |
+
|
258 |
+
def issigned_long_longarray(var):
|
259 |
+
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
|
260 |
+
and get_kind(var) == '8'
|
261 |
+
|
262 |
+
|
263 |
+
def isallocatable(var):
|
264 |
+
return 'attrspec' in var and 'allocatable' in var['attrspec']
|
265 |
+
|
266 |
+
|
267 |
+
def ismutable(var):
|
268 |
+
return not ('dimension' not in var or isstring(var))
|
269 |
+
|
270 |
+
|
271 |
+
def ismoduleroutine(rout):
|
272 |
+
return 'modulename' in rout
|
273 |
+
|
274 |
+
|
275 |
+
def ismodule(rout):
|
276 |
+
return 'block' in rout and 'module' == rout['block']
|
277 |
+
|
278 |
+
|
279 |
+
def isfunction(rout):
|
280 |
+
return 'block' in rout and 'function' == rout['block']
|
281 |
+
|
282 |
+
|
283 |
+
def isfunction_wrap(rout):
|
284 |
+
if isintent_c(rout):
|
285 |
+
return 0
|
286 |
+
return wrapfuncs and isfunction(rout) and (not isexternal(rout))
|
287 |
+
|
288 |
+
|
289 |
+
def issubroutine(rout):
|
290 |
+
return 'block' in rout and 'subroutine' == rout['block']
|
291 |
+
|
292 |
+
|
293 |
+
def issubroutine_wrap(rout):
|
294 |
+
if isintent_c(rout):
|
295 |
+
return 0
|
296 |
+
return issubroutine(rout) and hasassumedshape(rout)
|
297 |
+
|
298 |
+
def isattr_value(var):
|
299 |
+
return 'value' in var.get('attrspec', [])
|
300 |
+
|
301 |
+
|
302 |
+
def hasassumedshape(rout):
|
303 |
+
if rout.get('hasassumedshape'):
|
304 |
+
return True
|
305 |
+
for a in rout['args']:
|
306 |
+
for d in rout['vars'].get(a, {}).get('dimension', []):
|
307 |
+
if d == ':':
|
308 |
+
rout['hasassumedshape'] = True
|
309 |
+
return True
|
310 |
+
return False
|
311 |
+
|
312 |
+
|
313 |
+
def requiresf90wrapper(rout):
|
314 |
+
return ismoduleroutine(rout) or hasassumedshape(rout)
|
315 |
+
|
316 |
+
|
317 |
+
def isroutine(rout):
|
318 |
+
return isfunction(rout) or issubroutine(rout)
|
319 |
+
|
320 |
+
|
321 |
+
def islogicalfunction(rout):
|
322 |
+
if not isfunction(rout):
|
323 |
+
return 0
|
324 |
+
if 'result' in rout:
|
325 |
+
a = rout['result']
|
326 |
+
else:
|
327 |
+
a = rout['name']
|
328 |
+
if a in rout['vars']:
|
329 |
+
return islogical(rout['vars'][a])
|
330 |
+
return 0
|
331 |
+
|
332 |
+
|
333 |
+
def islong_longfunction(rout):
|
334 |
+
if not isfunction(rout):
|
335 |
+
return 0
|
336 |
+
if 'result' in rout:
|
337 |
+
a = rout['result']
|
338 |
+
else:
|
339 |
+
a = rout['name']
|
340 |
+
if a in rout['vars']:
|
341 |
+
return islong_long(rout['vars'][a])
|
342 |
+
return 0
|
343 |
+
|
344 |
+
|
345 |
+
def islong_doublefunction(rout):
|
346 |
+
if not isfunction(rout):
|
347 |
+
return 0
|
348 |
+
if 'result' in rout:
|
349 |
+
a = rout['result']
|
350 |
+
else:
|
351 |
+
a = rout['name']
|
352 |
+
if a in rout['vars']:
|
353 |
+
return islong_double(rout['vars'][a])
|
354 |
+
return 0
|
355 |
+
|
356 |
+
|
357 |
+
def iscomplexfunction(rout):
|
358 |
+
if not isfunction(rout):
|
359 |
+
return 0
|
360 |
+
if 'result' in rout:
|
361 |
+
a = rout['result']
|
362 |
+
else:
|
363 |
+
a = rout['name']
|
364 |
+
if a in rout['vars']:
|
365 |
+
return iscomplex(rout['vars'][a])
|
366 |
+
return 0
|
367 |
+
|
368 |
+
|
369 |
+
def iscomplexfunction_warn(rout):
|
370 |
+
if iscomplexfunction(rout):
|
371 |
+
outmess("""\
|
372 |
+
**************************************************************
|
373 |
+
Warning: code with a function returning complex value
|
374 |
+
may not work correctly with your Fortran compiler.
|
375 |
+
When using GNU gcc/g77 compilers, codes should work
|
376 |
+
correctly for callbacks with:
|
377 |
+
f2py -c -DF2PY_CB_RETURNCOMPLEX
|
378 |
+
**************************************************************\n""")
|
379 |
+
return 1
|
380 |
+
return 0
|
381 |
+
|
382 |
+
|
383 |
+
def isstringfunction(rout):
|
384 |
+
if not isfunction(rout):
|
385 |
+
return 0
|
386 |
+
if 'result' in rout:
|
387 |
+
a = rout['result']
|
388 |
+
else:
|
389 |
+
a = rout['name']
|
390 |
+
if a in rout['vars']:
|
391 |
+
return isstring(rout['vars'][a])
|
392 |
+
return 0
|
393 |
+
|
394 |
+
|
395 |
+
def hasexternals(rout):
|
396 |
+
return 'externals' in rout and rout['externals']
|
397 |
+
|
398 |
+
|
399 |
+
def isthreadsafe(rout):
|
400 |
+
return 'f2pyenhancements' in rout and \
|
401 |
+
'threadsafe' in rout['f2pyenhancements']
|
402 |
+
|
403 |
+
|
404 |
+
def hasvariables(rout):
|
405 |
+
return 'vars' in rout and rout['vars']
|
406 |
+
|
407 |
+
|
408 |
+
def isoptional(var):
|
409 |
+
return ('attrspec' in var and 'optional' in var['attrspec'] and
|
410 |
+
'required' not in var['attrspec']) and isintent_nothide(var)
|
411 |
+
|
412 |
+
|
413 |
+
def isexternal(var):
|
414 |
+
return 'attrspec' in var and 'external' in var['attrspec']
|
415 |
+
|
416 |
+
|
417 |
+
def getdimension(var):
|
418 |
+
dimpattern = r"\((.*?)\)"
|
419 |
+
if 'attrspec' in var.keys():
|
420 |
+
if any('dimension' in s for s in var['attrspec']):
|
421 |
+
return [re.findall(dimpattern, v) for v in var['attrspec']][0]
|
422 |
+
|
423 |
+
|
424 |
+
def isrequired(var):
|
425 |
+
return not isoptional(var) and isintent_nothide(var)
|
426 |
+
|
427 |
+
|
428 |
+
def isintent_in(var):
|
429 |
+
if 'intent' not in var:
|
430 |
+
return 1
|
431 |
+
if 'hide' in var['intent']:
|
432 |
+
return 0
|
433 |
+
if 'inplace' in var['intent']:
|
434 |
+
return 0
|
435 |
+
if 'in' in var['intent']:
|
436 |
+
return 1
|
437 |
+
if 'out' in var['intent']:
|
438 |
+
return 0
|
439 |
+
if 'inout' in var['intent']:
|
440 |
+
return 0
|
441 |
+
if 'outin' in var['intent']:
|
442 |
+
return 0
|
443 |
+
return 1
|
444 |
+
|
445 |
+
|
446 |
+
def isintent_inout(var):
|
447 |
+
return ('intent' in var and ('inout' in var['intent'] or
|
448 |
+
'outin' in var['intent']) and 'in' not in var['intent'] and
|
449 |
+
'hide' not in var['intent'] and 'inplace' not in var['intent'])
|
450 |
+
|
451 |
+
|
452 |
+
def isintent_out(var):
|
453 |
+
return 'out' in var.get('intent', [])
|
454 |
+
|
455 |
+
|
456 |
+
def isintent_hide(var):
|
457 |
+
return ('intent' in var and ('hide' in var['intent'] or
|
458 |
+
('out' in var['intent'] and 'in' not in var['intent'] and
|
459 |
+
(not l_or(isintent_inout, isintent_inplace)(var)))))
|
460 |
+
|
461 |
+
|
462 |
+
def isintent_nothide(var):
|
463 |
+
return not isintent_hide(var)
|
464 |
+
|
465 |
+
|
466 |
+
def isintent_c(var):
|
467 |
+
return 'c' in var.get('intent', [])
|
468 |
+
|
469 |
+
|
470 |
+
def isintent_cache(var):
|
471 |
+
return 'cache' in var.get('intent', [])
|
472 |
+
|
473 |
+
|
474 |
+
def isintent_copy(var):
|
475 |
+
return 'copy' in var.get('intent', [])
|
476 |
+
|
477 |
+
|
478 |
+
def isintent_overwrite(var):
|
479 |
+
return 'overwrite' in var.get('intent', [])
|
480 |
+
|
481 |
+
|
482 |
+
def isintent_callback(var):
|
483 |
+
return 'callback' in var.get('intent', [])
|
484 |
+
|
485 |
+
|
486 |
+
def isintent_inplace(var):
|
487 |
+
return 'inplace' in var.get('intent', [])
|
488 |
+
|
489 |
+
|
490 |
+
def isintent_aux(var):
|
491 |
+
return 'aux' in var.get('intent', [])
|
492 |
+
|
493 |
+
|
494 |
+
def isintent_aligned4(var):
|
495 |
+
return 'aligned4' in var.get('intent', [])
|
496 |
+
|
497 |
+
|
498 |
+
def isintent_aligned8(var):
|
499 |
+
return 'aligned8' in var.get('intent', [])
|
500 |
+
|
501 |
+
|
502 |
+
def isintent_aligned16(var):
|
503 |
+
return 'aligned16' in var.get('intent', [])
|
504 |
+
|
505 |
+
|
506 |
+
isintent_dict = {isintent_in: 'INTENT_IN', isintent_inout: 'INTENT_INOUT',
|
507 |
+
isintent_out: 'INTENT_OUT', isintent_hide: 'INTENT_HIDE',
|
508 |
+
isintent_cache: 'INTENT_CACHE',
|
509 |
+
isintent_c: 'INTENT_C', isoptional: 'OPTIONAL',
|
510 |
+
isintent_inplace: 'INTENT_INPLACE',
|
511 |
+
isintent_aligned4: 'INTENT_ALIGNED4',
|
512 |
+
isintent_aligned8: 'INTENT_ALIGNED8',
|
513 |
+
isintent_aligned16: 'INTENT_ALIGNED16',
|
514 |
+
}
|
515 |
+
|
516 |
+
|
517 |
+
def isprivate(var):
|
518 |
+
return 'attrspec' in var and 'private' in var['attrspec']
|
519 |
+
|
520 |
+
|
521 |
+
def hasinitvalue(var):
|
522 |
+
return '=' in var
|
523 |
+
|
524 |
+
|
525 |
+
def hasinitvalueasstring(var):
|
526 |
+
if not hasinitvalue(var):
|
527 |
+
return 0
|
528 |
+
return var['='][0] in ['"', "'"]
|
529 |
+
|
530 |
+
|
531 |
+
def hasnote(var):
|
532 |
+
return 'note' in var
|
533 |
+
|
534 |
+
|
535 |
+
def hasresultnote(rout):
|
536 |
+
if not isfunction(rout):
|
537 |
+
return 0
|
538 |
+
if 'result' in rout:
|
539 |
+
a = rout['result']
|
540 |
+
else:
|
541 |
+
a = rout['name']
|
542 |
+
if a in rout['vars']:
|
543 |
+
return hasnote(rout['vars'][a])
|
544 |
+
return 0
|
545 |
+
|
546 |
+
|
547 |
+
def hascommon(rout):
|
548 |
+
return 'common' in rout
|
549 |
+
|
550 |
+
|
551 |
+
def containscommon(rout):
|
552 |
+
if hascommon(rout):
|
553 |
+
return 1
|
554 |
+
if hasbody(rout):
|
555 |
+
for b in rout['body']:
|
556 |
+
if containscommon(b):
|
557 |
+
return 1
|
558 |
+
return 0
|
559 |
+
|
560 |
+
|
561 |
+
def containsmodule(block):
|
562 |
+
if ismodule(block):
|
563 |
+
return 1
|
564 |
+
if not hasbody(block):
|
565 |
+
return 0
|
566 |
+
for b in block['body']:
|
567 |
+
if containsmodule(b):
|
568 |
+
return 1
|
569 |
+
return 0
|
570 |
+
|
571 |
+
|
572 |
+
def hasbody(rout):
|
573 |
+
return 'body' in rout
|
574 |
+
|
575 |
+
|
576 |
+
def hascallstatement(rout):
|
577 |
+
return getcallstatement(rout) is not None
|
578 |
+
|
579 |
+
|
580 |
+
def istrue(var):
|
581 |
+
return 1
|
582 |
+
|
583 |
+
|
584 |
+
def isfalse(var):
|
585 |
+
return 0
|
586 |
+
|
587 |
+
|
588 |
+
class F2PYError(Exception):
|
589 |
+
pass
|
590 |
+
|
591 |
+
|
592 |
+
class throw_error:
|
593 |
+
|
594 |
+
def __init__(self, mess):
|
595 |
+
self.mess = mess
|
596 |
+
|
597 |
+
def __call__(self, var):
|
598 |
+
mess = '\n\n var = %s\n Message: %s\n' % (var, self.mess)
|
599 |
+
raise F2PYError(mess)
|
600 |
+
|
601 |
+
|
602 |
+
def l_and(*f):
|
603 |
+
l1, l2 = 'lambda v', []
|
604 |
+
for i in range(len(f)):
|
605 |
+
l1 = '%s,f%d=f[%d]' % (l1, i, i)
|
606 |
+
l2.append('f%d(v)' % (i))
|
607 |
+
return eval('%s:%s' % (l1, ' and '.join(l2)))
|
608 |
+
|
609 |
+
|
610 |
+
def l_or(*f):
|
611 |
+
l1, l2 = 'lambda v', []
|
612 |
+
for i in range(len(f)):
|
613 |
+
l1 = '%s,f%d=f[%d]' % (l1, i, i)
|
614 |
+
l2.append('f%d(v)' % (i))
|
615 |
+
return eval('%s:%s' % (l1, ' or '.join(l2)))
|
616 |
+
|
617 |
+
|
618 |
+
def l_not(f):
|
619 |
+
return eval('lambda v,f=f:not f(v)')
|
620 |
+
|
621 |
+
|
622 |
+
def isdummyroutine(rout):
|
623 |
+
try:
|
624 |
+
return rout['f2pyenhancements']['fortranname'] == ''
|
625 |
+
except KeyError:
|
626 |
+
return 0
|
627 |
+
|
628 |
+
|
629 |
+
def getfortranname(rout):
|
630 |
+
try:
|
631 |
+
name = rout['f2pyenhancements']['fortranname']
|
632 |
+
if name == '':
|
633 |
+
raise KeyError
|
634 |
+
if not name:
|
635 |
+
errmess('Failed to use fortranname from %s\n' %
|
636 |
+
(rout['f2pyenhancements']))
|
637 |
+
raise KeyError
|
638 |
+
except KeyError:
|
639 |
+
name = rout['name']
|
640 |
+
return name
|
641 |
+
|
642 |
+
|
643 |
+
def getmultilineblock(rout, blockname, comment=1, counter=0):
|
644 |
+
try:
|
645 |
+
r = rout['f2pyenhancements'].get(blockname)
|
646 |
+
except KeyError:
|
647 |
+
return
|
648 |
+
if not r:
|
649 |
+
return
|
650 |
+
if counter > 0 and isinstance(r, str):
|
651 |
+
return
|
652 |
+
if isinstance(r, list):
|
653 |
+
if counter >= len(r):
|
654 |
+
return
|
655 |
+
r = r[counter]
|
656 |
+
if r[:3] == "'''":
|
657 |
+
if comment:
|
658 |
+
r = '\t/* start ' + blockname + \
|
659 |
+
' multiline (' + repr(counter) + ') */\n' + r[3:]
|
660 |
+
else:
|
661 |
+
r = r[3:]
|
662 |
+
if r[-3:] == "'''":
|
663 |
+
if comment:
|
664 |
+
r = r[:-3] + '\n\t/* end multiline (' + repr(counter) + ')*/'
|
665 |
+
else:
|
666 |
+
r = r[:-3]
|
667 |
+
else:
|
668 |
+
errmess("%s multiline block should end with `'''`: %s\n"
|
669 |
+
% (blockname, repr(r)))
|
670 |
+
return r
|
671 |
+
|
672 |
+
|
673 |
+
def getcallstatement(rout):
|
674 |
+
return getmultilineblock(rout, 'callstatement')
|
675 |
+
|
676 |
+
|
677 |
+
def getcallprotoargument(rout, cb_map={}):
|
678 |
+
r = getmultilineblock(rout, 'callprotoargument', comment=0)
|
679 |
+
if r:
|
680 |
+
return r
|
681 |
+
if hascallstatement(rout):
|
682 |
+
outmess(
|
683 |
+
'warning: callstatement is defined without callprotoargument\n')
|
684 |
+
return
|
685 |
+
from .capi_maps import getctype
|
686 |
+
arg_types, arg_types2 = [], []
|
687 |
+
if l_and(isstringfunction, l_not(isfunction_wrap))(rout):
|
688 |
+
arg_types.extend(['char*', 'size_t'])
|
689 |
+
for n in rout['args']:
|
690 |
+
var = rout['vars'][n]
|
691 |
+
if isintent_callback(var):
|
692 |
+
continue
|
693 |
+
if n in cb_map:
|
694 |
+
ctype = cb_map[n] + '_typedef'
|
695 |
+
else:
|
696 |
+
ctype = getctype(var)
|
697 |
+
if l_and(isintent_c, l_or(isscalar, iscomplex))(var):
|
698 |
+
pass
|
699 |
+
elif isstring(var):
|
700 |
+
pass
|
701 |
+
else:
|
702 |
+
if not isattr_value(var):
|
703 |
+
ctype = ctype + '*'
|
704 |
+
if ((isstring(var)
|
705 |
+
or isarrayofstrings(var) # obsolete?
|
706 |
+
or isstringarray(var))):
|
707 |
+
arg_types2.append('size_t')
|
708 |
+
arg_types.append(ctype)
|
709 |
+
|
710 |
+
proto_args = ','.join(arg_types + arg_types2)
|
711 |
+
if not proto_args:
|
712 |
+
proto_args = 'void'
|
713 |
+
return proto_args
|
714 |
+
|
715 |
+
|
716 |
+
def getusercode(rout):
|
717 |
+
return getmultilineblock(rout, 'usercode')
|
718 |
+
|
719 |
+
|
720 |
+
def getusercode1(rout):
|
721 |
+
return getmultilineblock(rout, 'usercode', counter=1)
|
722 |
+
|
723 |
+
|
724 |
+
def getpymethoddef(rout):
|
725 |
+
return getmultilineblock(rout, 'pymethoddef')
|
726 |
+
|
727 |
+
|
728 |
+
def getargs(rout):
|
729 |
+
sortargs, args = [], []
|
730 |
+
if 'args' in rout:
|
731 |
+
args = rout['args']
|
732 |
+
if 'sortvars' in rout:
|
733 |
+
for a in rout['sortvars']:
|
734 |
+
if a in args:
|
735 |
+
sortargs.append(a)
|
736 |
+
for a in args:
|
737 |
+
if a not in sortargs:
|
738 |
+
sortargs.append(a)
|
739 |
+
else:
|
740 |
+
sortargs = rout['args']
|
741 |
+
return args, sortargs
|
742 |
+
|
743 |
+
|
744 |
+
def getargs2(rout):
|
745 |
+
sortargs, args = [], rout.get('args', [])
|
746 |
+
auxvars = [a for a in rout['vars'].keys() if isintent_aux(rout['vars'][a])
|
747 |
+
and a not in args]
|
748 |
+
args = auxvars + args
|
749 |
+
if 'sortvars' in rout:
|
750 |
+
for a in rout['sortvars']:
|
751 |
+
if a in args:
|
752 |
+
sortargs.append(a)
|
753 |
+
for a in args:
|
754 |
+
if a not in sortargs:
|
755 |
+
sortargs.append(a)
|
756 |
+
else:
|
757 |
+
sortargs = auxvars + rout['args']
|
758 |
+
return args, sortargs
|
759 |
+
|
760 |
+
|
761 |
+
def getrestdoc(rout):
|
762 |
+
if 'f2pymultilines' not in rout:
|
763 |
+
return None
|
764 |
+
k = None
|
765 |
+
if rout['block'] == 'python module':
|
766 |
+
k = rout['block'], rout['name']
|
767 |
+
return rout['f2pymultilines'].get(k, None)
|
768 |
+
|
769 |
+
|
770 |
+
def gentitle(name):
|
771 |
+
ln = (80 - len(name) - 6) // 2
|
772 |
+
return '/*%s %s %s*/' % (ln * '*', name, ln * '*')
|
773 |
+
|
774 |
+
|
775 |
+
def flatlist(lst):
|
776 |
+
if isinstance(lst, list):
|
777 |
+
return reduce(lambda x, y, f=flatlist: x + f(y), lst, [])
|
778 |
+
return [lst]
|
779 |
+
|
780 |
+
|
781 |
+
def stripcomma(s):
|
782 |
+
if s and s[-1] == ',':
|
783 |
+
return s[:-1]
|
784 |
+
return s
|
785 |
+
|
786 |
+
|
787 |
+
def replace(str, d, defaultsep=''):
|
788 |
+
if isinstance(d, list):
|
789 |
+
return [replace(str, _m, defaultsep) for _m in d]
|
790 |
+
if isinstance(str, list):
|
791 |
+
return [replace(_m, d, defaultsep) for _m in str]
|
792 |
+
for k in 2 * list(d.keys()):
|
793 |
+
if k == 'separatorsfor':
|
794 |
+
continue
|
795 |
+
if 'separatorsfor' in d and k in d['separatorsfor']:
|
796 |
+
sep = d['separatorsfor'][k]
|
797 |
+
else:
|
798 |
+
sep = defaultsep
|
799 |
+
if isinstance(d[k], list):
|
800 |
+
str = str.replace('#%s#' % (k), sep.join(flatlist(d[k])))
|
801 |
+
else:
|
802 |
+
str = str.replace('#%s#' % (k), d[k])
|
803 |
+
return str
|
804 |
+
|
805 |
+
|
806 |
+
def dictappend(rd, ar):
|
807 |
+
if isinstance(ar, list):
|
808 |
+
for a in ar:
|
809 |
+
rd = dictappend(rd, a)
|
810 |
+
return rd
|
811 |
+
for k in ar.keys():
|
812 |
+
if k[0] == '_':
|
813 |
+
continue
|
814 |
+
if k in rd:
|
815 |
+
if isinstance(rd[k], str):
|
816 |
+
rd[k] = [rd[k]]
|
817 |
+
if isinstance(rd[k], list):
|
818 |
+
if isinstance(ar[k], list):
|
819 |
+
rd[k] = rd[k] + ar[k]
|
820 |
+
else:
|
821 |
+
rd[k].append(ar[k])
|
822 |
+
elif isinstance(rd[k], dict):
|
823 |
+
if isinstance(ar[k], dict):
|
824 |
+
if k == 'separatorsfor':
|
825 |
+
for k1 in ar[k].keys():
|
826 |
+
if k1 not in rd[k]:
|
827 |
+
rd[k][k1] = ar[k][k1]
|
828 |
+
else:
|
829 |
+
rd[k] = dictappend(rd[k], ar[k])
|
830 |
+
else:
|
831 |
+
rd[k] = ar[k]
|
832 |
+
return rd
|
833 |
+
|
834 |
+
|
835 |
+
def applyrules(rules, d, var={}):
|
836 |
+
ret = {}
|
837 |
+
if isinstance(rules, list):
|
838 |
+
for r in rules:
|
839 |
+
rr = applyrules(r, d, var)
|
840 |
+
ret = dictappend(ret, rr)
|
841 |
+
if '_break' in rr:
|
842 |
+
break
|
843 |
+
return ret
|
844 |
+
if '_check' in rules and (not rules['_check'](var)):
|
845 |
+
return ret
|
846 |
+
if 'need' in rules:
|
847 |
+
res = applyrules({'needs': rules['need']}, d, var)
|
848 |
+
if 'needs' in res:
|
849 |
+
cfuncs.append_needs(res['needs'])
|
850 |
+
|
851 |
+
for k in rules.keys():
|
852 |
+
if k == 'separatorsfor':
|
853 |
+
ret[k] = rules[k]
|
854 |
+
continue
|
855 |
+
if isinstance(rules[k], str):
|
856 |
+
ret[k] = replace(rules[k], d)
|
857 |
+
elif isinstance(rules[k], list):
|
858 |
+
ret[k] = []
|
859 |
+
for i in rules[k]:
|
860 |
+
ar = applyrules({k: i}, d, var)
|
861 |
+
if k in ar:
|
862 |
+
ret[k].append(ar[k])
|
863 |
+
elif k[0] == '_':
|
864 |
+
continue
|
865 |
+
elif isinstance(rules[k], dict):
|
866 |
+
ret[k] = []
|
867 |
+
for k1 in rules[k].keys():
|
868 |
+
if isinstance(k1, types.FunctionType) and k1(var):
|
869 |
+
if isinstance(rules[k][k1], list):
|
870 |
+
for i in rules[k][k1]:
|
871 |
+
if isinstance(i, dict):
|
872 |
+
res = applyrules({'supertext': i}, d, var)
|
873 |
+
if 'supertext' in res:
|
874 |
+
i = res['supertext']
|
875 |
+
else:
|
876 |
+
i = ''
|
877 |
+
ret[k].append(replace(i, d))
|
878 |
+
else:
|
879 |
+
i = rules[k][k1]
|
880 |
+
if isinstance(i, dict):
|
881 |
+
res = applyrules({'supertext': i}, d)
|
882 |
+
if 'supertext' in res:
|
883 |
+
i = res['supertext']
|
884 |
+
else:
|
885 |
+
i = ''
|
886 |
+
ret[k].append(replace(i, d))
|
887 |
+
else:
|
888 |
+
errmess('applyrules: ignoring rule %s.\n' % repr(rules[k]))
|
889 |
+
if isinstance(ret[k], list):
|
890 |
+
if len(ret[k]) == 1:
|
891 |
+
ret[k] = ret[k][0]
|
892 |
+
if ret[k] == []:
|
893 |
+
del ret[k]
|
894 |
+
return ret
|
895 |
+
|
896 |
+
_f2py_module_name_match = re.compile(r'\s*python\s*module\s*(?P<name>[\w_]+)',
|
897 |
+
re.I).match
|
898 |
+
_f2py_user_module_name_match = re.compile(r'\s*python\s*module\s*(?P<name>[\w_]*?'
|
899 |
+
r'__user__[\w_]*)', re.I).match
|
900 |
+
|
901 |
+
def get_f2py_modulename(source):
|
902 |
+
name = None
|
903 |
+
with open(source) as f:
|
904 |
+
for line in f:
|
905 |
+
m = _f2py_module_name_match(line)
|
906 |
+
if m:
|
907 |
+
if _f2py_user_module_name_match(line): # skip *__user__* names
|
908 |
+
continue
|
909 |
+
name = m.group('name')
|
910 |
+
break
|
911 |
+
return name
|
912 |
+
|
913 |
+
def getuseblocks(pymod):
|
914 |
+
all_uses = []
|
915 |
+
for inner in pymod['body']:
|
916 |
+
for modblock in inner['body']:
|
917 |
+
if modblock.get('use'):
|
918 |
+
all_uses.extend([x for x in modblock.get("use").keys() if "__" not in x])
|
919 |
+
return all_uses
|
920 |
+
|
921 |
+
def process_f2cmap_dict(f2cmap_all, new_map, c2py_map, verbose = False):
|
922 |
+
"""
|
923 |
+
Update the Fortran-to-C type mapping dictionary with new mappings and
|
924 |
+
return a list of successfully mapped C types.
|
925 |
+
|
926 |
+
This function integrates a new mapping dictionary into an existing
|
927 |
+
Fortran-to-C type mapping dictionary. It ensures that all keys are in
|
928 |
+
lowercase and validates new entries against a given C-to-Python mapping
|
929 |
+
dictionary. Redefinitions and invalid entries are reported with a warning.
|
930 |
+
|
931 |
+
Parameters
|
932 |
+
----------
|
933 |
+
f2cmap_all : dict
|
934 |
+
The existing Fortran-to-C type mapping dictionary that will be updated.
|
935 |
+
It should be a dictionary of dictionaries where the main keys represent
|
936 |
+
Fortran types and the nested dictionaries map Fortran type specifiers
|
937 |
+
to corresponding C types.
|
938 |
+
|
939 |
+
new_map : dict
|
940 |
+
A dictionary containing new type mappings to be added to `f2cmap_all`.
|
941 |
+
The structure should be similar to `f2cmap_all`, with keys representing
|
942 |
+
Fortran types and values being dictionaries of type specifiers and their
|
943 |
+
C type equivalents.
|
944 |
+
|
945 |
+
c2py_map : dict
|
946 |
+
A dictionary used for validating the C types in `new_map`. It maps C
|
947 |
+
types to corresponding Python types and is used to ensure that the C
|
948 |
+
types specified in `new_map` are valid.
|
949 |
+
|
950 |
+
verbose : boolean
|
951 |
+
A flag used to provide information about the types mapped
|
952 |
+
|
953 |
+
Returns
|
954 |
+
-------
|
955 |
+
tuple of (dict, list)
|
956 |
+
The updated Fortran-to-C type mapping dictionary and a list of
|
957 |
+
successfully mapped C types.
|
958 |
+
"""
|
959 |
+
f2cmap_mapped = []
|
960 |
+
|
961 |
+
new_map_lower = {}
|
962 |
+
for k, d1 in new_map.items():
|
963 |
+
d1_lower = {k1.lower(): v1 for k1, v1 in d1.items()}
|
964 |
+
new_map_lower[k.lower()] = d1_lower
|
965 |
+
|
966 |
+
for k, d1 in new_map_lower.items():
|
967 |
+
if k not in f2cmap_all:
|
968 |
+
f2cmap_all[k] = {}
|
969 |
+
|
970 |
+
for k1, v1 in d1.items():
|
971 |
+
if v1 in c2py_map:
|
972 |
+
if k1 in f2cmap_all[k]:
|
973 |
+
outmess(
|
974 |
+
"\tWarning: redefinition of {'%s':{'%s':'%s'->'%s'}}\n"
|
975 |
+
% (k, k1, f2cmap_all[k][k1], v1)
|
976 |
+
)
|
977 |
+
f2cmap_all[k][k1] = v1
|
978 |
+
if verbose:
|
979 |
+
outmess('\tMapping "%s(kind=%s)" to "%s"\n' % (k, k1, v1))
|
980 |
+
f2cmap_mapped.append(v1)
|
981 |
+
else:
|
982 |
+
if verbose:
|
983 |
+
errmess(
|
984 |
+
"\tIgnoring map {'%s':{'%s':'%s'}}: '%s' must be in %s\n"
|
985 |
+
% (k, k1, v1, v1, list(c2py_map.keys()))
|
986 |
+
)
|
987 |
+
|
988 |
+
return f2cmap_all, f2cmap_mapped
|
.venv/lib/python3.11/site-packages/numpy/f2py/cb_rules.py
ADDED
@@ -0,0 +1,644 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Build call-back mechanism for f2py2e.
|
3 |
+
|
4 |
+
Copyright 1999 -- 2011 Pearu Peterson all rights reserved.
|
5 |
+
Copyright 2011 -- present NumPy Developers.
|
6 |
+
Permission to use, modify, and distribute this software is given under the
|
7 |
+
terms of the NumPy License.
|
8 |
+
|
9 |
+
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
|
10 |
+
"""
|
11 |
+
from . import __version__
|
12 |
+
from .auxfuncs import (
|
13 |
+
applyrules, debugcapi, dictappend, errmess, getargs, hasnote, isarray,
|
14 |
+
iscomplex, iscomplexarray, iscomplexfunction, isfunction, isintent_c,
|
15 |
+
isintent_hide, isintent_in, isintent_inout, isintent_nothide,
|
16 |
+
isintent_out, isoptional, isrequired, isscalar, isstring,
|
17 |
+
isstringfunction, issubroutine, l_and, l_not, l_or, outmess, replace,
|
18 |
+
stripcomma, throw_error
|
19 |
+
)
|
20 |
+
from . import cfuncs
|
21 |
+
|
22 |
+
f2py_version = __version__.version
|
23 |
+
|
24 |
+
|
25 |
+
################## Rules for callback function ##############
|
26 |
+
|
27 |
+
cb_routine_rules = {
|
28 |
+
'cbtypedefs': 'typedef #rctype#(*#name#_typedef)(#optargs_td##args_td##strarglens_td##noargs#);',
|
29 |
+
'body': """
|
30 |
+
#begintitle#
|
31 |
+
typedef struct {
|
32 |
+
PyObject *capi;
|
33 |
+
PyTupleObject *args_capi;
|
34 |
+
int nofargs;
|
35 |
+
jmp_buf jmpbuf;
|
36 |
+
} #name#_t;
|
37 |
+
|
38 |
+
#if defined(F2PY_THREAD_LOCAL_DECL) && !defined(F2PY_USE_PYTHON_TLS)
|
39 |
+
|
40 |
+
static F2PY_THREAD_LOCAL_DECL #name#_t *_active_#name# = NULL;
|
41 |
+
|
42 |
+
static #name#_t *swap_active_#name#(#name#_t *ptr) {
|
43 |
+
#name#_t *prev = _active_#name#;
|
44 |
+
_active_#name# = ptr;
|
45 |
+
return prev;
|
46 |
+
}
|
47 |
+
|
48 |
+
static #name#_t *get_active_#name#(void) {
|
49 |
+
return _active_#name#;
|
50 |
+
}
|
51 |
+
|
52 |
+
#else
|
53 |
+
|
54 |
+
static #name#_t *swap_active_#name#(#name#_t *ptr) {
|
55 |
+
char *key = "__f2py_cb_#name#";
|
56 |
+
return (#name#_t *)F2PySwapThreadLocalCallbackPtr(key, ptr);
|
57 |
+
}
|
58 |
+
|
59 |
+
static #name#_t *get_active_#name#(void) {
|
60 |
+
char *key = "__f2py_cb_#name#";
|
61 |
+
return (#name#_t *)F2PyGetThreadLocalCallbackPtr(key);
|
62 |
+
}
|
63 |
+
|
64 |
+
#endif
|
65 |
+
|
66 |
+
/*typedef #rctype#(*#name#_typedef)(#optargs_td##args_td##strarglens_td##noargs#);*/
|
67 |
+
#static# #rctype# #callbackname# (#optargs##args##strarglens##noargs#) {
|
68 |
+
#name#_t cb_local = { NULL, NULL, 0 };
|
69 |
+
#name#_t *cb = NULL;
|
70 |
+
PyTupleObject *capi_arglist = NULL;
|
71 |
+
PyObject *capi_return = NULL;
|
72 |
+
PyObject *capi_tmp = NULL;
|
73 |
+
PyObject *capi_arglist_list = NULL;
|
74 |
+
int capi_j,capi_i = 0;
|
75 |
+
int capi_longjmp_ok = 1;
|
76 |
+
#decl#
|
77 |
+
#ifdef F2PY_REPORT_ATEXIT
|
78 |
+
f2py_cb_start_clock();
|
79 |
+
#endif
|
80 |
+
cb = get_active_#name#();
|
81 |
+
if (cb == NULL) {
|
82 |
+
capi_longjmp_ok = 0;
|
83 |
+
cb = &cb_local;
|
84 |
+
}
|
85 |
+
capi_arglist = cb->args_capi;
|
86 |
+
CFUNCSMESS(\"cb:Call-back function #name# (maxnofargs=#maxnofargs#(-#nofoptargs#))\\n\");
|
87 |
+
CFUNCSMESSPY(\"cb:#name#_capi=\",cb->capi);
|
88 |
+
if (cb->capi==NULL) {
|
89 |
+
capi_longjmp_ok = 0;
|
90 |
+
cb->capi = PyObject_GetAttrString(#modulename#_module,\"#argname#\");
|
91 |
+
CFUNCSMESSPY(\"cb:#name#_capi=\",cb->capi);
|
92 |
+
}
|
93 |
+
if (cb->capi==NULL) {
|
94 |
+
PyErr_SetString(#modulename#_error,\"cb: Callback #argname# not defined (as an argument or module #modulename# attribute).\\n\");
|
95 |
+
goto capi_fail;
|
96 |
+
}
|
97 |
+
if (F2PyCapsule_Check(cb->capi)) {
|
98 |
+
#name#_typedef #name#_cptr;
|
99 |
+
#name#_cptr = F2PyCapsule_AsVoidPtr(cb->capi);
|
100 |
+
#returncptr#(*#name#_cptr)(#optargs_nm##args_nm##strarglens_nm#);
|
101 |
+
#return#
|
102 |
+
}
|
103 |
+
if (capi_arglist==NULL) {
|
104 |
+
capi_longjmp_ok = 0;
|
105 |
+
capi_tmp = PyObject_GetAttrString(#modulename#_module,\"#argname#_extra_args\");
|
106 |
+
if (capi_tmp) {
|
107 |
+
capi_arglist = (PyTupleObject *)PySequence_Tuple(capi_tmp);
|
108 |
+
Py_DECREF(capi_tmp);
|
109 |
+
if (capi_arglist==NULL) {
|
110 |
+
PyErr_SetString(#modulename#_error,\"Failed to convert #modulename#.#argname#_extra_args to tuple.\\n\");
|
111 |
+
goto capi_fail;
|
112 |
+
}
|
113 |
+
} else {
|
114 |
+
PyErr_Clear();
|
115 |
+
capi_arglist = (PyTupleObject *)Py_BuildValue(\"()\");
|
116 |
+
}
|
117 |
+
}
|
118 |
+
if (capi_arglist == NULL) {
|
119 |
+
PyErr_SetString(#modulename#_error,\"Callback #argname# argument list is not set.\\n\");
|
120 |
+
goto capi_fail;
|
121 |
+
}
|
122 |
+
#setdims#
|
123 |
+
#ifdef PYPY_VERSION
|
124 |
+
#define CAPI_ARGLIST_SETITEM(idx, value) PyList_SetItem((PyObject *)capi_arglist_list, idx, value)
|
125 |
+
capi_arglist_list = PySequence_List(capi_arglist);
|
126 |
+
if (capi_arglist_list == NULL) goto capi_fail;
|
127 |
+
#else
|
128 |
+
#define CAPI_ARGLIST_SETITEM(idx, value) PyTuple_SetItem((PyObject *)capi_arglist, idx, value)
|
129 |
+
#endif
|
130 |
+
#pyobjfrom#
|
131 |
+
#undef CAPI_ARGLIST_SETITEM
|
132 |
+
#ifdef PYPY_VERSION
|
133 |
+
CFUNCSMESSPY(\"cb:capi_arglist=\",capi_arglist_list);
|
134 |
+
#else
|
135 |
+
CFUNCSMESSPY(\"cb:capi_arglist=\",capi_arglist);
|
136 |
+
#endif
|
137 |
+
CFUNCSMESS(\"cb:Call-back calling Python function #argname#.\\n\");
|
138 |
+
#ifdef F2PY_REPORT_ATEXIT
|
139 |
+
f2py_cb_start_call_clock();
|
140 |
+
#endif
|
141 |
+
#ifdef PYPY_VERSION
|
142 |
+
capi_return = PyObject_CallObject(cb->capi,(PyObject *)capi_arglist_list);
|
143 |
+
Py_DECREF(capi_arglist_list);
|
144 |
+
capi_arglist_list = NULL;
|
145 |
+
#else
|
146 |
+
capi_return = PyObject_CallObject(cb->capi,(PyObject *)capi_arglist);
|
147 |
+
#endif
|
148 |
+
#ifdef F2PY_REPORT_ATEXIT
|
149 |
+
f2py_cb_stop_call_clock();
|
150 |
+
#endif
|
151 |
+
CFUNCSMESSPY(\"cb:capi_return=\",capi_return);
|
152 |
+
if (capi_return == NULL) {
|
153 |
+
fprintf(stderr,\"capi_return is NULL\\n\");
|
154 |
+
goto capi_fail;
|
155 |
+
}
|
156 |
+
if (capi_return == Py_None) {
|
157 |
+
Py_DECREF(capi_return);
|
158 |
+
capi_return = Py_BuildValue(\"()\");
|
159 |
+
}
|
160 |
+
else if (!PyTuple_Check(capi_return)) {
|
161 |
+
capi_return = Py_BuildValue(\"(N)\",capi_return);
|
162 |
+
}
|
163 |
+
capi_j = PyTuple_Size(capi_return);
|
164 |
+
capi_i = 0;
|
165 |
+
#frompyobj#
|
166 |
+
CFUNCSMESS(\"cb:#name#:successful\\n\");
|
167 |
+
Py_DECREF(capi_return);
|
168 |
+
#ifdef F2PY_REPORT_ATEXIT
|
169 |
+
f2py_cb_stop_clock();
|
170 |
+
#endif
|
171 |
+
goto capi_return_pt;
|
172 |
+
capi_fail:
|
173 |
+
fprintf(stderr,\"Call-back #name# failed.\\n\");
|
174 |
+
Py_XDECREF(capi_return);
|
175 |
+
Py_XDECREF(capi_arglist_list);
|
176 |
+
if (capi_longjmp_ok) {
|
177 |
+
longjmp(cb->jmpbuf,-1);
|
178 |
+
}
|
179 |
+
capi_return_pt:
|
180 |
+
;
|
181 |
+
#return#
|
182 |
+
}
|
183 |
+
#endtitle#
|
184 |
+
""",
|
185 |
+
'need': ['setjmp.h', 'CFUNCSMESS', 'F2PY_THREAD_LOCAL_DECL'],
|
186 |
+
'maxnofargs': '#maxnofargs#',
|
187 |
+
'nofoptargs': '#nofoptargs#',
|
188 |
+
'docstr': """\
|
189 |
+
def #argname#(#docsignature#): return #docreturn#\\n\\
|
190 |
+
#docstrsigns#""",
|
191 |
+
'latexdocstr': """
|
192 |
+
{{}\\verb@def #argname#(#latexdocsignature#): return #docreturn#@{}}
|
193 |
+
#routnote#
|
194 |
+
|
195 |
+
#latexdocstrsigns#""",
|
196 |
+
'docstrshort': 'def #argname#(#docsignature#): return #docreturn#'
|
197 |
+
}
|
198 |
+
cb_rout_rules = [
|
199 |
+
{ # Init
|
200 |
+
'separatorsfor': {'decl': '\n',
|
201 |
+
'args': ',', 'optargs': '', 'pyobjfrom': '\n', 'freemem': '\n',
|
202 |
+
'args_td': ',', 'optargs_td': '',
|
203 |
+
'args_nm': ',', 'optargs_nm': '',
|
204 |
+
'frompyobj': '\n', 'setdims': '\n',
|
205 |
+
'docstrsigns': '\\n"\n"',
|
206 |
+
'latexdocstrsigns': '\n',
|
207 |
+
'latexdocstrreq': '\n', 'latexdocstropt': '\n',
|
208 |
+
'latexdocstrout': '\n', 'latexdocstrcbs': '\n',
|
209 |
+
},
|
210 |
+
'decl': '/*decl*/', 'pyobjfrom': '/*pyobjfrom*/', 'frompyobj': '/*frompyobj*/',
|
211 |
+
'args': [], 'optargs': '', 'return': '', 'strarglens': '', 'freemem': '/*freemem*/',
|
212 |
+
'args_td': [], 'optargs_td': '', 'strarglens_td': '',
|
213 |
+
'args_nm': [], 'optargs_nm': '', 'strarglens_nm': '',
|
214 |
+
'noargs': '',
|
215 |
+
'setdims': '/*setdims*/',
|
216 |
+
'docstrsigns': '', 'latexdocstrsigns': '',
|
217 |
+
'docstrreq': ' Required arguments:',
|
218 |
+
'docstropt': ' Optional arguments:',
|
219 |
+
'docstrout': ' Return objects:',
|
220 |
+
'docstrcbs': ' Call-back functions:',
|
221 |
+
'docreturn': '', 'docsign': '', 'docsignopt': '',
|
222 |
+
'latexdocstrreq': '\\noindent Required arguments:',
|
223 |
+
'latexdocstropt': '\\noindent Optional arguments:',
|
224 |
+
'latexdocstrout': '\\noindent Return objects:',
|
225 |
+
'latexdocstrcbs': '\\noindent Call-back functions:',
|
226 |
+
'routnote': {hasnote: '--- #note#', l_not(hasnote): ''},
|
227 |
+
}, { # Function
|
228 |
+
'decl': ' #ctype# return_value = 0;',
|
229 |
+
'frompyobj': [
|
230 |
+
{debugcapi: ' CFUNCSMESS("cb:Getting return_value->");'},
|
231 |
+
'''\
|
232 |
+
if (capi_j>capi_i) {
|
233 |
+
GETSCALARFROMPYTUPLE(capi_return,capi_i++,&return_value,#ctype#,
|
234 |
+
"#ctype#_from_pyobj failed in converting return_value of"
|
235 |
+
" call-back function #name# to C #ctype#\\n");
|
236 |
+
} else {
|
237 |
+
fprintf(stderr,"Warning: call-back function #name# did not provide"
|
238 |
+
" return value (index=%d, type=#ctype#)\\n",capi_i);
|
239 |
+
}''',
|
240 |
+
{debugcapi:
|
241 |
+
' fprintf(stderr,"#showvalueformat#.\\n",return_value);'}
|
242 |
+
],
|
243 |
+
'need': ['#ctype#_from_pyobj', {debugcapi: 'CFUNCSMESS'}, 'GETSCALARFROMPYTUPLE'],
|
244 |
+
'return': ' return return_value;',
|
245 |
+
'_check': l_and(isfunction, l_not(isstringfunction), l_not(iscomplexfunction))
|
246 |
+
},
|
247 |
+
{ # String function
|
248 |
+
'pyobjfrom': {debugcapi: ' fprintf(stderr,"debug-capi:cb:#name#:%d:\\n",return_value_len);'},
|
249 |
+
'args': '#ctype# return_value,int return_value_len',
|
250 |
+
'args_nm': 'return_value,&return_value_len',
|
251 |
+
'args_td': '#ctype# ,int',
|
252 |
+
'frompyobj': [
|
253 |
+
{debugcapi: ' CFUNCSMESS("cb:Getting return_value->\\"");'},
|
254 |
+
"""\
|
255 |
+
if (capi_j>capi_i) {
|
256 |
+
GETSTRFROMPYTUPLE(capi_return,capi_i++,return_value,return_value_len);
|
257 |
+
} else {
|
258 |
+
fprintf(stderr,"Warning: call-back function #name# did not provide"
|
259 |
+
" return value (index=%d, type=#ctype#)\\n",capi_i);
|
260 |
+
}""",
|
261 |
+
{debugcapi:
|
262 |
+
' fprintf(stderr,"#showvalueformat#\\".\\n",return_value);'}
|
263 |
+
],
|
264 |
+
'need': ['#ctype#_from_pyobj', {debugcapi: 'CFUNCSMESS'},
|
265 |
+
'string.h', 'GETSTRFROMPYTUPLE'],
|
266 |
+
'return': 'return;',
|
267 |
+
'_check': isstringfunction
|
268 |
+
},
|
269 |
+
{ # Complex function
|
270 |
+
'optargs': """
|
271 |
+
#ifndef F2PY_CB_RETURNCOMPLEX
|
272 |
+
#ctype# *return_value
|
273 |
+
#endif
|
274 |
+
""",
|
275 |
+
'optargs_nm': """
|
276 |
+
#ifndef F2PY_CB_RETURNCOMPLEX
|
277 |
+
return_value
|
278 |
+
#endif
|
279 |
+
""",
|
280 |
+
'optargs_td': """
|
281 |
+
#ifndef F2PY_CB_RETURNCOMPLEX
|
282 |
+
#ctype# *
|
283 |
+
#endif
|
284 |
+
""",
|
285 |
+
'decl': """
|
286 |
+
#ifdef F2PY_CB_RETURNCOMPLEX
|
287 |
+
#ctype# return_value = {0, 0};
|
288 |
+
#endif
|
289 |
+
""",
|
290 |
+
'frompyobj': [
|
291 |
+
{debugcapi: ' CFUNCSMESS("cb:Getting return_value->");'},
|
292 |
+
"""\
|
293 |
+
if (capi_j>capi_i) {
|
294 |
+
#ifdef F2PY_CB_RETURNCOMPLEX
|
295 |
+
GETSCALARFROMPYTUPLE(capi_return,capi_i++,&return_value,#ctype#,
|
296 |
+
\"#ctype#_from_pyobj failed in converting return_value of call-back\"
|
297 |
+
\" function #name# to C #ctype#\\n\");
|
298 |
+
#else
|
299 |
+
GETSCALARFROMPYTUPLE(capi_return,capi_i++,return_value,#ctype#,
|
300 |
+
\"#ctype#_from_pyobj failed in converting return_value of call-back\"
|
301 |
+
\" function #name# to C #ctype#\\n\");
|
302 |
+
#endif
|
303 |
+
} else {
|
304 |
+
fprintf(stderr,
|
305 |
+
\"Warning: call-back function #name# did not provide\"
|
306 |
+
\" return value (index=%d, type=#ctype#)\\n\",capi_i);
|
307 |
+
}""",
|
308 |
+
{debugcapi: """\
|
309 |
+
#ifdef F2PY_CB_RETURNCOMPLEX
|
310 |
+
fprintf(stderr,\"#showvalueformat#.\\n\",(return_value).r,(return_value).i);
|
311 |
+
#else
|
312 |
+
fprintf(stderr,\"#showvalueformat#.\\n\",(*return_value).r,(*return_value).i);
|
313 |
+
#endif
|
314 |
+
"""}
|
315 |
+
],
|
316 |
+
'return': """
|
317 |
+
#ifdef F2PY_CB_RETURNCOMPLEX
|
318 |
+
return return_value;
|
319 |
+
#else
|
320 |
+
return;
|
321 |
+
#endif
|
322 |
+
""",
|
323 |
+
'need': ['#ctype#_from_pyobj', {debugcapi: 'CFUNCSMESS'},
|
324 |
+
'string.h', 'GETSCALARFROMPYTUPLE', '#ctype#'],
|
325 |
+
'_check': iscomplexfunction
|
326 |
+
},
|
327 |
+
{'docstrout': ' #pydocsignout#',
|
328 |
+
'latexdocstrout': ['\\item[]{{}\\verb@#pydocsignout#@{}}',
|
329 |
+
{hasnote: '--- #note#'}],
|
330 |
+
'docreturn': '#rname#,',
|
331 |
+
'_check': isfunction},
|
332 |
+
{'_check': issubroutine, 'return': 'return;'}
|
333 |
+
]
|
334 |
+
|
335 |
+
cb_arg_rules = [
|
336 |
+
{ # Doc
|
337 |
+
'docstropt': {l_and(isoptional, isintent_nothide): ' #pydocsign#'},
|
338 |
+
'docstrreq': {l_and(isrequired, isintent_nothide): ' #pydocsign#'},
|
339 |
+
'docstrout': {isintent_out: ' #pydocsignout#'},
|
340 |
+
'latexdocstropt': {l_and(isoptional, isintent_nothide): ['\\item[]{{}\\verb@#pydocsign#@{}}',
|
341 |
+
{hasnote: '--- #note#'}]},
|
342 |
+
'latexdocstrreq': {l_and(isrequired, isintent_nothide): ['\\item[]{{}\\verb@#pydocsign#@{}}',
|
343 |
+
{hasnote: '--- #note#'}]},
|
344 |
+
'latexdocstrout': {isintent_out: ['\\item[]{{}\\verb@#pydocsignout#@{}}',
|
345 |
+
{l_and(hasnote, isintent_hide): '--- #note#',
|
346 |
+
l_and(hasnote, isintent_nothide): '--- See above.'}]},
|
347 |
+
'docsign': {l_and(isrequired, isintent_nothide): '#varname#,'},
|
348 |
+
'docsignopt': {l_and(isoptional, isintent_nothide): '#varname#,'},
|
349 |
+
'depend': ''
|
350 |
+
},
|
351 |
+
{
|
352 |
+
'args': {
|
353 |
+
l_and(isscalar, isintent_c): '#ctype# #varname_i#',
|
354 |
+
l_and(isscalar, l_not(isintent_c)): '#ctype# *#varname_i#_cb_capi',
|
355 |
+
isarray: '#ctype# *#varname_i#',
|
356 |
+
isstring: '#ctype# #varname_i#'
|
357 |
+
},
|
358 |
+
'args_nm': {
|
359 |
+
l_and(isscalar, isintent_c): '#varname_i#',
|
360 |
+
l_and(isscalar, l_not(isintent_c)): '#varname_i#_cb_capi',
|
361 |
+
isarray: '#varname_i#',
|
362 |
+
isstring: '#varname_i#'
|
363 |
+
},
|
364 |
+
'args_td': {
|
365 |
+
l_and(isscalar, isintent_c): '#ctype#',
|
366 |
+
l_and(isscalar, l_not(isintent_c)): '#ctype# *',
|
367 |
+
isarray: '#ctype# *',
|
368 |
+
isstring: '#ctype#'
|
369 |
+
},
|
370 |
+
'need': {l_or(isscalar, isarray, isstring): '#ctype#'},
|
371 |
+
# untested with multiple args
|
372 |
+
'strarglens': {isstring: ',int #varname_i#_cb_len'},
|
373 |
+
'strarglens_td': {isstring: ',int'}, # untested with multiple args
|
374 |
+
# untested with multiple args
|
375 |
+
'strarglens_nm': {isstring: ',#varname_i#_cb_len'},
|
376 |
+
},
|
377 |
+
{ # Scalars
|
378 |
+
'decl': {l_not(isintent_c): ' #ctype# #varname_i#=(*#varname_i#_cb_capi);'},
|
379 |
+
'error': {l_and(isintent_c, isintent_out,
|
380 |
+
throw_error('intent(c,out) is forbidden for callback scalar arguments')):
|
381 |
+
''},
|
382 |
+
'frompyobj': [{debugcapi: ' CFUNCSMESS("cb:Getting #varname#->");'},
|
383 |
+
{isintent_out:
|
384 |
+
' if (capi_j>capi_i)\n GETSCALARFROMPYTUPLE(capi_return,capi_i++,#varname_i#_cb_capi,#ctype#,"#ctype#_from_pyobj failed in converting argument #varname# of call-back function #name# to C #ctype#\\n");'},
|
385 |
+
{l_and(debugcapi, l_and(l_not(iscomplex), isintent_c)):
|
386 |
+
' fprintf(stderr,"#showvalueformat#.\\n",#varname_i#);'},
|
387 |
+
{l_and(debugcapi, l_and(l_not(iscomplex), l_not( isintent_c))):
|
388 |
+
' fprintf(stderr,"#showvalueformat#.\\n",*#varname_i#_cb_capi);'},
|
389 |
+
{l_and(debugcapi, l_and(iscomplex, isintent_c)):
|
390 |
+
' fprintf(stderr,"#showvalueformat#.\\n",(#varname_i#).r,(#varname_i#).i);'},
|
391 |
+
{l_and(debugcapi, l_and(iscomplex, l_not( isintent_c))):
|
392 |
+
' fprintf(stderr,"#showvalueformat#.\\n",(*#varname_i#_cb_capi).r,(*#varname_i#_cb_capi).i);'},
|
393 |
+
],
|
394 |
+
'need': [{isintent_out: ['#ctype#_from_pyobj', 'GETSCALARFROMPYTUPLE']},
|
395 |
+
{debugcapi: 'CFUNCSMESS'}],
|
396 |
+
'_check': isscalar
|
397 |
+
}, {
|
398 |
+
'pyobjfrom': [{isintent_in: """\
|
399 |
+
if (cb->nofargs>capi_i)
|
400 |
+
if (CAPI_ARGLIST_SETITEM(capi_i++,pyobj_from_#ctype#1(#varname_i#)))
|
401 |
+
goto capi_fail;"""},
|
402 |
+
{isintent_inout: """\
|
403 |
+
if (cb->nofargs>capi_i)
|
404 |
+
if (CAPI_ARGLIST_SETITEM(capi_i++,pyarr_from_p_#ctype#1(#varname_i#_cb_capi)))
|
405 |
+
goto capi_fail;"""}],
|
406 |
+
'need': [{isintent_in: 'pyobj_from_#ctype#1'},
|
407 |
+
{isintent_inout: 'pyarr_from_p_#ctype#1'},
|
408 |
+
{iscomplex: '#ctype#'}],
|
409 |
+
'_check': l_and(isscalar, isintent_nothide),
|
410 |
+
'_optional': ''
|
411 |
+
}, { # String
|
412 |
+
'frompyobj': [{debugcapi: ' CFUNCSMESS("cb:Getting #varname#->\\"");'},
|
413 |
+
""" if (capi_j>capi_i)
|
414 |
+
GETSTRFROMPYTUPLE(capi_return,capi_i++,#varname_i#,#varname_i#_cb_len);""",
|
415 |
+
{debugcapi:
|
416 |
+
' fprintf(stderr,"#showvalueformat#\\":%d:.\\n",#varname_i#,#varname_i#_cb_len);'},
|
417 |
+
],
|
418 |
+
'need': ['#ctype#', 'GETSTRFROMPYTUPLE',
|
419 |
+
{debugcapi: 'CFUNCSMESS'}, 'string.h'],
|
420 |
+
'_check': l_and(isstring, isintent_out)
|
421 |
+
}, {
|
422 |
+
'pyobjfrom': [
|
423 |
+
{debugcapi:
|
424 |
+
(' fprintf(stderr,"debug-capi:cb:#varname#=#showvalueformat#:'
|
425 |
+
'%d:\\n",#varname_i#,#varname_i#_cb_len);')},
|
426 |
+
{isintent_in: """\
|
427 |
+
if (cb->nofargs>capi_i)
|
428 |
+
if (CAPI_ARGLIST_SETITEM(capi_i++,pyobj_from_#ctype#1size(#varname_i#,#varname_i#_cb_len)))
|
429 |
+
goto capi_fail;"""},
|
430 |
+
{isintent_inout: """\
|
431 |
+
if (cb->nofargs>capi_i) {
|
432 |
+
int #varname_i#_cb_dims[] = {#varname_i#_cb_len};
|
433 |
+
if (CAPI_ARGLIST_SETITEM(capi_i++,pyarr_from_p_#ctype#1(#varname_i#,#varname_i#_cb_dims)))
|
434 |
+
goto capi_fail;
|
435 |
+
}"""}],
|
436 |
+
'need': [{isintent_in: 'pyobj_from_#ctype#1size'},
|
437 |
+
{isintent_inout: 'pyarr_from_p_#ctype#1'}],
|
438 |
+
'_check': l_and(isstring, isintent_nothide),
|
439 |
+
'_optional': ''
|
440 |
+
},
|
441 |
+
# Array ...
|
442 |
+
{
|
443 |
+
'decl': ' npy_intp #varname_i#_Dims[#rank#] = {#rank*[-1]#};',
|
444 |
+
'setdims': ' #cbsetdims#;',
|
445 |
+
'_check': isarray,
|
446 |
+
'_depend': ''
|
447 |
+
},
|
448 |
+
{
|
449 |
+
'pyobjfrom': [{debugcapi: ' fprintf(stderr,"debug-capi:cb:#varname#\\n");'},
|
450 |
+
{isintent_c: """\
|
451 |
+
if (cb->nofargs>capi_i) {
|
452 |
+
/* tmp_arr will be inserted to capi_arglist_list that will be
|
453 |
+
destroyed when leaving callback function wrapper together
|
454 |
+
with tmp_arr. */
|
455 |
+
PyArrayObject *tmp_arr = (PyArrayObject *)PyArray_New(&PyArray_Type,
|
456 |
+
#rank#,#varname_i#_Dims,#atype#,NULL,(char*)#varname_i#,#elsize#,
|
457 |
+
NPY_ARRAY_CARRAY,NULL);
|
458 |
+
""",
|
459 |
+
l_not(isintent_c): """\
|
460 |
+
if (cb->nofargs>capi_i) {
|
461 |
+
/* tmp_arr will be inserted to capi_arglist_list that will be
|
462 |
+
destroyed when leaving callback function wrapper together
|
463 |
+
with tmp_arr. */
|
464 |
+
PyArrayObject *tmp_arr = (PyArrayObject *)PyArray_New(&PyArray_Type,
|
465 |
+
#rank#,#varname_i#_Dims,#atype#,NULL,(char*)#varname_i#,#elsize#,
|
466 |
+
NPY_ARRAY_FARRAY,NULL);
|
467 |
+
""",
|
468 |
+
},
|
469 |
+
"""
|
470 |
+
if (tmp_arr==NULL)
|
471 |
+
goto capi_fail;
|
472 |
+
if (CAPI_ARGLIST_SETITEM(capi_i++,(PyObject *)tmp_arr))
|
473 |
+
goto capi_fail;
|
474 |
+
}"""],
|
475 |
+
'_check': l_and(isarray, isintent_nothide, l_or(isintent_in, isintent_inout)),
|
476 |
+
'_optional': '',
|
477 |
+
}, {
|
478 |
+
'frompyobj': [{debugcapi: ' CFUNCSMESS("cb:Getting #varname#->");'},
|
479 |
+
""" if (capi_j>capi_i) {
|
480 |
+
PyArrayObject *rv_cb_arr = NULL;
|
481 |
+
if ((capi_tmp = PyTuple_GetItem(capi_return,capi_i++))==NULL) goto capi_fail;
|
482 |
+
rv_cb_arr = array_from_pyobj(#atype#,#varname_i#_Dims,#rank#,F2PY_INTENT_IN""",
|
483 |
+
{isintent_c: '|F2PY_INTENT_C'},
|
484 |
+
""",capi_tmp);
|
485 |
+
if (rv_cb_arr == NULL) {
|
486 |
+
fprintf(stderr,\"rv_cb_arr is NULL\\n\");
|
487 |
+
goto capi_fail;
|
488 |
+
}
|
489 |
+
MEMCOPY(#varname_i#,PyArray_DATA(rv_cb_arr),PyArray_NBYTES(rv_cb_arr));
|
490 |
+
if (capi_tmp != (PyObject *)rv_cb_arr) {
|
491 |
+
Py_DECREF(rv_cb_arr);
|
492 |
+
}
|
493 |
+
}""",
|
494 |
+
{debugcapi: ' fprintf(stderr,"<-.\\n");'},
|
495 |
+
],
|
496 |
+
'need': ['MEMCOPY', {iscomplexarray: '#ctype#'}],
|
497 |
+
'_check': l_and(isarray, isintent_out)
|
498 |
+
}, {
|
499 |
+
'docreturn': '#varname#,',
|
500 |
+
'_check': isintent_out
|
501 |
+
}
|
502 |
+
]
|
503 |
+
|
504 |
+
################## Build call-back module #############
|
505 |
+
cb_map = {}
|
506 |
+
|
507 |
+
|
508 |
+
def buildcallbacks(m):
|
509 |
+
cb_map[m['name']] = []
|
510 |
+
for bi in m['body']:
|
511 |
+
if bi['block'] == 'interface':
|
512 |
+
for b in bi['body']:
|
513 |
+
if b:
|
514 |
+
buildcallback(b, m['name'])
|
515 |
+
else:
|
516 |
+
errmess('warning: empty body for %s\n' % (m['name']))
|
517 |
+
|
518 |
+
|
519 |
+
def buildcallback(rout, um):
|
520 |
+
from . import capi_maps
|
521 |
+
|
522 |
+
outmess(' Constructing call-back function "cb_%s_in_%s"\n' %
|
523 |
+
(rout['name'], um))
|
524 |
+
args, depargs = getargs(rout)
|
525 |
+
capi_maps.depargs = depargs
|
526 |
+
var = rout['vars']
|
527 |
+
vrd = capi_maps.cb_routsign2map(rout, um)
|
528 |
+
rd = dictappend({}, vrd)
|
529 |
+
cb_map[um].append([rout['name'], rd['name']])
|
530 |
+
for r in cb_rout_rules:
|
531 |
+
if ('_check' in r and r['_check'](rout)) or ('_check' not in r):
|
532 |
+
ar = applyrules(r, vrd, rout)
|
533 |
+
rd = dictappend(rd, ar)
|
534 |
+
savevrd = {}
|
535 |
+
for i, a in enumerate(args):
|
536 |
+
vrd = capi_maps.cb_sign2map(a, var[a], index=i)
|
537 |
+
savevrd[a] = vrd
|
538 |
+
for r in cb_arg_rules:
|
539 |
+
if '_depend' in r:
|
540 |
+
continue
|
541 |
+
if '_optional' in r and isoptional(var[a]):
|
542 |
+
continue
|
543 |
+
if ('_check' in r and r['_check'](var[a])) or ('_check' not in r):
|
544 |
+
ar = applyrules(r, vrd, var[a])
|
545 |
+
rd = dictappend(rd, ar)
|
546 |
+
if '_break' in r:
|
547 |
+
break
|
548 |
+
for a in args:
|
549 |
+
vrd = savevrd[a]
|
550 |
+
for r in cb_arg_rules:
|
551 |
+
if '_depend' in r:
|
552 |
+
continue
|
553 |
+
if ('_optional' not in r) or ('_optional' in r and isrequired(var[a])):
|
554 |
+
continue
|
555 |
+
if ('_check' in r and r['_check'](var[a])) or ('_check' not in r):
|
556 |
+
ar = applyrules(r, vrd, var[a])
|
557 |
+
rd = dictappend(rd, ar)
|
558 |
+
if '_break' in r:
|
559 |
+
break
|
560 |
+
for a in depargs:
|
561 |
+
vrd = savevrd[a]
|
562 |
+
for r in cb_arg_rules:
|
563 |
+
if '_depend' not in r:
|
564 |
+
continue
|
565 |
+
if '_optional' in r:
|
566 |
+
continue
|
567 |
+
if ('_check' in r and r['_check'](var[a])) or ('_check' not in r):
|
568 |
+
ar = applyrules(r, vrd, var[a])
|
569 |
+
rd = dictappend(rd, ar)
|
570 |
+
if '_break' in r:
|
571 |
+
break
|
572 |
+
if 'args' in rd and 'optargs' in rd:
|
573 |
+
if isinstance(rd['optargs'], list):
|
574 |
+
rd['optargs'] = rd['optargs'] + ["""
|
575 |
+
#ifndef F2PY_CB_RETURNCOMPLEX
|
576 |
+
,
|
577 |
+
#endif
|
578 |
+
"""]
|
579 |
+
rd['optargs_nm'] = rd['optargs_nm'] + ["""
|
580 |
+
#ifndef F2PY_CB_RETURNCOMPLEX
|
581 |
+
,
|
582 |
+
#endif
|
583 |
+
"""]
|
584 |
+
rd['optargs_td'] = rd['optargs_td'] + ["""
|
585 |
+
#ifndef F2PY_CB_RETURNCOMPLEX
|
586 |
+
,
|
587 |
+
#endif
|
588 |
+
"""]
|
589 |
+
if isinstance(rd['docreturn'], list):
|
590 |
+
rd['docreturn'] = stripcomma(
|
591 |
+
replace('#docreturn#', {'docreturn': rd['docreturn']}))
|
592 |
+
optargs = stripcomma(replace('#docsignopt#',
|
593 |
+
{'docsignopt': rd['docsignopt']}
|
594 |
+
))
|
595 |
+
if optargs == '':
|
596 |
+
rd['docsignature'] = stripcomma(
|
597 |
+
replace('#docsign#', {'docsign': rd['docsign']}))
|
598 |
+
else:
|
599 |
+
rd['docsignature'] = replace('#docsign#[#docsignopt#]',
|
600 |
+
{'docsign': rd['docsign'],
|
601 |
+
'docsignopt': optargs,
|
602 |
+
})
|
603 |
+
rd['latexdocsignature'] = rd['docsignature'].replace('_', '\\_')
|
604 |
+
rd['latexdocsignature'] = rd['latexdocsignature'].replace(',', ', ')
|
605 |
+
rd['docstrsigns'] = []
|
606 |
+
rd['latexdocstrsigns'] = []
|
607 |
+
for k in ['docstrreq', 'docstropt', 'docstrout', 'docstrcbs']:
|
608 |
+
if k in rd and isinstance(rd[k], list):
|
609 |
+
rd['docstrsigns'] = rd['docstrsigns'] + rd[k]
|
610 |
+
k = 'latex' + k
|
611 |
+
if k in rd and isinstance(rd[k], list):
|
612 |
+
rd['latexdocstrsigns'] = rd['latexdocstrsigns'] + rd[k][0:1] +\
|
613 |
+
['\\begin{description}'] + rd[k][1:] +\
|
614 |
+
['\\end{description}']
|
615 |
+
if 'args' not in rd:
|
616 |
+
rd['args'] = ''
|
617 |
+
rd['args_td'] = ''
|
618 |
+
rd['args_nm'] = ''
|
619 |
+
if not (rd.get('args') or rd.get('optargs') or rd.get('strarglens')):
|
620 |
+
rd['noargs'] = 'void'
|
621 |
+
|
622 |
+
ar = applyrules(cb_routine_rules, rd)
|
623 |
+
cfuncs.callbacks[rd['name']] = ar['body']
|
624 |
+
if isinstance(ar['need'], str):
|
625 |
+
ar['need'] = [ar['need']]
|
626 |
+
|
627 |
+
if 'need' in rd:
|
628 |
+
for t in cfuncs.typedefs.keys():
|
629 |
+
if t in rd['need']:
|
630 |
+
ar['need'].append(t)
|
631 |
+
|
632 |
+
cfuncs.typedefs_generated[rd['name'] + '_typedef'] = ar['cbtypedefs']
|
633 |
+
ar['need'].append(rd['name'] + '_typedef')
|
634 |
+
cfuncs.needs[rd['name']] = ar['need']
|
635 |
+
|
636 |
+
capi_maps.lcb2_map[rd['name']] = {'maxnofargs': ar['maxnofargs'],
|
637 |
+
'nofoptargs': ar['nofoptargs'],
|
638 |
+
'docstr': ar['docstr'],
|
639 |
+
'latexdocstr': ar['latexdocstr'],
|
640 |
+
'argname': rd['argname']
|
641 |
+
}
|
642 |
+
outmess(' %s\n' % (ar['docstrshort']))
|
643 |
+
return
|
644 |
+
################## Build call-back function #############
|
.venv/lib/python3.11/site-packages/numpy/f2py/cfuncs.py
ADDED
@@ -0,0 +1,1536 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python3
|
2 |
+
"""
|
3 |
+
C declarations, CPP macros, and C functions for f2py2e.
|
4 |
+
Only required declarations/macros/functions will be used.
|
5 |
+
|
6 |
+
Copyright 1999 -- 2011 Pearu Peterson all rights reserved.
|
7 |
+
Copyright 2011 -- present NumPy Developers.
|
8 |
+
Permission to use, modify, and distribute this software is given under the
|
9 |
+
terms of the NumPy License.
|
10 |
+
|
11 |
+
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
|
12 |
+
"""
|
13 |
+
import sys
|
14 |
+
import copy
|
15 |
+
|
16 |
+
from . import __version__
|
17 |
+
|
18 |
+
f2py_version = __version__.version
|
19 |
+
errmess = sys.stderr.write
|
20 |
+
|
21 |
+
##################### Definitions ##################
|
22 |
+
|
23 |
+
outneeds = {'includes0': [], 'includes': [], 'typedefs': [], 'typedefs_generated': [],
|
24 |
+
'userincludes': [],
|
25 |
+
'cppmacros': [], 'cfuncs': [], 'callbacks': [], 'f90modhooks': [],
|
26 |
+
'commonhooks': []}
|
27 |
+
needs = {}
|
28 |
+
includes0 = {'includes0': '/*need_includes0*/'}
|
29 |
+
includes = {'includes': '/*need_includes*/'}
|
30 |
+
userincludes = {'userincludes': '/*need_userincludes*/'}
|
31 |
+
typedefs = {'typedefs': '/*need_typedefs*/'}
|
32 |
+
typedefs_generated = {'typedefs_generated': '/*need_typedefs_generated*/'}
|
33 |
+
cppmacros = {'cppmacros': '/*need_cppmacros*/'}
|
34 |
+
cfuncs = {'cfuncs': '/*need_cfuncs*/'}
|
35 |
+
callbacks = {'callbacks': '/*need_callbacks*/'}
|
36 |
+
f90modhooks = {'f90modhooks': '/*need_f90modhooks*/',
|
37 |
+
'initf90modhooksstatic': '/*initf90modhooksstatic*/',
|
38 |
+
'initf90modhooksdynamic': '/*initf90modhooksdynamic*/',
|
39 |
+
}
|
40 |
+
commonhooks = {'commonhooks': '/*need_commonhooks*/',
|
41 |
+
'initcommonhooks': '/*need_initcommonhooks*/',
|
42 |
+
}
|
43 |
+
|
44 |
+
############ Includes ###################
|
45 |
+
|
46 |
+
includes0['math.h'] = '#include <math.h>'
|
47 |
+
includes0['string.h'] = '#include <string.h>'
|
48 |
+
includes0['setjmp.h'] = '#include <setjmp.h>'
|
49 |
+
|
50 |
+
includes['arrayobject.h'] = '''#define PY_ARRAY_UNIQUE_SYMBOL PyArray_API
|
51 |
+
#include "arrayobject.h"'''
|
52 |
+
includes['npy_math.h'] = '#include "numpy/npy_math.h"'
|
53 |
+
|
54 |
+
includes['arrayobject.h'] = '#include "fortranobject.h"'
|
55 |
+
includes['stdarg.h'] = '#include <stdarg.h>'
|
56 |
+
|
57 |
+
############# Type definitions ###############
|
58 |
+
|
59 |
+
typedefs['unsigned_char'] = 'typedef unsigned char unsigned_char;'
|
60 |
+
typedefs['unsigned_short'] = 'typedef unsigned short unsigned_short;'
|
61 |
+
typedefs['unsigned_long'] = 'typedef unsigned long unsigned_long;'
|
62 |
+
typedefs['signed_char'] = 'typedef signed char signed_char;'
|
63 |
+
typedefs['long_long'] = """
|
64 |
+
#if defined(NPY_OS_WIN32)
|
65 |
+
typedef __int64 long_long;
|
66 |
+
#else
|
67 |
+
typedef long long long_long;
|
68 |
+
typedef unsigned long long unsigned_long_long;
|
69 |
+
#endif
|
70 |
+
"""
|
71 |
+
typedefs['unsigned_long_long'] = """
|
72 |
+
#if defined(NPY_OS_WIN32)
|
73 |
+
typedef __uint64 long_long;
|
74 |
+
#else
|
75 |
+
typedef unsigned long long unsigned_long_long;
|
76 |
+
#endif
|
77 |
+
"""
|
78 |
+
typedefs['long_double'] = """
|
79 |
+
#ifndef _LONG_DOUBLE
|
80 |
+
typedef long double long_double;
|
81 |
+
#endif
|
82 |
+
"""
|
83 |
+
typedefs[
|
84 |
+
'complex_long_double'] = 'typedef struct {long double r,i;} complex_long_double;'
|
85 |
+
typedefs['complex_float'] = 'typedef struct {float r,i;} complex_float;'
|
86 |
+
typedefs['complex_double'] = 'typedef struct {double r,i;} complex_double;'
|
87 |
+
typedefs['string'] = """typedef char * string;"""
|
88 |
+
typedefs['character'] = """typedef char character;"""
|
89 |
+
|
90 |
+
|
91 |
+
############### CPP macros ####################
|
92 |
+
cppmacros['CFUNCSMESS'] = """
|
93 |
+
#ifdef DEBUGCFUNCS
|
94 |
+
#define CFUNCSMESS(mess) fprintf(stderr,\"debug-capi:\"mess);
|
95 |
+
#define CFUNCSMESSPY(mess,obj) CFUNCSMESS(mess) \\
|
96 |
+
PyObject_Print((PyObject *)obj,stderr,Py_PRINT_RAW);\\
|
97 |
+
fprintf(stderr,\"\\n\");
|
98 |
+
#else
|
99 |
+
#define CFUNCSMESS(mess)
|
100 |
+
#define CFUNCSMESSPY(mess,obj)
|
101 |
+
#endif
|
102 |
+
"""
|
103 |
+
cppmacros['F_FUNC'] = """
|
104 |
+
#if defined(PREPEND_FORTRAN)
|
105 |
+
#if defined(NO_APPEND_FORTRAN)
|
106 |
+
#if defined(UPPERCASE_FORTRAN)
|
107 |
+
#define F_FUNC(f,F) _##F
|
108 |
+
#else
|
109 |
+
#define F_FUNC(f,F) _##f
|
110 |
+
#endif
|
111 |
+
#else
|
112 |
+
#if defined(UPPERCASE_FORTRAN)
|
113 |
+
#define F_FUNC(f,F) _##F##_
|
114 |
+
#else
|
115 |
+
#define F_FUNC(f,F) _##f##_
|
116 |
+
#endif
|
117 |
+
#endif
|
118 |
+
#else
|
119 |
+
#if defined(NO_APPEND_FORTRAN)
|
120 |
+
#if defined(UPPERCASE_FORTRAN)
|
121 |
+
#define F_FUNC(f,F) F
|
122 |
+
#else
|
123 |
+
#define F_FUNC(f,F) f
|
124 |
+
#endif
|
125 |
+
#else
|
126 |
+
#if defined(UPPERCASE_FORTRAN)
|
127 |
+
#define F_FUNC(f,F) F##_
|
128 |
+
#else
|
129 |
+
#define F_FUNC(f,F) f##_
|
130 |
+
#endif
|
131 |
+
#endif
|
132 |
+
#endif
|
133 |
+
#if defined(UNDERSCORE_G77)
|
134 |
+
#define F_FUNC_US(f,F) F_FUNC(f##_,F##_)
|
135 |
+
#else
|
136 |
+
#define F_FUNC_US(f,F) F_FUNC(f,F)
|
137 |
+
#endif
|
138 |
+
"""
|
139 |
+
cppmacros['F_WRAPPEDFUNC'] = """
|
140 |
+
#if defined(PREPEND_FORTRAN)
|
141 |
+
#if defined(NO_APPEND_FORTRAN)
|
142 |
+
#if defined(UPPERCASE_FORTRAN)
|
143 |
+
#define F_WRAPPEDFUNC(f,F) _F2PYWRAP##F
|
144 |
+
#else
|
145 |
+
#define F_WRAPPEDFUNC(f,F) _f2pywrap##f
|
146 |
+
#endif
|
147 |
+
#else
|
148 |
+
#if defined(UPPERCASE_FORTRAN)
|
149 |
+
#define F_WRAPPEDFUNC(f,F) _F2PYWRAP##F##_
|
150 |
+
#else
|
151 |
+
#define F_WRAPPEDFUNC(f,F) _f2pywrap##f##_
|
152 |
+
#endif
|
153 |
+
#endif
|
154 |
+
#else
|
155 |
+
#if defined(NO_APPEND_FORTRAN)
|
156 |
+
#if defined(UPPERCASE_FORTRAN)
|
157 |
+
#define F_WRAPPEDFUNC(f,F) F2PYWRAP##F
|
158 |
+
#else
|
159 |
+
#define F_WRAPPEDFUNC(f,F) f2pywrap##f
|
160 |
+
#endif
|
161 |
+
#else
|
162 |
+
#if defined(UPPERCASE_FORTRAN)
|
163 |
+
#define F_WRAPPEDFUNC(f,F) F2PYWRAP##F##_
|
164 |
+
#else
|
165 |
+
#define F_WRAPPEDFUNC(f,F) f2pywrap##f##_
|
166 |
+
#endif
|
167 |
+
#endif
|
168 |
+
#endif
|
169 |
+
#if defined(UNDERSCORE_G77)
|
170 |
+
#define F_WRAPPEDFUNC_US(f,F) F_WRAPPEDFUNC(f##_,F##_)
|
171 |
+
#else
|
172 |
+
#define F_WRAPPEDFUNC_US(f,F) F_WRAPPEDFUNC(f,F)
|
173 |
+
#endif
|
174 |
+
"""
|
175 |
+
cppmacros['F_MODFUNC'] = """
|
176 |
+
#if defined(F90MOD2CCONV1) /*E.g. Compaq Fortran */
|
177 |
+
#if defined(NO_APPEND_FORTRAN)
|
178 |
+
#define F_MODFUNCNAME(m,f) $ ## m ## $ ## f
|
179 |
+
#else
|
180 |
+
#define F_MODFUNCNAME(m,f) $ ## m ## $ ## f ## _
|
181 |
+
#endif
|
182 |
+
#endif
|
183 |
+
|
184 |
+
#if defined(F90MOD2CCONV2) /*E.g. IBM XL Fortran, not tested though */
|
185 |
+
#if defined(NO_APPEND_FORTRAN)
|
186 |
+
#define F_MODFUNCNAME(m,f) __ ## m ## _MOD_ ## f
|
187 |
+
#else
|
188 |
+
#define F_MODFUNCNAME(m,f) __ ## m ## _MOD_ ## f ## _
|
189 |
+
#endif
|
190 |
+
#endif
|
191 |
+
|
192 |
+
#if defined(F90MOD2CCONV3) /*E.g. MIPSPro Compilers */
|
193 |
+
#if defined(NO_APPEND_FORTRAN)
|
194 |
+
#define F_MODFUNCNAME(m,f) f ## .in. ## m
|
195 |
+
#else
|
196 |
+
#define F_MODFUNCNAME(m,f) f ## .in. ## m ## _
|
197 |
+
#endif
|
198 |
+
#endif
|
199 |
+
/*
|
200 |
+
#if defined(UPPERCASE_FORTRAN)
|
201 |
+
#define F_MODFUNC(m,M,f,F) F_MODFUNCNAME(M,F)
|
202 |
+
#else
|
203 |
+
#define F_MODFUNC(m,M,f,F) F_MODFUNCNAME(m,f)
|
204 |
+
#endif
|
205 |
+
*/
|
206 |
+
|
207 |
+
#define F_MODFUNC(m,f) (*(f2pymodstruct##m##.##f))
|
208 |
+
"""
|
209 |
+
cppmacros['SWAPUNSAFE'] = """
|
210 |
+
#define SWAP(a,b) (size_t)(a) = ((size_t)(a) ^ (size_t)(b));\\
|
211 |
+
(size_t)(b) = ((size_t)(a) ^ (size_t)(b));\\
|
212 |
+
(size_t)(a) = ((size_t)(a) ^ (size_t)(b))
|
213 |
+
"""
|
214 |
+
cppmacros['SWAP'] = """
|
215 |
+
#define SWAP(a,b,t) {\\
|
216 |
+
t *c;\\
|
217 |
+
c = a;\\
|
218 |
+
a = b;\\
|
219 |
+
b = c;}
|
220 |
+
"""
|
221 |
+
# cppmacros['ISCONTIGUOUS']='#define ISCONTIGUOUS(m) (PyArray_FLAGS(m) &
|
222 |
+
# NPY_ARRAY_C_CONTIGUOUS)'
|
223 |
+
cppmacros['PRINTPYOBJERR'] = """
|
224 |
+
#define PRINTPYOBJERR(obj)\\
|
225 |
+
fprintf(stderr,\"#modulename#.error is related to \");\\
|
226 |
+
PyObject_Print((PyObject *)obj,stderr,Py_PRINT_RAW);\\
|
227 |
+
fprintf(stderr,\"\\n\");
|
228 |
+
"""
|
229 |
+
cppmacros['MINMAX'] = """
|
230 |
+
#ifndef max
|
231 |
+
#define max(a,b) ((a > b) ? (a) : (b))
|
232 |
+
#endif
|
233 |
+
#ifndef min
|
234 |
+
#define min(a,b) ((a < b) ? (a) : (b))
|
235 |
+
#endif
|
236 |
+
#ifndef MAX
|
237 |
+
#define MAX(a,b) ((a > b) ? (a) : (b))
|
238 |
+
#endif
|
239 |
+
#ifndef MIN
|
240 |
+
#define MIN(a,b) ((a < b) ? (a) : (b))
|
241 |
+
#endif
|
242 |
+
"""
|
243 |
+
cppmacros['len..'] = """
|
244 |
+
/* See fortranobject.h for definitions. The macros here are provided for BC. */
|
245 |
+
#define rank f2py_rank
|
246 |
+
#define shape f2py_shape
|
247 |
+
#define fshape f2py_shape
|
248 |
+
#define len f2py_len
|
249 |
+
#define flen f2py_flen
|
250 |
+
#define slen f2py_slen
|
251 |
+
#define size f2py_size
|
252 |
+
"""
|
253 |
+
cppmacros['pyobj_from_char1'] = r"""
|
254 |
+
#define pyobj_from_char1(v) (PyLong_FromLong(v))
|
255 |
+
"""
|
256 |
+
cppmacros['pyobj_from_short1'] = r"""
|
257 |
+
#define pyobj_from_short1(v) (PyLong_FromLong(v))
|
258 |
+
"""
|
259 |
+
needs['pyobj_from_int1'] = ['signed_char']
|
260 |
+
cppmacros['pyobj_from_int1'] = r"""
|
261 |
+
#define pyobj_from_int1(v) (PyLong_FromLong(v))
|
262 |
+
"""
|
263 |
+
cppmacros['pyobj_from_long1'] = r"""
|
264 |
+
#define pyobj_from_long1(v) (PyLong_FromLong(v))
|
265 |
+
"""
|
266 |
+
needs['pyobj_from_long_long1'] = ['long_long']
|
267 |
+
cppmacros['pyobj_from_long_long1'] = """
|
268 |
+
#ifdef HAVE_LONG_LONG
|
269 |
+
#define pyobj_from_long_long1(v) (PyLong_FromLongLong(v))
|
270 |
+
#else
|
271 |
+
#warning HAVE_LONG_LONG is not available. Redefining pyobj_from_long_long.
|
272 |
+
#define pyobj_from_long_long1(v) (PyLong_FromLong(v))
|
273 |
+
#endif
|
274 |
+
"""
|
275 |
+
needs['pyobj_from_long_double1'] = ['long_double']
|
276 |
+
cppmacros['pyobj_from_long_double1'] = """
|
277 |
+
#define pyobj_from_long_double1(v) (PyFloat_FromDouble(v))"""
|
278 |
+
cppmacros['pyobj_from_double1'] = """
|
279 |
+
#define pyobj_from_double1(v) (PyFloat_FromDouble(v))"""
|
280 |
+
cppmacros['pyobj_from_float1'] = """
|
281 |
+
#define pyobj_from_float1(v) (PyFloat_FromDouble(v))"""
|
282 |
+
needs['pyobj_from_complex_long_double1'] = ['complex_long_double']
|
283 |
+
cppmacros['pyobj_from_complex_long_double1'] = """
|
284 |
+
#define pyobj_from_complex_long_double1(v) (PyComplex_FromDoubles(v.r,v.i))"""
|
285 |
+
needs['pyobj_from_complex_double1'] = ['complex_double']
|
286 |
+
cppmacros['pyobj_from_complex_double1'] = """
|
287 |
+
#define pyobj_from_complex_double1(v) (PyComplex_FromDoubles(v.r,v.i))"""
|
288 |
+
needs['pyobj_from_complex_float1'] = ['complex_float']
|
289 |
+
cppmacros['pyobj_from_complex_float1'] = """
|
290 |
+
#define pyobj_from_complex_float1(v) (PyComplex_FromDoubles(v.r,v.i))"""
|
291 |
+
needs['pyobj_from_string1'] = ['string']
|
292 |
+
cppmacros['pyobj_from_string1'] = """
|
293 |
+
#define pyobj_from_string1(v) (PyUnicode_FromString((char *)v))"""
|
294 |
+
needs['pyobj_from_string1size'] = ['string']
|
295 |
+
cppmacros['pyobj_from_string1size'] = """
|
296 |
+
#define pyobj_from_string1size(v,len) (PyUnicode_FromStringAndSize((char *)v, len))"""
|
297 |
+
needs['TRYPYARRAYTEMPLATE'] = ['PRINTPYOBJERR']
|
298 |
+
cppmacros['TRYPYARRAYTEMPLATE'] = """
|
299 |
+
/* New SciPy */
|
300 |
+
#define TRYPYARRAYTEMPLATECHAR case NPY_STRING: *(char *)(PyArray_DATA(arr))=*v; break;
|
301 |
+
#define TRYPYARRAYTEMPLATELONG case NPY_LONG: *(long *)(PyArray_DATA(arr))=*v; break;
|
302 |
+
#define TRYPYARRAYTEMPLATEOBJECT case NPY_OBJECT: PyArray_SETITEM(arr,PyArray_DATA(arr),pyobj_from_ ## ctype ## 1(*v)); break;
|
303 |
+
|
304 |
+
#define TRYPYARRAYTEMPLATE(ctype,typecode) \\
|
305 |
+
PyArrayObject *arr = NULL;\\
|
306 |
+
if (!obj) return -2;\\
|
307 |
+
if (!PyArray_Check(obj)) return -1;\\
|
308 |
+
if (!(arr=(PyArrayObject *)obj)) {fprintf(stderr,\"TRYPYARRAYTEMPLATE:\");PRINTPYOBJERR(obj);return 0;}\\
|
309 |
+
if (PyArray_DESCR(arr)->type==typecode) {*(ctype *)(PyArray_DATA(arr))=*v; return 1;}\\
|
310 |
+
switch (PyArray_TYPE(arr)) {\\
|
311 |
+
case NPY_DOUBLE: *(npy_double *)(PyArray_DATA(arr))=*v; break;\\
|
312 |
+
case NPY_INT: *(npy_int *)(PyArray_DATA(arr))=*v; break;\\
|
313 |
+
case NPY_LONG: *(npy_long *)(PyArray_DATA(arr))=*v; break;\\
|
314 |
+
case NPY_FLOAT: *(npy_float *)(PyArray_DATA(arr))=*v; break;\\
|
315 |
+
case NPY_CDOUBLE: *(npy_double *)(PyArray_DATA(arr))=*v; break;\\
|
316 |
+
case NPY_CFLOAT: *(npy_float *)(PyArray_DATA(arr))=*v; break;\\
|
317 |
+
case NPY_BOOL: *(npy_bool *)(PyArray_DATA(arr))=(*v!=0); break;\\
|
318 |
+
case NPY_UBYTE: *(npy_ubyte *)(PyArray_DATA(arr))=*v; break;\\
|
319 |
+
case NPY_BYTE: *(npy_byte *)(PyArray_DATA(arr))=*v; break;\\
|
320 |
+
case NPY_SHORT: *(npy_short *)(PyArray_DATA(arr))=*v; break;\\
|
321 |
+
case NPY_USHORT: *(npy_ushort *)(PyArray_DATA(arr))=*v; break;\\
|
322 |
+
case NPY_UINT: *(npy_uint *)(PyArray_DATA(arr))=*v; break;\\
|
323 |
+
case NPY_ULONG: *(npy_ulong *)(PyArray_DATA(arr))=*v; break;\\
|
324 |
+
case NPY_LONGLONG: *(npy_longlong *)(PyArray_DATA(arr))=*v; break;\\
|
325 |
+
case NPY_ULONGLONG: *(npy_ulonglong *)(PyArray_DATA(arr))=*v; break;\\
|
326 |
+
case NPY_LONGDOUBLE: *(npy_longdouble *)(PyArray_DATA(arr))=*v; break;\\
|
327 |
+
case NPY_CLONGDOUBLE: *(npy_longdouble *)(PyArray_DATA(arr))=*v; break;\\
|
328 |
+
case NPY_OBJECT: PyArray_SETITEM(arr, PyArray_DATA(arr), pyobj_from_ ## ctype ## 1(*v)); break;\\
|
329 |
+
default: return -2;\\
|
330 |
+
};\\
|
331 |
+
return 1
|
332 |
+
"""
|
333 |
+
|
334 |
+
needs['TRYCOMPLEXPYARRAYTEMPLATE'] = ['PRINTPYOBJERR']
|
335 |
+
cppmacros['TRYCOMPLEXPYARRAYTEMPLATE'] = """
|
336 |
+
#define TRYCOMPLEXPYARRAYTEMPLATEOBJECT case NPY_OBJECT: PyArray_SETITEM(arr, PyArray_DATA(arr), pyobj_from_complex_ ## ctype ## 1((*v))); break;
|
337 |
+
#define TRYCOMPLEXPYARRAYTEMPLATE(ctype,typecode)\\
|
338 |
+
PyArrayObject *arr = NULL;\\
|
339 |
+
if (!obj) return -2;\\
|
340 |
+
if (!PyArray_Check(obj)) return -1;\\
|
341 |
+
if (!(arr=(PyArrayObject *)obj)) {fprintf(stderr,\"TRYCOMPLEXPYARRAYTEMPLATE:\");PRINTPYOBJERR(obj);return 0;}\\
|
342 |
+
if (PyArray_DESCR(arr)->type==typecode) {\\
|
343 |
+
*(ctype *)(PyArray_DATA(arr))=(*v).r;\\
|
344 |
+
*(ctype *)(PyArray_DATA(arr)+sizeof(ctype))=(*v).i;\\
|
345 |
+
return 1;\\
|
346 |
+
}\\
|
347 |
+
switch (PyArray_TYPE(arr)) {\\
|
348 |
+
case NPY_CDOUBLE: *(npy_double *)(PyArray_DATA(arr))=(*v).r;\\
|
349 |
+
*(npy_double *)(PyArray_DATA(arr)+sizeof(npy_double))=(*v).i;\\
|
350 |
+
break;\\
|
351 |
+
case NPY_CFLOAT: *(npy_float *)(PyArray_DATA(arr))=(*v).r;\\
|
352 |
+
*(npy_float *)(PyArray_DATA(arr)+sizeof(npy_float))=(*v).i;\\
|
353 |
+
break;\\
|
354 |
+
case NPY_DOUBLE: *(npy_double *)(PyArray_DATA(arr))=(*v).r; break;\\
|
355 |
+
case NPY_LONG: *(npy_long *)(PyArray_DATA(arr))=(*v).r; break;\\
|
356 |
+
case NPY_FLOAT: *(npy_float *)(PyArray_DATA(arr))=(*v).r; break;\\
|
357 |
+
case NPY_INT: *(npy_int *)(PyArray_DATA(arr))=(*v).r; break;\\
|
358 |
+
case NPY_SHORT: *(npy_short *)(PyArray_DATA(arr))=(*v).r; break;\\
|
359 |
+
case NPY_UBYTE: *(npy_ubyte *)(PyArray_DATA(arr))=(*v).r; break;\\
|
360 |
+
case NPY_BYTE: *(npy_byte *)(PyArray_DATA(arr))=(*v).r; break;\\
|
361 |
+
case NPY_BOOL: *(npy_bool *)(PyArray_DATA(arr))=((*v).r!=0 && (*v).i!=0); break;\\
|
362 |
+
case NPY_USHORT: *(npy_ushort *)(PyArray_DATA(arr))=(*v).r; break;\\
|
363 |
+
case NPY_UINT: *(npy_uint *)(PyArray_DATA(arr))=(*v).r; break;\\
|
364 |
+
case NPY_ULONG: *(npy_ulong *)(PyArray_DATA(arr))=(*v).r; break;\\
|
365 |
+
case NPY_LONGLONG: *(npy_longlong *)(PyArray_DATA(arr))=(*v).r; break;\\
|
366 |
+
case NPY_ULONGLONG: *(npy_ulonglong *)(PyArray_DATA(arr))=(*v).r; break;\\
|
367 |
+
case NPY_LONGDOUBLE: *(npy_longdouble *)(PyArray_DATA(arr))=(*v).r; break;\\
|
368 |
+
case NPY_CLONGDOUBLE: *(npy_longdouble *)(PyArray_DATA(arr))=(*v).r;\\
|
369 |
+
*(npy_longdouble *)(PyArray_DATA(arr)+sizeof(npy_longdouble))=(*v).i;\\
|
370 |
+
break;\\
|
371 |
+
case NPY_OBJECT: PyArray_SETITEM(arr, PyArray_DATA(arr), pyobj_from_complex_ ## ctype ## 1((*v))); break;\\
|
372 |
+
default: return -2;\\
|
373 |
+
};\\
|
374 |
+
return -1;
|
375 |
+
"""
|
376 |
+
# cppmacros['NUMFROMARROBJ']="""
|
377 |
+
# define NUMFROMARROBJ(typenum,ctype) \\
|
378 |
+
# if (PyArray_Check(obj)) arr = (PyArrayObject *)obj;\\
|
379 |
+
# else arr = (PyArrayObject *)PyArray_ContiguousFromObject(obj,typenum,0,0);\\
|
380 |
+
# if (arr) {\\
|
381 |
+
# if (PyArray_TYPE(arr)==NPY_OBJECT) {\\
|
382 |
+
# if (!ctype ## _from_pyobj(v,(PyArray_DESCR(arr)->getitem)(PyArray_DATA(arr)),\"\"))\\
|
383 |
+
# goto capi_fail;\\
|
384 |
+
# } else {\\
|
385 |
+
# (PyArray_DESCR(arr)->cast[typenum])(PyArray_DATA(arr),1,(char*)v,1,1);\\
|
386 |
+
# }\\
|
387 |
+
# if ((PyObject *)arr != obj) { Py_DECREF(arr); }\\
|
388 |
+
# return 1;\\
|
389 |
+
# }
|
390 |
+
# """
|
391 |
+
# XXX: Note that CNUMFROMARROBJ is identical with NUMFROMARROBJ
|
392 |
+
# cppmacros['CNUMFROMARROBJ']="""
|
393 |
+
# define CNUMFROMARROBJ(typenum,ctype) \\
|
394 |
+
# if (PyArray_Check(obj)) arr = (PyArrayObject *)obj;\\
|
395 |
+
# else arr = (PyArrayObject *)PyArray_ContiguousFromObject(obj,typenum,0,0);\\
|
396 |
+
# if (arr) {\\
|
397 |
+
# if (PyArray_TYPE(arr)==NPY_OBJECT) {\\
|
398 |
+
# if (!ctype ## _from_pyobj(v,(PyArray_DESCR(arr)->getitem)(PyArray_DATA(arr)),\"\"))\\
|
399 |
+
# goto capi_fail;\\
|
400 |
+
# } else {\\
|
401 |
+
# (PyArray_DESCR(arr)->cast[typenum])((void *)(PyArray_DATA(arr)),1,(void *)(v),1,1);\\
|
402 |
+
# }\\
|
403 |
+
# if ((PyObject *)arr != obj) { Py_DECREF(arr); }\\
|
404 |
+
# return 1;\\
|
405 |
+
# }
|
406 |
+
# """
|
407 |
+
|
408 |
+
|
409 |
+
needs['GETSTRFROMPYTUPLE'] = ['STRINGCOPYN', 'PRINTPYOBJERR']
|
410 |
+
cppmacros['GETSTRFROMPYTUPLE'] = """
|
411 |
+
#define GETSTRFROMPYTUPLE(tuple,index,str,len) {\\
|
412 |
+
PyObject *rv_cb_str = PyTuple_GetItem((tuple),(index));\\
|
413 |
+
if (rv_cb_str == NULL)\\
|
414 |
+
goto capi_fail;\\
|
415 |
+
if (PyBytes_Check(rv_cb_str)) {\\
|
416 |
+
str[len-1]='\\0';\\
|
417 |
+
STRINGCOPYN((str),PyBytes_AS_STRING((PyBytesObject*)rv_cb_str),(len));\\
|
418 |
+
} else {\\
|
419 |
+
PRINTPYOBJERR(rv_cb_str);\\
|
420 |
+
PyErr_SetString(#modulename#_error,\"string object expected\");\\
|
421 |
+
goto capi_fail;\\
|
422 |
+
}\\
|
423 |
+
}
|
424 |
+
"""
|
425 |
+
cppmacros['GETSCALARFROMPYTUPLE'] = """
|
426 |
+
#define GETSCALARFROMPYTUPLE(tuple,index,var,ctype,mess) {\\
|
427 |
+
if ((capi_tmp = PyTuple_GetItem((tuple),(index)))==NULL) goto capi_fail;\\
|
428 |
+
if (!(ctype ## _from_pyobj((var),capi_tmp,mess)))\\
|
429 |
+
goto capi_fail;\\
|
430 |
+
}
|
431 |
+
"""
|
432 |
+
|
433 |
+
cppmacros['FAILNULL'] = """\
|
434 |
+
#define FAILNULL(p) do { \\
|
435 |
+
if ((p) == NULL) { \\
|
436 |
+
PyErr_SetString(PyExc_MemoryError, "NULL pointer found"); \\
|
437 |
+
goto capi_fail; \\
|
438 |
+
} \\
|
439 |
+
} while (0)
|
440 |
+
"""
|
441 |
+
needs['MEMCOPY'] = ['string.h', 'FAILNULL']
|
442 |
+
cppmacros['MEMCOPY'] = """
|
443 |
+
#define MEMCOPY(to,from,n)\\
|
444 |
+
do { FAILNULL(to); FAILNULL(from); (void)memcpy(to,from,n); } while (0)
|
445 |
+
"""
|
446 |
+
cppmacros['STRINGMALLOC'] = """
|
447 |
+
#define STRINGMALLOC(str,len)\\
|
448 |
+
if ((str = (string)malloc(len+1)) == NULL) {\\
|
449 |
+
PyErr_SetString(PyExc_MemoryError, \"out of memory\");\\
|
450 |
+
goto capi_fail;\\
|
451 |
+
} else {\\
|
452 |
+
(str)[len] = '\\0';\\
|
453 |
+
}
|
454 |
+
"""
|
455 |
+
cppmacros['STRINGFREE'] = """
|
456 |
+
#define STRINGFREE(str) do {if (!(str == NULL)) free(str);} while (0)
|
457 |
+
"""
|
458 |
+
needs['STRINGPADN'] = ['string.h']
|
459 |
+
cppmacros['STRINGPADN'] = """
|
460 |
+
/*
|
461 |
+
STRINGPADN replaces null values with padding values from the right.
|
462 |
+
|
463 |
+
`to` must have size of at least N bytes.
|
464 |
+
|
465 |
+
If the `to[N-1]` has null value, then replace it and all the
|
466 |
+
preceding, nulls with the given padding.
|
467 |
+
|
468 |
+
STRINGPADN(to, N, PADDING, NULLVALUE) is an inverse operation.
|
469 |
+
*/
|
470 |
+
#define STRINGPADN(to, N, NULLVALUE, PADDING) \\
|
471 |
+
do { \\
|
472 |
+
int _m = (N); \\
|
473 |
+
char *_to = (to); \\
|
474 |
+
for (_m -= 1; _m >= 0 && _to[_m] == NULLVALUE; _m--) { \\
|
475 |
+
_to[_m] = PADDING; \\
|
476 |
+
} \\
|
477 |
+
} while (0)
|
478 |
+
"""
|
479 |
+
needs['STRINGCOPYN'] = ['string.h', 'FAILNULL']
|
480 |
+
cppmacros['STRINGCOPYN'] = """
|
481 |
+
/*
|
482 |
+
STRINGCOPYN copies N bytes.
|
483 |
+
|
484 |
+
`to` and `from` buffers must have sizes of at least N bytes.
|
485 |
+
*/
|
486 |
+
#define STRINGCOPYN(to,from,N) \\
|
487 |
+
do { \\
|
488 |
+
int _m = (N); \\
|
489 |
+
char *_to = (to); \\
|
490 |
+
char *_from = (from); \\
|
491 |
+
FAILNULL(_to); FAILNULL(_from); \\
|
492 |
+
(void)strncpy(_to, _from, _m); \\
|
493 |
+
} while (0)
|
494 |
+
"""
|
495 |
+
needs['STRINGCOPY'] = ['string.h', 'FAILNULL']
|
496 |
+
cppmacros['STRINGCOPY'] = """
|
497 |
+
#define STRINGCOPY(to,from)\\
|
498 |
+
do { FAILNULL(to); FAILNULL(from); (void)strcpy(to,from); } while (0)
|
499 |
+
"""
|
500 |
+
cppmacros['CHECKGENERIC'] = """
|
501 |
+
#define CHECKGENERIC(check,tcheck,name) \\
|
502 |
+
if (!(check)) {\\
|
503 |
+
PyErr_SetString(#modulename#_error,\"(\"tcheck\") failed for \"name);\\
|
504 |
+
/*goto capi_fail;*/\\
|
505 |
+
} else """
|
506 |
+
cppmacros['CHECKARRAY'] = """
|
507 |
+
#define CHECKARRAY(check,tcheck,name) \\
|
508 |
+
if (!(check)) {\\
|
509 |
+
PyErr_SetString(#modulename#_error,\"(\"tcheck\") failed for \"name);\\
|
510 |
+
/*goto capi_fail;*/\\
|
511 |
+
} else """
|
512 |
+
cppmacros['CHECKSTRING'] = """
|
513 |
+
#define CHECKSTRING(check,tcheck,name,show,var)\\
|
514 |
+
if (!(check)) {\\
|
515 |
+
char errstring[256];\\
|
516 |
+
sprintf(errstring, \"%s: \"show, \"(\"tcheck\") failed for \"name, slen(var), var);\\
|
517 |
+
PyErr_SetString(#modulename#_error, errstring);\\
|
518 |
+
/*goto capi_fail;*/\\
|
519 |
+
} else """
|
520 |
+
cppmacros['CHECKSCALAR'] = """
|
521 |
+
#define CHECKSCALAR(check,tcheck,name,show,var)\\
|
522 |
+
if (!(check)) {\\
|
523 |
+
char errstring[256];\\
|
524 |
+
sprintf(errstring, \"%s: \"show, \"(\"tcheck\") failed for \"name, var);\\
|
525 |
+
PyErr_SetString(#modulename#_error,errstring);\\
|
526 |
+
/*goto capi_fail;*/\\
|
527 |
+
} else """
|
528 |
+
# cppmacros['CHECKDIMS']="""
|
529 |
+
# define CHECKDIMS(dims,rank) \\
|
530 |
+
# for (int i=0;i<(rank);i++)\\
|
531 |
+
# if (dims[i]<0) {\\
|
532 |
+
# fprintf(stderr,\"Unspecified array argument requires a complete dimension specification.\\n\");\\
|
533 |
+
# goto capi_fail;\\
|
534 |
+
# }
|
535 |
+
# """
|
536 |
+
cppmacros[
|
537 |
+
'ARRSIZE'] = '#define ARRSIZE(dims,rank) (_PyArray_multiply_list(dims,rank))'
|
538 |
+
cppmacros['OLDPYNUM'] = """
|
539 |
+
#ifdef OLDPYNUM
|
540 |
+
#error You need to install NumPy version 0.13 or higher. See https://scipy.org/install.html
|
541 |
+
#endif
|
542 |
+
"""
|
543 |
+
cppmacros["F2PY_THREAD_LOCAL_DECL"] = """
|
544 |
+
#ifndef F2PY_THREAD_LOCAL_DECL
|
545 |
+
#if defined(_MSC_VER)
|
546 |
+
#define F2PY_THREAD_LOCAL_DECL __declspec(thread)
|
547 |
+
#elif defined(NPY_OS_MINGW)
|
548 |
+
#define F2PY_THREAD_LOCAL_DECL __thread
|
549 |
+
#elif defined(__STDC_VERSION__) \\
|
550 |
+
&& (__STDC_VERSION__ >= 201112L) \\
|
551 |
+
&& !defined(__STDC_NO_THREADS__) \\
|
552 |
+
&& (!defined(__GLIBC__) || __GLIBC__ > 2 || (__GLIBC__ == 2 && __GLIBC_MINOR__ > 12)) \\
|
553 |
+
&& !defined(NPY_OS_OPENBSD) && !defined(NPY_OS_HAIKU)
|
554 |
+
/* __STDC_NO_THREADS__ was first defined in a maintenance release of glibc 2.12,
|
555 |
+
see https://lists.gnu.org/archive/html/commit-hurd/2012-07/msg00180.html,
|
556 |
+
so `!defined(__STDC_NO_THREADS__)` may give false positive for the existence
|
557 |
+
of `threads.h` when using an older release of glibc 2.12
|
558 |
+
See gh-19437 for details on OpenBSD */
|
559 |
+
#include <threads.h>
|
560 |
+
#define F2PY_THREAD_LOCAL_DECL thread_local
|
561 |
+
#elif defined(__GNUC__) \\
|
562 |
+
&& (__GNUC__ > 4 || (__GNUC__ == 4 && (__GNUC_MINOR__ >= 4)))
|
563 |
+
#define F2PY_THREAD_LOCAL_DECL __thread
|
564 |
+
#endif
|
565 |
+
#endif
|
566 |
+
"""
|
567 |
+
################# C functions ###############
|
568 |
+
|
569 |
+
cfuncs['calcarrindex'] = """
|
570 |
+
static int calcarrindex(int *i,PyArrayObject *arr) {
|
571 |
+
int k,ii = i[0];
|
572 |
+
for (k=1; k < PyArray_NDIM(arr); k++)
|
573 |
+
ii += (ii*(PyArray_DIM(arr,k) - 1)+i[k]); /* assuming contiguous arr */
|
574 |
+
return ii;
|
575 |
+
}"""
|
576 |
+
cfuncs['calcarrindextr'] = """
|
577 |
+
static int calcarrindextr(int *i,PyArrayObject *arr) {
|
578 |
+
int k,ii = i[PyArray_NDIM(arr)-1];
|
579 |
+
for (k=1; k < PyArray_NDIM(arr); k++)
|
580 |
+
ii += (ii*(PyArray_DIM(arr,PyArray_NDIM(arr)-k-1) - 1)+i[PyArray_NDIM(arr)-k-1]); /* assuming contiguous arr */
|
581 |
+
return ii;
|
582 |
+
}"""
|
583 |
+
cfuncs['forcomb'] = """
|
584 |
+
static struct { int nd;npy_intp *d;int *i,*i_tr,tr; } forcombcache;
|
585 |
+
static int initforcomb(npy_intp *dims,int nd,int tr) {
|
586 |
+
int k;
|
587 |
+
if (dims==NULL) return 0;
|
588 |
+
if (nd<0) return 0;
|
589 |
+
forcombcache.nd = nd;
|
590 |
+
forcombcache.d = dims;
|
591 |
+
forcombcache.tr = tr;
|
592 |
+
if ((forcombcache.i = (int *)malloc(sizeof(int)*nd))==NULL) return 0;
|
593 |
+
if ((forcombcache.i_tr = (int *)malloc(sizeof(int)*nd))==NULL) return 0;
|
594 |
+
for (k=1;k<nd;k++) {
|
595 |
+
forcombcache.i[k] = forcombcache.i_tr[nd-k-1] = 0;
|
596 |
+
}
|
597 |
+
forcombcache.i[0] = forcombcache.i_tr[nd-1] = -1;
|
598 |
+
return 1;
|
599 |
+
}
|
600 |
+
static int *nextforcomb(void) {
|
601 |
+
int j,*i,*i_tr,k;
|
602 |
+
int nd=forcombcache.nd;
|
603 |
+
if ((i=forcombcache.i) == NULL) return NULL;
|
604 |
+
if ((i_tr=forcombcache.i_tr) == NULL) return NULL;
|
605 |
+
if (forcombcache.d == NULL) return NULL;
|
606 |
+
i[0]++;
|
607 |
+
if (i[0]==forcombcache.d[0]) {
|
608 |
+
j=1;
|
609 |
+
while ((j<nd) && (i[j]==forcombcache.d[j]-1)) j++;
|
610 |
+
if (j==nd) {
|
611 |
+
free(i);
|
612 |
+
free(i_tr);
|
613 |
+
return NULL;
|
614 |
+
}
|
615 |
+
for (k=0;k<j;k++) i[k] = i_tr[nd-k-1] = 0;
|
616 |
+
i[j]++;
|
617 |
+
i_tr[nd-j-1]++;
|
618 |
+
} else
|
619 |
+
i_tr[nd-1]++;
|
620 |
+
if (forcombcache.tr) return i_tr;
|
621 |
+
return i;
|
622 |
+
}"""
|
623 |
+
needs['try_pyarr_from_string'] = ['STRINGCOPYN', 'PRINTPYOBJERR', 'string']
|
624 |
+
cfuncs['try_pyarr_from_string'] = """
|
625 |
+
/*
|
626 |
+
try_pyarr_from_string copies str[:len(obj)] to the data of an `ndarray`.
|
627 |
+
|
628 |
+
If obj is an `ndarray`, it is assumed to be contiguous.
|
629 |
+
|
630 |
+
If the specified len==-1, str must be null-terminated.
|
631 |
+
*/
|
632 |
+
static int try_pyarr_from_string(PyObject *obj,
|
633 |
+
const string str, const int len) {
|
634 |
+
#ifdef DEBUGCFUNCS
|
635 |
+
fprintf(stderr, "try_pyarr_from_string(str='%s', len=%d, obj=%p)\\n",
|
636 |
+
(char*)str,len, obj);
|
637 |
+
#endif
|
638 |
+
if (!obj) return -2; /* Object missing */
|
639 |
+
if (obj == Py_None) return -1; /* None */
|
640 |
+
if (!PyArray_Check(obj)) goto capi_fail; /* not an ndarray */
|
641 |
+
if (PyArray_Check(obj)) {
|
642 |
+
PyArrayObject *arr = (PyArrayObject *)obj;
|
643 |
+
assert(ISCONTIGUOUS(arr));
|
644 |
+
string buf = PyArray_DATA(arr);
|
645 |
+
npy_intp n = len;
|
646 |
+
if (n == -1) {
|
647 |
+
/* Assuming null-terminated str. */
|
648 |
+
n = strlen(str);
|
649 |
+
}
|
650 |
+
if (n > PyArray_NBYTES(arr)) {
|
651 |
+
n = PyArray_NBYTES(arr);
|
652 |
+
}
|
653 |
+
STRINGCOPYN(buf, str, n);
|
654 |
+
return 1;
|
655 |
+
}
|
656 |
+
capi_fail:
|
657 |
+
PRINTPYOBJERR(obj);
|
658 |
+
PyErr_SetString(#modulename#_error, \"try_pyarr_from_string failed\");
|
659 |
+
return 0;
|
660 |
+
}
|
661 |
+
"""
|
662 |
+
needs['string_from_pyobj'] = ['string', 'STRINGMALLOC', 'STRINGCOPYN']
|
663 |
+
cfuncs['string_from_pyobj'] = """
|
664 |
+
/*
|
665 |
+
Create a new string buffer `str` of at most length `len` from a
|
666 |
+
Python string-like object `obj`.
|
667 |
+
|
668 |
+
The string buffer has given size (len) or the size of inistr when len==-1.
|
669 |
+
|
670 |
+
The string buffer is padded with blanks: in Fortran, trailing blanks
|
671 |
+
are insignificant contrary to C nulls.
|
672 |
+
*/
|
673 |
+
static int
|
674 |
+
string_from_pyobj(string *str, int *len, const string inistr, PyObject *obj,
|
675 |
+
const char *errmess)
|
676 |
+
{
|
677 |
+
PyObject *tmp = NULL;
|
678 |
+
string buf = NULL;
|
679 |
+
npy_intp n = -1;
|
680 |
+
#ifdef DEBUGCFUNCS
|
681 |
+
fprintf(stderr,\"string_from_pyobj(str='%s',len=%d,inistr='%s',obj=%p)\\n\",
|
682 |
+
(char*)str, *len, (char *)inistr, obj);
|
683 |
+
#endif
|
684 |
+
if (obj == Py_None) {
|
685 |
+
n = strlen(inistr);
|
686 |
+
buf = inistr;
|
687 |
+
}
|
688 |
+
else if (PyArray_Check(obj)) {
|
689 |
+
PyArrayObject *arr = (PyArrayObject *)obj;
|
690 |
+
if (!ISCONTIGUOUS(arr)) {
|
691 |
+
PyErr_SetString(PyExc_ValueError,
|
692 |
+
\"array object is non-contiguous.\");
|
693 |
+
goto capi_fail;
|
694 |
+
}
|
695 |
+
n = PyArray_NBYTES(arr);
|
696 |
+
buf = PyArray_DATA(arr);
|
697 |
+
n = strnlen(buf, n);
|
698 |
+
}
|
699 |
+
else {
|
700 |
+
if (PyBytes_Check(obj)) {
|
701 |
+
tmp = obj;
|
702 |
+
Py_INCREF(tmp);
|
703 |
+
}
|
704 |
+
else if (PyUnicode_Check(obj)) {
|
705 |
+
tmp = PyUnicode_AsASCIIString(obj);
|
706 |
+
}
|
707 |
+
else {
|
708 |
+
PyObject *tmp2;
|
709 |
+
tmp2 = PyObject_Str(obj);
|
710 |
+
if (tmp2) {
|
711 |
+
tmp = PyUnicode_AsASCIIString(tmp2);
|
712 |
+
Py_DECREF(tmp2);
|
713 |
+
}
|
714 |
+
else {
|
715 |
+
tmp = NULL;
|
716 |
+
}
|
717 |
+
}
|
718 |
+
if (tmp == NULL) goto capi_fail;
|
719 |
+
n = PyBytes_GET_SIZE(tmp);
|
720 |
+
buf = PyBytes_AS_STRING(tmp);
|
721 |
+
}
|
722 |
+
if (*len == -1) {
|
723 |
+
/* TODO: change the type of `len` so that we can remove this */
|
724 |
+
if (n > NPY_MAX_INT) {
|
725 |
+
PyErr_SetString(PyExc_OverflowError,
|
726 |
+
"object too large for a 32-bit int");
|
727 |
+
goto capi_fail;
|
728 |
+
}
|
729 |
+
*len = n;
|
730 |
+
}
|
731 |
+
else if (*len < n) {
|
732 |
+
/* discard the last (len-n) bytes of input buf */
|
733 |
+
n = *len;
|
734 |
+
}
|
735 |
+
if (n < 0 || *len < 0 || buf == NULL) {
|
736 |
+
goto capi_fail;
|
737 |
+
}
|
738 |
+
STRINGMALLOC(*str, *len); // *str is allocated with size (*len + 1)
|
739 |
+
if (n < *len) {
|
740 |
+
/*
|
741 |
+
Pad fixed-width string with nulls. The caller will replace
|
742 |
+
nulls with blanks when the corresponding argument is not
|
743 |
+
intent(c).
|
744 |
+
*/
|
745 |
+
memset(*str + n, '\\0', *len - n);
|
746 |
+
}
|
747 |
+
STRINGCOPYN(*str, buf, n);
|
748 |
+
Py_XDECREF(tmp);
|
749 |
+
return 1;
|
750 |
+
capi_fail:
|
751 |
+
Py_XDECREF(tmp);
|
752 |
+
{
|
753 |
+
PyObject* err = PyErr_Occurred();
|
754 |
+
if (err == NULL) {
|
755 |
+
err = #modulename#_error;
|
756 |
+
}
|
757 |
+
PyErr_SetString(err, errmess);
|
758 |
+
}
|
759 |
+
return 0;
|
760 |
+
}
|
761 |
+
"""
|
762 |
+
|
763 |
+
cfuncs['character_from_pyobj'] = """
|
764 |
+
static int
|
765 |
+
character_from_pyobj(character* v, PyObject *obj, const char *errmess) {
|
766 |
+
if (PyBytes_Check(obj)) {
|
767 |
+
/* empty bytes has trailing null, so dereferencing is always safe */
|
768 |
+
*v = PyBytes_AS_STRING(obj)[0];
|
769 |
+
return 1;
|
770 |
+
} else if (PyUnicode_Check(obj)) {
|
771 |
+
PyObject* tmp = PyUnicode_AsASCIIString(obj);
|
772 |
+
if (tmp != NULL) {
|
773 |
+
*v = PyBytes_AS_STRING(tmp)[0];
|
774 |
+
Py_DECREF(tmp);
|
775 |
+
return 1;
|
776 |
+
}
|
777 |
+
} else if (PyArray_Check(obj)) {
|
778 |
+
PyArrayObject* arr = (PyArrayObject*)obj;
|
779 |
+
if (F2PY_ARRAY_IS_CHARACTER_COMPATIBLE(arr)) {
|
780 |
+
*v = PyArray_BYTES(arr)[0];
|
781 |
+
return 1;
|
782 |
+
} else if (F2PY_IS_UNICODE_ARRAY(arr)) {
|
783 |
+
// TODO: update when numpy will support 1-byte and
|
784 |
+
// 2-byte unicode dtypes
|
785 |
+
PyObject* tmp = PyUnicode_FromKindAndData(
|
786 |
+
PyUnicode_4BYTE_KIND,
|
787 |
+
PyArray_BYTES(arr),
|
788 |
+
(PyArray_NBYTES(arr)>0?1:0));
|
789 |
+
if (tmp != NULL) {
|
790 |
+
if (character_from_pyobj(v, tmp, errmess)) {
|
791 |
+
Py_DECREF(tmp);
|
792 |
+
return 1;
|
793 |
+
}
|
794 |
+
Py_DECREF(tmp);
|
795 |
+
}
|
796 |
+
}
|
797 |
+
} else if (PySequence_Check(obj)) {
|
798 |
+
PyObject* tmp = PySequence_GetItem(obj,0);
|
799 |
+
if (tmp != NULL) {
|
800 |
+
if (character_from_pyobj(v, tmp, errmess)) {
|
801 |
+
Py_DECREF(tmp);
|
802 |
+
return 1;
|
803 |
+
}
|
804 |
+
Py_DECREF(tmp);
|
805 |
+
}
|
806 |
+
}
|
807 |
+
{
|
808 |
+
/* TODO: This error (and most other) error handling needs cleaning. */
|
809 |
+
char mess[F2PY_MESSAGE_BUFFER_SIZE];
|
810 |
+
strcpy(mess, errmess);
|
811 |
+
PyObject* err = PyErr_Occurred();
|
812 |
+
if (err == NULL) {
|
813 |
+
err = PyExc_TypeError;
|
814 |
+
Py_INCREF(err);
|
815 |
+
}
|
816 |
+
else {
|
817 |
+
Py_INCREF(err);
|
818 |
+
PyErr_Clear();
|
819 |
+
}
|
820 |
+
sprintf(mess + strlen(mess),
|
821 |
+
" -- expected str|bytes|sequence-of-str-or-bytes, got ");
|
822 |
+
f2py_describe(obj, mess + strlen(mess));
|
823 |
+
PyErr_SetString(err, mess);
|
824 |
+
Py_DECREF(err);
|
825 |
+
}
|
826 |
+
return 0;
|
827 |
+
}
|
828 |
+
"""
|
829 |
+
|
830 |
+
# TODO: These should be dynamically generated, too many mapped to int things,
|
831 |
+
# see note in _isocbind.py
|
832 |
+
needs['char_from_pyobj'] = ['int_from_pyobj']
|
833 |
+
cfuncs['char_from_pyobj'] = """
|
834 |
+
static int
|
835 |
+
char_from_pyobj(char* v, PyObject *obj, const char *errmess) {
|
836 |
+
int i = 0;
|
837 |
+
if (int_from_pyobj(&i, obj, errmess)) {
|
838 |
+
*v = (char)i;
|
839 |
+
return 1;
|
840 |
+
}
|
841 |
+
return 0;
|
842 |
+
}
|
843 |
+
"""
|
844 |
+
|
845 |
+
|
846 |
+
needs['signed_char_from_pyobj'] = ['int_from_pyobj', 'signed_char']
|
847 |
+
cfuncs['signed_char_from_pyobj'] = """
|
848 |
+
static int
|
849 |
+
signed_char_from_pyobj(signed_char* v, PyObject *obj, const char *errmess) {
|
850 |
+
int i = 0;
|
851 |
+
if (int_from_pyobj(&i, obj, errmess)) {
|
852 |
+
*v = (signed_char)i;
|
853 |
+
return 1;
|
854 |
+
}
|
855 |
+
return 0;
|
856 |
+
}
|
857 |
+
"""
|
858 |
+
|
859 |
+
|
860 |
+
needs['short_from_pyobj'] = ['int_from_pyobj']
|
861 |
+
cfuncs['short_from_pyobj'] = """
|
862 |
+
static int
|
863 |
+
short_from_pyobj(short* v, PyObject *obj, const char *errmess) {
|
864 |
+
int i = 0;
|
865 |
+
if (int_from_pyobj(&i, obj, errmess)) {
|
866 |
+
*v = (short)i;
|
867 |
+
return 1;
|
868 |
+
}
|
869 |
+
return 0;
|
870 |
+
}
|
871 |
+
"""
|
872 |
+
|
873 |
+
|
874 |
+
cfuncs['int_from_pyobj'] = """
|
875 |
+
static int
|
876 |
+
int_from_pyobj(int* v, PyObject *obj, const char *errmess)
|
877 |
+
{
|
878 |
+
PyObject* tmp = NULL;
|
879 |
+
|
880 |
+
if (PyLong_Check(obj)) {
|
881 |
+
*v = Npy__PyLong_AsInt(obj);
|
882 |
+
return !(*v == -1 && PyErr_Occurred());
|
883 |
+
}
|
884 |
+
|
885 |
+
tmp = PyNumber_Long(obj);
|
886 |
+
if (tmp) {
|
887 |
+
*v = Npy__PyLong_AsInt(tmp);
|
888 |
+
Py_DECREF(tmp);
|
889 |
+
return !(*v == -1 && PyErr_Occurred());
|
890 |
+
}
|
891 |
+
|
892 |
+
if (PyComplex_Check(obj)) {
|
893 |
+
PyErr_Clear();
|
894 |
+
tmp = PyObject_GetAttrString(obj,\"real\");
|
895 |
+
}
|
896 |
+
else if (PyBytes_Check(obj) || PyUnicode_Check(obj)) {
|
897 |
+
/*pass*/;
|
898 |
+
}
|
899 |
+
else if (PySequence_Check(obj)) {
|
900 |
+
PyErr_Clear();
|
901 |
+
tmp = PySequence_GetItem(obj, 0);
|
902 |
+
}
|
903 |
+
|
904 |
+
if (tmp) {
|
905 |
+
if (int_from_pyobj(v, tmp, errmess)) {
|
906 |
+
Py_DECREF(tmp);
|
907 |
+
return 1;
|
908 |
+
}
|
909 |
+
Py_DECREF(tmp);
|
910 |
+
}
|
911 |
+
|
912 |
+
{
|
913 |
+
PyObject* err = PyErr_Occurred();
|
914 |
+
if (err == NULL) {
|
915 |
+
err = #modulename#_error;
|
916 |
+
}
|
917 |
+
PyErr_SetString(err, errmess);
|
918 |
+
}
|
919 |
+
return 0;
|
920 |
+
}
|
921 |
+
"""
|
922 |
+
|
923 |
+
|
924 |
+
cfuncs['long_from_pyobj'] = """
|
925 |
+
static int
|
926 |
+
long_from_pyobj(long* v, PyObject *obj, const char *errmess) {
|
927 |
+
PyObject* tmp = NULL;
|
928 |
+
|
929 |
+
if (PyLong_Check(obj)) {
|
930 |
+
*v = PyLong_AsLong(obj);
|
931 |
+
return !(*v == -1 && PyErr_Occurred());
|
932 |
+
}
|
933 |
+
|
934 |
+
tmp = PyNumber_Long(obj);
|
935 |
+
if (tmp) {
|
936 |
+
*v = PyLong_AsLong(tmp);
|
937 |
+
Py_DECREF(tmp);
|
938 |
+
return !(*v == -1 && PyErr_Occurred());
|
939 |
+
}
|
940 |
+
|
941 |
+
if (PyComplex_Check(obj)) {
|
942 |
+
PyErr_Clear();
|
943 |
+
tmp = PyObject_GetAttrString(obj,\"real\");
|
944 |
+
}
|
945 |
+
else if (PyBytes_Check(obj) || PyUnicode_Check(obj)) {
|
946 |
+
/*pass*/;
|
947 |
+
}
|
948 |
+
else if (PySequence_Check(obj)) {
|
949 |
+
PyErr_Clear();
|
950 |
+
tmp = PySequence_GetItem(obj, 0);
|
951 |
+
}
|
952 |
+
|
953 |
+
if (tmp) {
|
954 |
+
if (long_from_pyobj(v, tmp, errmess)) {
|
955 |
+
Py_DECREF(tmp);
|
956 |
+
return 1;
|
957 |
+
}
|
958 |
+
Py_DECREF(tmp);
|
959 |
+
}
|
960 |
+
{
|
961 |
+
PyObject* err = PyErr_Occurred();
|
962 |
+
if (err == NULL) {
|
963 |
+
err = #modulename#_error;
|
964 |
+
}
|
965 |
+
PyErr_SetString(err, errmess);
|
966 |
+
}
|
967 |
+
return 0;
|
968 |
+
}
|
969 |
+
"""
|
970 |
+
|
971 |
+
|
972 |
+
needs['long_long_from_pyobj'] = ['long_long']
|
973 |
+
cfuncs['long_long_from_pyobj'] = """
|
974 |
+
static int
|
975 |
+
long_long_from_pyobj(long_long* v, PyObject *obj, const char *errmess)
|
976 |
+
{
|
977 |
+
PyObject* tmp = NULL;
|
978 |
+
|
979 |
+
if (PyLong_Check(obj)) {
|
980 |
+
*v = PyLong_AsLongLong(obj);
|
981 |
+
return !(*v == -1 && PyErr_Occurred());
|
982 |
+
}
|
983 |
+
|
984 |
+
tmp = PyNumber_Long(obj);
|
985 |
+
if (tmp) {
|
986 |
+
*v = PyLong_AsLongLong(tmp);
|
987 |
+
Py_DECREF(tmp);
|
988 |
+
return !(*v == -1 && PyErr_Occurred());
|
989 |
+
}
|
990 |
+
|
991 |
+
if (PyComplex_Check(obj)) {
|
992 |
+
PyErr_Clear();
|
993 |
+
tmp = PyObject_GetAttrString(obj,\"real\");
|
994 |
+
}
|
995 |
+
else if (PyBytes_Check(obj) || PyUnicode_Check(obj)) {
|
996 |
+
/*pass*/;
|
997 |
+
}
|
998 |
+
else if (PySequence_Check(obj)) {
|
999 |
+
PyErr_Clear();
|
1000 |
+
tmp = PySequence_GetItem(obj, 0);
|
1001 |
+
}
|
1002 |
+
|
1003 |
+
if (tmp) {
|
1004 |
+
if (long_long_from_pyobj(v, tmp, errmess)) {
|
1005 |
+
Py_DECREF(tmp);
|
1006 |
+
return 1;
|
1007 |
+
}
|
1008 |
+
Py_DECREF(tmp);
|
1009 |
+
}
|
1010 |
+
{
|
1011 |
+
PyObject* err = PyErr_Occurred();
|
1012 |
+
if (err == NULL) {
|
1013 |
+
err = #modulename#_error;
|
1014 |
+
}
|
1015 |
+
PyErr_SetString(err,errmess);
|
1016 |
+
}
|
1017 |
+
return 0;
|
1018 |
+
}
|
1019 |
+
"""
|
1020 |
+
|
1021 |
+
|
1022 |
+
needs['long_double_from_pyobj'] = ['double_from_pyobj', 'long_double']
|
1023 |
+
cfuncs['long_double_from_pyobj'] = """
|
1024 |
+
static int
|
1025 |
+
long_double_from_pyobj(long_double* v, PyObject *obj, const char *errmess)
|
1026 |
+
{
|
1027 |
+
double d=0;
|
1028 |
+
if (PyArray_CheckScalar(obj)){
|
1029 |
+
if PyArray_IsScalar(obj, LongDouble) {
|
1030 |
+
PyArray_ScalarAsCtype(obj, v);
|
1031 |
+
return 1;
|
1032 |
+
}
|
1033 |
+
else if (PyArray_Check(obj) && PyArray_TYPE(obj) == NPY_LONGDOUBLE) {
|
1034 |
+
(*v) = *((npy_longdouble *)PyArray_DATA(obj));
|
1035 |
+
return 1;
|
1036 |
+
}
|
1037 |
+
}
|
1038 |
+
if (double_from_pyobj(&d, obj, errmess)) {
|
1039 |
+
*v = (long_double)d;
|
1040 |
+
return 1;
|
1041 |
+
}
|
1042 |
+
return 0;
|
1043 |
+
}
|
1044 |
+
"""
|
1045 |
+
|
1046 |
+
|
1047 |
+
cfuncs['double_from_pyobj'] = """
|
1048 |
+
static int
|
1049 |
+
double_from_pyobj(double* v, PyObject *obj, const char *errmess)
|
1050 |
+
{
|
1051 |
+
PyObject* tmp = NULL;
|
1052 |
+
if (PyFloat_Check(obj)) {
|
1053 |
+
*v = PyFloat_AsDouble(obj);
|
1054 |
+
return !(*v == -1.0 && PyErr_Occurred());
|
1055 |
+
}
|
1056 |
+
|
1057 |
+
tmp = PyNumber_Float(obj);
|
1058 |
+
if (tmp) {
|
1059 |
+
*v = PyFloat_AsDouble(tmp);
|
1060 |
+
Py_DECREF(tmp);
|
1061 |
+
return !(*v == -1.0 && PyErr_Occurred());
|
1062 |
+
}
|
1063 |
+
|
1064 |
+
if (PyComplex_Check(obj)) {
|
1065 |
+
PyErr_Clear();
|
1066 |
+
tmp = PyObject_GetAttrString(obj,\"real\");
|
1067 |
+
}
|
1068 |
+
else if (PyBytes_Check(obj) || PyUnicode_Check(obj)) {
|
1069 |
+
/*pass*/;
|
1070 |
+
}
|
1071 |
+
else if (PySequence_Check(obj)) {
|
1072 |
+
PyErr_Clear();
|
1073 |
+
tmp = PySequence_GetItem(obj, 0);
|
1074 |
+
}
|
1075 |
+
|
1076 |
+
if (tmp) {
|
1077 |
+
if (double_from_pyobj(v,tmp,errmess)) {Py_DECREF(tmp); return 1;}
|
1078 |
+
Py_DECREF(tmp);
|
1079 |
+
}
|
1080 |
+
{
|
1081 |
+
PyObject* err = PyErr_Occurred();
|
1082 |
+
if (err==NULL) err = #modulename#_error;
|
1083 |
+
PyErr_SetString(err,errmess);
|
1084 |
+
}
|
1085 |
+
return 0;
|
1086 |
+
}
|
1087 |
+
"""
|
1088 |
+
|
1089 |
+
|
1090 |
+
needs['float_from_pyobj'] = ['double_from_pyobj']
|
1091 |
+
cfuncs['float_from_pyobj'] = """
|
1092 |
+
static int
|
1093 |
+
float_from_pyobj(float* v, PyObject *obj, const char *errmess)
|
1094 |
+
{
|
1095 |
+
double d=0.0;
|
1096 |
+
if (double_from_pyobj(&d,obj,errmess)) {
|
1097 |
+
*v = (float)d;
|
1098 |
+
return 1;
|
1099 |
+
}
|
1100 |
+
return 0;
|
1101 |
+
}
|
1102 |
+
"""
|
1103 |
+
|
1104 |
+
|
1105 |
+
needs['complex_long_double_from_pyobj'] = ['complex_long_double', 'long_double',
|
1106 |
+
'complex_double_from_pyobj', 'npy_math.h']
|
1107 |
+
cfuncs['complex_long_double_from_pyobj'] = """
|
1108 |
+
static int
|
1109 |
+
complex_long_double_from_pyobj(complex_long_double* v, PyObject *obj, const char *errmess)
|
1110 |
+
{
|
1111 |
+
complex_double cd = {0.0,0.0};
|
1112 |
+
if (PyArray_CheckScalar(obj)){
|
1113 |
+
if PyArray_IsScalar(obj, CLongDouble) {
|
1114 |
+
PyArray_ScalarAsCtype(obj, v);
|
1115 |
+
return 1;
|
1116 |
+
}
|
1117 |
+
else if (PyArray_Check(obj) && PyArray_TYPE(obj)==NPY_CLONGDOUBLE) {
|
1118 |
+
(*v).r = npy_creall(*(((npy_clongdouble *)PyArray_DATA(obj))));
|
1119 |
+
(*v).i = npy_cimagl(*(((npy_clongdouble *)PyArray_DATA(obj))));
|
1120 |
+
return 1;
|
1121 |
+
}
|
1122 |
+
}
|
1123 |
+
if (complex_double_from_pyobj(&cd,obj,errmess)) {
|
1124 |
+
(*v).r = (long_double)cd.r;
|
1125 |
+
(*v).i = (long_double)cd.i;
|
1126 |
+
return 1;
|
1127 |
+
}
|
1128 |
+
return 0;
|
1129 |
+
}
|
1130 |
+
"""
|
1131 |
+
|
1132 |
+
|
1133 |
+
needs['complex_double_from_pyobj'] = ['complex_double', 'npy_math.h']
|
1134 |
+
cfuncs['complex_double_from_pyobj'] = """
|
1135 |
+
static int
|
1136 |
+
complex_double_from_pyobj(complex_double* v, PyObject *obj, const char *errmess) {
|
1137 |
+
Py_complex c;
|
1138 |
+
if (PyComplex_Check(obj)) {
|
1139 |
+
c = PyComplex_AsCComplex(obj);
|
1140 |
+
(*v).r = c.real;
|
1141 |
+
(*v).i = c.imag;
|
1142 |
+
return 1;
|
1143 |
+
}
|
1144 |
+
if (PyArray_IsScalar(obj, ComplexFloating)) {
|
1145 |
+
if (PyArray_IsScalar(obj, CFloat)) {
|
1146 |
+
npy_cfloat new;
|
1147 |
+
PyArray_ScalarAsCtype(obj, &new);
|
1148 |
+
(*v).r = (double)npy_crealf(new);
|
1149 |
+
(*v).i = (double)npy_cimagf(new);
|
1150 |
+
}
|
1151 |
+
else if (PyArray_IsScalar(obj, CLongDouble)) {
|
1152 |
+
npy_clongdouble new;
|
1153 |
+
PyArray_ScalarAsCtype(obj, &new);
|
1154 |
+
(*v).r = (double)npy_creall(new);
|
1155 |
+
(*v).i = (double)npy_cimagl(new);
|
1156 |
+
}
|
1157 |
+
else { /* if (PyArray_IsScalar(obj, CDouble)) */
|
1158 |
+
PyArray_ScalarAsCtype(obj, v);
|
1159 |
+
}
|
1160 |
+
return 1;
|
1161 |
+
}
|
1162 |
+
if (PyArray_CheckScalar(obj)) { /* 0-dim array or still array scalar */
|
1163 |
+
PyArrayObject *arr;
|
1164 |
+
if (PyArray_Check(obj)) {
|
1165 |
+
arr = (PyArrayObject *)PyArray_Cast((PyArrayObject *)obj, NPY_CDOUBLE);
|
1166 |
+
}
|
1167 |
+
else {
|
1168 |
+
arr = (PyArrayObject *)PyArray_FromScalar(obj, PyArray_DescrFromType(NPY_CDOUBLE));
|
1169 |
+
}
|
1170 |
+
if (arr == NULL) {
|
1171 |
+
return 0;
|
1172 |
+
}
|
1173 |
+
(*v).r = npy_creal(*(((npy_cdouble *)PyArray_DATA(arr))));
|
1174 |
+
(*v).i = npy_cimag(*(((npy_cdouble *)PyArray_DATA(arr))));
|
1175 |
+
Py_DECREF(arr);
|
1176 |
+
return 1;
|
1177 |
+
}
|
1178 |
+
/* Python does not provide PyNumber_Complex function :-( */
|
1179 |
+
(*v).i = 0.0;
|
1180 |
+
if (PyFloat_Check(obj)) {
|
1181 |
+
(*v).r = PyFloat_AsDouble(obj);
|
1182 |
+
return !((*v).r == -1.0 && PyErr_Occurred());
|
1183 |
+
}
|
1184 |
+
if (PyLong_Check(obj)) {
|
1185 |
+
(*v).r = PyLong_AsDouble(obj);
|
1186 |
+
return !((*v).r == -1.0 && PyErr_Occurred());
|
1187 |
+
}
|
1188 |
+
if (PySequence_Check(obj) && !(PyBytes_Check(obj) || PyUnicode_Check(obj))) {
|
1189 |
+
PyObject *tmp = PySequence_GetItem(obj,0);
|
1190 |
+
if (tmp) {
|
1191 |
+
if (complex_double_from_pyobj(v,tmp,errmess)) {
|
1192 |
+
Py_DECREF(tmp);
|
1193 |
+
return 1;
|
1194 |
+
}
|
1195 |
+
Py_DECREF(tmp);
|
1196 |
+
}
|
1197 |
+
}
|
1198 |
+
{
|
1199 |
+
PyObject* err = PyErr_Occurred();
|
1200 |
+
if (err==NULL)
|
1201 |
+
err = PyExc_TypeError;
|
1202 |
+
PyErr_SetString(err,errmess);
|
1203 |
+
}
|
1204 |
+
return 0;
|
1205 |
+
}
|
1206 |
+
"""
|
1207 |
+
|
1208 |
+
|
1209 |
+
needs['complex_float_from_pyobj'] = [
|
1210 |
+
'complex_float', 'complex_double_from_pyobj']
|
1211 |
+
cfuncs['complex_float_from_pyobj'] = """
|
1212 |
+
static int
|
1213 |
+
complex_float_from_pyobj(complex_float* v,PyObject *obj,const char *errmess)
|
1214 |
+
{
|
1215 |
+
complex_double cd={0.0,0.0};
|
1216 |
+
if (complex_double_from_pyobj(&cd,obj,errmess)) {
|
1217 |
+
(*v).r = (float)cd.r;
|
1218 |
+
(*v).i = (float)cd.i;
|
1219 |
+
return 1;
|
1220 |
+
}
|
1221 |
+
return 0;
|
1222 |
+
}
|
1223 |
+
"""
|
1224 |
+
|
1225 |
+
|
1226 |
+
cfuncs['try_pyarr_from_character'] = """
|
1227 |
+
static int try_pyarr_from_character(PyObject* obj, character* v) {
|
1228 |
+
PyArrayObject *arr = (PyArrayObject*)obj;
|
1229 |
+
if (!obj) return -2;
|
1230 |
+
if (PyArray_Check(obj)) {
|
1231 |
+
if (F2PY_ARRAY_IS_CHARACTER_COMPATIBLE(arr)) {
|
1232 |
+
*(character *)(PyArray_DATA(arr)) = *v;
|
1233 |
+
return 1;
|
1234 |
+
}
|
1235 |
+
}
|
1236 |
+
{
|
1237 |
+
char mess[F2PY_MESSAGE_BUFFER_SIZE];
|
1238 |
+
PyObject* err = PyErr_Occurred();
|
1239 |
+
if (err == NULL) {
|
1240 |
+
err = PyExc_ValueError;
|
1241 |
+
strcpy(mess, "try_pyarr_from_character failed"
|
1242 |
+
" -- expected bytes array-scalar|array, got ");
|
1243 |
+
f2py_describe(obj, mess + strlen(mess));
|
1244 |
+
PyErr_SetString(err, mess);
|
1245 |
+
}
|
1246 |
+
}
|
1247 |
+
return 0;
|
1248 |
+
}
|
1249 |
+
"""
|
1250 |
+
|
1251 |
+
needs['try_pyarr_from_char'] = ['pyobj_from_char1', 'TRYPYARRAYTEMPLATE']
|
1252 |
+
cfuncs[
|
1253 |
+
'try_pyarr_from_char'] = 'static int try_pyarr_from_char(PyObject* obj,char* v) {\n TRYPYARRAYTEMPLATE(char,\'c\');\n}\n'
|
1254 |
+
needs['try_pyarr_from_signed_char'] = ['TRYPYARRAYTEMPLATE', 'unsigned_char']
|
1255 |
+
cfuncs[
|
1256 |
+
'try_pyarr_from_unsigned_char'] = 'static int try_pyarr_from_unsigned_char(PyObject* obj,unsigned_char* v) {\n TRYPYARRAYTEMPLATE(unsigned_char,\'b\');\n}\n'
|
1257 |
+
needs['try_pyarr_from_signed_char'] = ['TRYPYARRAYTEMPLATE', 'signed_char']
|
1258 |
+
cfuncs[
|
1259 |
+
'try_pyarr_from_signed_char'] = 'static int try_pyarr_from_signed_char(PyObject* obj,signed_char* v) {\n TRYPYARRAYTEMPLATE(signed_char,\'1\');\n}\n'
|
1260 |
+
needs['try_pyarr_from_short'] = ['pyobj_from_short1', 'TRYPYARRAYTEMPLATE']
|
1261 |
+
cfuncs[
|
1262 |
+
'try_pyarr_from_short'] = 'static int try_pyarr_from_short(PyObject* obj,short* v) {\n TRYPYARRAYTEMPLATE(short,\'s\');\n}\n'
|
1263 |
+
needs['try_pyarr_from_int'] = ['pyobj_from_int1', 'TRYPYARRAYTEMPLATE']
|
1264 |
+
cfuncs[
|
1265 |
+
'try_pyarr_from_int'] = 'static int try_pyarr_from_int(PyObject* obj,int* v) {\n TRYPYARRAYTEMPLATE(int,\'i\');\n}\n'
|
1266 |
+
needs['try_pyarr_from_long'] = ['pyobj_from_long1', 'TRYPYARRAYTEMPLATE']
|
1267 |
+
cfuncs[
|
1268 |
+
'try_pyarr_from_long'] = 'static int try_pyarr_from_long(PyObject* obj,long* v) {\n TRYPYARRAYTEMPLATE(long,\'l\');\n}\n'
|
1269 |
+
needs['try_pyarr_from_long_long'] = [
|
1270 |
+
'pyobj_from_long_long1', 'TRYPYARRAYTEMPLATE', 'long_long']
|
1271 |
+
cfuncs[
|
1272 |
+
'try_pyarr_from_long_long'] = 'static int try_pyarr_from_long_long(PyObject* obj,long_long* v) {\n TRYPYARRAYTEMPLATE(long_long,\'L\');\n}\n'
|
1273 |
+
needs['try_pyarr_from_float'] = ['pyobj_from_float1', 'TRYPYARRAYTEMPLATE']
|
1274 |
+
cfuncs[
|
1275 |
+
'try_pyarr_from_float'] = 'static int try_pyarr_from_float(PyObject* obj,float* v) {\n TRYPYARRAYTEMPLATE(float,\'f\');\n}\n'
|
1276 |
+
needs['try_pyarr_from_double'] = ['pyobj_from_double1', 'TRYPYARRAYTEMPLATE']
|
1277 |
+
cfuncs[
|
1278 |
+
'try_pyarr_from_double'] = 'static int try_pyarr_from_double(PyObject* obj,double* v) {\n TRYPYARRAYTEMPLATE(double,\'d\');\n}\n'
|
1279 |
+
needs['try_pyarr_from_complex_float'] = [
|
1280 |
+
'pyobj_from_complex_float1', 'TRYCOMPLEXPYARRAYTEMPLATE', 'complex_float']
|
1281 |
+
cfuncs[
|
1282 |
+
'try_pyarr_from_complex_float'] = 'static int try_pyarr_from_complex_float(PyObject* obj,complex_float* v) {\n TRYCOMPLEXPYARRAYTEMPLATE(float,\'F\');\n}\n'
|
1283 |
+
needs['try_pyarr_from_complex_double'] = [
|
1284 |
+
'pyobj_from_complex_double1', 'TRYCOMPLEXPYARRAYTEMPLATE', 'complex_double']
|
1285 |
+
cfuncs[
|
1286 |
+
'try_pyarr_from_complex_double'] = 'static int try_pyarr_from_complex_double(PyObject* obj,complex_double* v) {\n TRYCOMPLEXPYARRAYTEMPLATE(double,\'D\');\n}\n'
|
1287 |
+
|
1288 |
+
|
1289 |
+
needs['create_cb_arglist'] = ['CFUNCSMESS', 'PRINTPYOBJERR', 'MINMAX']
|
1290 |
+
# create the list of arguments to be used when calling back to python
|
1291 |
+
cfuncs['create_cb_arglist'] = """
|
1292 |
+
static int
|
1293 |
+
create_cb_arglist(PyObject* fun, PyTupleObject* xa , const int maxnofargs,
|
1294 |
+
const int nofoptargs, int *nofargs, PyTupleObject **args,
|
1295 |
+
const char *errmess)
|
1296 |
+
{
|
1297 |
+
PyObject *tmp = NULL;
|
1298 |
+
PyObject *tmp_fun = NULL;
|
1299 |
+
Py_ssize_t tot, opt, ext, siz, i, di = 0;
|
1300 |
+
CFUNCSMESS(\"create_cb_arglist\\n\");
|
1301 |
+
tot=opt=ext=siz=0;
|
1302 |
+
/* Get the total number of arguments */
|
1303 |
+
if (PyFunction_Check(fun)) {
|
1304 |
+
tmp_fun = fun;
|
1305 |
+
Py_INCREF(tmp_fun);
|
1306 |
+
}
|
1307 |
+
else {
|
1308 |
+
di = 1;
|
1309 |
+
if (PyObject_HasAttrString(fun,\"im_func\")) {
|
1310 |
+
tmp_fun = PyObject_GetAttrString(fun,\"im_func\");
|
1311 |
+
}
|
1312 |
+
else if (PyObject_HasAttrString(fun,\"__call__\")) {
|
1313 |
+
tmp = PyObject_GetAttrString(fun,\"__call__\");
|
1314 |
+
if (PyObject_HasAttrString(tmp,\"im_func\"))
|
1315 |
+
tmp_fun = PyObject_GetAttrString(tmp,\"im_func\");
|
1316 |
+
else {
|
1317 |
+
tmp_fun = fun; /* built-in function */
|
1318 |
+
Py_INCREF(tmp_fun);
|
1319 |
+
tot = maxnofargs;
|
1320 |
+
if (PyCFunction_Check(fun)) {
|
1321 |
+
/* In case the function has a co_argcount (like on PyPy) */
|
1322 |
+
di = 0;
|
1323 |
+
}
|
1324 |
+
if (xa != NULL)
|
1325 |
+
tot += PyTuple_Size((PyObject *)xa);
|
1326 |
+
}
|
1327 |
+
Py_XDECREF(tmp);
|
1328 |
+
}
|
1329 |
+
else if (PyFortran_Check(fun) || PyFortran_Check1(fun)) {
|
1330 |
+
tot = maxnofargs;
|
1331 |
+
if (xa != NULL)
|
1332 |
+
tot += PyTuple_Size((PyObject *)xa);
|
1333 |
+
tmp_fun = fun;
|
1334 |
+
Py_INCREF(tmp_fun);
|
1335 |
+
}
|
1336 |
+
else if (F2PyCapsule_Check(fun)) {
|
1337 |
+
tot = maxnofargs;
|
1338 |
+
if (xa != NULL)
|
1339 |
+
ext = PyTuple_Size((PyObject *)xa);
|
1340 |
+
if(ext>0) {
|
1341 |
+
fprintf(stderr,\"extra arguments tuple cannot be used with PyCapsule call-back\\n\");
|
1342 |
+
goto capi_fail;
|
1343 |
+
}
|
1344 |
+
tmp_fun = fun;
|
1345 |
+
Py_INCREF(tmp_fun);
|
1346 |
+
}
|
1347 |
+
}
|
1348 |
+
|
1349 |
+
if (tmp_fun == NULL) {
|
1350 |
+
fprintf(stderr,
|
1351 |
+
\"Call-back argument must be function|instance|instance.__call__|f2py-function \"
|
1352 |
+
\"but got %s.\\n\",
|
1353 |
+
((fun == NULL) ? \"NULL\" : Py_TYPE(fun)->tp_name));
|
1354 |
+
goto capi_fail;
|
1355 |
+
}
|
1356 |
+
|
1357 |
+
if (PyObject_HasAttrString(tmp_fun,\"__code__\")) {
|
1358 |
+
if (PyObject_HasAttrString(tmp = PyObject_GetAttrString(tmp_fun,\"__code__\"),\"co_argcount\")) {
|
1359 |
+
PyObject *tmp_argcount = PyObject_GetAttrString(tmp,\"co_argcount\");
|
1360 |
+
Py_DECREF(tmp);
|
1361 |
+
if (tmp_argcount == NULL) {
|
1362 |
+
goto capi_fail;
|
1363 |
+
}
|
1364 |
+
tot = PyLong_AsSsize_t(tmp_argcount) - di;
|
1365 |
+
Py_DECREF(tmp_argcount);
|
1366 |
+
}
|
1367 |
+
}
|
1368 |
+
/* Get the number of optional arguments */
|
1369 |
+
if (PyObject_HasAttrString(tmp_fun,\"__defaults__\")) {
|
1370 |
+
if (PyTuple_Check(tmp = PyObject_GetAttrString(tmp_fun,\"__defaults__\")))
|
1371 |
+
opt = PyTuple_Size(tmp);
|
1372 |
+
Py_XDECREF(tmp);
|
1373 |
+
}
|
1374 |
+
/* Get the number of extra arguments */
|
1375 |
+
if (xa != NULL)
|
1376 |
+
ext = PyTuple_Size((PyObject *)xa);
|
1377 |
+
/* Calculate the size of call-backs argument list */
|
1378 |
+
siz = MIN(maxnofargs+ext,tot);
|
1379 |
+
*nofargs = MAX(0,siz-ext);
|
1380 |
+
|
1381 |
+
#ifdef DEBUGCFUNCS
|
1382 |
+
fprintf(stderr,
|
1383 |
+
\"debug-capi:create_cb_arglist:maxnofargs(-nofoptargs),\"
|
1384 |
+
\"tot,opt,ext,siz,nofargs = %d(-%d), %zd, %zd, %zd, %zd, %d\\n\",
|
1385 |
+
maxnofargs, nofoptargs, tot, opt, ext, siz, *nofargs);
|
1386 |
+
#endif
|
1387 |
+
|
1388 |
+
if (siz < tot-opt) {
|
1389 |
+
fprintf(stderr,
|
1390 |
+
\"create_cb_arglist: Failed to build argument list \"
|
1391 |
+
\"(siz) with enough arguments (tot-opt) required by \"
|
1392 |
+
\"user-supplied function (siz,tot,opt=%zd, %zd, %zd).\\n\",
|
1393 |
+
siz, tot, opt);
|
1394 |
+
goto capi_fail;
|
1395 |
+
}
|
1396 |
+
|
1397 |
+
/* Initialize argument list */
|
1398 |
+
*args = (PyTupleObject *)PyTuple_New(siz);
|
1399 |
+
for (i=0;i<*nofargs;i++) {
|
1400 |
+
Py_INCREF(Py_None);
|
1401 |
+
PyTuple_SET_ITEM((PyObject *)(*args),i,Py_None);
|
1402 |
+
}
|
1403 |
+
if (xa != NULL)
|
1404 |
+
for (i=(*nofargs);i<siz;i++) {
|
1405 |
+
tmp = PyTuple_GetItem((PyObject *)xa,i-(*nofargs));
|
1406 |
+
Py_INCREF(tmp);
|
1407 |
+
PyTuple_SET_ITEM(*args,i,tmp);
|
1408 |
+
}
|
1409 |
+
CFUNCSMESS(\"create_cb_arglist-end\\n\");
|
1410 |
+
Py_DECREF(tmp_fun);
|
1411 |
+
return 1;
|
1412 |
+
|
1413 |
+
capi_fail:
|
1414 |
+
if (PyErr_Occurred() == NULL)
|
1415 |
+
PyErr_SetString(#modulename#_error, errmess);
|
1416 |
+
Py_XDECREF(tmp_fun);
|
1417 |
+
return 0;
|
1418 |
+
}
|
1419 |
+
"""
|
1420 |
+
|
1421 |
+
|
1422 |
+
def buildcfuncs():
|
1423 |
+
from .capi_maps import c2capi_map
|
1424 |
+
for k in c2capi_map.keys():
|
1425 |
+
m = 'pyarr_from_p_%s1' % k
|
1426 |
+
cppmacros[
|
1427 |
+
m] = '#define %s(v) (PyArray_SimpleNewFromData(0,NULL,%s,(char *)v))' % (m, c2capi_map[k])
|
1428 |
+
k = 'string'
|
1429 |
+
m = 'pyarr_from_p_%s1' % k
|
1430 |
+
# NPY_CHAR compatibility, NPY_STRING with itemsize 1
|
1431 |
+
cppmacros[
|
1432 |
+
m] = '#define %s(v,dims) (PyArray_New(&PyArray_Type, 1, dims, NPY_STRING, NULL, v, 1, NPY_ARRAY_CARRAY, NULL))' % (m)
|
1433 |
+
|
1434 |
+
|
1435 |
+
############ Auxiliary functions for sorting needs ###################
|
1436 |
+
|
1437 |
+
def append_needs(need, flag=1):
|
1438 |
+
# This function modifies the contents of the global `outneeds` dict.
|
1439 |
+
if isinstance(need, list):
|
1440 |
+
for n in need:
|
1441 |
+
append_needs(n, flag)
|
1442 |
+
elif isinstance(need, str):
|
1443 |
+
if not need:
|
1444 |
+
return
|
1445 |
+
if need in includes0:
|
1446 |
+
n = 'includes0'
|
1447 |
+
elif need in includes:
|
1448 |
+
n = 'includes'
|
1449 |
+
elif need in typedefs:
|
1450 |
+
n = 'typedefs'
|
1451 |
+
elif need in typedefs_generated:
|
1452 |
+
n = 'typedefs_generated'
|
1453 |
+
elif need in cppmacros:
|
1454 |
+
n = 'cppmacros'
|
1455 |
+
elif need in cfuncs:
|
1456 |
+
n = 'cfuncs'
|
1457 |
+
elif need in callbacks:
|
1458 |
+
n = 'callbacks'
|
1459 |
+
elif need in f90modhooks:
|
1460 |
+
n = 'f90modhooks'
|
1461 |
+
elif need in commonhooks:
|
1462 |
+
n = 'commonhooks'
|
1463 |
+
else:
|
1464 |
+
errmess('append_needs: unknown need %s\n' % (repr(need)))
|
1465 |
+
return
|
1466 |
+
if need in outneeds[n]:
|
1467 |
+
return
|
1468 |
+
if flag:
|
1469 |
+
tmp = {}
|
1470 |
+
if need in needs:
|
1471 |
+
for nn in needs[need]:
|
1472 |
+
t = append_needs(nn, 0)
|
1473 |
+
if isinstance(t, dict):
|
1474 |
+
for nnn in t.keys():
|
1475 |
+
if nnn in tmp:
|
1476 |
+
tmp[nnn] = tmp[nnn] + t[nnn]
|
1477 |
+
else:
|
1478 |
+
tmp[nnn] = t[nnn]
|
1479 |
+
for nn in tmp.keys():
|
1480 |
+
for nnn in tmp[nn]:
|
1481 |
+
if nnn not in outneeds[nn]:
|
1482 |
+
outneeds[nn] = [nnn] + outneeds[nn]
|
1483 |
+
outneeds[n].append(need)
|
1484 |
+
else:
|
1485 |
+
tmp = {}
|
1486 |
+
if need in needs:
|
1487 |
+
for nn in needs[need]:
|
1488 |
+
t = append_needs(nn, flag)
|
1489 |
+
if isinstance(t, dict):
|
1490 |
+
for nnn in t.keys():
|
1491 |
+
if nnn in tmp:
|
1492 |
+
tmp[nnn] = t[nnn] + tmp[nnn]
|
1493 |
+
else:
|
1494 |
+
tmp[nnn] = t[nnn]
|
1495 |
+
if n not in tmp:
|
1496 |
+
tmp[n] = []
|
1497 |
+
tmp[n].append(need)
|
1498 |
+
return tmp
|
1499 |
+
else:
|
1500 |
+
errmess('append_needs: expected list or string but got :%s\n' %
|
1501 |
+
(repr(need)))
|
1502 |
+
|
1503 |
+
|
1504 |
+
def get_needs():
|
1505 |
+
# This function modifies the contents of the global `outneeds` dict.
|
1506 |
+
res = {}
|
1507 |
+
for n in outneeds.keys():
|
1508 |
+
out = []
|
1509 |
+
saveout = copy.copy(outneeds[n])
|
1510 |
+
while len(outneeds[n]) > 0:
|
1511 |
+
if outneeds[n][0] not in needs:
|
1512 |
+
out.append(outneeds[n][0])
|
1513 |
+
del outneeds[n][0]
|
1514 |
+
else:
|
1515 |
+
flag = 0
|
1516 |
+
for k in outneeds[n][1:]:
|
1517 |
+
if k in needs[outneeds[n][0]]:
|
1518 |
+
flag = 1
|
1519 |
+
break
|
1520 |
+
if flag:
|
1521 |
+
outneeds[n] = outneeds[n][1:] + [outneeds[n][0]]
|
1522 |
+
else:
|
1523 |
+
out.append(outneeds[n][0])
|
1524 |
+
del outneeds[n][0]
|
1525 |
+
if saveout and (0 not in map(lambda x, y: x == y, saveout, outneeds[n])) \
|
1526 |
+
and outneeds[n] != []:
|
1527 |
+
print(n, saveout)
|
1528 |
+
errmess(
|
1529 |
+
'get_needs: no progress in sorting needs, probably circular dependence, skipping.\n')
|
1530 |
+
out = out + saveout
|
1531 |
+
break
|
1532 |
+
saveout = copy.copy(outneeds[n])
|
1533 |
+
if out == []:
|
1534 |
+
out = [n]
|
1535 |
+
res[n] = out
|
1536 |
+
return res
|
.venv/lib/python3.11/site-packages/numpy/f2py/f90mod_rules.py
ADDED
@@ -0,0 +1,264 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Build F90 module support for f2py2e.
|
3 |
+
|
4 |
+
Copyright 1999 -- 2011 Pearu Peterson all rights reserved.
|
5 |
+
Copyright 2011 -- present NumPy Developers.
|
6 |
+
Permission to use, modify, and distribute this software is given under the
|
7 |
+
terms of the NumPy License.
|
8 |
+
|
9 |
+
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
|
10 |
+
"""
|
11 |
+
__version__ = "$Revision: 1.27 $"[10:-1]
|
12 |
+
|
13 |
+
f2py_version = 'See `f2py -v`'
|
14 |
+
|
15 |
+
import numpy as np
|
16 |
+
|
17 |
+
from . import capi_maps
|
18 |
+
from . import func2subr
|
19 |
+
from .crackfortran import undo_rmbadname, undo_rmbadname1
|
20 |
+
|
21 |
+
# The environment provided by auxfuncs.py is needed for some calls to eval.
|
22 |
+
# As the needed functions cannot be determined by static inspection of the
|
23 |
+
# code, it is safest to use import * pending a major refactoring of f2py.
|
24 |
+
from .auxfuncs import *
|
25 |
+
|
26 |
+
options = {}
|
27 |
+
|
28 |
+
|
29 |
+
def findf90modules(m):
|
30 |
+
if ismodule(m):
|
31 |
+
return [m]
|
32 |
+
if not hasbody(m):
|
33 |
+
return []
|
34 |
+
ret = []
|
35 |
+
for b in m['body']:
|
36 |
+
if ismodule(b):
|
37 |
+
ret.append(b)
|
38 |
+
else:
|
39 |
+
ret = ret + findf90modules(b)
|
40 |
+
return ret
|
41 |
+
|
42 |
+
fgetdims1 = """\
|
43 |
+
external f2pysetdata
|
44 |
+
logical ns
|
45 |
+
integer r,i
|
46 |
+
integer(%d) s(*)
|
47 |
+
ns = .FALSE.
|
48 |
+
if (allocated(d)) then
|
49 |
+
do i=1,r
|
50 |
+
if ((size(d,i).ne.s(i)).and.(s(i).ge.0)) then
|
51 |
+
ns = .TRUE.
|
52 |
+
end if
|
53 |
+
end do
|
54 |
+
if (ns) then
|
55 |
+
deallocate(d)
|
56 |
+
end if
|
57 |
+
end if
|
58 |
+
if ((.not.allocated(d)).and.(s(1).ge.1)) then""" % np.intp().itemsize
|
59 |
+
|
60 |
+
fgetdims2 = """\
|
61 |
+
end if
|
62 |
+
if (allocated(d)) then
|
63 |
+
do i=1,r
|
64 |
+
s(i) = size(d,i)
|
65 |
+
end do
|
66 |
+
end if
|
67 |
+
flag = 1
|
68 |
+
call f2pysetdata(d,allocated(d))"""
|
69 |
+
|
70 |
+
fgetdims2_sa = """\
|
71 |
+
end if
|
72 |
+
if (allocated(d)) then
|
73 |
+
do i=1,r
|
74 |
+
s(i) = size(d,i)
|
75 |
+
end do
|
76 |
+
!s(r) must be equal to len(d(1))
|
77 |
+
end if
|
78 |
+
flag = 2
|
79 |
+
call f2pysetdata(d,allocated(d))"""
|
80 |
+
|
81 |
+
|
82 |
+
def buildhooks(pymod):
|
83 |
+
from . import rules
|
84 |
+
ret = {'f90modhooks': [], 'initf90modhooks': [], 'body': [],
|
85 |
+
'need': ['F_FUNC', 'arrayobject.h'],
|
86 |
+
'separatorsfor': {'includes0': '\n', 'includes': '\n'},
|
87 |
+
'docs': ['"Fortran 90/95 modules:\\n"'],
|
88 |
+
'latexdoc': []}
|
89 |
+
fhooks = ['']
|
90 |
+
|
91 |
+
def fadd(line, s=fhooks):
|
92 |
+
s[0] = '%s\n %s' % (s[0], line)
|
93 |
+
doc = ['']
|
94 |
+
|
95 |
+
def dadd(line, s=doc):
|
96 |
+
s[0] = '%s\n%s' % (s[0], line)
|
97 |
+
|
98 |
+
usenames = getuseblocks(pymod)
|
99 |
+
for m in findf90modules(pymod):
|
100 |
+
sargs, fargs, efargs, modobjs, notvars, onlyvars = [], [], [], [], [
|
101 |
+
m['name']], []
|
102 |
+
sargsp = []
|
103 |
+
ifargs = []
|
104 |
+
mfargs = []
|
105 |
+
if hasbody(m):
|
106 |
+
for b in m['body']:
|
107 |
+
notvars.append(b['name'])
|
108 |
+
for n in m['vars'].keys():
|
109 |
+
var = m['vars'][n]
|
110 |
+
if (n not in notvars) and (not l_or(isintent_hide, isprivate)(var)):
|
111 |
+
onlyvars.append(n)
|
112 |
+
mfargs.append(n)
|
113 |
+
outmess('\t\tConstructing F90 module support for "%s"...\n' %
|
114 |
+
(m['name']))
|
115 |
+
if m['name'] in usenames and not onlyvars:
|
116 |
+
outmess(f"\t\t\tSkipping {m['name']} since it is in 'use'...\n")
|
117 |
+
continue
|
118 |
+
if onlyvars:
|
119 |
+
outmess('\t\t Variables: %s\n' % (' '.join(onlyvars)))
|
120 |
+
chooks = ['']
|
121 |
+
|
122 |
+
def cadd(line, s=chooks):
|
123 |
+
s[0] = '%s\n%s' % (s[0], line)
|
124 |
+
ihooks = ['']
|
125 |
+
|
126 |
+
def iadd(line, s=ihooks):
|
127 |
+
s[0] = '%s\n%s' % (s[0], line)
|
128 |
+
|
129 |
+
vrd = capi_maps.modsign2map(m)
|
130 |
+
cadd('static FortranDataDef f2py_%s_def[] = {' % (m['name']))
|
131 |
+
dadd('\\subsection{Fortran 90/95 module \\texttt{%s}}\n' % (m['name']))
|
132 |
+
if hasnote(m):
|
133 |
+
note = m['note']
|
134 |
+
if isinstance(note, list):
|
135 |
+
note = '\n'.join(note)
|
136 |
+
dadd(note)
|
137 |
+
if onlyvars:
|
138 |
+
dadd('\\begin{description}')
|
139 |
+
for n in onlyvars:
|
140 |
+
var = m['vars'][n]
|
141 |
+
modobjs.append(n)
|
142 |
+
ct = capi_maps.getctype(var)
|
143 |
+
at = capi_maps.c2capi_map[ct]
|
144 |
+
dm = capi_maps.getarrdims(n, var)
|
145 |
+
dms = dm['dims'].replace('*', '-1').strip()
|
146 |
+
dms = dms.replace(':', '-1').strip()
|
147 |
+
if not dms:
|
148 |
+
dms = '-1'
|
149 |
+
use_fgetdims2 = fgetdims2
|
150 |
+
cadd('\t{"%s",%s,{{%s}},%s, %s},' %
|
151 |
+
(undo_rmbadname1(n), dm['rank'], dms, at,
|
152 |
+
capi_maps.get_elsize(var)))
|
153 |
+
dadd('\\item[]{{}\\verb@%s@{}}' %
|
154 |
+
(capi_maps.getarrdocsign(n, var)))
|
155 |
+
if hasnote(var):
|
156 |
+
note = var['note']
|
157 |
+
if isinstance(note, list):
|
158 |
+
note = '\n'.join(note)
|
159 |
+
dadd('--- %s' % (note))
|
160 |
+
if isallocatable(var):
|
161 |
+
fargs.append('f2py_%s_getdims_%s' % (m['name'], n))
|
162 |
+
efargs.append(fargs[-1])
|
163 |
+
sargs.append(
|
164 |
+
'void (*%s)(int*,npy_intp*,void(*)(char*,npy_intp*),int*)' % (n))
|
165 |
+
sargsp.append('void (*)(int*,npy_intp*,void(*)(char*,npy_intp*),int*)')
|
166 |
+
iadd('\tf2py_%s_def[i_f2py++].func = %s;' % (m['name'], n))
|
167 |
+
fadd('subroutine %s(r,s,f2pysetdata,flag)' % (fargs[-1]))
|
168 |
+
fadd('use %s, only: d => %s\n' %
|
169 |
+
(m['name'], undo_rmbadname1(n)))
|
170 |
+
fadd('integer flag\n')
|
171 |
+
fhooks[0] = fhooks[0] + fgetdims1
|
172 |
+
dms = range(1, int(dm['rank']) + 1)
|
173 |
+
fadd(' allocate(d(%s))\n' %
|
174 |
+
(','.join(['s(%s)' % i for i in dms])))
|
175 |
+
fhooks[0] = fhooks[0] + use_fgetdims2
|
176 |
+
fadd('end subroutine %s' % (fargs[-1]))
|
177 |
+
else:
|
178 |
+
fargs.append(n)
|
179 |
+
sargs.append('char *%s' % (n))
|
180 |
+
sargsp.append('char*')
|
181 |
+
iadd('\tf2py_%s_def[i_f2py++].data = %s;' % (m['name'], n))
|
182 |
+
if onlyvars:
|
183 |
+
dadd('\\end{description}')
|
184 |
+
if hasbody(m):
|
185 |
+
for b in m['body']:
|
186 |
+
if not isroutine(b):
|
187 |
+
outmess("f90mod_rules.buildhooks:"
|
188 |
+
f" skipping {b['block']} {b['name']}\n")
|
189 |
+
continue
|
190 |
+
modobjs.append('%s()' % (b['name']))
|
191 |
+
b['modulename'] = m['name']
|
192 |
+
api, wrap = rules.buildapi(b)
|
193 |
+
if isfunction(b):
|
194 |
+
fhooks[0] = fhooks[0] + wrap
|
195 |
+
fargs.append('f2pywrap_%s_%s' % (m['name'], b['name']))
|
196 |
+
ifargs.append(func2subr.createfuncwrapper(b, signature=1))
|
197 |
+
else:
|
198 |
+
if wrap:
|
199 |
+
fhooks[0] = fhooks[0] + wrap
|
200 |
+
fargs.append('f2pywrap_%s_%s' % (m['name'], b['name']))
|
201 |
+
ifargs.append(
|
202 |
+
func2subr.createsubrwrapper(b, signature=1))
|
203 |
+
else:
|
204 |
+
fargs.append(b['name'])
|
205 |
+
mfargs.append(fargs[-1])
|
206 |
+
api['externroutines'] = []
|
207 |
+
ar = applyrules(api, vrd)
|
208 |
+
ar['docs'] = []
|
209 |
+
ar['docshort'] = []
|
210 |
+
ret = dictappend(ret, ar)
|
211 |
+
cadd(('\t{"%s",-1,{{-1}},0,0,NULL,(void *)'
|
212 |
+
'f2py_rout_#modulename#_%s_%s,'
|
213 |
+
'doc_f2py_rout_#modulename#_%s_%s},')
|
214 |
+
% (b['name'], m['name'], b['name'], m['name'], b['name']))
|
215 |
+
sargs.append('char *%s' % (b['name']))
|
216 |
+
sargsp.append('char *')
|
217 |
+
iadd('\tf2py_%s_def[i_f2py++].data = %s;' %
|
218 |
+
(m['name'], b['name']))
|
219 |
+
cadd('\t{NULL}\n};\n')
|
220 |
+
iadd('}')
|
221 |
+
ihooks[0] = 'static void f2py_setup_%s(%s) {\n\tint i_f2py=0;%s' % (
|
222 |
+
m['name'], ','.join(sargs), ihooks[0])
|
223 |
+
if '_' in m['name']:
|
224 |
+
F_FUNC = 'F_FUNC_US'
|
225 |
+
else:
|
226 |
+
F_FUNC = 'F_FUNC'
|
227 |
+
iadd('extern void %s(f2pyinit%s,F2PYINIT%s)(void (*)(%s));'
|
228 |
+
% (F_FUNC, m['name'], m['name'].upper(), ','.join(sargsp)))
|
229 |
+
iadd('static void f2py_init_%s(void) {' % (m['name']))
|
230 |
+
iadd('\t%s(f2pyinit%s,F2PYINIT%s)(f2py_setup_%s);'
|
231 |
+
% (F_FUNC, m['name'], m['name'].upper(), m['name']))
|
232 |
+
iadd('}\n')
|
233 |
+
ret['f90modhooks'] = ret['f90modhooks'] + chooks + ihooks
|
234 |
+
ret['initf90modhooks'] = ['\tPyDict_SetItemString(d, "%s", PyFortranObject_New(f2py_%s_def,f2py_init_%s));' % (
|
235 |
+
m['name'], m['name'], m['name'])] + ret['initf90modhooks']
|
236 |
+
fadd('')
|
237 |
+
fadd('subroutine f2pyinit%s(f2pysetupfunc)' % (m['name']))
|
238 |
+
if mfargs:
|
239 |
+
for a in undo_rmbadname(mfargs):
|
240 |
+
fadd('use %s, only : %s' % (m['name'], a))
|
241 |
+
if ifargs:
|
242 |
+
fadd(' '.join(['interface'] + ifargs))
|
243 |
+
fadd('end interface')
|
244 |
+
fadd('external f2pysetupfunc')
|
245 |
+
if efargs:
|
246 |
+
for a in undo_rmbadname(efargs):
|
247 |
+
fadd('external %s' % (a))
|
248 |
+
fadd('call f2pysetupfunc(%s)' % (','.join(undo_rmbadname(fargs))))
|
249 |
+
fadd('end subroutine f2pyinit%s\n' % (m['name']))
|
250 |
+
|
251 |
+
dadd('\n'.join(ret['latexdoc']).replace(
|
252 |
+
r'\subsection{', r'\subsubsection{'))
|
253 |
+
|
254 |
+
ret['latexdoc'] = []
|
255 |
+
ret['docs'].append('"\t%s --- %s"' % (m['name'],
|
256 |
+
','.join(undo_rmbadname(modobjs))))
|
257 |
+
|
258 |
+
ret['routine_defs'] = ''
|
259 |
+
ret['doc'] = []
|
260 |
+
ret['docshort'] = []
|
261 |
+
ret['latexdoc'] = doc[0]
|
262 |
+
if len(ret['docs']) <= 1:
|
263 |
+
ret['docs'] = ''
|
264 |
+
return ret, fhooks[0]
|
.venv/lib/python3.11/site-packages/numpy/f2py/setup.cfg
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
[bdist_rpm]
|
2 |
+
doc_files = docs/
|
3 |
+
tests/
|
.venv/lib/python3.11/site-packages/numpy/f2py/symbolic.py
ADDED
@@ -0,0 +1,1517 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Fortran/C symbolic expressions
|
2 |
+
|
3 |
+
References:
|
4 |
+
- J3/21-007: Draft Fortran 202x. https://j3-fortran.org/doc/year/21/21-007.pdf
|
5 |
+
|
6 |
+
Copyright 1999 -- 2011 Pearu Peterson all rights reserved.
|
7 |
+
Copyright 2011 -- present NumPy Developers.
|
8 |
+
Permission to use, modify, and distribute this software is given under the
|
9 |
+
terms of the NumPy License.
|
10 |
+
|
11 |
+
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
|
12 |
+
"""
|
13 |
+
|
14 |
+
# To analyze Fortran expressions to solve dimensions specifications,
|
15 |
+
# for instances, we implement a minimal symbolic engine for parsing
|
16 |
+
# expressions into a tree of expression instances. As a first
|
17 |
+
# instance, we care only about arithmetic expressions involving
|
18 |
+
# integers and operations like addition (+), subtraction (-),
|
19 |
+
# multiplication (*), division (Fortran / is Python //, Fortran // is
|
20 |
+
# concatenate), and exponentiation (**). In addition, .pyf files may
|
21 |
+
# contain C expressions that support here is implemented as well.
|
22 |
+
#
|
23 |
+
# TODO: support logical constants (Op.BOOLEAN)
|
24 |
+
# TODO: support logical operators (.AND., ...)
|
25 |
+
# TODO: support defined operators (.MYOP., ...)
|
26 |
+
#
|
27 |
+
__all__ = ['Expr']
|
28 |
+
|
29 |
+
|
30 |
+
import re
|
31 |
+
import warnings
|
32 |
+
from enum import Enum
|
33 |
+
from math import gcd
|
34 |
+
|
35 |
+
|
36 |
+
class Language(Enum):
|
37 |
+
"""
|
38 |
+
Used as Expr.tostring language argument.
|
39 |
+
"""
|
40 |
+
Python = 0
|
41 |
+
Fortran = 1
|
42 |
+
C = 2
|
43 |
+
|
44 |
+
|
45 |
+
class Op(Enum):
|
46 |
+
"""
|
47 |
+
Used as Expr op attribute.
|
48 |
+
"""
|
49 |
+
INTEGER = 10
|
50 |
+
REAL = 12
|
51 |
+
COMPLEX = 15
|
52 |
+
STRING = 20
|
53 |
+
ARRAY = 30
|
54 |
+
SYMBOL = 40
|
55 |
+
TERNARY = 100
|
56 |
+
APPLY = 200
|
57 |
+
INDEXING = 210
|
58 |
+
CONCAT = 220
|
59 |
+
RELATIONAL = 300
|
60 |
+
TERMS = 1000
|
61 |
+
FACTORS = 2000
|
62 |
+
REF = 3000
|
63 |
+
DEREF = 3001
|
64 |
+
|
65 |
+
|
66 |
+
class RelOp(Enum):
|
67 |
+
"""
|
68 |
+
Used in Op.RELATIONAL expression to specify the function part.
|
69 |
+
"""
|
70 |
+
EQ = 1
|
71 |
+
NE = 2
|
72 |
+
LT = 3
|
73 |
+
LE = 4
|
74 |
+
GT = 5
|
75 |
+
GE = 6
|
76 |
+
|
77 |
+
@classmethod
|
78 |
+
def fromstring(cls, s, language=Language.C):
|
79 |
+
if language is Language.Fortran:
|
80 |
+
return {'.eq.': RelOp.EQ, '.ne.': RelOp.NE,
|
81 |
+
'.lt.': RelOp.LT, '.le.': RelOp.LE,
|
82 |
+
'.gt.': RelOp.GT, '.ge.': RelOp.GE}[s.lower()]
|
83 |
+
return {'==': RelOp.EQ, '!=': RelOp.NE, '<': RelOp.LT,
|
84 |
+
'<=': RelOp.LE, '>': RelOp.GT, '>=': RelOp.GE}[s]
|
85 |
+
|
86 |
+
def tostring(self, language=Language.C):
|
87 |
+
if language is Language.Fortran:
|
88 |
+
return {RelOp.EQ: '.eq.', RelOp.NE: '.ne.',
|
89 |
+
RelOp.LT: '.lt.', RelOp.LE: '.le.',
|
90 |
+
RelOp.GT: '.gt.', RelOp.GE: '.ge.'}[self]
|
91 |
+
return {RelOp.EQ: '==', RelOp.NE: '!=',
|
92 |
+
RelOp.LT: '<', RelOp.LE: '<=',
|
93 |
+
RelOp.GT: '>', RelOp.GE: '>='}[self]
|
94 |
+
|
95 |
+
|
96 |
+
class ArithOp(Enum):
|
97 |
+
"""
|
98 |
+
Used in Op.APPLY expression to specify the function part.
|
99 |
+
"""
|
100 |
+
POS = 1
|
101 |
+
NEG = 2
|
102 |
+
ADD = 3
|
103 |
+
SUB = 4
|
104 |
+
MUL = 5
|
105 |
+
DIV = 6
|
106 |
+
POW = 7
|
107 |
+
|
108 |
+
|
109 |
+
class OpError(Exception):
|
110 |
+
pass
|
111 |
+
|
112 |
+
|
113 |
+
class Precedence(Enum):
|
114 |
+
"""
|
115 |
+
Used as Expr.tostring precedence argument.
|
116 |
+
"""
|
117 |
+
ATOM = 0
|
118 |
+
POWER = 1
|
119 |
+
UNARY = 2
|
120 |
+
PRODUCT = 3
|
121 |
+
SUM = 4
|
122 |
+
LT = 6
|
123 |
+
EQ = 7
|
124 |
+
LAND = 11
|
125 |
+
LOR = 12
|
126 |
+
TERNARY = 13
|
127 |
+
ASSIGN = 14
|
128 |
+
TUPLE = 15
|
129 |
+
NONE = 100
|
130 |
+
|
131 |
+
|
132 |
+
integer_types = (int,)
|
133 |
+
number_types = (int, float)
|
134 |
+
|
135 |
+
|
136 |
+
def _pairs_add(d, k, v):
|
137 |
+
# Internal utility method for updating terms and factors data.
|
138 |
+
c = d.get(k)
|
139 |
+
if c is None:
|
140 |
+
d[k] = v
|
141 |
+
else:
|
142 |
+
c = c + v
|
143 |
+
if c:
|
144 |
+
d[k] = c
|
145 |
+
else:
|
146 |
+
del d[k]
|
147 |
+
|
148 |
+
|
149 |
+
class ExprWarning(UserWarning):
|
150 |
+
pass
|
151 |
+
|
152 |
+
|
153 |
+
def ewarn(message):
|
154 |
+
warnings.warn(message, ExprWarning, stacklevel=2)
|
155 |
+
|
156 |
+
|
157 |
+
class Expr:
|
158 |
+
"""Represents a Fortran expression as a op-data pair.
|
159 |
+
|
160 |
+
Expr instances are hashable and sortable.
|
161 |
+
"""
|
162 |
+
|
163 |
+
@staticmethod
|
164 |
+
def parse(s, language=Language.C):
|
165 |
+
"""Parse a Fortran expression to a Expr.
|
166 |
+
"""
|
167 |
+
return fromstring(s, language=language)
|
168 |
+
|
169 |
+
def __init__(self, op, data):
|
170 |
+
assert isinstance(op, Op)
|
171 |
+
|
172 |
+
# sanity checks
|
173 |
+
if op is Op.INTEGER:
|
174 |
+
# data is a 2-tuple of numeric object and a kind value
|
175 |
+
# (default is 4)
|
176 |
+
assert isinstance(data, tuple) and len(data) == 2
|
177 |
+
assert isinstance(data[0], int)
|
178 |
+
assert isinstance(data[1], (int, str)), data
|
179 |
+
elif op is Op.REAL:
|
180 |
+
# data is a 2-tuple of numeric object and a kind value
|
181 |
+
# (default is 4)
|
182 |
+
assert isinstance(data, tuple) and len(data) == 2
|
183 |
+
assert isinstance(data[0], float)
|
184 |
+
assert isinstance(data[1], (int, str)), data
|
185 |
+
elif op is Op.COMPLEX:
|
186 |
+
# data is a 2-tuple of constant expressions
|
187 |
+
assert isinstance(data, tuple) and len(data) == 2
|
188 |
+
elif op is Op.STRING:
|
189 |
+
# data is a 2-tuple of quoted string and a kind value
|
190 |
+
# (default is 1)
|
191 |
+
assert isinstance(data, tuple) and len(data) == 2
|
192 |
+
assert (isinstance(data[0], str)
|
193 |
+
and data[0][::len(data[0])-1] in ('""', "''", '@@'))
|
194 |
+
assert isinstance(data[1], (int, str)), data
|
195 |
+
elif op is Op.SYMBOL:
|
196 |
+
# data is any hashable object
|
197 |
+
assert hash(data) is not None
|
198 |
+
elif op in (Op.ARRAY, Op.CONCAT):
|
199 |
+
# data is a tuple of expressions
|
200 |
+
assert isinstance(data, tuple)
|
201 |
+
assert all(isinstance(item, Expr) for item in data), data
|
202 |
+
elif op in (Op.TERMS, Op.FACTORS):
|
203 |
+
# data is {<term|base>:<coeff|exponent>} where dict values
|
204 |
+
# are nonzero Python integers
|
205 |
+
assert isinstance(data, dict)
|
206 |
+
elif op is Op.APPLY:
|
207 |
+
# data is (<function>, <operands>, <kwoperands>) where
|
208 |
+
# operands are Expr instances
|
209 |
+
assert isinstance(data, tuple) and len(data) == 3
|
210 |
+
# function is any hashable object
|
211 |
+
assert hash(data[0]) is not None
|
212 |
+
assert isinstance(data[1], tuple)
|
213 |
+
assert isinstance(data[2], dict)
|
214 |
+
elif op is Op.INDEXING:
|
215 |
+
# data is (<object>, <indices>)
|
216 |
+
assert isinstance(data, tuple) and len(data) == 2
|
217 |
+
# function is any hashable object
|
218 |
+
assert hash(data[0]) is not None
|
219 |
+
elif op is Op.TERNARY:
|
220 |
+
# data is (<cond>, <expr1>, <expr2>)
|
221 |
+
assert isinstance(data, tuple) and len(data) == 3
|
222 |
+
elif op in (Op.REF, Op.DEREF):
|
223 |
+
# data is Expr instance
|
224 |
+
assert isinstance(data, Expr)
|
225 |
+
elif op is Op.RELATIONAL:
|
226 |
+
# data is (<relop>, <left>, <right>)
|
227 |
+
assert isinstance(data, tuple) and len(data) == 3
|
228 |
+
else:
|
229 |
+
raise NotImplementedError(
|
230 |
+
f'unknown op or missing sanity check: {op}')
|
231 |
+
|
232 |
+
self.op = op
|
233 |
+
self.data = data
|
234 |
+
|
235 |
+
def __eq__(self, other):
|
236 |
+
return (isinstance(other, Expr)
|
237 |
+
and self.op is other.op
|
238 |
+
and self.data == other.data)
|
239 |
+
|
240 |
+
def __hash__(self):
|
241 |
+
if self.op in (Op.TERMS, Op.FACTORS):
|
242 |
+
data = tuple(sorted(self.data.items()))
|
243 |
+
elif self.op is Op.APPLY:
|
244 |
+
data = self.data[:2] + tuple(sorted(self.data[2].items()))
|
245 |
+
else:
|
246 |
+
data = self.data
|
247 |
+
return hash((self.op, data))
|
248 |
+
|
249 |
+
def __lt__(self, other):
|
250 |
+
if isinstance(other, Expr):
|
251 |
+
if self.op is not other.op:
|
252 |
+
return self.op.value < other.op.value
|
253 |
+
if self.op in (Op.TERMS, Op.FACTORS):
|
254 |
+
return (tuple(sorted(self.data.items()))
|
255 |
+
< tuple(sorted(other.data.items())))
|
256 |
+
if self.op is Op.APPLY:
|
257 |
+
if self.data[:2] != other.data[:2]:
|
258 |
+
return self.data[:2] < other.data[:2]
|
259 |
+
return tuple(sorted(self.data[2].items())) < tuple(
|
260 |
+
sorted(other.data[2].items()))
|
261 |
+
return self.data < other.data
|
262 |
+
return NotImplemented
|
263 |
+
|
264 |
+
def __le__(self, other): return self == other or self < other
|
265 |
+
|
266 |
+
def __gt__(self, other): return not (self <= other)
|
267 |
+
|
268 |
+
def __ge__(self, other): return not (self < other)
|
269 |
+
|
270 |
+
def __repr__(self):
|
271 |
+
return f'{type(self).__name__}({self.op}, {self.data!r})'
|
272 |
+
|
273 |
+
def __str__(self):
|
274 |
+
return self.tostring()
|
275 |
+
|
276 |
+
def tostring(self, parent_precedence=Precedence.NONE,
|
277 |
+
language=Language.Fortran):
|
278 |
+
"""Return a string representation of Expr.
|
279 |
+
"""
|
280 |
+
if self.op in (Op.INTEGER, Op.REAL):
|
281 |
+
precedence = (Precedence.SUM if self.data[0] < 0
|
282 |
+
else Precedence.ATOM)
|
283 |
+
r = str(self.data[0]) + (f'_{self.data[1]}'
|
284 |
+
if self.data[1] != 4 else '')
|
285 |
+
elif self.op is Op.COMPLEX:
|
286 |
+
r = ', '.join(item.tostring(Precedence.TUPLE, language=language)
|
287 |
+
for item in self.data)
|
288 |
+
r = '(' + r + ')'
|
289 |
+
precedence = Precedence.ATOM
|
290 |
+
elif self.op is Op.SYMBOL:
|
291 |
+
precedence = Precedence.ATOM
|
292 |
+
r = str(self.data)
|
293 |
+
elif self.op is Op.STRING:
|
294 |
+
r = self.data[0]
|
295 |
+
if self.data[1] != 1:
|
296 |
+
r = self.data[1] + '_' + r
|
297 |
+
precedence = Precedence.ATOM
|
298 |
+
elif self.op is Op.ARRAY:
|
299 |
+
r = ', '.join(item.tostring(Precedence.TUPLE, language=language)
|
300 |
+
for item in self.data)
|
301 |
+
r = '[' + r + ']'
|
302 |
+
precedence = Precedence.ATOM
|
303 |
+
elif self.op is Op.TERMS:
|
304 |
+
terms = []
|
305 |
+
for term, coeff in sorted(self.data.items()):
|
306 |
+
if coeff < 0:
|
307 |
+
op = ' - '
|
308 |
+
coeff = -coeff
|
309 |
+
else:
|
310 |
+
op = ' + '
|
311 |
+
if coeff == 1:
|
312 |
+
term = term.tostring(Precedence.SUM, language=language)
|
313 |
+
else:
|
314 |
+
if term == as_number(1):
|
315 |
+
term = str(coeff)
|
316 |
+
else:
|
317 |
+
term = f'{coeff} * ' + term.tostring(
|
318 |
+
Precedence.PRODUCT, language=language)
|
319 |
+
if terms:
|
320 |
+
terms.append(op)
|
321 |
+
elif op == ' - ':
|
322 |
+
terms.append('-')
|
323 |
+
terms.append(term)
|
324 |
+
r = ''.join(terms) or '0'
|
325 |
+
precedence = Precedence.SUM if terms else Precedence.ATOM
|
326 |
+
elif self.op is Op.FACTORS:
|
327 |
+
factors = []
|
328 |
+
tail = []
|
329 |
+
for base, exp in sorted(self.data.items()):
|
330 |
+
op = ' * '
|
331 |
+
if exp == 1:
|
332 |
+
factor = base.tostring(Precedence.PRODUCT,
|
333 |
+
language=language)
|
334 |
+
elif language is Language.C:
|
335 |
+
if exp in range(2, 10):
|
336 |
+
factor = base.tostring(Precedence.PRODUCT,
|
337 |
+
language=language)
|
338 |
+
factor = ' * '.join([factor] * exp)
|
339 |
+
elif exp in range(-10, 0):
|
340 |
+
factor = base.tostring(Precedence.PRODUCT,
|
341 |
+
language=language)
|
342 |
+
tail += [factor] * -exp
|
343 |
+
continue
|
344 |
+
else:
|
345 |
+
factor = base.tostring(Precedence.TUPLE,
|
346 |
+
language=language)
|
347 |
+
factor = f'pow({factor}, {exp})'
|
348 |
+
else:
|
349 |
+
factor = base.tostring(Precedence.POWER,
|
350 |
+
language=language) + f' ** {exp}'
|
351 |
+
if factors:
|
352 |
+
factors.append(op)
|
353 |
+
factors.append(factor)
|
354 |
+
if tail:
|
355 |
+
if not factors:
|
356 |
+
factors += ['1']
|
357 |
+
factors += ['/', '(', ' * '.join(tail), ')']
|
358 |
+
r = ''.join(factors) or '1'
|
359 |
+
precedence = Precedence.PRODUCT if factors else Precedence.ATOM
|
360 |
+
elif self.op is Op.APPLY:
|
361 |
+
name, args, kwargs = self.data
|
362 |
+
if name is ArithOp.DIV and language is Language.C:
|
363 |
+
numer, denom = [arg.tostring(Precedence.PRODUCT,
|
364 |
+
language=language)
|
365 |
+
for arg in args]
|
366 |
+
r = f'{numer} / {denom}'
|
367 |
+
precedence = Precedence.PRODUCT
|
368 |
+
else:
|
369 |
+
args = [arg.tostring(Precedence.TUPLE, language=language)
|
370 |
+
for arg in args]
|
371 |
+
args += [k + '=' + v.tostring(Precedence.NONE)
|
372 |
+
for k, v in kwargs.items()]
|
373 |
+
r = f'{name}({", ".join(args)})'
|
374 |
+
precedence = Precedence.ATOM
|
375 |
+
elif self.op is Op.INDEXING:
|
376 |
+
name = self.data[0]
|
377 |
+
args = [arg.tostring(Precedence.TUPLE, language=language)
|
378 |
+
for arg in self.data[1:]]
|
379 |
+
r = f'{name}[{", ".join(args)}]'
|
380 |
+
precedence = Precedence.ATOM
|
381 |
+
elif self.op is Op.CONCAT:
|
382 |
+
args = [arg.tostring(Precedence.PRODUCT, language=language)
|
383 |
+
for arg in self.data]
|
384 |
+
r = " // ".join(args)
|
385 |
+
precedence = Precedence.PRODUCT
|
386 |
+
elif self.op is Op.TERNARY:
|
387 |
+
cond, expr1, expr2 = [a.tostring(Precedence.TUPLE,
|
388 |
+
language=language)
|
389 |
+
for a in self.data]
|
390 |
+
if language is Language.C:
|
391 |
+
r = f'({cond}?{expr1}:{expr2})'
|
392 |
+
elif language is Language.Python:
|
393 |
+
r = f'({expr1} if {cond} else {expr2})'
|
394 |
+
elif language is Language.Fortran:
|
395 |
+
r = f'merge({expr1}, {expr2}, {cond})'
|
396 |
+
else:
|
397 |
+
raise NotImplementedError(
|
398 |
+
f'tostring for {self.op} and {language}')
|
399 |
+
precedence = Precedence.ATOM
|
400 |
+
elif self.op is Op.REF:
|
401 |
+
r = '&' + self.data.tostring(Precedence.UNARY, language=language)
|
402 |
+
precedence = Precedence.UNARY
|
403 |
+
elif self.op is Op.DEREF:
|
404 |
+
r = '*' + self.data.tostring(Precedence.UNARY, language=language)
|
405 |
+
precedence = Precedence.UNARY
|
406 |
+
elif self.op is Op.RELATIONAL:
|
407 |
+
rop, left, right = self.data
|
408 |
+
precedence = (Precedence.EQ if rop in (RelOp.EQ, RelOp.NE)
|
409 |
+
else Precedence.LT)
|
410 |
+
left = left.tostring(precedence, language=language)
|
411 |
+
right = right.tostring(precedence, language=language)
|
412 |
+
rop = rop.tostring(language=language)
|
413 |
+
r = f'{left} {rop} {right}'
|
414 |
+
else:
|
415 |
+
raise NotImplementedError(f'tostring for op {self.op}')
|
416 |
+
if parent_precedence.value < precedence.value:
|
417 |
+
# If parent precedence is higher than operand precedence,
|
418 |
+
# operand will be enclosed in parenthesis.
|
419 |
+
return '(' + r + ')'
|
420 |
+
return r
|
421 |
+
|
422 |
+
def __pos__(self):
|
423 |
+
return self
|
424 |
+
|
425 |
+
def __neg__(self):
|
426 |
+
return self * -1
|
427 |
+
|
428 |
+
def __add__(self, other):
|
429 |
+
other = as_expr(other)
|
430 |
+
if isinstance(other, Expr):
|
431 |
+
if self.op is other.op:
|
432 |
+
if self.op in (Op.INTEGER, Op.REAL):
|
433 |
+
return as_number(
|
434 |
+
self.data[0] + other.data[0],
|
435 |
+
max(self.data[1], other.data[1]))
|
436 |
+
if self.op is Op.COMPLEX:
|
437 |
+
r1, i1 = self.data
|
438 |
+
r2, i2 = other.data
|
439 |
+
return as_complex(r1 + r2, i1 + i2)
|
440 |
+
if self.op is Op.TERMS:
|
441 |
+
r = Expr(self.op, dict(self.data))
|
442 |
+
for k, v in other.data.items():
|
443 |
+
_pairs_add(r.data, k, v)
|
444 |
+
return normalize(r)
|
445 |
+
if self.op is Op.COMPLEX and other.op in (Op.INTEGER, Op.REAL):
|
446 |
+
return self + as_complex(other)
|
447 |
+
elif self.op in (Op.INTEGER, Op.REAL) and other.op is Op.COMPLEX:
|
448 |
+
return as_complex(self) + other
|
449 |
+
elif self.op is Op.REAL and other.op is Op.INTEGER:
|
450 |
+
return self + as_real(other, kind=self.data[1])
|
451 |
+
elif self.op is Op.INTEGER and other.op is Op.REAL:
|
452 |
+
return as_real(self, kind=other.data[1]) + other
|
453 |
+
return as_terms(self) + as_terms(other)
|
454 |
+
return NotImplemented
|
455 |
+
|
456 |
+
def __radd__(self, other):
|
457 |
+
if isinstance(other, number_types):
|
458 |
+
return as_number(other) + self
|
459 |
+
return NotImplemented
|
460 |
+
|
461 |
+
def __sub__(self, other):
|
462 |
+
return self + (-other)
|
463 |
+
|
464 |
+
def __rsub__(self, other):
|
465 |
+
if isinstance(other, number_types):
|
466 |
+
return as_number(other) - self
|
467 |
+
return NotImplemented
|
468 |
+
|
469 |
+
def __mul__(self, other):
|
470 |
+
other = as_expr(other)
|
471 |
+
if isinstance(other, Expr):
|
472 |
+
if self.op is other.op:
|
473 |
+
if self.op in (Op.INTEGER, Op.REAL):
|
474 |
+
return as_number(self.data[0] * other.data[0],
|
475 |
+
max(self.data[1], other.data[1]))
|
476 |
+
elif self.op is Op.COMPLEX:
|
477 |
+
r1, i1 = self.data
|
478 |
+
r2, i2 = other.data
|
479 |
+
return as_complex(r1 * r2 - i1 * i2, r1 * i2 + r2 * i1)
|
480 |
+
|
481 |
+
if self.op is Op.FACTORS:
|
482 |
+
r = Expr(self.op, dict(self.data))
|
483 |
+
for k, v in other.data.items():
|
484 |
+
_pairs_add(r.data, k, v)
|
485 |
+
return normalize(r)
|
486 |
+
elif self.op is Op.TERMS:
|
487 |
+
r = Expr(self.op, {})
|
488 |
+
for t1, c1 in self.data.items():
|
489 |
+
for t2, c2 in other.data.items():
|
490 |
+
_pairs_add(r.data, t1 * t2, c1 * c2)
|
491 |
+
return normalize(r)
|
492 |
+
|
493 |
+
if self.op is Op.COMPLEX and other.op in (Op.INTEGER, Op.REAL):
|
494 |
+
return self * as_complex(other)
|
495 |
+
elif other.op is Op.COMPLEX and self.op in (Op.INTEGER, Op.REAL):
|
496 |
+
return as_complex(self) * other
|
497 |
+
elif self.op is Op.REAL and other.op is Op.INTEGER:
|
498 |
+
return self * as_real(other, kind=self.data[1])
|
499 |
+
elif self.op is Op.INTEGER and other.op is Op.REAL:
|
500 |
+
return as_real(self, kind=other.data[1]) * other
|
501 |
+
|
502 |
+
if self.op is Op.TERMS:
|
503 |
+
return self * as_terms(other)
|
504 |
+
elif other.op is Op.TERMS:
|
505 |
+
return as_terms(self) * other
|
506 |
+
|
507 |
+
return as_factors(self) * as_factors(other)
|
508 |
+
return NotImplemented
|
509 |
+
|
510 |
+
def __rmul__(self, other):
|
511 |
+
if isinstance(other, number_types):
|
512 |
+
return as_number(other) * self
|
513 |
+
return NotImplemented
|
514 |
+
|
515 |
+
def __pow__(self, other):
|
516 |
+
other = as_expr(other)
|
517 |
+
if isinstance(other, Expr):
|
518 |
+
if other.op is Op.INTEGER:
|
519 |
+
exponent = other.data[0]
|
520 |
+
# TODO: other kind not used
|
521 |
+
if exponent == 0:
|
522 |
+
return as_number(1)
|
523 |
+
if exponent == 1:
|
524 |
+
return self
|
525 |
+
if exponent > 0:
|
526 |
+
if self.op is Op.FACTORS:
|
527 |
+
r = Expr(self.op, {})
|
528 |
+
for k, v in self.data.items():
|
529 |
+
r.data[k] = v * exponent
|
530 |
+
return normalize(r)
|
531 |
+
return self * (self ** (exponent - 1))
|
532 |
+
elif exponent != -1:
|
533 |
+
return (self ** (-exponent)) ** -1
|
534 |
+
return Expr(Op.FACTORS, {self: exponent})
|
535 |
+
return as_apply(ArithOp.POW, self, other)
|
536 |
+
return NotImplemented
|
537 |
+
|
538 |
+
def __truediv__(self, other):
|
539 |
+
other = as_expr(other)
|
540 |
+
if isinstance(other, Expr):
|
541 |
+
# Fortran / is different from Python /:
|
542 |
+
# - `/` is a truncate operation for integer operands
|
543 |
+
return normalize(as_apply(ArithOp.DIV, self, other))
|
544 |
+
return NotImplemented
|
545 |
+
|
546 |
+
def __rtruediv__(self, other):
|
547 |
+
other = as_expr(other)
|
548 |
+
if isinstance(other, Expr):
|
549 |
+
return other / self
|
550 |
+
return NotImplemented
|
551 |
+
|
552 |
+
def __floordiv__(self, other):
|
553 |
+
other = as_expr(other)
|
554 |
+
if isinstance(other, Expr):
|
555 |
+
# Fortran // is different from Python //:
|
556 |
+
# - `//` is a concatenate operation for string operands
|
557 |
+
return normalize(Expr(Op.CONCAT, (self, other)))
|
558 |
+
return NotImplemented
|
559 |
+
|
560 |
+
def __rfloordiv__(self, other):
|
561 |
+
other = as_expr(other)
|
562 |
+
if isinstance(other, Expr):
|
563 |
+
return other // self
|
564 |
+
return NotImplemented
|
565 |
+
|
566 |
+
def __call__(self, *args, **kwargs):
|
567 |
+
# In Fortran, parenthesis () are use for both function call as
|
568 |
+
# well as indexing operations.
|
569 |
+
#
|
570 |
+
# TODO: implement a method for deciding when __call__ should
|
571 |
+
# return an INDEXING expression.
|
572 |
+
return as_apply(self, *map(as_expr, args),
|
573 |
+
**dict((k, as_expr(v)) for k, v in kwargs.items()))
|
574 |
+
|
575 |
+
def __getitem__(self, index):
|
576 |
+
# Provided to support C indexing operations that .pyf files
|
577 |
+
# may contain.
|
578 |
+
index = as_expr(index)
|
579 |
+
if not isinstance(index, tuple):
|
580 |
+
index = index,
|
581 |
+
if len(index) > 1:
|
582 |
+
ewarn(f'C-index should be a single expression but got `{index}`')
|
583 |
+
return Expr(Op.INDEXING, (self,) + index)
|
584 |
+
|
585 |
+
def substitute(self, symbols_map):
|
586 |
+
"""Recursively substitute symbols with values in symbols map.
|
587 |
+
|
588 |
+
Symbols map is a dictionary of symbol-expression pairs.
|
589 |
+
"""
|
590 |
+
if self.op is Op.SYMBOL:
|
591 |
+
value = symbols_map.get(self)
|
592 |
+
if value is None:
|
593 |
+
return self
|
594 |
+
m = re.match(r'\A(@__f2py_PARENTHESIS_(\w+)_\d+@)\Z', self.data)
|
595 |
+
if m:
|
596 |
+
# complement to fromstring method
|
597 |
+
items, paren = m.groups()
|
598 |
+
if paren in ['ROUNDDIV', 'SQUARE']:
|
599 |
+
return as_array(value)
|
600 |
+
assert paren == 'ROUND', (paren, value)
|
601 |
+
return value
|
602 |
+
if self.op in (Op.INTEGER, Op.REAL, Op.STRING):
|
603 |
+
return self
|
604 |
+
if self.op in (Op.ARRAY, Op.COMPLEX):
|
605 |
+
return Expr(self.op, tuple(item.substitute(symbols_map)
|
606 |
+
for item in self.data))
|
607 |
+
if self.op is Op.CONCAT:
|
608 |
+
return normalize(Expr(self.op, tuple(item.substitute(symbols_map)
|
609 |
+
for item in self.data)))
|
610 |
+
if self.op is Op.TERMS:
|
611 |
+
r = None
|
612 |
+
for term, coeff in self.data.items():
|
613 |
+
if r is None:
|
614 |
+
r = term.substitute(symbols_map) * coeff
|
615 |
+
else:
|
616 |
+
r += term.substitute(symbols_map) * coeff
|
617 |
+
if r is None:
|
618 |
+
ewarn('substitute: empty TERMS expression interpreted as'
|
619 |
+
' int-literal 0')
|
620 |
+
return as_number(0)
|
621 |
+
return r
|
622 |
+
if self.op is Op.FACTORS:
|
623 |
+
r = None
|
624 |
+
for base, exponent in self.data.items():
|
625 |
+
if r is None:
|
626 |
+
r = base.substitute(symbols_map) ** exponent
|
627 |
+
else:
|
628 |
+
r *= base.substitute(symbols_map) ** exponent
|
629 |
+
if r is None:
|
630 |
+
ewarn('substitute: empty FACTORS expression interpreted'
|
631 |
+
' as int-literal 1')
|
632 |
+
return as_number(1)
|
633 |
+
return r
|
634 |
+
if self.op is Op.APPLY:
|
635 |
+
target, args, kwargs = self.data
|
636 |
+
if isinstance(target, Expr):
|
637 |
+
target = target.substitute(symbols_map)
|
638 |
+
args = tuple(a.substitute(symbols_map) for a in args)
|
639 |
+
kwargs = dict((k, v.substitute(symbols_map))
|
640 |
+
for k, v in kwargs.items())
|
641 |
+
return normalize(Expr(self.op, (target, args, kwargs)))
|
642 |
+
if self.op is Op.INDEXING:
|
643 |
+
func = self.data[0]
|
644 |
+
if isinstance(func, Expr):
|
645 |
+
func = func.substitute(symbols_map)
|
646 |
+
args = tuple(a.substitute(symbols_map) for a in self.data[1:])
|
647 |
+
return normalize(Expr(self.op, (func,) + args))
|
648 |
+
if self.op is Op.TERNARY:
|
649 |
+
operands = tuple(a.substitute(symbols_map) for a in self.data)
|
650 |
+
return normalize(Expr(self.op, operands))
|
651 |
+
if self.op in (Op.REF, Op.DEREF):
|
652 |
+
return normalize(Expr(self.op, self.data.substitute(symbols_map)))
|
653 |
+
if self.op is Op.RELATIONAL:
|
654 |
+
rop, left, right = self.data
|
655 |
+
left = left.substitute(symbols_map)
|
656 |
+
right = right.substitute(symbols_map)
|
657 |
+
return normalize(Expr(self.op, (rop, left, right)))
|
658 |
+
raise NotImplementedError(f'substitute method for {self.op}: {self!r}')
|
659 |
+
|
660 |
+
def traverse(self, visit, *args, **kwargs):
|
661 |
+
"""Traverse expression tree with visit function.
|
662 |
+
|
663 |
+
The visit function is applied to an expression with given args
|
664 |
+
and kwargs.
|
665 |
+
|
666 |
+
Traverse call returns an expression returned by visit when not
|
667 |
+
None, otherwise return a new normalized expression with
|
668 |
+
traverse-visit sub-expressions.
|
669 |
+
"""
|
670 |
+
result = visit(self, *args, **kwargs)
|
671 |
+
if result is not None:
|
672 |
+
return result
|
673 |
+
|
674 |
+
if self.op in (Op.INTEGER, Op.REAL, Op.STRING, Op.SYMBOL):
|
675 |
+
return self
|
676 |
+
elif self.op in (Op.COMPLEX, Op.ARRAY, Op.CONCAT, Op.TERNARY):
|
677 |
+
return normalize(Expr(self.op, tuple(
|
678 |
+
item.traverse(visit, *args, **kwargs)
|
679 |
+
for item in self.data)))
|
680 |
+
elif self.op in (Op.TERMS, Op.FACTORS):
|
681 |
+
data = {}
|
682 |
+
for k, v in self.data.items():
|
683 |
+
k = k.traverse(visit, *args, **kwargs)
|
684 |
+
v = (v.traverse(visit, *args, **kwargs)
|
685 |
+
if isinstance(v, Expr) else v)
|
686 |
+
if k in data:
|
687 |
+
v = data[k] + v
|
688 |
+
data[k] = v
|
689 |
+
return normalize(Expr(self.op, data))
|
690 |
+
elif self.op is Op.APPLY:
|
691 |
+
obj = self.data[0]
|
692 |
+
func = (obj.traverse(visit, *args, **kwargs)
|
693 |
+
if isinstance(obj, Expr) else obj)
|
694 |
+
operands = tuple(operand.traverse(visit, *args, **kwargs)
|
695 |
+
for operand in self.data[1])
|
696 |
+
kwoperands = dict((k, v.traverse(visit, *args, **kwargs))
|
697 |
+
for k, v in self.data[2].items())
|
698 |
+
return normalize(Expr(self.op, (func, operands, kwoperands)))
|
699 |
+
elif self.op is Op.INDEXING:
|
700 |
+
obj = self.data[0]
|
701 |
+
obj = (obj.traverse(visit, *args, **kwargs)
|
702 |
+
if isinstance(obj, Expr) else obj)
|
703 |
+
indices = tuple(index.traverse(visit, *args, **kwargs)
|
704 |
+
for index in self.data[1:])
|
705 |
+
return normalize(Expr(self.op, (obj,) + indices))
|
706 |
+
elif self.op in (Op.REF, Op.DEREF):
|
707 |
+
return normalize(Expr(self.op,
|
708 |
+
self.data.traverse(visit, *args, **kwargs)))
|
709 |
+
elif self.op is Op.RELATIONAL:
|
710 |
+
rop, left, right = self.data
|
711 |
+
left = left.traverse(visit, *args, **kwargs)
|
712 |
+
right = right.traverse(visit, *args, **kwargs)
|
713 |
+
return normalize(Expr(self.op, (rop, left, right)))
|
714 |
+
raise NotImplementedError(f'traverse method for {self.op}')
|
715 |
+
|
716 |
+
def contains(self, other):
|
717 |
+
"""Check if self contains other.
|
718 |
+
"""
|
719 |
+
found = []
|
720 |
+
|
721 |
+
def visit(expr, found=found):
|
722 |
+
if found:
|
723 |
+
return expr
|
724 |
+
elif expr == other:
|
725 |
+
found.append(1)
|
726 |
+
return expr
|
727 |
+
|
728 |
+
self.traverse(visit)
|
729 |
+
|
730 |
+
return len(found) != 0
|
731 |
+
|
732 |
+
def symbols(self):
|
733 |
+
"""Return a set of symbols contained in self.
|
734 |
+
"""
|
735 |
+
found = set()
|
736 |
+
|
737 |
+
def visit(expr, found=found):
|
738 |
+
if expr.op is Op.SYMBOL:
|
739 |
+
found.add(expr)
|
740 |
+
|
741 |
+
self.traverse(visit)
|
742 |
+
|
743 |
+
return found
|
744 |
+
|
745 |
+
def polynomial_atoms(self):
|
746 |
+
"""Return a set of expressions used as atoms in polynomial self.
|
747 |
+
"""
|
748 |
+
found = set()
|
749 |
+
|
750 |
+
def visit(expr, found=found):
|
751 |
+
if expr.op is Op.FACTORS:
|
752 |
+
for b in expr.data:
|
753 |
+
b.traverse(visit)
|
754 |
+
return expr
|
755 |
+
if expr.op in (Op.TERMS, Op.COMPLEX):
|
756 |
+
return
|
757 |
+
if expr.op is Op.APPLY and isinstance(expr.data[0], ArithOp):
|
758 |
+
if expr.data[0] is ArithOp.POW:
|
759 |
+
expr.data[1][0].traverse(visit)
|
760 |
+
return expr
|
761 |
+
return
|
762 |
+
if expr.op in (Op.INTEGER, Op.REAL):
|
763 |
+
return expr
|
764 |
+
|
765 |
+
found.add(expr)
|
766 |
+
|
767 |
+
if expr.op in (Op.INDEXING, Op.APPLY):
|
768 |
+
return expr
|
769 |
+
|
770 |
+
self.traverse(visit)
|
771 |
+
|
772 |
+
return found
|
773 |
+
|
774 |
+
def linear_solve(self, symbol):
|
775 |
+
"""Return a, b such that a * symbol + b == self.
|
776 |
+
|
777 |
+
If self is not linear with respect to symbol, raise RuntimeError.
|
778 |
+
"""
|
779 |
+
b = self.substitute({symbol: as_number(0)})
|
780 |
+
ax = self - b
|
781 |
+
a = ax.substitute({symbol: as_number(1)})
|
782 |
+
|
783 |
+
zero, _ = as_numer_denom(a * symbol - ax)
|
784 |
+
|
785 |
+
if zero != as_number(0):
|
786 |
+
raise RuntimeError(f'not a {symbol}-linear equation:'
|
787 |
+
f' {a} * {symbol} + {b} == {self}')
|
788 |
+
return a, b
|
789 |
+
|
790 |
+
|
791 |
+
def normalize(obj):
|
792 |
+
"""Normalize Expr and apply basic evaluation methods.
|
793 |
+
"""
|
794 |
+
if not isinstance(obj, Expr):
|
795 |
+
return obj
|
796 |
+
|
797 |
+
if obj.op is Op.TERMS:
|
798 |
+
d = {}
|
799 |
+
for t, c in obj.data.items():
|
800 |
+
if c == 0:
|
801 |
+
continue
|
802 |
+
if t.op is Op.COMPLEX and c != 1:
|
803 |
+
t = t * c
|
804 |
+
c = 1
|
805 |
+
if t.op is Op.TERMS:
|
806 |
+
for t1, c1 in t.data.items():
|
807 |
+
_pairs_add(d, t1, c1 * c)
|
808 |
+
else:
|
809 |
+
_pairs_add(d, t, c)
|
810 |
+
if len(d) == 0:
|
811 |
+
# TODO: determine correct kind
|
812 |
+
return as_number(0)
|
813 |
+
elif len(d) == 1:
|
814 |
+
(t, c), = d.items()
|
815 |
+
if c == 1:
|
816 |
+
return t
|
817 |
+
return Expr(Op.TERMS, d)
|
818 |
+
|
819 |
+
if obj.op is Op.FACTORS:
|
820 |
+
coeff = 1
|
821 |
+
d = {}
|
822 |
+
for b, e in obj.data.items():
|
823 |
+
if e == 0:
|
824 |
+
continue
|
825 |
+
if b.op is Op.TERMS and isinstance(e, integer_types) and e > 1:
|
826 |
+
# expand integer powers of sums
|
827 |
+
b = b * (b ** (e - 1))
|
828 |
+
e = 1
|
829 |
+
|
830 |
+
if b.op in (Op.INTEGER, Op.REAL):
|
831 |
+
if e == 1:
|
832 |
+
coeff *= b.data[0]
|
833 |
+
elif e > 0:
|
834 |
+
coeff *= b.data[0] ** e
|
835 |
+
else:
|
836 |
+
_pairs_add(d, b, e)
|
837 |
+
elif b.op is Op.FACTORS:
|
838 |
+
if e > 0 and isinstance(e, integer_types):
|
839 |
+
for b1, e1 in b.data.items():
|
840 |
+
_pairs_add(d, b1, e1 * e)
|
841 |
+
else:
|
842 |
+
_pairs_add(d, b, e)
|
843 |
+
else:
|
844 |
+
_pairs_add(d, b, e)
|
845 |
+
if len(d) == 0 or coeff == 0:
|
846 |
+
# TODO: determine correct kind
|
847 |
+
assert isinstance(coeff, number_types)
|
848 |
+
return as_number(coeff)
|
849 |
+
elif len(d) == 1:
|
850 |
+
(b, e), = d.items()
|
851 |
+
if e == 1:
|
852 |
+
t = b
|
853 |
+
else:
|
854 |
+
t = Expr(Op.FACTORS, d)
|
855 |
+
if coeff == 1:
|
856 |
+
return t
|
857 |
+
return Expr(Op.TERMS, {t: coeff})
|
858 |
+
elif coeff == 1:
|
859 |
+
return Expr(Op.FACTORS, d)
|
860 |
+
else:
|
861 |
+
return Expr(Op.TERMS, {Expr(Op.FACTORS, d): coeff})
|
862 |
+
|
863 |
+
if obj.op is Op.APPLY and obj.data[0] is ArithOp.DIV:
|
864 |
+
dividend, divisor = obj.data[1]
|
865 |
+
t1, c1 = as_term_coeff(dividend)
|
866 |
+
t2, c2 = as_term_coeff(divisor)
|
867 |
+
if isinstance(c1, integer_types) and isinstance(c2, integer_types):
|
868 |
+
g = gcd(c1, c2)
|
869 |
+
c1, c2 = c1//g, c2//g
|
870 |
+
else:
|
871 |
+
c1, c2 = c1/c2, 1
|
872 |
+
|
873 |
+
if t1.op is Op.APPLY and t1.data[0] is ArithOp.DIV:
|
874 |
+
numer = t1.data[1][0] * c1
|
875 |
+
denom = t1.data[1][1] * t2 * c2
|
876 |
+
return as_apply(ArithOp.DIV, numer, denom)
|
877 |
+
|
878 |
+
if t2.op is Op.APPLY and t2.data[0] is ArithOp.DIV:
|
879 |
+
numer = t2.data[1][1] * t1 * c1
|
880 |
+
denom = t2.data[1][0] * c2
|
881 |
+
return as_apply(ArithOp.DIV, numer, denom)
|
882 |
+
|
883 |
+
d = dict(as_factors(t1).data)
|
884 |
+
for b, e in as_factors(t2).data.items():
|
885 |
+
_pairs_add(d, b, -e)
|
886 |
+
numer, denom = {}, {}
|
887 |
+
for b, e in d.items():
|
888 |
+
if e > 0:
|
889 |
+
numer[b] = e
|
890 |
+
else:
|
891 |
+
denom[b] = -e
|
892 |
+
numer = normalize(Expr(Op.FACTORS, numer)) * c1
|
893 |
+
denom = normalize(Expr(Op.FACTORS, denom)) * c2
|
894 |
+
|
895 |
+
if denom.op in (Op.INTEGER, Op.REAL) and denom.data[0] == 1:
|
896 |
+
# TODO: denom kind not used
|
897 |
+
return numer
|
898 |
+
return as_apply(ArithOp.DIV, numer, denom)
|
899 |
+
|
900 |
+
if obj.op is Op.CONCAT:
|
901 |
+
lst = [obj.data[0]]
|
902 |
+
for s in obj.data[1:]:
|
903 |
+
last = lst[-1]
|
904 |
+
if (
|
905 |
+
last.op is Op.STRING
|
906 |
+
and s.op is Op.STRING
|
907 |
+
and last.data[0][0] in '"\''
|
908 |
+
and s.data[0][0] == last.data[0][-1]
|
909 |
+
):
|
910 |
+
new_last = as_string(last.data[0][:-1] + s.data[0][1:],
|
911 |
+
max(last.data[1], s.data[1]))
|
912 |
+
lst[-1] = new_last
|
913 |
+
else:
|
914 |
+
lst.append(s)
|
915 |
+
if len(lst) == 1:
|
916 |
+
return lst[0]
|
917 |
+
return Expr(Op.CONCAT, tuple(lst))
|
918 |
+
|
919 |
+
if obj.op is Op.TERNARY:
|
920 |
+
cond, expr1, expr2 = map(normalize, obj.data)
|
921 |
+
if cond.op is Op.INTEGER:
|
922 |
+
return expr1 if cond.data[0] else expr2
|
923 |
+
return Expr(Op.TERNARY, (cond, expr1, expr2))
|
924 |
+
|
925 |
+
return obj
|
926 |
+
|
927 |
+
|
928 |
+
def as_expr(obj):
|
929 |
+
"""Convert non-Expr objects to Expr objects.
|
930 |
+
"""
|
931 |
+
if isinstance(obj, complex):
|
932 |
+
return as_complex(obj.real, obj.imag)
|
933 |
+
if isinstance(obj, number_types):
|
934 |
+
return as_number(obj)
|
935 |
+
if isinstance(obj, str):
|
936 |
+
# STRING expression holds string with boundary quotes, hence
|
937 |
+
# applying repr:
|
938 |
+
return as_string(repr(obj))
|
939 |
+
if isinstance(obj, tuple):
|
940 |
+
return tuple(map(as_expr, obj))
|
941 |
+
return obj
|
942 |
+
|
943 |
+
|
944 |
+
def as_symbol(obj):
|
945 |
+
"""Return object as SYMBOL expression (variable or unparsed expression).
|
946 |
+
"""
|
947 |
+
return Expr(Op.SYMBOL, obj)
|
948 |
+
|
949 |
+
|
950 |
+
def as_number(obj, kind=4):
|
951 |
+
"""Return object as INTEGER or REAL constant.
|
952 |
+
"""
|
953 |
+
if isinstance(obj, int):
|
954 |
+
return Expr(Op.INTEGER, (obj, kind))
|
955 |
+
if isinstance(obj, float):
|
956 |
+
return Expr(Op.REAL, (obj, kind))
|
957 |
+
if isinstance(obj, Expr):
|
958 |
+
if obj.op in (Op.INTEGER, Op.REAL):
|
959 |
+
return obj
|
960 |
+
raise OpError(f'cannot convert {obj} to INTEGER or REAL constant')
|
961 |
+
|
962 |
+
|
963 |
+
def as_integer(obj, kind=4):
|
964 |
+
"""Return object as INTEGER constant.
|
965 |
+
"""
|
966 |
+
if isinstance(obj, int):
|
967 |
+
return Expr(Op.INTEGER, (obj, kind))
|
968 |
+
if isinstance(obj, Expr):
|
969 |
+
if obj.op is Op.INTEGER:
|
970 |
+
return obj
|
971 |
+
raise OpError(f'cannot convert {obj} to INTEGER constant')
|
972 |
+
|
973 |
+
|
974 |
+
def as_real(obj, kind=4):
|
975 |
+
"""Return object as REAL constant.
|
976 |
+
"""
|
977 |
+
if isinstance(obj, int):
|
978 |
+
return Expr(Op.REAL, (float(obj), kind))
|
979 |
+
if isinstance(obj, float):
|
980 |
+
return Expr(Op.REAL, (obj, kind))
|
981 |
+
if isinstance(obj, Expr):
|
982 |
+
if obj.op is Op.REAL:
|
983 |
+
return obj
|
984 |
+
elif obj.op is Op.INTEGER:
|
985 |
+
return Expr(Op.REAL, (float(obj.data[0]), kind))
|
986 |
+
raise OpError(f'cannot convert {obj} to REAL constant')
|
987 |
+
|
988 |
+
|
989 |
+
def as_string(obj, kind=1):
|
990 |
+
"""Return object as STRING expression (string literal constant).
|
991 |
+
"""
|
992 |
+
return Expr(Op.STRING, (obj, kind))
|
993 |
+
|
994 |
+
|
995 |
+
def as_array(obj):
|
996 |
+
"""Return object as ARRAY expression (array constant).
|
997 |
+
"""
|
998 |
+
if isinstance(obj, Expr):
|
999 |
+
obj = obj,
|
1000 |
+
return Expr(Op.ARRAY, obj)
|
1001 |
+
|
1002 |
+
|
1003 |
+
def as_complex(real, imag=0):
|
1004 |
+
"""Return object as COMPLEX expression (complex literal constant).
|
1005 |
+
"""
|
1006 |
+
return Expr(Op.COMPLEX, (as_expr(real), as_expr(imag)))
|
1007 |
+
|
1008 |
+
|
1009 |
+
def as_apply(func, *args, **kwargs):
|
1010 |
+
"""Return object as APPLY expression (function call, constructor, etc.)
|
1011 |
+
"""
|
1012 |
+
return Expr(Op.APPLY,
|
1013 |
+
(func, tuple(map(as_expr, args)),
|
1014 |
+
dict((k, as_expr(v)) for k, v in kwargs.items())))
|
1015 |
+
|
1016 |
+
|
1017 |
+
def as_ternary(cond, expr1, expr2):
|
1018 |
+
"""Return object as TERNARY expression (cond?expr1:expr2).
|
1019 |
+
"""
|
1020 |
+
return Expr(Op.TERNARY, (cond, expr1, expr2))
|
1021 |
+
|
1022 |
+
|
1023 |
+
def as_ref(expr):
|
1024 |
+
"""Return object as referencing expression.
|
1025 |
+
"""
|
1026 |
+
return Expr(Op.REF, expr)
|
1027 |
+
|
1028 |
+
|
1029 |
+
def as_deref(expr):
|
1030 |
+
"""Return object as dereferencing expression.
|
1031 |
+
"""
|
1032 |
+
return Expr(Op.DEREF, expr)
|
1033 |
+
|
1034 |
+
|
1035 |
+
def as_eq(left, right):
|
1036 |
+
return Expr(Op.RELATIONAL, (RelOp.EQ, left, right))
|
1037 |
+
|
1038 |
+
|
1039 |
+
def as_ne(left, right):
|
1040 |
+
return Expr(Op.RELATIONAL, (RelOp.NE, left, right))
|
1041 |
+
|
1042 |
+
|
1043 |
+
def as_lt(left, right):
|
1044 |
+
return Expr(Op.RELATIONAL, (RelOp.LT, left, right))
|
1045 |
+
|
1046 |
+
|
1047 |
+
def as_le(left, right):
|
1048 |
+
return Expr(Op.RELATIONAL, (RelOp.LE, left, right))
|
1049 |
+
|
1050 |
+
|
1051 |
+
def as_gt(left, right):
|
1052 |
+
return Expr(Op.RELATIONAL, (RelOp.GT, left, right))
|
1053 |
+
|
1054 |
+
|
1055 |
+
def as_ge(left, right):
|
1056 |
+
return Expr(Op.RELATIONAL, (RelOp.GE, left, right))
|
1057 |
+
|
1058 |
+
|
1059 |
+
def as_terms(obj):
|
1060 |
+
"""Return expression as TERMS expression.
|
1061 |
+
"""
|
1062 |
+
if isinstance(obj, Expr):
|
1063 |
+
obj = normalize(obj)
|
1064 |
+
if obj.op is Op.TERMS:
|
1065 |
+
return obj
|
1066 |
+
if obj.op is Op.INTEGER:
|
1067 |
+
return Expr(Op.TERMS, {as_integer(1, obj.data[1]): obj.data[0]})
|
1068 |
+
if obj.op is Op.REAL:
|
1069 |
+
return Expr(Op.TERMS, {as_real(1, obj.data[1]): obj.data[0]})
|
1070 |
+
return Expr(Op.TERMS, {obj: 1})
|
1071 |
+
raise OpError(f'cannot convert {type(obj)} to terms Expr')
|
1072 |
+
|
1073 |
+
|
1074 |
+
def as_factors(obj):
|
1075 |
+
"""Return expression as FACTORS expression.
|
1076 |
+
"""
|
1077 |
+
if isinstance(obj, Expr):
|
1078 |
+
obj = normalize(obj)
|
1079 |
+
if obj.op is Op.FACTORS:
|
1080 |
+
return obj
|
1081 |
+
if obj.op is Op.TERMS:
|
1082 |
+
if len(obj.data) == 1:
|
1083 |
+
(term, coeff), = obj.data.items()
|
1084 |
+
if coeff == 1:
|
1085 |
+
return Expr(Op.FACTORS, {term: 1})
|
1086 |
+
return Expr(Op.FACTORS, {term: 1, Expr.number(coeff): 1})
|
1087 |
+
if ((obj.op is Op.APPLY
|
1088 |
+
and obj.data[0] is ArithOp.DIV
|
1089 |
+
and not obj.data[2])):
|
1090 |
+
return Expr(Op.FACTORS, {obj.data[1][0]: 1, obj.data[1][1]: -1})
|
1091 |
+
return Expr(Op.FACTORS, {obj: 1})
|
1092 |
+
raise OpError(f'cannot convert {type(obj)} to terms Expr')
|
1093 |
+
|
1094 |
+
|
1095 |
+
def as_term_coeff(obj):
|
1096 |
+
"""Return expression as term-coefficient pair.
|
1097 |
+
"""
|
1098 |
+
if isinstance(obj, Expr):
|
1099 |
+
obj = normalize(obj)
|
1100 |
+
if obj.op is Op.INTEGER:
|
1101 |
+
return as_integer(1, obj.data[1]), obj.data[0]
|
1102 |
+
if obj.op is Op.REAL:
|
1103 |
+
return as_real(1, obj.data[1]), obj.data[0]
|
1104 |
+
if obj.op is Op.TERMS:
|
1105 |
+
if len(obj.data) == 1:
|
1106 |
+
(term, coeff), = obj.data.items()
|
1107 |
+
return term, coeff
|
1108 |
+
# TODO: find common divisor of coefficients
|
1109 |
+
if obj.op is Op.APPLY and obj.data[0] is ArithOp.DIV:
|
1110 |
+
t, c = as_term_coeff(obj.data[1][0])
|
1111 |
+
return as_apply(ArithOp.DIV, t, obj.data[1][1]), c
|
1112 |
+
return obj, 1
|
1113 |
+
raise OpError(f'cannot convert {type(obj)} to term and coeff')
|
1114 |
+
|
1115 |
+
|
1116 |
+
def as_numer_denom(obj):
|
1117 |
+
"""Return expression as numer-denom pair.
|
1118 |
+
"""
|
1119 |
+
if isinstance(obj, Expr):
|
1120 |
+
obj = normalize(obj)
|
1121 |
+
if obj.op in (Op.INTEGER, Op.REAL, Op.COMPLEX, Op.SYMBOL,
|
1122 |
+
Op.INDEXING, Op.TERNARY):
|
1123 |
+
return obj, as_number(1)
|
1124 |
+
elif obj.op is Op.APPLY:
|
1125 |
+
if obj.data[0] is ArithOp.DIV and not obj.data[2]:
|
1126 |
+
numers, denoms = map(as_numer_denom, obj.data[1])
|
1127 |
+
return numers[0] * denoms[1], numers[1] * denoms[0]
|
1128 |
+
return obj, as_number(1)
|
1129 |
+
elif obj.op is Op.TERMS:
|
1130 |
+
numers, denoms = [], []
|
1131 |
+
for term, coeff in obj.data.items():
|
1132 |
+
n, d = as_numer_denom(term)
|
1133 |
+
n = n * coeff
|
1134 |
+
numers.append(n)
|
1135 |
+
denoms.append(d)
|
1136 |
+
numer, denom = as_number(0), as_number(1)
|
1137 |
+
for i in range(len(numers)):
|
1138 |
+
n = numers[i]
|
1139 |
+
for j in range(len(numers)):
|
1140 |
+
if i != j:
|
1141 |
+
n *= denoms[j]
|
1142 |
+
numer += n
|
1143 |
+
denom *= denoms[i]
|
1144 |
+
if denom.op in (Op.INTEGER, Op.REAL) and denom.data[0] < 0:
|
1145 |
+
numer, denom = -numer, -denom
|
1146 |
+
return numer, denom
|
1147 |
+
elif obj.op is Op.FACTORS:
|
1148 |
+
numer, denom = as_number(1), as_number(1)
|
1149 |
+
for b, e in obj.data.items():
|
1150 |
+
bnumer, bdenom = as_numer_denom(b)
|
1151 |
+
if e > 0:
|
1152 |
+
numer *= bnumer ** e
|
1153 |
+
denom *= bdenom ** e
|
1154 |
+
elif e < 0:
|
1155 |
+
numer *= bdenom ** (-e)
|
1156 |
+
denom *= bnumer ** (-e)
|
1157 |
+
return numer, denom
|
1158 |
+
raise OpError(f'cannot convert {type(obj)} to numer and denom')
|
1159 |
+
|
1160 |
+
|
1161 |
+
def _counter():
|
1162 |
+
# Used internally to generate unique dummy symbols
|
1163 |
+
counter = 0
|
1164 |
+
while True:
|
1165 |
+
counter += 1
|
1166 |
+
yield counter
|
1167 |
+
|
1168 |
+
|
1169 |
+
COUNTER = _counter()
|
1170 |
+
|
1171 |
+
|
1172 |
+
def eliminate_quotes(s):
|
1173 |
+
"""Replace quoted substrings of input string.
|
1174 |
+
|
1175 |
+
Return a new string and a mapping of replacements.
|
1176 |
+
"""
|
1177 |
+
d = {}
|
1178 |
+
|
1179 |
+
def repl(m):
|
1180 |
+
kind, value = m.groups()[:2]
|
1181 |
+
if kind:
|
1182 |
+
# remove trailing underscore
|
1183 |
+
kind = kind[:-1]
|
1184 |
+
p = {"'": "SINGLE", '"': "DOUBLE"}[value[0]]
|
1185 |
+
k = f'{kind}@__f2py_QUOTES_{p}_{COUNTER.__next__()}@'
|
1186 |
+
d[k] = value
|
1187 |
+
return k
|
1188 |
+
|
1189 |
+
new_s = re.sub(r'({kind}_|)({single_quoted}|{double_quoted})'.format(
|
1190 |
+
kind=r'\w[\w\d_]*',
|
1191 |
+
single_quoted=r"('([^'\\]|(\\.))*')",
|
1192 |
+
double_quoted=r'("([^"\\]|(\\.))*")'),
|
1193 |
+
repl, s)
|
1194 |
+
|
1195 |
+
assert '"' not in new_s
|
1196 |
+
assert "'" not in new_s
|
1197 |
+
|
1198 |
+
return new_s, d
|
1199 |
+
|
1200 |
+
|
1201 |
+
def insert_quotes(s, d):
|
1202 |
+
"""Inverse of eliminate_quotes.
|
1203 |
+
"""
|
1204 |
+
for k, v in d.items():
|
1205 |
+
kind = k[:k.find('@')]
|
1206 |
+
if kind:
|
1207 |
+
kind += '_'
|
1208 |
+
s = s.replace(k, kind + v)
|
1209 |
+
return s
|
1210 |
+
|
1211 |
+
|
1212 |
+
def replace_parenthesis(s):
|
1213 |
+
"""Replace substrings of input that are enclosed in parenthesis.
|
1214 |
+
|
1215 |
+
Return a new string and a mapping of replacements.
|
1216 |
+
"""
|
1217 |
+
# Find a parenthesis pair that appears first.
|
1218 |
+
|
1219 |
+
# Fortran deliminator are `(`, `)`, `[`, `]`, `(/', '/)`, `/`.
|
1220 |
+
# We don't handle `/` deliminator because it is not a part of an
|
1221 |
+
# expression.
|
1222 |
+
left, right = None, None
|
1223 |
+
mn_i = len(s)
|
1224 |
+
for left_, right_ in (('(/', '/)'),
|
1225 |
+
'()',
|
1226 |
+
'{}', # to support C literal structs
|
1227 |
+
'[]'):
|
1228 |
+
i = s.find(left_)
|
1229 |
+
if i == -1:
|
1230 |
+
continue
|
1231 |
+
if i < mn_i:
|
1232 |
+
mn_i = i
|
1233 |
+
left, right = left_, right_
|
1234 |
+
|
1235 |
+
if left is None:
|
1236 |
+
return s, {}
|
1237 |
+
|
1238 |
+
i = mn_i
|
1239 |
+
j = s.find(right, i)
|
1240 |
+
|
1241 |
+
while s.count(left, i + 1, j) != s.count(right, i + 1, j):
|
1242 |
+
j = s.find(right, j + 1)
|
1243 |
+
if j == -1:
|
1244 |
+
raise ValueError(f'Mismatch of {left+right} parenthesis in {s!r}')
|
1245 |
+
|
1246 |
+
p = {'(': 'ROUND', '[': 'SQUARE', '{': 'CURLY', '(/': 'ROUNDDIV'}[left]
|
1247 |
+
|
1248 |
+
k = f'@__f2py_PARENTHESIS_{p}_{COUNTER.__next__()}@'
|
1249 |
+
v = s[i+len(left):j]
|
1250 |
+
r, d = replace_parenthesis(s[j+len(right):])
|
1251 |
+
d[k] = v
|
1252 |
+
return s[:i] + k + r, d
|
1253 |
+
|
1254 |
+
|
1255 |
+
def _get_parenthesis_kind(s):
|
1256 |
+
assert s.startswith('@__f2py_PARENTHESIS_'), s
|
1257 |
+
return s.split('_')[4]
|
1258 |
+
|
1259 |
+
|
1260 |
+
def unreplace_parenthesis(s, d):
|
1261 |
+
"""Inverse of replace_parenthesis.
|
1262 |
+
"""
|
1263 |
+
for k, v in d.items():
|
1264 |
+
p = _get_parenthesis_kind(k)
|
1265 |
+
left = dict(ROUND='(', SQUARE='[', CURLY='{', ROUNDDIV='(/')[p]
|
1266 |
+
right = dict(ROUND=')', SQUARE=']', CURLY='}', ROUNDDIV='/)')[p]
|
1267 |
+
s = s.replace(k, left + v + right)
|
1268 |
+
return s
|
1269 |
+
|
1270 |
+
|
1271 |
+
def fromstring(s, language=Language.C):
|
1272 |
+
"""Create an expression from a string.
|
1273 |
+
|
1274 |
+
This is a "lazy" parser, that is, only arithmetic operations are
|
1275 |
+
resolved, non-arithmetic operations are treated as symbols.
|
1276 |
+
"""
|
1277 |
+
r = _FromStringWorker(language=language).parse(s)
|
1278 |
+
if isinstance(r, Expr):
|
1279 |
+
return r
|
1280 |
+
raise ValueError(f'failed to parse `{s}` to Expr instance: got `{r}`')
|
1281 |
+
|
1282 |
+
|
1283 |
+
class _Pair:
|
1284 |
+
# Internal class to represent a pair of expressions
|
1285 |
+
|
1286 |
+
def __init__(self, left, right):
|
1287 |
+
self.left = left
|
1288 |
+
self.right = right
|
1289 |
+
|
1290 |
+
def substitute(self, symbols_map):
|
1291 |
+
left, right = self.left, self.right
|
1292 |
+
if isinstance(left, Expr):
|
1293 |
+
left = left.substitute(symbols_map)
|
1294 |
+
if isinstance(right, Expr):
|
1295 |
+
right = right.substitute(symbols_map)
|
1296 |
+
return _Pair(left, right)
|
1297 |
+
|
1298 |
+
def __repr__(self):
|
1299 |
+
return f'{type(self).__name__}({self.left}, {self.right})'
|
1300 |
+
|
1301 |
+
|
1302 |
+
class _FromStringWorker:
|
1303 |
+
|
1304 |
+
def __init__(self, language=Language.C):
|
1305 |
+
self.original = None
|
1306 |
+
self.quotes_map = None
|
1307 |
+
self.language = language
|
1308 |
+
|
1309 |
+
def finalize_string(self, s):
|
1310 |
+
return insert_quotes(s, self.quotes_map)
|
1311 |
+
|
1312 |
+
def parse(self, inp):
|
1313 |
+
self.original = inp
|
1314 |
+
unquoted, self.quotes_map = eliminate_quotes(inp)
|
1315 |
+
return self.process(unquoted)
|
1316 |
+
|
1317 |
+
def process(self, s, context='expr'):
|
1318 |
+
"""Parse string within the given context.
|
1319 |
+
|
1320 |
+
The context may define the result in case of ambiguous
|
1321 |
+
expressions. For instance, consider expressions `f(x, y)` and
|
1322 |
+
`(x, y) + (a, b)` where `f` is a function and pair `(x, y)`
|
1323 |
+
denotes complex number. Specifying context as "args" or
|
1324 |
+
"expr", the subexpression `(x, y)` will be parse to an
|
1325 |
+
argument list or to a complex number, respectively.
|
1326 |
+
"""
|
1327 |
+
if isinstance(s, (list, tuple)):
|
1328 |
+
return type(s)(self.process(s_, context) for s_ in s)
|
1329 |
+
|
1330 |
+
assert isinstance(s, str), (type(s), s)
|
1331 |
+
|
1332 |
+
# replace subexpressions in parenthesis with f2py @-names
|
1333 |
+
r, raw_symbols_map = replace_parenthesis(s)
|
1334 |
+
r = r.strip()
|
1335 |
+
|
1336 |
+
def restore(r):
|
1337 |
+
# restores subexpressions marked with f2py @-names
|
1338 |
+
if isinstance(r, (list, tuple)):
|
1339 |
+
return type(r)(map(restore, r))
|
1340 |
+
return unreplace_parenthesis(r, raw_symbols_map)
|
1341 |
+
|
1342 |
+
# comma-separated tuple
|
1343 |
+
if ',' in r:
|
1344 |
+
operands = restore(r.split(','))
|
1345 |
+
if context == 'args':
|
1346 |
+
return tuple(self.process(operands))
|
1347 |
+
if context == 'expr':
|
1348 |
+
if len(operands) == 2:
|
1349 |
+
# complex number literal
|
1350 |
+
return as_complex(*self.process(operands))
|
1351 |
+
raise NotImplementedError(
|
1352 |
+
f'parsing comma-separated list (context={context}): {r}')
|
1353 |
+
|
1354 |
+
# ternary operation
|
1355 |
+
m = re.match(r'\A([^?]+)[?]([^:]+)[:](.+)\Z', r)
|
1356 |
+
if m:
|
1357 |
+
assert context == 'expr', context
|
1358 |
+
oper, expr1, expr2 = restore(m.groups())
|
1359 |
+
oper = self.process(oper)
|
1360 |
+
expr1 = self.process(expr1)
|
1361 |
+
expr2 = self.process(expr2)
|
1362 |
+
return as_ternary(oper, expr1, expr2)
|
1363 |
+
|
1364 |
+
# relational expression
|
1365 |
+
if self.language is Language.Fortran:
|
1366 |
+
m = re.match(
|
1367 |
+
r'\A(.+)\s*[.](eq|ne|lt|le|gt|ge)[.]\s*(.+)\Z', r, re.I)
|
1368 |
+
else:
|
1369 |
+
m = re.match(
|
1370 |
+
r'\A(.+)\s*([=][=]|[!][=]|[<][=]|[<]|[>][=]|[>])\s*(.+)\Z', r)
|
1371 |
+
if m:
|
1372 |
+
left, rop, right = m.groups()
|
1373 |
+
if self.language is Language.Fortran:
|
1374 |
+
rop = '.' + rop + '.'
|
1375 |
+
left, right = self.process(restore((left, right)))
|
1376 |
+
rop = RelOp.fromstring(rop, language=self.language)
|
1377 |
+
return Expr(Op.RELATIONAL, (rop, left, right))
|
1378 |
+
|
1379 |
+
# keyword argument
|
1380 |
+
m = re.match(r'\A(\w[\w\d_]*)\s*[=](.*)\Z', r)
|
1381 |
+
if m:
|
1382 |
+
keyname, value = m.groups()
|
1383 |
+
value = restore(value)
|
1384 |
+
return _Pair(keyname, self.process(value))
|
1385 |
+
|
1386 |
+
# addition/subtraction operations
|
1387 |
+
operands = re.split(r'((?<!\d[edED])[+-])', r)
|
1388 |
+
if len(operands) > 1:
|
1389 |
+
result = self.process(restore(operands[0] or '0'))
|
1390 |
+
for op, operand in zip(operands[1::2], operands[2::2]):
|
1391 |
+
operand = self.process(restore(operand))
|
1392 |
+
op = op.strip()
|
1393 |
+
if op == '+':
|
1394 |
+
result += operand
|
1395 |
+
else:
|
1396 |
+
assert op == '-'
|
1397 |
+
result -= operand
|
1398 |
+
return result
|
1399 |
+
|
1400 |
+
# string concatenate operation
|
1401 |
+
if self.language is Language.Fortran and '//' in r:
|
1402 |
+
operands = restore(r.split('//'))
|
1403 |
+
return Expr(Op.CONCAT,
|
1404 |
+
tuple(self.process(operands)))
|
1405 |
+
|
1406 |
+
# multiplication/division operations
|
1407 |
+
operands = re.split(r'(?<=[@\w\d_])\s*([*]|/)',
|
1408 |
+
(r if self.language is Language.C
|
1409 |
+
else r.replace('**', '@__f2py_DOUBLE_STAR@')))
|
1410 |
+
if len(operands) > 1:
|
1411 |
+
operands = restore(operands)
|
1412 |
+
if self.language is not Language.C:
|
1413 |
+
operands = [operand.replace('@__f2py_DOUBLE_STAR@', '**')
|
1414 |
+
for operand in operands]
|
1415 |
+
# Expression is an arithmetic product
|
1416 |
+
result = self.process(operands[0])
|
1417 |
+
for op, operand in zip(operands[1::2], operands[2::2]):
|
1418 |
+
operand = self.process(operand)
|
1419 |
+
op = op.strip()
|
1420 |
+
if op == '*':
|
1421 |
+
result *= operand
|
1422 |
+
else:
|
1423 |
+
assert op == '/'
|
1424 |
+
result /= operand
|
1425 |
+
return result
|
1426 |
+
|
1427 |
+
# referencing/dereferencing
|
1428 |
+
if r.startswith('*') or r.startswith('&'):
|
1429 |
+
op = {'*': Op.DEREF, '&': Op.REF}[r[0]]
|
1430 |
+
operand = self.process(restore(r[1:]))
|
1431 |
+
return Expr(op, operand)
|
1432 |
+
|
1433 |
+
# exponentiation operations
|
1434 |
+
if self.language is not Language.C and '**' in r:
|
1435 |
+
operands = list(reversed(restore(r.split('**'))))
|
1436 |
+
result = self.process(operands[0])
|
1437 |
+
for operand in operands[1:]:
|
1438 |
+
operand = self.process(operand)
|
1439 |
+
result = operand ** result
|
1440 |
+
return result
|
1441 |
+
|
1442 |
+
# int-literal-constant
|
1443 |
+
m = re.match(r'\A({digit_string})({kind}|)\Z'.format(
|
1444 |
+
digit_string=r'\d+',
|
1445 |
+
kind=r'_(\d+|\w[\w\d_]*)'), r)
|
1446 |
+
if m:
|
1447 |
+
value, _, kind = m.groups()
|
1448 |
+
if kind and kind.isdigit():
|
1449 |
+
kind = int(kind)
|
1450 |
+
return as_integer(int(value), kind or 4)
|
1451 |
+
|
1452 |
+
# real-literal-constant
|
1453 |
+
m = re.match(r'\A({significant}({exponent}|)|\d+{exponent})({kind}|)\Z'
|
1454 |
+
.format(
|
1455 |
+
significant=r'[.]\d+|\d+[.]\d*',
|
1456 |
+
exponent=r'[edED][+-]?\d+',
|
1457 |
+
kind=r'_(\d+|\w[\w\d_]*)'), r)
|
1458 |
+
if m:
|
1459 |
+
value, _, _, kind = m.groups()
|
1460 |
+
if kind and kind.isdigit():
|
1461 |
+
kind = int(kind)
|
1462 |
+
value = value.lower()
|
1463 |
+
if 'd' in value:
|
1464 |
+
return as_real(float(value.replace('d', 'e')), kind or 8)
|
1465 |
+
return as_real(float(value), kind or 4)
|
1466 |
+
|
1467 |
+
# string-literal-constant with kind parameter specification
|
1468 |
+
if r in self.quotes_map:
|
1469 |
+
kind = r[:r.find('@')]
|
1470 |
+
return as_string(self.quotes_map[r], kind or 1)
|
1471 |
+
|
1472 |
+
# array constructor or literal complex constant or
|
1473 |
+
# parenthesized expression
|
1474 |
+
if r in raw_symbols_map:
|
1475 |
+
paren = _get_parenthesis_kind(r)
|
1476 |
+
items = self.process(restore(raw_symbols_map[r]),
|
1477 |
+
'expr' if paren == 'ROUND' else 'args')
|
1478 |
+
if paren == 'ROUND':
|
1479 |
+
if isinstance(items, Expr):
|
1480 |
+
return items
|
1481 |
+
if paren in ['ROUNDDIV', 'SQUARE']:
|
1482 |
+
# Expression is a array constructor
|
1483 |
+
if isinstance(items, Expr):
|
1484 |
+
items = (items,)
|
1485 |
+
return as_array(items)
|
1486 |
+
|
1487 |
+
# function call/indexing
|
1488 |
+
m = re.match(r'\A(.+)\s*(@__f2py_PARENTHESIS_(ROUND|SQUARE)_\d+@)\Z',
|
1489 |
+
r)
|
1490 |
+
if m:
|
1491 |
+
target, args, paren = m.groups()
|
1492 |
+
target = self.process(restore(target))
|
1493 |
+
args = self.process(restore(args)[1:-1], 'args')
|
1494 |
+
if not isinstance(args, tuple):
|
1495 |
+
args = args,
|
1496 |
+
if paren == 'ROUND':
|
1497 |
+
kwargs = dict((a.left, a.right) for a in args
|
1498 |
+
if isinstance(a, _Pair))
|
1499 |
+
args = tuple(a for a in args if not isinstance(a, _Pair))
|
1500 |
+
# Warning: this could also be Fortran indexing operation..
|
1501 |
+
return as_apply(target, *args, **kwargs)
|
1502 |
+
else:
|
1503 |
+
# Expression is a C/Python indexing operation
|
1504 |
+
# (e.g. used in .pyf files)
|
1505 |
+
assert paren == 'SQUARE'
|
1506 |
+
return target[args]
|
1507 |
+
|
1508 |
+
# Fortran standard conforming identifier
|
1509 |
+
m = re.match(r'\A\w[\w\d_]*\Z', r)
|
1510 |
+
if m:
|
1511 |
+
return as_symbol(r)
|
1512 |
+
|
1513 |
+
# fall-back to symbol
|
1514 |
+
r = self.finalize_string(restore(r))
|
1515 |
+
ewarn(
|
1516 |
+
f'fromstring: treating {r!r} as symbol (original={self.original})')
|
1517 |
+
return as_symbol(r)
|
.venv/lib/python3.11/site-packages/numpy/f2py/tests/test_parameter.py
ADDED
@@ -0,0 +1,112 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
import numpy as np
|
5 |
+
|
6 |
+
from . import util
|
7 |
+
|
8 |
+
|
9 |
+
class TestParameters(util.F2PyTest):
|
10 |
+
# Check that intent(in out) translates as intent(inout)
|
11 |
+
sources = [
|
12 |
+
util.getpath("tests", "src", "parameter", "constant_real.f90"),
|
13 |
+
util.getpath("tests", "src", "parameter", "constant_integer.f90"),
|
14 |
+
util.getpath("tests", "src", "parameter", "constant_both.f90"),
|
15 |
+
util.getpath("tests", "src", "parameter", "constant_compound.f90"),
|
16 |
+
util.getpath("tests", "src", "parameter", "constant_non_compound.f90"),
|
17 |
+
]
|
18 |
+
|
19 |
+
@pytest.mark.slow
|
20 |
+
def test_constant_real_single(self):
|
21 |
+
# non-contiguous should raise error
|
22 |
+
x = np.arange(6, dtype=np.float32)[::2]
|
23 |
+
pytest.raises(ValueError, self.module.foo_single, x)
|
24 |
+
|
25 |
+
# check values with contiguous array
|
26 |
+
x = np.arange(3, dtype=np.float32)
|
27 |
+
self.module.foo_single(x)
|
28 |
+
assert np.allclose(x, [0 + 1 + 2 * 3, 1, 2])
|
29 |
+
|
30 |
+
@pytest.mark.slow
|
31 |
+
def test_constant_real_double(self):
|
32 |
+
# non-contiguous should raise error
|
33 |
+
x = np.arange(6, dtype=np.float64)[::2]
|
34 |
+
pytest.raises(ValueError, self.module.foo_double, x)
|
35 |
+
|
36 |
+
# check values with contiguous array
|
37 |
+
x = np.arange(3, dtype=np.float64)
|
38 |
+
self.module.foo_double(x)
|
39 |
+
assert np.allclose(x, [0 + 1 + 2 * 3, 1, 2])
|
40 |
+
|
41 |
+
@pytest.mark.slow
|
42 |
+
def test_constant_compound_int(self):
|
43 |
+
# non-contiguous should raise error
|
44 |
+
x = np.arange(6, dtype=np.int32)[::2]
|
45 |
+
pytest.raises(ValueError, self.module.foo_compound_int, x)
|
46 |
+
|
47 |
+
# check values with contiguous array
|
48 |
+
x = np.arange(3, dtype=np.int32)
|
49 |
+
self.module.foo_compound_int(x)
|
50 |
+
assert np.allclose(x, [0 + 1 + 2 * 6, 1, 2])
|
51 |
+
|
52 |
+
@pytest.mark.slow
|
53 |
+
def test_constant_non_compound_int(self):
|
54 |
+
# check values
|
55 |
+
x = np.arange(4, dtype=np.int32)
|
56 |
+
self.module.foo_non_compound_int(x)
|
57 |
+
assert np.allclose(x, [0 + 1 + 2 + 3 * 4, 1, 2, 3])
|
58 |
+
|
59 |
+
@pytest.mark.slow
|
60 |
+
def test_constant_integer_int(self):
|
61 |
+
# non-contiguous should raise error
|
62 |
+
x = np.arange(6, dtype=np.int32)[::2]
|
63 |
+
pytest.raises(ValueError, self.module.foo_int, x)
|
64 |
+
|
65 |
+
# check values with contiguous array
|
66 |
+
x = np.arange(3, dtype=np.int32)
|
67 |
+
self.module.foo_int(x)
|
68 |
+
assert np.allclose(x, [0 + 1 + 2 * 3, 1, 2])
|
69 |
+
|
70 |
+
@pytest.mark.slow
|
71 |
+
def test_constant_integer_long(self):
|
72 |
+
# non-contiguous should raise error
|
73 |
+
x = np.arange(6, dtype=np.int64)[::2]
|
74 |
+
pytest.raises(ValueError, self.module.foo_long, x)
|
75 |
+
|
76 |
+
# check values with contiguous array
|
77 |
+
x = np.arange(3, dtype=np.int64)
|
78 |
+
self.module.foo_long(x)
|
79 |
+
assert np.allclose(x, [0 + 1 + 2 * 3, 1, 2])
|
80 |
+
|
81 |
+
@pytest.mark.slow
|
82 |
+
def test_constant_both(self):
|
83 |
+
# non-contiguous should raise error
|
84 |
+
x = np.arange(6, dtype=np.float64)[::2]
|
85 |
+
pytest.raises(ValueError, self.module.foo, x)
|
86 |
+
|
87 |
+
# check values with contiguous array
|
88 |
+
x = np.arange(3, dtype=np.float64)
|
89 |
+
self.module.foo(x)
|
90 |
+
assert np.allclose(x, [0 + 1 * 3 * 3 + 2 * 3 * 3, 1 * 3, 2 * 3])
|
91 |
+
|
92 |
+
@pytest.mark.slow
|
93 |
+
def test_constant_no(self):
|
94 |
+
# non-contiguous should raise error
|
95 |
+
x = np.arange(6, dtype=np.float64)[::2]
|
96 |
+
pytest.raises(ValueError, self.module.foo_no, x)
|
97 |
+
|
98 |
+
# check values with contiguous array
|
99 |
+
x = np.arange(3, dtype=np.float64)
|
100 |
+
self.module.foo_no(x)
|
101 |
+
assert np.allclose(x, [0 + 1 * 3 * 3 + 2 * 3 * 3, 1 * 3, 2 * 3])
|
102 |
+
|
103 |
+
@pytest.mark.slow
|
104 |
+
def test_constant_sum(self):
|
105 |
+
# non-contiguous should raise error
|
106 |
+
x = np.arange(6, dtype=np.float64)[::2]
|
107 |
+
pytest.raises(ValueError, self.module.foo_sum, x)
|
108 |
+
|
109 |
+
# check values with contiguous array
|
110 |
+
x = np.arange(3, dtype=np.float64)
|
111 |
+
self.module.foo_sum(x)
|
112 |
+
assert np.allclose(x, [0 + 1 * 3 * 3 + 2 * 3 * 3, 1 * 3, 2 * 3])
|
.venv/lib/python3.11/site-packages/numpy/f2py/tests/test_return_complex.py
ADDED
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pytest
|
2 |
+
|
3 |
+
from numpy import array
|
4 |
+
from . import util
|
5 |
+
|
6 |
+
|
7 |
+
class TestReturnComplex(util.F2PyTest):
|
8 |
+
def check_function(self, t, tname):
|
9 |
+
if tname in ["t0", "t8", "s0", "s8"]:
|
10 |
+
err = 1e-5
|
11 |
+
else:
|
12 |
+
err = 0.0
|
13 |
+
assert abs(t(234j) - 234.0j) <= err
|
14 |
+
assert abs(t(234.6) - 234.6) <= err
|
15 |
+
assert abs(t(234) - 234.0) <= err
|
16 |
+
assert abs(t(234.6 + 3j) - (234.6 + 3j)) <= err
|
17 |
+
# assert abs(t('234')-234.)<=err
|
18 |
+
# assert abs(t('234.6')-234.6)<=err
|
19 |
+
assert abs(t(-234) + 234.0) <= err
|
20 |
+
assert abs(t([234]) - 234.0) <= err
|
21 |
+
assert abs(t((234, )) - 234.0) <= err
|
22 |
+
assert abs(t(array(234)) - 234.0) <= err
|
23 |
+
assert abs(t(array(23 + 4j, "F")) - (23 + 4j)) <= err
|
24 |
+
assert abs(t(array([234])) - 234.0) <= err
|
25 |
+
assert abs(t(array([[234]])) - 234.0) <= err
|
26 |
+
assert abs(t(array([234]).astype("b")) + 22.0) <= err
|
27 |
+
assert abs(t(array([234], "h")) - 234.0) <= err
|
28 |
+
assert abs(t(array([234], "i")) - 234.0) <= err
|
29 |
+
assert abs(t(array([234], "l")) - 234.0) <= err
|
30 |
+
assert abs(t(array([234], "q")) - 234.0) <= err
|
31 |
+
assert abs(t(array([234], "f")) - 234.0) <= err
|
32 |
+
assert abs(t(array([234], "d")) - 234.0) <= err
|
33 |
+
assert abs(t(array([234 + 3j], "F")) - (234 + 3j)) <= err
|
34 |
+
assert abs(t(array([234], "D")) - 234.0) <= err
|
35 |
+
|
36 |
+
# pytest.raises(TypeError, t, array([234], 'a1'))
|
37 |
+
pytest.raises(TypeError, t, "abc")
|
38 |
+
|
39 |
+
pytest.raises(IndexError, t, [])
|
40 |
+
pytest.raises(IndexError, t, ())
|
41 |
+
|
42 |
+
pytest.raises(TypeError, t, t)
|
43 |
+
pytest.raises(TypeError, t, {})
|
44 |
+
|
45 |
+
try:
|
46 |
+
r = t(10**400)
|
47 |
+
assert repr(r) in ["(inf+0j)", "(Infinity+0j)"]
|
48 |
+
except OverflowError:
|
49 |
+
pass
|
50 |
+
|
51 |
+
|
52 |
+
class TestFReturnComplex(TestReturnComplex):
|
53 |
+
sources = [
|
54 |
+
util.getpath("tests", "src", "return_complex", "foo77.f"),
|
55 |
+
util.getpath("tests", "src", "return_complex", "foo90.f90"),
|
56 |
+
]
|
57 |
+
|
58 |
+
@pytest.mark.parametrize("name", "t0,t8,t16,td,s0,s8,s16,sd".split(","))
|
59 |
+
def test_all_f77(self, name):
|
60 |
+
self.check_function(getattr(self.module, name), name)
|
61 |
+
|
62 |
+
@pytest.mark.parametrize("name", "t0,t8,t16,td,s0,s8,s16,sd".split(","))
|
63 |
+
def test_all_f90(self, name):
|
64 |
+
self.check_function(getattr(self.module.f90_return_complex, name),
|
65 |
+
name)
|
.venv/lib/python3.11/site-packages/numpy/f2py/use_rules.py
ADDED
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Build 'use others module data' mechanism for f2py2e.
|
3 |
+
|
4 |
+
Copyright 1999 -- 2011 Pearu Peterson all rights reserved.
|
5 |
+
Copyright 2011 -- present NumPy Developers.
|
6 |
+
Permission to use, modify, and distribute this software is given under the
|
7 |
+
terms of the NumPy License.
|
8 |
+
|
9 |
+
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
|
10 |
+
"""
|
11 |
+
__version__ = "$Revision: 1.3 $"[10:-1]
|
12 |
+
|
13 |
+
f2py_version = 'See `f2py -v`'
|
14 |
+
|
15 |
+
|
16 |
+
from .auxfuncs import (
|
17 |
+
applyrules, dictappend, gentitle, hasnote, outmess
|
18 |
+
)
|
19 |
+
|
20 |
+
|
21 |
+
usemodule_rules = {
|
22 |
+
'body': """
|
23 |
+
#begintitle#
|
24 |
+
static char doc_#apiname#[] = \"\\\nVariable wrapper signature:\\n\\
|
25 |
+
\t #name# = get_#name#()\\n\\
|
26 |
+
Arguments:\\n\\
|
27 |
+
#docstr#\";
|
28 |
+
extern F_MODFUNC(#usemodulename#,#USEMODULENAME#,#realname#,#REALNAME#);
|
29 |
+
static PyObject *#apiname#(PyObject *capi_self, PyObject *capi_args) {
|
30 |
+
/*#decl#*/
|
31 |
+
\tif (!PyArg_ParseTuple(capi_args, \"\")) goto capi_fail;
|
32 |
+
printf(\"c: %d\\n\",F_MODFUNC(#usemodulename#,#USEMODULENAME#,#realname#,#REALNAME#));
|
33 |
+
\treturn Py_BuildValue(\"\");
|
34 |
+
capi_fail:
|
35 |
+
\treturn NULL;
|
36 |
+
}
|
37 |
+
""",
|
38 |
+
'method': '\t{\"get_#name#\",#apiname#,METH_VARARGS|METH_KEYWORDS,doc_#apiname#},',
|
39 |
+
'need': ['F_MODFUNC']
|
40 |
+
}
|
41 |
+
|
42 |
+
################
|
43 |
+
|
44 |
+
|
45 |
+
def buildusevars(m, r):
|
46 |
+
ret = {}
|
47 |
+
outmess(
|
48 |
+
'\t\tBuilding use variable hooks for module "%s" (feature only for F90/F95)...\n' % (m['name']))
|
49 |
+
varsmap = {}
|
50 |
+
revmap = {}
|
51 |
+
if 'map' in r:
|
52 |
+
for k in r['map'].keys():
|
53 |
+
if r['map'][k] in revmap:
|
54 |
+
outmess('\t\t\tVariable "%s<=%s" is already mapped by "%s". Skipping.\n' % (
|
55 |
+
r['map'][k], k, revmap[r['map'][k]]))
|
56 |
+
else:
|
57 |
+
revmap[r['map'][k]] = k
|
58 |
+
if 'only' in r and r['only']:
|
59 |
+
for v in r['map'].keys():
|
60 |
+
if r['map'][v] in m['vars']:
|
61 |
+
|
62 |
+
if revmap[r['map'][v]] == v:
|
63 |
+
varsmap[v] = r['map'][v]
|
64 |
+
else:
|
65 |
+
outmess('\t\t\tIgnoring map "%s=>%s". See above.\n' %
|
66 |
+
(v, r['map'][v]))
|
67 |
+
else:
|
68 |
+
outmess(
|
69 |
+
'\t\t\tNo definition for variable "%s=>%s". Skipping.\n' % (v, r['map'][v]))
|
70 |
+
else:
|
71 |
+
for v in m['vars'].keys():
|
72 |
+
if v in revmap:
|
73 |
+
varsmap[v] = revmap[v]
|
74 |
+
else:
|
75 |
+
varsmap[v] = v
|
76 |
+
for v in varsmap.keys():
|
77 |
+
ret = dictappend(ret, buildusevar(v, varsmap[v], m['vars'], m['name']))
|
78 |
+
return ret
|
79 |
+
|
80 |
+
|
81 |
+
def buildusevar(name, realname, vars, usemodulename):
|
82 |
+
outmess('\t\t\tConstructing wrapper function for variable "%s=>%s"...\n' % (
|
83 |
+
name, realname))
|
84 |
+
ret = {}
|
85 |
+
vrd = {'name': name,
|
86 |
+
'realname': realname,
|
87 |
+
'REALNAME': realname.upper(),
|
88 |
+
'usemodulename': usemodulename,
|
89 |
+
'USEMODULENAME': usemodulename.upper(),
|
90 |
+
'texname': name.replace('_', '\\_'),
|
91 |
+
'begintitle': gentitle('%s=>%s' % (name, realname)),
|
92 |
+
'endtitle': gentitle('end of %s=>%s' % (name, realname)),
|
93 |
+
'apiname': '#modulename#_use_%s_from_%s' % (realname, usemodulename)
|
94 |
+
}
|
95 |
+
nummap = {0: 'Ro', 1: 'Ri', 2: 'Rii', 3: 'Riii', 4: 'Riv',
|
96 |
+
5: 'Rv', 6: 'Rvi', 7: 'Rvii', 8: 'Rviii', 9: 'Rix'}
|
97 |
+
vrd['texnamename'] = name
|
98 |
+
for i in nummap.keys():
|
99 |
+
vrd['texnamename'] = vrd['texnamename'].replace(repr(i), nummap[i])
|
100 |
+
if hasnote(vars[realname]):
|
101 |
+
vrd['note'] = vars[realname]['note']
|
102 |
+
rd = dictappend({}, vrd)
|
103 |
+
|
104 |
+
print(name, realname, vars[realname])
|
105 |
+
ret = applyrules(usemodule_rules, rd)
|
106 |
+
return ret
|
.venv/lib/python3.11/site-packages/urllib3/__init__.py
ADDED
@@ -0,0 +1,211 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more
|
3 |
+
"""
|
4 |
+
|
5 |
+
from __future__ import annotations
|
6 |
+
|
7 |
+
# Set default logging handler to avoid "No handler found" warnings.
|
8 |
+
import logging
|
9 |
+
import sys
|
10 |
+
import typing
|
11 |
+
import warnings
|
12 |
+
from logging import NullHandler
|
13 |
+
|
14 |
+
from . import exceptions
|
15 |
+
from ._base_connection import _TYPE_BODY
|
16 |
+
from ._collections import HTTPHeaderDict
|
17 |
+
from ._version import __version__
|
18 |
+
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url
|
19 |
+
from .filepost import _TYPE_FIELDS, encode_multipart_formdata
|
20 |
+
from .poolmanager import PoolManager, ProxyManager, proxy_from_url
|
21 |
+
from .response import BaseHTTPResponse, HTTPResponse
|
22 |
+
from .util.request import make_headers
|
23 |
+
from .util.retry import Retry
|
24 |
+
from .util.timeout import Timeout
|
25 |
+
|
26 |
+
# Ensure that Python is compiled with OpenSSL 1.1.1+
|
27 |
+
# If the 'ssl' module isn't available at all that's
|
28 |
+
# fine, we only care if the module is available.
|
29 |
+
try:
|
30 |
+
import ssl
|
31 |
+
except ImportError:
|
32 |
+
pass
|
33 |
+
else:
|
34 |
+
if not ssl.OPENSSL_VERSION.startswith("OpenSSL "): # Defensive:
|
35 |
+
warnings.warn(
|
36 |
+
"urllib3 v2 only supports OpenSSL 1.1.1+, currently "
|
37 |
+
f"the 'ssl' module is compiled with {ssl.OPENSSL_VERSION!r}. "
|
38 |
+
"See: https://github.com/urllib3/urllib3/issues/3020",
|
39 |
+
exceptions.NotOpenSSLWarning,
|
40 |
+
)
|
41 |
+
elif ssl.OPENSSL_VERSION_INFO < (1, 1, 1): # Defensive:
|
42 |
+
raise ImportError(
|
43 |
+
"urllib3 v2 only supports OpenSSL 1.1.1+, currently "
|
44 |
+
f"the 'ssl' module is compiled with {ssl.OPENSSL_VERSION!r}. "
|
45 |
+
"See: https://github.com/urllib3/urllib3/issues/2168"
|
46 |
+
)
|
47 |
+
|
48 |
+
__author__ = "Andrey Petrov ([email protected])"
|
49 |
+
__license__ = "MIT"
|
50 |
+
__version__ = __version__
|
51 |
+
|
52 |
+
__all__ = (
|
53 |
+
"HTTPConnectionPool",
|
54 |
+
"HTTPHeaderDict",
|
55 |
+
"HTTPSConnectionPool",
|
56 |
+
"PoolManager",
|
57 |
+
"ProxyManager",
|
58 |
+
"HTTPResponse",
|
59 |
+
"Retry",
|
60 |
+
"Timeout",
|
61 |
+
"add_stderr_logger",
|
62 |
+
"connection_from_url",
|
63 |
+
"disable_warnings",
|
64 |
+
"encode_multipart_formdata",
|
65 |
+
"make_headers",
|
66 |
+
"proxy_from_url",
|
67 |
+
"request",
|
68 |
+
"BaseHTTPResponse",
|
69 |
+
)
|
70 |
+
|
71 |
+
logging.getLogger(__name__).addHandler(NullHandler())
|
72 |
+
|
73 |
+
|
74 |
+
def add_stderr_logger(
|
75 |
+
level: int = logging.DEBUG,
|
76 |
+
) -> logging.StreamHandler[typing.TextIO]:
|
77 |
+
"""
|
78 |
+
Helper for quickly adding a StreamHandler to the logger. Useful for
|
79 |
+
debugging.
|
80 |
+
|
81 |
+
Returns the handler after adding it.
|
82 |
+
"""
|
83 |
+
# This method needs to be in this __init__.py to get the __name__ correct
|
84 |
+
# even if urllib3 is vendored within another package.
|
85 |
+
logger = logging.getLogger(__name__)
|
86 |
+
handler = logging.StreamHandler()
|
87 |
+
handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s"))
|
88 |
+
logger.addHandler(handler)
|
89 |
+
logger.setLevel(level)
|
90 |
+
logger.debug("Added a stderr logging handler to logger: %s", __name__)
|
91 |
+
return handler
|
92 |
+
|
93 |
+
|
94 |
+
# ... Clean up.
|
95 |
+
del NullHandler
|
96 |
+
|
97 |
+
|
98 |
+
# All warning filters *must* be appended unless you're really certain that they
|
99 |
+
# shouldn't be: otherwise, it's very hard for users to use most Python
|
100 |
+
# mechanisms to silence them.
|
101 |
+
# SecurityWarning's always go off by default.
|
102 |
+
warnings.simplefilter("always", exceptions.SecurityWarning, append=True)
|
103 |
+
# InsecurePlatformWarning's don't vary between requests, so we keep it default.
|
104 |
+
warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True)
|
105 |
+
|
106 |
+
|
107 |
+
def disable_warnings(category: type[Warning] = exceptions.HTTPWarning) -> None:
|
108 |
+
"""
|
109 |
+
Helper for quickly disabling all urllib3 warnings.
|
110 |
+
"""
|
111 |
+
warnings.simplefilter("ignore", category)
|
112 |
+
|
113 |
+
|
114 |
+
_DEFAULT_POOL = PoolManager()
|
115 |
+
|
116 |
+
|
117 |
+
def request(
|
118 |
+
method: str,
|
119 |
+
url: str,
|
120 |
+
*,
|
121 |
+
body: _TYPE_BODY | None = None,
|
122 |
+
fields: _TYPE_FIELDS | None = None,
|
123 |
+
headers: typing.Mapping[str, str] | None = None,
|
124 |
+
preload_content: bool | None = True,
|
125 |
+
decode_content: bool | None = True,
|
126 |
+
redirect: bool | None = True,
|
127 |
+
retries: Retry | bool | int | None = None,
|
128 |
+
timeout: Timeout | float | int | None = 3,
|
129 |
+
json: typing.Any | None = None,
|
130 |
+
) -> BaseHTTPResponse:
|
131 |
+
"""
|
132 |
+
A convenience, top-level request method. It uses a module-global ``PoolManager`` instance.
|
133 |
+
Therefore, its side effects could be shared across dependencies relying on it.
|
134 |
+
To avoid side effects create a new ``PoolManager`` instance and use it instead.
|
135 |
+
The method does not accept low-level ``**urlopen_kw`` keyword arguments.
|
136 |
+
|
137 |
+
:param method:
|
138 |
+
HTTP request method (such as GET, POST, PUT, etc.)
|
139 |
+
|
140 |
+
:param url:
|
141 |
+
The URL to perform the request on.
|
142 |
+
|
143 |
+
:param body:
|
144 |
+
Data to send in the request body, either :class:`str`, :class:`bytes`,
|
145 |
+
an iterable of :class:`str`/:class:`bytes`, or a file-like object.
|
146 |
+
|
147 |
+
:param fields:
|
148 |
+
Data to encode and send in the request body.
|
149 |
+
|
150 |
+
:param headers:
|
151 |
+
Dictionary of custom headers to send, such as User-Agent,
|
152 |
+
If-None-Match, etc.
|
153 |
+
|
154 |
+
:param bool preload_content:
|
155 |
+
If True, the response's body will be preloaded into memory.
|
156 |
+
|
157 |
+
:param bool decode_content:
|
158 |
+
If True, will attempt to decode the body based on the
|
159 |
+
'content-encoding' header.
|
160 |
+
|
161 |
+
:param redirect:
|
162 |
+
If True, automatically handle redirects (status codes 301, 302,
|
163 |
+
303, 307, 308). Each redirect counts as a retry. Disabling retries
|
164 |
+
will disable redirect, too.
|
165 |
+
|
166 |
+
:param retries:
|
167 |
+
Configure the number of retries to allow before raising a
|
168 |
+
:class:`~urllib3.exceptions.MaxRetryError` exception.
|
169 |
+
|
170 |
+
If ``None`` (default) will retry 3 times, see ``Retry.DEFAULT``. Pass a
|
171 |
+
:class:`~urllib3.util.retry.Retry` object for fine-grained control
|
172 |
+
over different types of retries.
|
173 |
+
Pass an integer number to retry connection errors that many times,
|
174 |
+
but no other types of errors. Pass zero to never retry.
|
175 |
+
|
176 |
+
If ``False``, then retries are disabled and any exception is raised
|
177 |
+
immediately. Also, instead of raising a MaxRetryError on redirects,
|
178 |
+
the redirect response will be returned.
|
179 |
+
|
180 |
+
:type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
|
181 |
+
|
182 |
+
:param timeout:
|
183 |
+
If specified, overrides the default timeout for this one
|
184 |
+
request. It may be a float (in seconds) or an instance of
|
185 |
+
:class:`urllib3.util.Timeout`.
|
186 |
+
|
187 |
+
:param json:
|
188 |
+
Data to encode and send as JSON with UTF-encoded in the request body.
|
189 |
+
The ``"Content-Type"`` header will be set to ``"application/json"``
|
190 |
+
unless specified otherwise.
|
191 |
+
"""
|
192 |
+
|
193 |
+
return _DEFAULT_POOL.request(
|
194 |
+
method,
|
195 |
+
url,
|
196 |
+
body=body,
|
197 |
+
fields=fields,
|
198 |
+
headers=headers,
|
199 |
+
preload_content=preload_content,
|
200 |
+
decode_content=decode_content,
|
201 |
+
redirect=redirect,
|
202 |
+
retries=retries,
|
203 |
+
timeout=timeout,
|
204 |
+
json=json,
|
205 |
+
)
|
206 |
+
|
207 |
+
|
208 |
+
if sys.platform == "emscripten":
|
209 |
+
from .contrib.emscripten import inject_into_urllib3 # noqa: 401
|
210 |
+
|
211 |
+
inject_into_urllib3()
|
.venv/lib/python3.11/site-packages/urllib3/_base_connection.py
ADDED
@@ -0,0 +1,165 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import typing
|
4 |
+
|
5 |
+
from .util.connection import _TYPE_SOCKET_OPTIONS
|
6 |
+
from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT
|
7 |
+
from .util.url import Url
|
8 |
+
|
9 |
+
_TYPE_BODY = typing.Union[bytes, typing.IO[typing.Any], typing.Iterable[bytes], str]
|
10 |
+
|
11 |
+
|
12 |
+
class ProxyConfig(typing.NamedTuple):
|
13 |
+
ssl_context: ssl.SSLContext | None
|
14 |
+
use_forwarding_for_https: bool
|
15 |
+
assert_hostname: None | str | typing.Literal[False]
|
16 |
+
assert_fingerprint: str | None
|
17 |
+
|
18 |
+
|
19 |
+
class _ResponseOptions(typing.NamedTuple):
|
20 |
+
# TODO: Remove this in favor of a better
|
21 |
+
# HTTP request/response lifecycle tracking.
|
22 |
+
request_method: str
|
23 |
+
request_url: str
|
24 |
+
preload_content: bool
|
25 |
+
decode_content: bool
|
26 |
+
enforce_content_length: bool
|
27 |
+
|
28 |
+
|
29 |
+
if typing.TYPE_CHECKING:
|
30 |
+
import ssl
|
31 |
+
from typing import Protocol
|
32 |
+
|
33 |
+
from .response import BaseHTTPResponse
|
34 |
+
|
35 |
+
class BaseHTTPConnection(Protocol):
|
36 |
+
default_port: typing.ClassVar[int]
|
37 |
+
default_socket_options: typing.ClassVar[_TYPE_SOCKET_OPTIONS]
|
38 |
+
|
39 |
+
host: str
|
40 |
+
port: int
|
41 |
+
timeout: None | (
|
42 |
+
float
|
43 |
+
) # Instance doesn't store _DEFAULT_TIMEOUT, must be resolved.
|
44 |
+
blocksize: int
|
45 |
+
source_address: tuple[str, int] | None
|
46 |
+
socket_options: _TYPE_SOCKET_OPTIONS | None
|
47 |
+
|
48 |
+
proxy: Url | None
|
49 |
+
proxy_config: ProxyConfig | None
|
50 |
+
|
51 |
+
is_verified: bool
|
52 |
+
proxy_is_verified: bool | None
|
53 |
+
|
54 |
+
def __init__(
|
55 |
+
self,
|
56 |
+
host: str,
|
57 |
+
port: int | None = None,
|
58 |
+
*,
|
59 |
+
timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
|
60 |
+
source_address: tuple[str, int] | None = None,
|
61 |
+
blocksize: int = 8192,
|
62 |
+
socket_options: _TYPE_SOCKET_OPTIONS | None = ...,
|
63 |
+
proxy: Url | None = None,
|
64 |
+
proxy_config: ProxyConfig | None = None,
|
65 |
+
) -> None: ...
|
66 |
+
|
67 |
+
def set_tunnel(
|
68 |
+
self,
|
69 |
+
host: str,
|
70 |
+
port: int | None = None,
|
71 |
+
headers: typing.Mapping[str, str] | None = None,
|
72 |
+
scheme: str = "http",
|
73 |
+
) -> None: ...
|
74 |
+
|
75 |
+
def connect(self) -> None: ...
|
76 |
+
|
77 |
+
def request(
|
78 |
+
self,
|
79 |
+
method: str,
|
80 |
+
url: str,
|
81 |
+
body: _TYPE_BODY | None = None,
|
82 |
+
headers: typing.Mapping[str, str] | None = None,
|
83 |
+
# We know *at least* botocore is depending on the order of the
|
84 |
+
# first 3 parameters so to be safe we only mark the later ones
|
85 |
+
# as keyword-only to ensure we have space to extend.
|
86 |
+
*,
|
87 |
+
chunked: bool = False,
|
88 |
+
preload_content: bool = True,
|
89 |
+
decode_content: bool = True,
|
90 |
+
enforce_content_length: bool = True,
|
91 |
+
) -> None: ...
|
92 |
+
|
93 |
+
def getresponse(self) -> BaseHTTPResponse: ...
|
94 |
+
|
95 |
+
def close(self) -> None: ...
|
96 |
+
|
97 |
+
@property
|
98 |
+
def is_closed(self) -> bool:
|
99 |
+
"""Whether the connection either is brand new or has been previously closed.
|
100 |
+
If this property is True then both ``is_connected`` and ``has_connected_to_proxy``
|
101 |
+
properties must be False.
|
102 |
+
"""
|
103 |
+
|
104 |
+
@property
|
105 |
+
def is_connected(self) -> bool:
|
106 |
+
"""Whether the connection is actively connected to any origin (proxy or target)"""
|
107 |
+
|
108 |
+
@property
|
109 |
+
def has_connected_to_proxy(self) -> bool:
|
110 |
+
"""Whether the connection has successfully connected to its proxy.
|
111 |
+
This returns False if no proxy is in use. Used to determine whether
|
112 |
+
errors are coming from the proxy layer or from tunnelling to the target origin.
|
113 |
+
"""
|
114 |
+
|
115 |
+
class BaseHTTPSConnection(BaseHTTPConnection, Protocol):
|
116 |
+
default_port: typing.ClassVar[int]
|
117 |
+
default_socket_options: typing.ClassVar[_TYPE_SOCKET_OPTIONS]
|
118 |
+
|
119 |
+
# Certificate verification methods
|
120 |
+
cert_reqs: int | str | None
|
121 |
+
assert_hostname: None | str | typing.Literal[False]
|
122 |
+
assert_fingerprint: str | None
|
123 |
+
ssl_context: ssl.SSLContext | None
|
124 |
+
|
125 |
+
# Trusted CAs
|
126 |
+
ca_certs: str | None
|
127 |
+
ca_cert_dir: str | None
|
128 |
+
ca_cert_data: None | str | bytes
|
129 |
+
|
130 |
+
# TLS version
|
131 |
+
ssl_minimum_version: int | None
|
132 |
+
ssl_maximum_version: int | None
|
133 |
+
ssl_version: int | str | None # Deprecated
|
134 |
+
|
135 |
+
# Client certificates
|
136 |
+
cert_file: str | None
|
137 |
+
key_file: str | None
|
138 |
+
key_password: str | None
|
139 |
+
|
140 |
+
def __init__(
|
141 |
+
self,
|
142 |
+
host: str,
|
143 |
+
port: int | None = None,
|
144 |
+
*,
|
145 |
+
timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
|
146 |
+
source_address: tuple[str, int] | None = None,
|
147 |
+
blocksize: int = 16384,
|
148 |
+
socket_options: _TYPE_SOCKET_OPTIONS | None = ...,
|
149 |
+
proxy: Url | None = None,
|
150 |
+
proxy_config: ProxyConfig | None = None,
|
151 |
+
cert_reqs: int | str | None = None,
|
152 |
+
assert_hostname: None | str | typing.Literal[False] = None,
|
153 |
+
assert_fingerprint: str | None = None,
|
154 |
+
server_hostname: str | None = None,
|
155 |
+
ssl_context: ssl.SSLContext | None = None,
|
156 |
+
ca_certs: str | None = None,
|
157 |
+
ca_cert_dir: str | None = None,
|
158 |
+
ca_cert_data: None | str | bytes = None,
|
159 |
+
ssl_minimum_version: int | None = None,
|
160 |
+
ssl_maximum_version: int | None = None,
|
161 |
+
ssl_version: int | str | None = None, # Deprecated
|
162 |
+
cert_file: str | None = None,
|
163 |
+
key_file: str | None = None,
|
164 |
+
key_password: str | None = None,
|
165 |
+
) -> None: ...
|
.venv/lib/python3.11/site-packages/urllib3/_version.py
ADDED
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# file generated by setuptools_scm
|
2 |
+
# don't change, don't track in version control
|
3 |
+
TYPE_CHECKING = False
|
4 |
+
if TYPE_CHECKING:
|
5 |
+
from typing import Tuple, Union
|
6 |
+
VERSION_TUPLE = Tuple[Union[int, str], ...]
|
7 |
+
else:
|
8 |
+
VERSION_TUPLE = object
|
9 |
+
|
10 |
+
version: str
|
11 |
+
__version__: str
|
12 |
+
__version_tuple__: VERSION_TUPLE
|
13 |
+
version_tuple: VERSION_TUPLE
|
14 |
+
|
15 |
+
__version__ = version = '2.3.0'
|
16 |
+
__version_tuple__ = version_tuple = (2, 3, 0)
|
.venv/lib/python3.11/site-packages/urllib3/contrib/__init__.py
ADDED
File without changes
|
.venv/lib/python3.11/site-packages/urllib3/contrib/emscripten/__init__.py
ADDED
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import urllib3.connection
|
4 |
+
|
5 |
+
from ...connectionpool import HTTPConnectionPool, HTTPSConnectionPool
|
6 |
+
from .connection import EmscriptenHTTPConnection, EmscriptenHTTPSConnection
|
7 |
+
|
8 |
+
|
9 |
+
def inject_into_urllib3() -> None:
|
10 |
+
# override connection classes to use emscripten specific classes
|
11 |
+
# n.b. mypy complains about the overriding of classes below
|
12 |
+
# if it isn't ignored
|
13 |
+
HTTPConnectionPool.ConnectionCls = EmscriptenHTTPConnection
|
14 |
+
HTTPSConnectionPool.ConnectionCls = EmscriptenHTTPSConnection
|
15 |
+
urllib3.connection.HTTPConnection = EmscriptenHTTPConnection # type: ignore[misc,assignment]
|
16 |
+
urllib3.connection.HTTPSConnection = EmscriptenHTTPSConnection # type: ignore[misc,assignment]
|
.venv/lib/python3.11/site-packages/urllib3/contrib/emscripten/__pycache__/connection.cpython-311.pyc
ADDED
Binary file (10.6 kB). View file
|
|
.venv/lib/python3.11/site-packages/urllib3/contrib/emscripten/__pycache__/request.cpython-311.pyc
ADDED
Binary file (1.65 kB). View file
|
|
.venv/lib/python3.11/site-packages/urllib3/contrib/emscripten/connection.py
ADDED
@@ -0,0 +1,255 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import os
|
4 |
+
import typing
|
5 |
+
|
6 |
+
# use http.client.HTTPException for consistency with non-emscripten
|
7 |
+
from http.client import HTTPException as HTTPException # noqa: F401
|
8 |
+
from http.client import ResponseNotReady
|
9 |
+
|
10 |
+
from ..._base_connection import _TYPE_BODY
|
11 |
+
from ...connection import HTTPConnection, ProxyConfig, port_by_scheme
|
12 |
+
from ...exceptions import TimeoutError
|
13 |
+
from ...response import BaseHTTPResponse
|
14 |
+
from ...util.connection import _TYPE_SOCKET_OPTIONS
|
15 |
+
from ...util.timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT
|
16 |
+
from ...util.url import Url
|
17 |
+
from .fetch import _RequestError, _TimeoutError, send_request, send_streaming_request
|
18 |
+
from .request import EmscriptenRequest
|
19 |
+
from .response import EmscriptenHttpResponseWrapper, EmscriptenResponse
|
20 |
+
|
21 |
+
if typing.TYPE_CHECKING:
|
22 |
+
from ..._base_connection import BaseHTTPConnection, BaseHTTPSConnection
|
23 |
+
|
24 |
+
|
25 |
+
class EmscriptenHTTPConnection:
|
26 |
+
default_port: typing.ClassVar[int] = port_by_scheme["http"]
|
27 |
+
default_socket_options: typing.ClassVar[_TYPE_SOCKET_OPTIONS]
|
28 |
+
|
29 |
+
timeout: None | (float)
|
30 |
+
|
31 |
+
host: str
|
32 |
+
port: int
|
33 |
+
blocksize: int
|
34 |
+
source_address: tuple[str, int] | None
|
35 |
+
socket_options: _TYPE_SOCKET_OPTIONS | None
|
36 |
+
|
37 |
+
proxy: Url | None
|
38 |
+
proxy_config: ProxyConfig | None
|
39 |
+
|
40 |
+
is_verified: bool = False
|
41 |
+
proxy_is_verified: bool | None = None
|
42 |
+
|
43 |
+
_response: EmscriptenResponse | None
|
44 |
+
|
45 |
+
def __init__(
|
46 |
+
self,
|
47 |
+
host: str,
|
48 |
+
port: int = 0,
|
49 |
+
*,
|
50 |
+
timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
|
51 |
+
source_address: tuple[str, int] | None = None,
|
52 |
+
blocksize: int = 8192,
|
53 |
+
socket_options: _TYPE_SOCKET_OPTIONS | None = None,
|
54 |
+
proxy: Url | None = None,
|
55 |
+
proxy_config: ProxyConfig | None = None,
|
56 |
+
) -> None:
|
57 |
+
self.host = host
|
58 |
+
self.port = port
|
59 |
+
self.timeout = timeout if isinstance(timeout, float) else 0.0
|
60 |
+
self.scheme = "http"
|
61 |
+
self._closed = True
|
62 |
+
self._response = None
|
63 |
+
# ignore these things because we don't
|
64 |
+
# have control over that stuff
|
65 |
+
self.proxy = None
|
66 |
+
self.proxy_config = None
|
67 |
+
self.blocksize = blocksize
|
68 |
+
self.source_address = None
|
69 |
+
self.socket_options = None
|
70 |
+
self.is_verified = False
|
71 |
+
|
72 |
+
def set_tunnel(
|
73 |
+
self,
|
74 |
+
host: str,
|
75 |
+
port: int | None = 0,
|
76 |
+
headers: typing.Mapping[str, str] | None = None,
|
77 |
+
scheme: str = "http",
|
78 |
+
) -> None:
|
79 |
+
pass
|
80 |
+
|
81 |
+
def connect(self) -> None:
|
82 |
+
pass
|
83 |
+
|
84 |
+
def request(
|
85 |
+
self,
|
86 |
+
method: str,
|
87 |
+
url: str,
|
88 |
+
body: _TYPE_BODY | None = None,
|
89 |
+
headers: typing.Mapping[str, str] | None = None,
|
90 |
+
# We know *at least* botocore is depending on the order of the
|
91 |
+
# first 3 parameters so to be safe we only mark the later ones
|
92 |
+
# as keyword-only to ensure we have space to extend.
|
93 |
+
*,
|
94 |
+
chunked: bool = False,
|
95 |
+
preload_content: bool = True,
|
96 |
+
decode_content: bool = True,
|
97 |
+
enforce_content_length: bool = True,
|
98 |
+
) -> None:
|
99 |
+
self._closed = False
|
100 |
+
if url.startswith("/"):
|
101 |
+
# no scheme / host / port included, make a full url
|
102 |
+
url = f"{self.scheme}://{self.host}:{self.port}" + url
|
103 |
+
request = EmscriptenRequest(
|
104 |
+
url=url,
|
105 |
+
method=method,
|
106 |
+
timeout=self.timeout if self.timeout else 0,
|
107 |
+
decode_content=decode_content,
|
108 |
+
)
|
109 |
+
request.set_body(body)
|
110 |
+
if headers:
|
111 |
+
for k, v in headers.items():
|
112 |
+
request.set_header(k, v)
|
113 |
+
self._response = None
|
114 |
+
try:
|
115 |
+
if not preload_content:
|
116 |
+
self._response = send_streaming_request(request)
|
117 |
+
if self._response is None:
|
118 |
+
self._response = send_request(request)
|
119 |
+
except _TimeoutError as e:
|
120 |
+
raise TimeoutError(e.message) from e
|
121 |
+
except _RequestError as e:
|
122 |
+
raise HTTPException(e.message) from e
|
123 |
+
|
124 |
+
def getresponse(self) -> BaseHTTPResponse:
|
125 |
+
if self._response is not None:
|
126 |
+
return EmscriptenHttpResponseWrapper(
|
127 |
+
internal_response=self._response,
|
128 |
+
url=self._response.request.url,
|
129 |
+
connection=self,
|
130 |
+
)
|
131 |
+
else:
|
132 |
+
raise ResponseNotReady()
|
133 |
+
|
134 |
+
def close(self) -> None:
|
135 |
+
self._closed = True
|
136 |
+
self._response = None
|
137 |
+
|
138 |
+
@property
|
139 |
+
def is_closed(self) -> bool:
|
140 |
+
"""Whether the connection either is brand new or has been previously closed.
|
141 |
+
If this property is True then both ``is_connected`` and ``has_connected_to_proxy``
|
142 |
+
properties must be False.
|
143 |
+
"""
|
144 |
+
return self._closed
|
145 |
+
|
146 |
+
@property
|
147 |
+
def is_connected(self) -> bool:
|
148 |
+
"""Whether the connection is actively connected to any origin (proxy or target)"""
|
149 |
+
return True
|
150 |
+
|
151 |
+
@property
|
152 |
+
def has_connected_to_proxy(self) -> bool:
|
153 |
+
"""Whether the connection has successfully connected to its proxy.
|
154 |
+
This returns False if no proxy is in use. Used to determine whether
|
155 |
+
errors are coming from the proxy layer or from tunnelling to the target origin.
|
156 |
+
"""
|
157 |
+
return False
|
158 |
+
|
159 |
+
|
160 |
+
class EmscriptenHTTPSConnection(EmscriptenHTTPConnection):
|
161 |
+
default_port = port_by_scheme["https"]
|
162 |
+
# all this is basically ignored, as browser handles https
|
163 |
+
cert_reqs: int | str | None = None
|
164 |
+
ca_certs: str | None = None
|
165 |
+
ca_cert_dir: str | None = None
|
166 |
+
ca_cert_data: None | str | bytes = None
|
167 |
+
cert_file: str | None
|
168 |
+
key_file: str | None
|
169 |
+
key_password: str | None
|
170 |
+
ssl_context: typing.Any | None
|
171 |
+
ssl_version: int | str | None = None
|
172 |
+
ssl_minimum_version: int | None = None
|
173 |
+
ssl_maximum_version: int | None = None
|
174 |
+
assert_hostname: None | str | typing.Literal[False]
|
175 |
+
assert_fingerprint: str | None = None
|
176 |
+
|
177 |
+
def __init__(
|
178 |
+
self,
|
179 |
+
host: str,
|
180 |
+
port: int = 0,
|
181 |
+
*,
|
182 |
+
timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
|
183 |
+
source_address: tuple[str, int] | None = None,
|
184 |
+
blocksize: int = 16384,
|
185 |
+
socket_options: (
|
186 |
+
None | _TYPE_SOCKET_OPTIONS
|
187 |
+
) = HTTPConnection.default_socket_options,
|
188 |
+
proxy: Url | None = None,
|
189 |
+
proxy_config: ProxyConfig | None = None,
|
190 |
+
cert_reqs: int | str | None = None,
|
191 |
+
assert_hostname: None | str | typing.Literal[False] = None,
|
192 |
+
assert_fingerprint: str | None = None,
|
193 |
+
server_hostname: str | None = None,
|
194 |
+
ssl_context: typing.Any | None = None,
|
195 |
+
ca_certs: str | None = None,
|
196 |
+
ca_cert_dir: str | None = None,
|
197 |
+
ca_cert_data: None | str | bytes = None,
|
198 |
+
ssl_minimum_version: int | None = None,
|
199 |
+
ssl_maximum_version: int | None = None,
|
200 |
+
ssl_version: int | str | None = None, # Deprecated
|
201 |
+
cert_file: str | None = None,
|
202 |
+
key_file: str | None = None,
|
203 |
+
key_password: str | None = None,
|
204 |
+
) -> None:
|
205 |
+
super().__init__(
|
206 |
+
host,
|
207 |
+
port=port,
|
208 |
+
timeout=timeout,
|
209 |
+
source_address=source_address,
|
210 |
+
blocksize=blocksize,
|
211 |
+
socket_options=socket_options,
|
212 |
+
proxy=proxy,
|
213 |
+
proxy_config=proxy_config,
|
214 |
+
)
|
215 |
+
self.scheme = "https"
|
216 |
+
|
217 |
+
self.key_file = key_file
|
218 |
+
self.cert_file = cert_file
|
219 |
+
self.key_password = key_password
|
220 |
+
self.ssl_context = ssl_context
|
221 |
+
self.server_hostname = server_hostname
|
222 |
+
self.assert_hostname = assert_hostname
|
223 |
+
self.assert_fingerprint = assert_fingerprint
|
224 |
+
self.ssl_version = ssl_version
|
225 |
+
self.ssl_minimum_version = ssl_minimum_version
|
226 |
+
self.ssl_maximum_version = ssl_maximum_version
|
227 |
+
self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
|
228 |
+
self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
|
229 |
+
self.ca_cert_data = ca_cert_data
|
230 |
+
|
231 |
+
self.cert_reqs = None
|
232 |
+
|
233 |
+
# The browser will automatically verify all requests.
|
234 |
+
# We have no control over that setting.
|
235 |
+
self.is_verified = True
|
236 |
+
|
237 |
+
def set_cert(
|
238 |
+
self,
|
239 |
+
key_file: str | None = None,
|
240 |
+
cert_file: str | None = None,
|
241 |
+
cert_reqs: int | str | None = None,
|
242 |
+
key_password: str | None = None,
|
243 |
+
ca_certs: str | None = None,
|
244 |
+
assert_hostname: None | str | typing.Literal[False] = None,
|
245 |
+
assert_fingerprint: str | None = None,
|
246 |
+
ca_cert_dir: str | None = None,
|
247 |
+
ca_cert_data: None | str | bytes = None,
|
248 |
+
) -> None:
|
249 |
+
pass
|
250 |
+
|
251 |
+
|
252 |
+
# verify that this class implements BaseHTTP(s) connection correctly
|
253 |
+
if typing.TYPE_CHECKING:
|
254 |
+
_supports_http_protocol: BaseHTTPConnection = EmscriptenHTTPConnection("", 0)
|
255 |
+
_supports_https_protocol: BaseHTTPSConnection = EmscriptenHTTPSConnection("", 0)
|
.venv/lib/python3.11/site-packages/urllib3/contrib/emscripten/emscripten_fetch_worker.js
ADDED
@@ -0,0 +1,110 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
let Status = {
|
2 |
+
SUCCESS_HEADER: -1,
|
3 |
+
SUCCESS_EOF: -2,
|
4 |
+
ERROR_TIMEOUT: -3,
|
5 |
+
ERROR_EXCEPTION: -4,
|
6 |
+
};
|
7 |
+
|
8 |
+
let connections = {};
|
9 |
+
let nextConnectionID = 1;
|
10 |
+
const encoder = new TextEncoder();
|
11 |
+
|
12 |
+
self.addEventListener("message", async function (event) {
|
13 |
+
if (event.data.close) {
|
14 |
+
let connectionID = event.data.close;
|
15 |
+
delete connections[connectionID];
|
16 |
+
return;
|
17 |
+
} else if (event.data.getMore) {
|
18 |
+
let connectionID = event.data.getMore;
|
19 |
+
let { curOffset, value, reader, intBuffer, byteBuffer } =
|
20 |
+
connections[connectionID];
|
21 |
+
// if we still have some in buffer, then just send it back straight away
|
22 |
+
if (!value || curOffset >= value.length) {
|
23 |
+
// read another buffer if required
|
24 |
+
try {
|
25 |
+
let readResponse = await reader.read();
|
26 |
+
|
27 |
+
if (readResponse.done) {
|
28 |
+
// read everything - clear connection and return
|
29 |
+
delete connections[connectionID];
|
30 |
+
Atomics.store(intBuffer, 0, Status.SUCCESS_EOF);
|
31 |
+
Atomics.notify(intBuffer, 0);
|
32 |
+
// finished reading successfully
|
33 |
+
// return from event handler
|
34 |
+
return;
|
35 |
+
}
|
36 |
+
curOffset = 0;
|
37 |
+
connections[connectionID].value = readResponse.value;
|
38 |
+
value = readResponse.value;
|
39 |
+
} catch (error) {
|
40 |
+
console.log("Request exception:", error);
|
41 |
+
let errorBytes = encoder.encode(error.message);
|
42 |
+
let written = errorBytes.length;
|
43 |
+
byteBuffer.set(errorBytes);
|
44 |
+
intBuffer[1] = written;
|
45 |
+
Atomics.store(intBuffer, 0, Status.ERROR_EXCEPTION);
|
46 |
+
Atomics.notify(intBuffer, 0);
|
47 |
+
}
|
48 |
+
}
|
49 |
+
|
50 |
+
// send as much buffer as we can
|
51 |
+
let curLen = value.length - curOffset;
|
52 |
+
if (curLen > byteBuffer.length) {
|
53 |
+
curLen = byteBuffer.length;
|
54 |
+
}
|
55 |
+
byteBuffer.set(value.subarray(curOffset, curOffset + curLen), 0);
|
56 |
+
|
57 |
+
Atomics.store(intBuffer, 0, curLen); // store current length in bytes
|
58 |
+
Atomics.notify(intBuffer, 0);
|
59 |
+
curOffset += curLen;
|
60 |
+
connections[connectionID].curOffset = curOffset;
|
61 |
+
|
62 |
+
return;
|
63 |
+
} else {
|
64 |
+
// start fetch
|
65 |
+
let connectionID = nextConnectionID;
|
66 |
+
nextConnectionID += 1;
|
67 |
+
const intBuffer = new Int32Array(event.data.buffer);
|
68 |
+
const byteBuffer = new Uint8Array(event.data.buffer, 8);
|
69 |
+
try {
|
70 |
+
const response = await fetch(event.data.url, event.data.fetchParams);
|
71 |
+
// return the headers first via textencoder
|
72 |
+
var headers = [];
|
73 |
+
for (const pair of response.headers.entries()) {
|
74 |
+
headers.push([pair[0], pair[1]]);
|
75 |
+
}
|
76 |
+
let headerObj = {
|
77 |
+
headers: headers,
|
78 |
+
status: response.status,
|
79 |
+
connectionID,
|
80 |
+
};
|
81 |
+
const headerText = JSON.stringify(headerObj);
|
82 |
+
let headerBytes = encoder.encode(headerText);
|
83 |
+
let written = headerBytes.length;
|
84 |
+
byteBuffer.set(headerBytes);
|
85 |
+
intBuffer[1] = written;
|
86 |
+
// make a connection
|
87 |
+
connections[connectionID] = {
|
88 |
+
reader: response.body.getReader(),
|
89 |
+
intBuffer: intBuffer,
|
90 |
+
byteBuffer: byteBuffer,
|
91 |
+
value: undefined,
|
92 |
+
curOffset: 0,
|
93 |
+
};
|
94 |
+
// set header ready
|
95 |
+
Atomics.store(intBuffer, 0, Status.SUCCESS_HEADER);
|
96 |
+
Atomics.notify(intBuffer, 0);
|
97 |
+
// all fetching after this goes through a new postmessage call with getMore
|
98 |
+
// this allows for parallel requests
|
99 |
+
} catch (error) {
|
100 |
+
console.log("Request exception:", error);
|
101 |
+
let errorBytes = encoder.encode(error.message);
|
102 |
+
let written = errorBytes.length;
|
103 |
+
byteBuffer.set(errorBytes);
|
104 |
+
intBuffer[1] = written;
|
105 |
+
Atomics.store(intBuffer, 0, Status.ERROR_EXCEPTION);
|
106 |
+
Atomics.notify(intBuffer, 0);
|
107 |
+
}
|
108 |
+
}
|
109 |
+
});
|
110 |
+
self.postMessage({ inited: true });
|
.venv/lib/python3.11/site-packages/urllib3/contrib/emscripten/fetch.py
ADDED
@@ -0,0 +1,708 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Support for streaming http requests in emscripten.
|
3 |
+
|
4 |
+
A few caveats -
|
5 |
+
|
6 |
+
If your browser (or Node.js) has WebAssembly JavaScript Promise Integration enabled
|
7 |
+
https://github.com/WebAssembly/js-promise-integration/blob/main/proposals/js-promise-integration/Overview.md
|
8 |
+
*and* you launch pyodide using `pyodide.runPythonAsync`, this will fetch data using the
|
9 |
+
JavaScript asynchronous fetch api (wrapped via `pyodide.ffi.call_sync`). In this case
|
10 |
+
timeouts and streaming should just work.
|
11 |
+
|
12 |
+
Otherwise, it uses a combination of XMLHttpRequest and a web-worker for streaming.
|
13 |
+
|
14 |
+
This approach has several caveats:
|
15 |
+
|
16 |
+
Firstly, you can't do streaming http in the main UI thread, because atomics.wait isn't allowed.
|
17 |
+
Streaming only works if you're running pyodide in a web worker.
|
18 |
+
|
19 |
+
Secondly, this uses an extra web worker and SharedArrayBuffer to do the asynchronous fetch
|
20 |
+
operation, so it requires that you have crossOriginIsolation enabled, by serving over https
|
21 |
+
(or from localhost) with the two headers below set:
|
22 |
+
|
23 |
+
Cross-Origin-Opener-Policy: same-origin
|
24 |
+
Cross-Origin-Embedder-Policy: require-corp
|
25 |
+
|
26 |
+
You can tell if cross origin isolation is successfully enabled by looking at the global crossOriginIsolated variable in
|
27 |
+
JavaScript console. If it isn't, streaming requests will fallback to XMLHttpRequest, i.e. getting the whole
|
28 |
+
request into a buffer and then returning it. it shows a warning in the JavaScript console in this case.
|
29 |
+
|
30 |
+
Finally, the webworker which does the streaming fetch is created on initial import, but will only be started once
|
31 |
+
control is returned to javascript. Call `await wait_for_streaming_ready()` to wait for streaming fetch.
|
32 |
+
|
33 |
+
NB: in this code, there are a lot of JavaScript objects. They are named js_*
|
34 |
+
to make it clear what type of object they are.
|
35 |
+
"""
|
36 |
+
|
37 |
+
from __future__ import annotations
|
38 |
+
|
39 |
+
import io
|
40 |
+
import json
|
41 |
+
from email.parser import Parser
|
42 |
+
from importlib.resources import files
|
43 |
+
from typing import TYPE_CHECKING, Any
|
44 |
+
|
45 |
+
import js # type: ignore[import-not-found]
|
46 |
+
from pyodide.ffi import ( # type: ignore[import-not-found]
|
47 |
+
JsArray,
|
48 |
+
JsException,
|
49 |
+
JsProxy,
|
50 |
+
to_js,
|
51 |
+
)
|
52 |
+
|
53 |
+
if TYPE_CHECKING:
|
54 |
+
from typing_extensions import Buffer
|
55 |
+
|
56 |
+
from .request import EmscriptenRequest
|
57 |
+
from .response import EmscriptenResponse
|
58 |
+
|
59 |
+
"""
|
60 |
+
There are some headers that trigger unintended CORS preflight requests.
|
61 |
+
See also https://github.com/koenvo/pyodide-http/issues/22
|
62 |
+
"""
|
63 |
+
HEADERS_TO_IGNORE = ("user-agent",)
|
64 |
+
|
65 |
+
SUCCESS_HEADER = -1
|
66 |
+
SUCCESS_EOF = -2
|
67 |
+
ERROR_TIMEOUT = -3
|
68 |
+
ERROR_EXCEPTION = -4
|
69 |
+
|
70 |
+
_STREAMING_WORKER_CODE = (
|
71 |
+
files(__package__)
|
72 |
+
.joinpath("emscripten_fetch_worker.js")
|
73 |
+
.read_text(encoding="utf-8")
|
74 |
+
)
|
75 |
+
|
76 |
+
|
77 |
+
class _RequestError(Exception):
|
78 |
+
def __init__(
|
79 |
+
self,
|
80 |
+
message: str | None = None,
|
81 |
+
*,
|
82 |
+
request: EmscriptenRequest | None = None,
|
83 |
+
response: EmscriptenResponse | None = None,
|
84 |
+
):
|
85 |
+
self.request = request
|
86 |
+
self.response = response
|
87 |
+
self.message = message
|
88 |
+
super().__init__(self.message)
|
89 |
+
|
90 |
+
|
91 |
+
class _StreamingError(_RequestError):
|
92 |
+
pass
|
93 |
+
|
94 |
+
|
95 |
+
class _TimeoutError(_RequestError):
|
96 |
+
pass
|
97 |
+
|
98 |
+
|
99 |
+
def _obj_from_dict(dict_val: dict[str, Any]) -> JsProxy:
|
100 |
+
return to_js(dict_val, dict_converter=js.Object.fromEntries)
|
101 |
+
|
102 |
+
|
103 |
+
class _ReadStream(io.RawIOBase):
|
104 |
+
def __init__(
|
105 |
+
self,
|
106 |
+
int_buffer: JsArray,
|
107 |
+
byte_buffer: JsArray,
|
108 |
+
timeout: float,
|
109 |
+
worker: JsProxy,
|
110 |
+
connection_id: int,
|
111 |
+
request: EmscriptenRequest,
|
112 |
+
):
|
113 |
+
self.int_buffer = int_buffer
|
114 |
+
self.byte_buffer = byte_buffer
|
115 |
+
self.read_pos = 0
|
116 |
+
self.read_len = 0
|
117 |
+
self.connection_id = connection_id
|
118 |
+
self.worker = worker
|
119 |
+
self.timeout = int(1000 * timeout) if timeout > 0 else None
|
120 |
+
self.is_live = True
|
121 |
+
self._is_closed = False
|
122 |
+
self.request: EmscriptenRequest | None = request
|
123 |
+
|
124 |
+
def __del__(self) -> None:
|
125 |
+
self.close()
|
126 |
+
|
127 |
+
# this is compatible with _base_connection
|
128 |
+
def is_closed(self) -> bool:
|
129 |
+
return self._is_closed
|
130 |
+
|
131 |
+
# for compatibility with RawIOBase
|
132 |
+
@property
|
133 |
+
def closed(self) -> bool:
|
134 |
+
return self.is_closed()
|
135 |
+
|
136 |
+
def close(self) -> None:
|
137 |
+
if self.is_closed():
|
138 |
+
return
|
139 |
+
self.read_len = 0
|
140 |
+
self.read_pos = 0
|
141 |
+
self.int_buffer = None
|
142 |
+
self.byte_buffer = None
|
143 |
+
self._is_closed = True
|
144 |
+
self.request = None
|
145 |
+
if self.is_live:
|
146 |
+
self.worker.postMessage(_obj_from_dict({"close": self.connection_id}))
|
147 |
+
self.is_live = False
|
148 |
+
super().close()
|
149 |
+
|
150 |
+
def readable(self) -> bool:
|
151 |
+
return True
|
152 |
+
|
153 |
+
def writable(self) -> bool:
|
154 |
+
return False
|
155 |
+
|
156 |
+
def seekable(self) -> bool:
|
157 |
+
return False
|
158 |
+
|
159 |
+
def readinto(self, byte_obj: Buffer) -> int:
|
160 |
+
if not self.int_buffer:
|
161 |
+
raise _StreamingError(
|
162 |
+
"No buffer for stream in _ReadStream.readinto",
|
163 |
+
request=self.request,
|
164 |
+
response=None,
|
165 |
+
)
|
166 |
+
if self.read_len == 0:
|
167 |
+
# wait for the worker to send something
|
168 |
+
js.Atomics.store(self.int_buffer, 0, ERROR_TIMEOUT)
|
169 |
+
self.worker.postMessage(_obj_from_dict({"getMore": self.connection_id}))
|
170 |
+
if (
|
171 |
+
js.Atomics.wait(self.int_buffer, 0, ERROR_TIMEOUT, self.timeout)
|
172 |
+
== "timed-out"
|
173 |
+
):
|
174 |
+
raise _TimeoutError
|
175 |
+
data_len = self.int_buffer[0]
|
176 |
+
if data_len > 0:
|
177 |
+
self.read_len = data_len
|
178 |
+
self.read_pos = 0
|
179 |
+
elif data_len == ERROR_EXCEPTION:
|
180 |
+
string_len = self.int_buffer[1]
|
181 |
+
# decode the error string
|
182 |
+
js_decoder = js.TextDecoder.new()
|
183 |
+
json_str = js_decoder.decode(self.byte_buffer.slice(0, string_len))
|
184 |
+
raise _StreamingError(
|
185 |
+
f"Exception thrown in fetch: {json_str}",
|
186 |
+
request=self.request,
|
187 |
+
response=None,
|
188 |
+
)
|
189 |
+
else:
|
190 |
+
# EOF, free the buffers and return zero
|
191 |
+
# and free the request
|
192 |
+
self.is_live = False
|
193 |
+
self.close()
|
194 |
+
return 0
|
195 |
+
# copy from int32array to python bytes
|
196 |
+
ret_length = min(self.read_len, len(memoryview(byte_obj)))
|
197 |
+
subarray = self.byte_buffer.subarray(
|
198 |
+
self.read_pos, self.read_pos + ret_length
|
199 |
+
).to_py()
|
200 |
+
memoryview(byte_obj)[0:ret_length] = subarray
|
201 |
+
self.read_len -= ret_length
|
202 |
+
self.read_pos += ret_length
|
203 |
+
return ret_length
|
204 |
+
|
205 |
+
|
206 |
+
class _StreamingFetcher:
|
207 |
+
def __init__(self) -> None:
|
208 |
+
# make web-worker and data buffer on startup
|
209 |
+
self.streaming_ready = False
|
210 |
+
|
211 |
+
js_data_blob = js.Blob.new(
|
212 |
+
to_js([_STREAMING_WORKER_CODE], create_pyproxies=False),
|
213 |
+
_obj_from_dict({"type": "application/javascript"}),
|
214 |
+
)
|
215 |
+
|
216 |
+
def promise_resolver(js_resolve_fn: JsProxy, js_reject_fn: JsProxy) -> None:
|
217 |
+
def onMsg(e: JsProxy) -> None:
|
218 |
+
self.streaming_ready = True
|
219 |
+
js_resolve_fn(e)
|
220 |
+
|
221 |
+
def onErr(e: JsProxy) -> None:
|
222 |
+
js_reject_fn(e) # Defensive: never happens in ci
|
223 |
+
|
224 |
+
self.js_worker.onmessage = onMsg
|
225 |
+
self.js_worker.onerror = onErr
|
226 |
+
|
227 |
+
js_data_url = js.URL.createObjectURL(js_data_blob)
|
228 |
+
self.js_worker = js.globalThis.Worker.new(js_data_url)
|
229 |
+
self.js_worker_ready_promise = js.globalThis.Promise.new(promise_resolver)
|
230 |
+
|
231 |
+
def send(self, request: EmscriptenRequest) -> EmscriptenResponse:
|
232 |
+
headers = {
|
233 |
+
k: v for k, v in request.headers.items() if k not in HEADERS_TO_IGNORE
|
234 |
+
}
|
235 |
+
|
236 |
+
body = request.body
|
237 |
+
fetch_data = {"headers": headers, "body": to_js(body), "method": request.method}
|
238 |
+
# start the request off in the worker
|
239 |
+
timeout = int(1000 * request.timeout) if request.timeout > 0 else None
|
240 |
+
js_shared_buffer = js.SharedArrayBuffer.new(1048576)
|
241 |
+
js_int_buffer = js.Int32Array.new(js_shared_buffer)
|
242 |
+
js_byte_buffer = js.Uint8Array.new(js_shared_buffer, 8)
|
243 |
+
|
244 |
+
js.Atomics.store(js_int_buffer, 0, ERROR_TIMEOUT)
|
245 |
+
js.Atomics.notify(js_int_buffer, 0)
|
246 |
+
js_absolute_url = js.URL.new(request.url, js.location).href
|
247 |
+
self.js_worker.postMessage(
|
248 |
+
_obj_from_dict(
|
249 |
+
{
|
250 |
+
"buffer": js_shared_buffer,
|
251 |
+
"url": js_absolute_url,
|
252 |
+
"fetchParams": fetch_data,
|
253 |
+
}
|
254 |
+
)
|
255 |
+
)
|
256 |
+
# wait for the worker to send something
|
257 |
+
js.Atomics.wait(js_int_buffer, 0, ERROR_TIMEOUT, timeout)
|
258 |
+
if js_int_buffer[0] == ERROR_TIMEOUT:
|
259 |
+
raise _TimeoutError(
|
260 |
+
"Timeout connecting to streaming request",
|
261 |
+
request=request,
|
262 |
+
response=None,
|
263 |
+
)
|
264 |
+
elif js_int_buffer[0] == SUCCESS_HEADER:
|
265 |
+
# got response
|
266 |
+
# header length is in second int of intBuffer
|
267 |
+
string_len = js_int_buffer[1]
|
268 |
+
# decode the rest to a JSON string
|
269 |
+
js_decoder = js.TextDecoder.new()
|
270 |
+
# this does a copy (the slice) because decode can't work on shared array
|
271 |
+
# for some silly reason
|
272 |
+
json_str = js_decoder.decode(js_byte_buffer.slice(0, string_len))
|
273 |
+
# get it as an object
|
274 |
+
response_obj = json.loads(json_str)
|
275 |
+
return EmscriptenResponse(
|
276 |
+
request=request,
|
277 |
+
status_code=response_obj["status"],
|
278 |
+
headers=response_obj["headers"],
|
279 |
+
body=_ReadStream(
|
280 |
+
js_int_buffer,
|
281 |
+
js_byte_buffer,
|
282 |
+
request.timeout,
|
283 |
+
self.js_worker,
|
284 |
+
response_obj["connectionID"],
|
285 |
+
request,
|
286 |
+
),
|
287 |
+
)
|
288 |
+
elif js_int_buffer[0] == ERROR_EXCEPTION:
|
289 |
+
string_len = js_int_buffer[1]
|
290 |
+
# decode the error string
|
291 |
+
js_decoder = js.TextDecoder.new()
|
292 |
+
json_str = js_decoder.decode(js_byte_buffer.slice(0, string_len))
|
293 |
+
raise _StreamingError(
|
294 |
+
f"Exception thrown in fetch: {json_str}", request=request, response=None
|
295 |
+
)
|
296 |
+
else:
|
297 |
+
raise _StreamingError(
|
298 |
+
f"Unknown status from worker in fetch: {js_int_buffer[0]}",
|
299 |
+
request=request,
|
300 |
+
response=None,
|
301 |
+
)
|
302 |
+
|
303 |
+
|
304 |
+
class _JSPIReadStream(io.RawIOBase):
|
305 |
+
"""
|
306 |
+
A read stream that uses pyodide.ffi.run_sync to read from a JavaScript fetch
|
307 |
+
response. This requires support for WebAssembly JavaScript Promise Integration
|
308 |
+
in the containing browser, and for pyodide to be launched via runPythonAsync.
|
309 |
+
|
310 |
+
:param js_read_stream:
|
311 |
+
The JavaScript stream reader
|
312 |
+
|
313 |
+
:param timeout:
|
314 |
+
Timeout in seconds
|
315 |
+
|
316 |
+
:param request:
|
317 |
+
The request we're handling
|
318 |
+
|
319 |
+
:param response:
|
320 |
+
The response this stream relates to
|
321 |
+
|
322 |
+
:param js_abort_controller:
|
323 |
+
A JavaScript AbortController object, used for timeouts
|
324 |
+
"""
|
325 |
+
|
326 |
+
def __init__(
|
327 |
+
self,
|
328 |
+
js_read_stream: Any,
|
329 |
+
timeout: float,
|
330 |
+
request: EmscriptenRequest,
|
331 |
+
response: EmscriptenResponse,
|
332 |
+
js_abort_controller: Any, # JavaScript AbortController for timeouts
|
333 |
+
):
|
334 |
+
self.js_read_stream = js_read_stream
|
335 |
+
self.timeout = timeout
|
336 |
+
self._is_closed = False
|
337 |
+
self._is_done = False
|
338 |
+
self.request: EmscriptenRequest | None = request
|
339 |
+
self.response: EmscriptenResponse | None = response
|
340 |
+
self.current_buffer = None
|
341 |
+
self.current_buffer_pos = 0
|
342 |
+
self.js_abort_controller = js_abort_controller
|
343 |
+
|
344 |
+
def __del__(self) -> None:
|
345 |
+
self.close()
|
346 |
+
|
347 |
+
# this is compatible with _base_connection
|
348 |
+
def is_closed(self) -> bool:
|
349 |
+
return self._is_closed
|
350 |
+
|
351 |
+
# for compatibility with RawIOBase
|
352 |
+
@property
|
353 |
+
def closed(self) -> bool:
|
354 |
+
return self.is_closed()
|
355 |
+
|
356 |
+
def close(self) -> None:
|
357 |
+
if self.is_closed():
|
358 |
+
return
|
359 |
+
self.read_len = 0
|
360 |
+
self.read_pos = 0
|
361 |
+
self.js_read_stream.cancel()
|
362 |
+
self.js_read_stream = None
|
363 |
+
self._is_closed = True
|
364 |
+
self._is_done = True
|
365 |
+
self.request = None
|
366 |
+
self.response = None
|
367 |
+
super().close()
|
368 |
+
|
369 |
+
def readable(self) -> bool:
|
370 |
+
return True
|
371 |
+
|
372 |
+
def writable(self) -> bool:
|
373 |
+
return False
|
374 |
+
|
375 |
+
def seekable(self) -> bool:
|
376 |
+
return False
|
377 |
+
|
378 |
+
def _get_next_buffer(self) -> bool:
|
379 |
+
result_js = _run_sync_with_timeout(
|
380 |
+
self.js_read_stream.read(),
|
381 |
+
self.timeout,
|
382 |
+
self.js_abort_controller,
|
383 |
+
request=self.request,
|
384 |
+
response=self.response,
|
385 |
+
)
|
386 |
+
if result_js.done:
|
387 |
+
self._is_done = True
|
388 |
+
return False
|
389 |
+
else:
|
390 |
+
self.current_buffer = result_js.value.to_py()
|
391 |
+
self.current_buffer_pos = 0
|
392 |
+
return True
|
393 |
+
|
394 |
+
def readinto(self, byte_obj: Buffer) -> int:
|
395 |
+
if self.current_buffer is None:
|
396 |
+
if not self._get_next_buffer() or self.current_buffer is None:
|
397 |
+
self.close()
|
398 |
+
return 0
|
399 |
+
ret_length = min(
|
400 |
+
len(byte_obj), len(self.current_buffer) - self.current_buffer_pos
|
401 |
+
)
|
402 |
+
byte_obj[0:ret_length] = self.current_buffer[
|
403 |
+
self.current_buffer_pos : self.current_buffer_pos + ret_length
|
404 |
+
]
|
405 |
+
self.current_buffer_pos += ret_length
|
406 |
+
if self.current_buffer_pos == len(self.current_buffer):
|
407 |
+
self.current_buffer = None
|
408 |
+
return ret_length
|
409 |
+
|
410 |
+
|
411 |
+
# check if we are in a worker or not
|
412 |
+
def is_in_browser_main_thread() -> bool:
|
413 |
+
return hasattr(js, "window") and hasattr(js, "self") and js.self == js.window
|
414 |
+
|
415 |
+
|
416 |
+
def is_cross_origin_isolated() -> bool:
|
417 |
+
return hasattr(js, "crossOriginIsolated") and js.crossOriginIsolated
|
418 |
+
|
419 |
+
|
420 |
+
def is_in_node() -> bool:
|
421 |
+
return (
|
422 |
+
hasattr(js, "process")
|
423 |
+
and hasattr(js.process, "release")
|
424 |
+
and hasattr(js.process.release, "name")
|
425 |
+
and js.process.release.name == "node"
|
426 |
+
)
|
427 |
+
|
428 |
+
|
429 |
+
def is_worker_available() -> bool:
|
430 |
+
return hasattr(js, "Worker") and hasattr(js, "Blob")
|
431 |
+
|
432 |
+
|
433 |
+
_fetcher: _StreamingFetcher | None = None
|
434 |
+
|
435 |
+
if is_worker_available() and (
|
436 |
+
(is_cross_origin_isolated() and not is_in_browser_main_thread())
|
437 |
+
and (not is_in_node())
|
438 |
+
):
|
439 |
+
_fetcher = _StreamingFetcher()
|
440 |
+
else:
|
441 |
+
_fetcher = None
|
442 |
+
|
443 |
+
|
444 |
+
NODE_JSPI_ERROR = (
|
445 |
+
"urllib3 only works in Node.js with pyodide.runPythonAsync"
|
446 |
+
" and requires the flag --experimental-wasm-stack-switching in "
|
447 |
+
" versions of node <24."
|
448 |
+
)
|
449 |
+
|
450 |
+
|
451 |
+
def send_streaming_request(request: EmscriptenRequest) -> EmscriptenResponse | None:
|
452 |
+
if has_jspi():
|
453 |
+
return send_jspi_request(request, True)
|
454 |
+
elif is_in_node():
|
455 |
+
raise _RequestError(
|
456 |
+
message=NODE_JSPI_ERROR,
|
457 |
+
request=request,
|
458 |
+
response=None,
|
459 |
+
)
|
460 |
+
|
461 |
+
if _fetcher and streaming_ready():
|
462 |
+
return _fetcher.send(request)
|
463 |
+
else:
|
464 |
+
_show_streaming_warning()
|
465 |
+
return None
|
466 |
+
|
467 |
+
|
468 |
+
_SHOWN_TIMEOUT_WARNING = False
|
469 |
+
|
470 |
+
|
471 |
+
def _show_timeout_warning() -> None:
|
472 |
+
global _SHOWN_TIMEOUT_WARNING
|
473 |
+
if not _SHOWN_TIMEOUT_WARNING:
|
474 |
+
_SHOWN_TIMEOUT_WARNING = True
|
475 |
+
message = "Warning: Timeout is not available on main browser thread"
|
476 |
+
js.console.warn(message)
|
477 |
+
|
478 |
+
|
479 |
+
_SHOWN_STREAMING_WARNING = False
|
480 |
+
|
481 |
+
|
482 |
+
def _show_streaming_warning() -> None:
|
483 |
+
global _SHOWN_STREAMING_WARNING
|
484 |
+
if not _SHOWN_STREAMING_WARNING:
|
485 |
+
_SHOWN_STREAMING_WARNING = True
|
486 |
+
message = "Can't stream HTTP requests because: \n"
|
487 |
+
if not is_cross_origin_isolated():
|
488 |
+
message += " Page is not cross-origin isolated\n"
|
489 |
+
if is_in_browser_main_thread():
|
490 |
+
message += " Python is running in main browser thread\n"
|
491 |
+
if not is_worker_available():
|
492 |
+
message += " Worker or Blob classes are not available in this environment." # Defensive: this is always False in browsers that we test in
|
493 |
+
if streaming_ready() is False:
|
494 |
+
message += """ Streaming fetch worker isn't ready. If you want to be sure that streaming fetch
|
495 |
+
is working, you need to call: 'await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready()`"""
|
496 |
+
from js import console
|
497 |
+
|
498 |
+
console.warn(message)
|
499 |
+
|
500 |
+
|
501 |
+
def send_request(request: EmscriptenRequest) -> EmscriptenResponse:
|
502 |
+
if has_jspi():
|
503 |
+
return send_jspi_request(request, False)
|
504 |
+
elif is_in_node():
|
505 |
+
raise _RequestError(
|
506 |
+
message=NODE_JSPI_ERROR,
|
507 |
+
request=request,
|
508 |
+
response=None,
|
509 |
+
)
|
510 |
+
try:
|
511 |
+
js_xhr = js.XMLHttpRequest.new()
|
512 |
+
|
513 |
+
if not is_in_browser_main_thread():
|
514 |
+
js_xhr.responseType = "arraybuffer"
|
515 |
+
if request.timeout:
|
516 |
+
js_xhr.timeout = int(request.timeout * 1000)
|
517 |
+
else:
|
518 |
+
js_xhr.overrideMimeType("text/plain; charset=ISO-8859-15")
|
519 |
+
if request.timeout:
|
520 |
+
# timeout isn't available on the main thread - show a warning in console
|
521 |
+
# if it is set
|
522 |
+
_show_timeout_warning()
|
523 |
+
|
524 |
+
js_xhr.open(request.method, request.url, False)
|
525 |
+
for name, value in request.headers.items():
|
526 |
+
if name.lower() not in HEADERS_TO_IGNORE:
|
527 |
+
js_xhr.setRequestHeader(name, value)
|
528 |
+
|
529 |
+
js_xhr.send(to_js(request.body))
|
530 |
+
|
531 |
+
headers = dict(Parser().parsestr(js_xhr.getAllResponseHeaders()))
|
532 |
+
|
533 |
+
if not is_in_browser_main_thread():
|
534 |
+
body = js_xhr.response.to_py().tobytes()
|
535 |
+
else:
|
536 |
+
body = js_xhr.response.encode("ISO-8859-15")
|
537 |
+
return EmscriptenResponse(
|
538 |
+
status_code=js_xhr.status, headers=headers, body=body, request=request
|
539 |
+
)
|
540 |
+
except JsException as err:
|
541 |
+
if err.name == "TimeoutError":
|
542 |
+
raise _TimeoutError(err.message, request=request)
|
543 |
+
elif err.name == "NetworkError":
|
544 |
+
raise _RequestError(err.message, request=request)
|
545 |
+
else:
|
546 |
+
# general http error
|
547 |
+
raise _RequestError(err.message, request=request)
|
548 |
+
|
549 |
+
|
550 |
+
def send_jspi_request(
|
551 |
+
request: EmscriptenRequest, streaming: bool
|
552 |
+
) -> EmscriptenResponse:
|
553 |
+
"""
|
554 |
+
Send a request using WebAssembly JavaScript Promise Integration
|
555 |
+
to wrap the asynchronous JavaScript fetch api (experimental).
|
556 |
+
|
557 |
+
:param request:
|
558 |
+
Request to send
|
559 |
+
|
560 |
+
:param streaming:
|
561 |
+
Whether to stream the response
|
562 |
+
|
563 |
+
:return: The response object
|
564 |
+
:rtype: EmscriptenResponse
|
565 |
+
"""
|
566 |
+
timeout = request.timeout
|
567 |
+
js_abort_controller = js.AbortController.new()
|
568 |
+
headers = {k: v for k, v in request.headers.items() if k not in HEADERS_TO_IGNORE}
|
569 |
+
req_body = request.body
|
570 |
+
fetch_data = {
|
571 |
+
"headers": headers,
|
572 |
+
"body": to_js(req_body),
|
573 |
+
"method": request.method,
|
574 |
+
"signal": js_abort_controller.signal,
|
575 |
+
}
|
576 |
+
# Call JavaScript fetch (async api, returns a promise)
|
577 |
+
fetcher_promise_js = js.fetch(request.url, _obj_from_dict(fetch_data))
|
578 |
+
# Now suspend WebAssembly until we resolve that promise
|
579 |
+
# or time out.
|
580 |
+
response_js = _run_sync_with_timeout(
|
581 |
+
fetcher_promise_js,
|
582 |
+
timeout,
|
583 |
+
js_abort_controller,
|
584 |
+
request=request,
|
585 |
+
response=None,
|
586 |
+
)
|
587 |
+
headers = {}
|
588 |
+
header_iter = response_js.headers.entries()
|
589 |
+
while True:
|
590 |
+
iter_value_js = header_iter.next()
|
591 |
+
if getattr(iter_value_js, "done", False):
|
592 |
+
break
|
593 |
+
else:
|
594 |
+
headers[str(iter_value_js.value[0])] = str(iter_value_js.value[1])
|
595 |
+
status_code = response_js.status
|
596 |
+
body: bytes | io.RawIOBase = b""
|
597 |
+
|
598 |
+
response = EmscriptenResponse(
|
599 |
+
status_code=status_code, headers=headers, body=b"", request=request
|
600 |
+
)
|
601 |
+
if streaming:
|
602 |
+
# get via inputstream
|
603 |
+
if response_js.body is not None:
|
604 |
+
# get a reader from the fetch response
|
605 |
+
body_stream_js = response_js.body.getReader()
|
606 |
+
body = _JSPIReadStream(
|
607 |
+
body_stream_js, timeout, request, response, js_abort_controller
|
608 |
+
)
|
609 |
+
else:
|
610 |
+
# get directly via arraybuffer
|
611 |
+
# n.b. this is another async JavaScript call.
|
612 |
+
body = _run_sync_with_timeout(
|
613 |
+
response_js.arrayBuffer(),
|
614 |
+
timeout,
|
615 |
+
js_abort_controller,
|
616 |
+
request=request,
|
617 |
+
response=response,
|
618 |
+
).to_py()
|
619 |
+
response.body = body
|
620 |
+
return response
|
621 |
+
|
622 |
+
|
623 |
+
def _run_sync_with_timeout(
|
624 |
+
promise: Any,
|
625 |
+
timeout: float,
|
626 |
+
js_abort_controller: Any,
|
627 |
+
request: EmscriptenRequest | None,
|
628 |
+
response: EmscriptenResponse | None,
|
629 |
+
) -> Any:
|
630 |
+
"""
|
631 |
+
Await a JavaScript promise synchronously with a timeout which is implemented
|
632 |
+
via the AbortController
|
633 |
+
|
634 |
+
:param promise:
|
635 |
+
Javascript promise to await
|
636 |
+
|
637 |
+
:param timeout:
|
638 |
+
Timeout in seconds
|
639 |
+
|
640 |
+
:param js_abort_controller:
|
641 |
+
A JavaScript AbortController object, used on timeout
|
642 |
+
|
643 |
+
:param request:
|
644 |
+
The request being handled
|
645 |
+
|
646 |
+
:param response:
|
647 |
+
The response being handled (if it exists yet)
|
648 |
+
|
649 |
+
:raises _TimeoutError: If the request times out
|
650 |
+
:raises _RequestError: If the request raises a JavaScript exception
|
651 |
+
|
652 |
+
:return: The result of awaiting the promise.
|
653 |
+
"""
|
654 |
+
timer_id = None
|
655 |
+
if timeout > 0:
|
656 |
+
timer_id = js.setTimeout(
|
657 |
+
js_abort_controller.abort.bind(js_abort_controller), int(timeout * 1000)
|
658 |
+
)
|
659 |
+
try:
|
660 |
+
from pyodide.ffi import run_sync
|
661 |
+
|
662 |
+
# run_sync here uses WebAssembly JavaScript Promise Integration to
|
663 |
+
# suspend python until the JavaScript promise resolves.
|
664 |
+
return run_sync(promise)
|
665 |
+
except JsException as err:
|
666 |
+
if err.name == "AbortError":
|
667 |
+
raise _TimeoutError(
|
668 |
+
message="Request timed out", request=request, response=response
|
669 |
+
)
|
670 |
+
else:
|
671 |
+
raise _RequestError(message=err.message, request=request, response=response)
|
672 |
+
finally:
|
673 |
+
if timer_id is not None:
|
674 |
+
js.clearTimeout(timer_id)
|
675 |
+
|
676 |
+
|
677 |
+
def has_jspi() -> bool:
|
678 |
+
"""
|
679 |
+
Return true if jspi can be used.
|
680 |
+
|
681 |
+
This requires both browser support and also WebAssembly
|
682 |
+
to be in the correct state - i.e. that the javascript
|
683 |
+
call into python was async not sync.
|
684 |
+
|
685 |
+
:return: True if jspi can be used.
|
686 |
+
:rtype: bool
|
687 |
+
"""
|
688 |
+
try:
|
689 |
+
from pyodide.ffi import can_run_sync, run_sync # noqa: F401
|
690 |
+
|
691 |
+
return bool(can_run_sync())
|
692 |
+
except ImportError:
|
693 |
+
return False
|
694 |
+
|
695 |
+
|
696 |
+
def streaming_ready() -> bool | None:
|
697 |
+
if _fetcher:
|
698 |
+
return _fetcher.streaming_ready
|
699 |
+
else:
|
700 |
+
return None # no fetcher, return None to signify that
|
701 |
+
|
702 |
+
|
703 |
+
async def wait_for_streaming_ready() -> bool:
|
704 |
+
if _fetcher:
|
705 |
+
await _fetcher.js_worker_ready_promise
|
706 |
+
return True
|
707 |
+
else:
|
708 |
+
return False
|
.venv/lib/python3.11/site-packages/urllib3/contrib/emscripten/request.py
ADDED
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
from dataclasses import dataclass, field
|
4 |
+
|
5 |
+
from ..._base_connection import _TYPE_BODY
|
6 |
+
|
7 |
+
|
8 |
+
@dataclass
|
9 |
+
class EmscriptenRequest:
|
10 |
+
method: str
|
11 |
+
url: str
|
12 |
+
params: dict[str, str] | None = None
|
13 |
+
body: _TYPE_BODY | None = None
|
14 |
+
headers: dict[str, str] = field(default_factory=dict)
|
15 |
+
timeout: float = 0
|
16 |
+
decode_content: bool = True
|
17 |
+
|
18 |
+
def set_header(self, name: str, value: str) -> None:
|
19 |
+
self.headers[name.capitalize()] = value
|
20 |
+
|
21 |
+
def set_body(self, body: _TYPE_BODY | None) -> None:
|
22 |
+
self.body = body
|
.venv/lib/python3.11/site-packages/urllib3/contrib/emscripten/response.py
ADDED
@@ -0,0 +1,285 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import json as _json
|
4 |
+
import logging
|
5 |
+
import typing
|
6 |
+
from contextlib import contextmanager
|
7 |
+
from dataclasses import dataclass
|
8 |
+
from http.client import HTTPException as HTTPException
|
9 |
+
from io import BytesIO, IOBase
|
10 |
+
|
11 |
+
from ...exceptions import InvalidHeader, TimeoutError
|
12 |
+
from ...response import BaseHTTPResponse
|
13 |
+
from ...util.retry import Retry
|
14 |
+
from .request import EmscriptenRequest
|
15 |
+
|
16 |
+
if typing.TYPE_CHECKING:
|
17 |
+
from ..._base_connection import BaseHTTPConnection, BaseHTTPSConnection
|
18 |
+
|
19 |
+
log = logging.getLogger(__name__)
|
20 |
+
|
21 |
+
|
22 |
+
@dataclass
|
23 |
+
class EmscriptenResponse:
|
24 |
+
status_code: int
|
25 |
+
headers: dict[str, str]
|
26 |
+
body: IOBase | bytes
|
27 |
+
request: EmscriptenRequest
|
28 |
+
|
29 |
+
|
30 |
+
class EmscriptenHttpResponseWrapper(BaseHTTPResponse):
|
31 |
+
def __init__(
|
32 |
+
self,
|
33 |
+
internal_response: EmscriptenResponse,
|
34 |
+
url: str | None = None,
|
35 |
+
connection: BaseHTTPConnection | BaseHTTPSConnection | None = None,
|
36 |
+
):
|
37 |
+
self._pool = None # set by pool class
|
38 |
+
self._body = None
|
39 |
+
self._response = internal_response
|
40 |
+
self._url = url
|
41 |
+
self._connection = connection
|
42 |
+
self._closed = False
|
43 |
+
super().__init__(
|
44 |
+
headers=internal_response.headers,
|
45 |
+
status=internal_response.status_code,
|
46 |
+
request_url=url,
|
47 |
+
version=0,
|
48 |
+
version_string="HTTP/?",
|
49 |
+
reason="",
|
50 |
+
decode_content=True,
|
51 |
+
)
|
52 |
+
self.length_remaining = self._init_length(self._response.request.method)
|
53 |
+
self.length_is_certain = False
|
54 |
+
|
55 |
+
@property
|
56 |
+
def url(self) -> str | None:
|
57 |
+
return self._url
|
58 |
+
|
59 |
+
@url.setter
|
60 |
+
def url(self, url: str | None) -> None:
|
61 |
+
self._url = url
|
62 |
+
|
63 |
+
@property
|
64 |
+
def connection(self) -> BaseHTTPConnection | BaseHTTPSConnection | None:
|
65 |
+
return self._connection
|
66 |
+
|
67 |
+
@property
|
68 |
+
def retries(self) -> Retry | None:
|
69 |
+
return self._retries
|
70 |
+
|
71 |
+
@retries.setter
|
72 |
+
def retries(self, retries: Retry | None) -> None:
|
73 |
+
# Override the request_url if retries has a redirect location.
|
74 |
+
self._retries = retries
|
75 |
+
|
76 |
+
def stream(
|
77 |
+
self, amt: int | None = 2**16, decode_content: bool | None = None
|
78 |
+
) -> typing.Generator[bytes]:
|
79 |
+
"""
|
80 |
+
A generator wrapper for the read() method. A call will block until
|
81 |
+
``amt`` bytes have been read from the connection or until the
|
82 |
+
connection is closed.
|
83 |
+
|
84 |
+
:param amt:
|
85 |
+
How much of the content to read. The generator will return up to
|
86 |
+
much data per iteration, but may return less. This is particularly
|
87 |
+
likely when using compressed data. However, the empty string will
|
88 |
+
never be returned.
|
89 |
+
|
90 |
+
:param decode_content:
|
91 |
+
If True, will attempt to decode the body based on the
|
92 |
+
'content-encoding' header.
|
93 |
+
"""
|
94 |
+
while True:
|
95 |
+
data = self.read(amt=amt, decode_content=decode_content)
|
96 |
+
|
97 |
+
if data:
|
98 |
+
yield data
|
99 |
+
else:
|
100 |
+
break
|
101 |
+
|
102 |
+
def _init_length(self, request_method: str | None) -> int | None:
|
103 |
+
length: int | None
|
104 |
+
content_length: str | None = self.headers.get("content-length")
|
105 |
+
|
106 |
+
if content_length is not None:
|
107 |
+
try:
|
108 |
+
# RFC 7230 section 3.3.2 specifies multiple content lengths can
|
109 |
+
# be sent in a single Content-Length header
|
110 |
+
# (e.g. Content-Length: 42, 42). This line ensures the values
|
111 |
+
# are all valid ints and that as long as the `set` length is 1,
|
112 |
+
# all values are the same. Otherwise, the header is invalid.
|
113 |
+
lengths = {int(val) for val in content_length.split(",")}
|
114 |
+
if len(lengths) > 1:
|
115 |
+
raise InvalidHeader(
|
116 |
+
"Content-Length contained multiple "
|
117 |
+
"unmatching values (%s)" % content_length
|
118 |
+
)
|
119 |
+
length = lengths.pop()
|
120 |
+
except ValueError:
|
121 |
+
length = None
|
122 |
+
else:
|
123 |
+
if length < 0:
|
124 |
+
length = None
|
125 |
+
|
126 |
+
else: # if content_length is None
|
127 |
+
length = None
|
128 |
+
|
129 |
+
# Check for responses that shouldn't include a body
|
130 |
+
if (
|
131 |
+
self.status in (204, 304)
|
132 |
+
or 100 <= self.status < 200
|
133 |
+
or request_method == "HEAD"
|
134 |
+
):
|
135 |
+
length = 0
|
136 |
+
|
137 |
+
return length
|
138 |
+
|
139 |
+
def read(
|
140 |
+
self,
|
141 |
+
amt: int | None = None,
|
142 |
+
decode_content: bool | None = None, # ignored because browser decodes always
|
143 |
+
cache_content: bool = False,
|
144 |
+
) -> bytes:
|
145 |
+
if (
|
146 |
+
self._closed
|
147 |
+
or self._response is None
|
148 |
+
or (isinstance(self._response.body, IOBase) and self._response.body.closed)
|
149 |
+
):
|
150 |
+
return b""
|
151 |
+
|
152 |
+
with self._error_catcher():
|
153 |
+
# body has been preloaded as a string by XmlHttpRequest
|
154 |
+
if not isinstance(self._response.body, IOBase):
|
155 |
+
self.length_remaining = len(self._response.body)
|
156 |
+
self.length_is_certain = True
|
157 |
+
# wrap body in IOStream
|
158 |
+
self._response.body = BytesIO(self._response.body)
|
159 |
+
if amt is not None and amt >= 0:
|
160 |
+
# don't cache partial content
|
161 |
+
cache_content = False
|
162 |
+
data = self._response.body.read(amt)
|
163 |
+
if self.length_remaining is not None:
|
164 |
+
self.length_remaining = max(self.length_remaining - len(data), 0)
|
165 |
+
if (self.length_is_certain and self.length_remaining == 0) or len(
|
166 |
+
data
|
167 |
+
) < amt:
|
168 |
+
# definitely finished reading, close response stream
|
169 |
+
self._response.body.close()
|
170 |
+
return typing.cast(bytes, data)
|
171 |
+
else: # read all we can (and cache it)
|
172 |
+
data = self._response.body.read()
|
173 |
+
if cache_content:
|
174 |
+
self._body = data
|
175 |
+
if self.length_remaining is not None:
|
176 |
+
self.length_remaining = max(self.length_remaining - len(data), 0)
|
177 |
+
if len(data) == 0 or (
|
178 |
+
self.length_is_certain and self.length_remaining == 0
|
179 |
+
):
|
180 |
+
# definitely finished reading, close response stream
|
181 |
+
self._response.body.close()
|
182 |
+
return typing.cast(bytes, data)
|
183 |
+
|
184 |
+
def read_chunked(
|
185 |
+
self,
|
186 |
+
amt: int | None = None,
|
187 |
+
decode_content: bool | None = None,
|
188 |
+
) -> typing.Generator[bytes]:
|
189 |
+
# chunked is handled by browser
|
190 |
+
while True:
|
191 |
+
bytes = self.read(amt, decode_content)
|
192 |
+
if not bytes:
|
193 |
+
break
|
194 |
+
yield bytes
|
195 |
+
|
196 |
+
def release_conn(self) -> None:
|
197 |
+
if not self._pool or not self._connection:
|
198 |
+
return None
|
199 |
+
|
200 |
+
self._pool._put_conn(self._connection)
|
201 |
+
self._connection = None
|
202 |
+
|
203 |
+
def drain_conn(self) -> None:
|
204 |
+
self.close()
|
205 |
+
|
206 |
+
@property
|
207 |
+
def data(self) -> bytes:
|
208 |
+
if self._body:
|
209 |
+
return self._body
|
210 |
+
else:
|
211 |
+
return self.read(cache_content=True)
|
212 |
+
|
213 |
+
def json(self) -> typing.Any:
|
214 |
+
"""
|
215 |
+
Deserializes the body of the HTTP response as a Python object.
|
216 |
+
|
217 |
+
The body of the HTTP response must be encoded using UTF-8, as per
|
218 |
+
`RFC 8529 Section 8.1 <https://www.rfc-editor.org/rfc/rfc8259#section-8.1>`_.
|
219 |
+
|
220 |
+
To use a custom JSON decoder pass the result of :attr:`HTTPResponse.data` to
|
221 |
+
your custom decoder instead.
|
222 |
+
|
223 |
+
If the body of the HTTP response is not decodable to UTF-8, a
|
224 |
+
`UnicodeDecodeError` will be raised. If the body of the HTTP response is not a
|
225 |
+
valid JSON document, a `json.JSONDecodeError` will be raised.
|
226 |
+
|
227 |
+
Read more :ref:`here <json_content>`.
|
228 |
+
|
229 |
+
:returns: The body of the HTTP response as a Python object.
|
230 |
+
"""
|
231 |
+
data = self.data.decode("utf-8")
|
232 |
+
return _json.loads(data)
|
233 |
+
|
234 |
+
def close(self) -> None:
|
235 |
+
if not self._closed:
|
236 |
+
if isinstance(self._response.body, IOBase):
|
237 |
+
self._response.body.close()
|
238 |
+
if self._connection:
|
239 |
+
self._connection.close()
|
240 |
+
self._connection = None
|
241 |
+
self._closed = True
|
242 |
+
|
243 |
+
@contextmanager
|
244 |
+
def _error_catcher(self) -> typing.Generator[None]:
|
245 |
+
"""
|
246 |
+
Catch Emscripten specific exceptions thrown by fetch.py,
|
247 |
+
instead re-raising urllib3 variants, so that low-level exceptions
|
248 |
+
are not leaked in the high-level api.
|
249 |
+
|
250 |
+
On exit, release the connection back to the pool.
|
251 |
+
"""
|
252 |
+
from .fetch import _RequestError, _TimeoutError # avoid circular import
|
253 |
+
|
254 |
+
clean_exit = False
|
255 |
+
|
256 |
+
try:
|
257 |
+
yield
|
258 |
+
# If no exception is thrown, we should avoid cleaning up
|
259 |
+
# unnecessarily.
|
260 |
+
clean_exit = True
|
261 |
+
except _TimeoutError as e:
|
262 |
+
raise TimeoutError(str(e))
|
263 |
+
except _RequestError as e:
|
264 |
+
raise HTTPException(str(e))
|
265 |
+
finally:
|
266 |
+
# If we didn't terminate cleanly, we need to throw away our
|
267 |
+
# connection.
|
268 |
+
if not clean_exit:
|
269 |
+
# The response may not be closed but we're not going to use it
|
270 |
+
# anymore so close it now
|
271 |
+
if (
|
272 |
+
isinstance(self._response.body, IOBase)
|
273 |
+
and not self._response.body.closed
|
274 |
+
):
|
275 |
+
self._response.body.close()
|
276 |
+
# release the connection back to the pool
|
277 |
+
self.release_conn()
|
278 |
+
else:
|
279 |
+
# If we have read everything from the response stream,
|
280 |
+
# return the connection back to the pool.
|
281 |
+
if (
|
282 |
+
isinstance(self._response.body, IOBase)
|
283 |
+
and self._response.body.closed
|
284 |
+
):
|
285 |
+
self.release_conn()
|
.venv/lib/python3.11/site-packages/urllib3/contrib/pyopenssl.py
ADDED
@@ -0,0 +1,554 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Module for using pyOpenSSL as a TLS backend. This module was relevant before
|
3 |
+
the standard library ``ssl`` module supported SNI, but now that we've dropped
|
4 |
+
support for Python 2.7 all relevant Python versions support SNI so
|
5 |
+
**this module is no longer recommended**.
|
6 |
+
|
7 |
+
This needs the following packages installed:
|
8 |
+
|
9 |
+
* `pyOpenSSL`_ (tested with 16.0.0)
|
10 |
+
* `cryptography`_ (minimum 1.3.4, from pyopenssl)
|
11 |
+
* `idna`_ (minimum 2.0)
|
12 |
+
|
13 |
+
However, pyOpenSSL depends on cryptography, so while we use all three directly here we
|
14 |
+
end up having relatively few packages required.
|
15 |
+
|
16 |
+
You can install them with the following command:
|
17 |
+
|
18 |
+
.. code-block:: bash
|
19 |
+
|
20 |
+
$ python -m pip install pyopenssl cryptography idna
|
21 |
+
|
22 |
+
To activate certificate checking, call
|
23 |
+
:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
|
24 |
+
before you begin making HTTP requests. This can be done in a ``sitecustomize``
|
25 |
+
module, or at any other time before your application begins using ``urllib3``,
|
26 |
+
like this:
|
27 |
+
|
28 |
+
.. code-block:: python
|
29 |
+
|
30 |
+
try:
|
31 |
+
import urllib3.contrib.pyopenssl
|
32 |
+
urllib3.contrib.pyopenssl.inject_into_urllib3()
|
33 |
+
except ImportError:
|
34 |
+
pass
|
35 |
+
|
36 |
+
.. _pyopenssl: https://www.pyopenssl.org
|
37 |
+
.. _cryptography: https://cryptography.io
|
38 |
+
.. _idna: https://github.com/kjd/idna
|
39 |
+
"""
|
40 |
+
|
41 |
+
from __future__ import annotations
|
42 |
+
|
43 |
+
import OpenSSL.SSL # type: ignore[import-untyped]
|
44 |
+
from cryptography import x509
|
45 |
+
|
46 |
+
try:
|
47 |
+
from cryptography.x509 import UnsupportedExtension # type: ignore[attr-defined]
|
48 |
+
except ImportError:
|
49 |
+
# UnsupportedExtension is gone in cryptography >= 2.1.0
|
50 |
+
class UnsupportedExtension(Exception): # type: ignore[no-redef]
|
51 |
+
pass
|
52 |
+
|
53 |
+
|
54 |
+
import logging
|
55 |
+
import ssl
|
56 |
+
import typing
|
57 |
+
from io import BytesIO
|
58 |
+
from socket import socket as socket_cls
|
59 |
+
from socket import timeout
|
60 |
+
|
61 |
+
from .. import util
|
62 |
+
|
63 |
+
if typing.TYPE_CHECKING:
|
64 |
+
from OpenSSL.crypto import X509 # type: ignore[import-untyped]
|
65 |
+
|
66 |
+
|
67 |
+
__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
|
68 |
+
|
69 |
+
# Map from urllib3 to PyOpenSSL compatible parameter-values.
|
70 |
+
_openssl_versions: dict[int, int] = {
|
71 |
+
util.ssl_.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD, # type: ignore[attr-defined]
|
72 |
+
util.ssl_.PROTOCOL_TLS_CLIENT: OpenSSL.SSL.SSLv23_METHOD, # type: ignore[attr-defined]
|
73 |
+
ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
|
74 |
+
}
|
75 |
+
|
76 |
+
if hasattr(ssl, "PROTOCOL_TLSv1_1") and hasattr(OpenSSL.SSL, "TLSv1_1_METHOD"):
|
77 |
+
_openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD
|
78 |
+
|
79 |
+
if hasattr(ssl, "PROTOCOL_TLSv1_2") and hasattr(OpenSSL.SSL, "TLSv1_2_METHOD"):
|
80 |
+
_openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD
|
81 |
+
|
82 |
+
|
83 |
+
_stdlib_to_openssl_verify = {
|
84 |
+
ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
|
85 |
+
ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
|
86 |
+
ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER
|
87 |
+
+ OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
|
88 |
+
}
|
89 |
+
_openssl_to_stdlib_verify = {v: k for k, v in _stdlib_to_openssl_verify.items()}
|
90 |
+
|
91 |
+
# The SSLvX values are the most likely to be missing in the future
|
92 |
+
# but we check them all just to be sure.
|
93 |
+
_OP_NO_SSLv2_OR_SSLv3: int = getattr(OpenSSL.SSL, "OP_NO_SSLv2", 0) | getattr(
|
94 |
+
OpenSSL.SSL, "OP_NO_SSLv3", 0
|
95 |
+
)
|
96 |
+
_OP_NO_TLSv1: int = getattr(OpenSSL.SSL, "OP_NO_TLSv1", 0)
|
97 |
+
_OP_NO_TLSv1_1: int = getattr(OpenSSL.SSL, "OP_NO_TLSv1_1", 0)
|
98 |
+
_OP_NO_TLSv1_2: int = getattr(OpenSSL.SSL, "OP_NO_TLSv1_2", 0)
|
99 |
+
_OP_NO_TLSv1_3: int = getattr(OpenSSL.SSL, "OP_NO_TLSv1_3", 0)
|
100 |
+
|
101 |
+
_openssl_to_ssl_minimum_version: dict[int, int] = {
|
102 |
+
ssl.TLSVersion.MINIMUM_SUPPORTED: _OP_NO_SSLv2_OR_SSLv3,
|
103 |
+
ssl.TLSVersion.TLSv1: _OP_NO_SSLv2_OR_SSLv3,
|
104 |
+
ssl.TLSVersion.TLSv1_1: _OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1,
|
105 |
+
ssl.TLSVersion.TLSv1_2: _OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1 | _OP_NO_TLSv1_1,
|
106 |
+
ssl.TLSVersion.TLSv1_3: (
|
107 |
+
_OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1 | _OP_NO_TLSv1_1 | _OP_NO_TLSv1_2
|
108 |
+
),
|
109 |
+
ssl.TLSVersion.MAXIMUM_SUPPORTED: (
|
110 |
+
_OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1 | _OP_NO_TLSv1_1 | _OP_NO_TLSv1_2
|
111 |
+
),
|
112 |
+
}
|
113 |
+
_openssl_to_ssl_maximum_version: dict[int, int] = {
|
114 |
+
ssl.TLSVersion.MINIMUM_SUPPORTED: (
|
115 |
+
_OP_NO_SSLv2_OR_SSLv3
|
116 |
+
| _OP_NO_TLSv1
|
117 |
+
| _OP_NO_TLSv1_1
|
118 |
+
| _OP_NO_TLSv1_2
|
119 |
+
| _OP_NO_TLSv1_3
|
120 |
+
),
|
121 |
+
ssl.TLSVersion.TLSv1: (
|
122 |
+
_OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1_1 | _OP_NO_TLSv1_2 | _OP_NO_TLSv1_3
|
123 |
+
),
|
124 |
+
ssl.TLSVersion.TLSv1_1: _OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1_2 | _OP_NO_TLSv1_3,
|
125 |
+
ssl.TLSVersion.TLSv1_2: _OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1_3,
|
126 |
+
ssl.TLSVersion.TLSv1_3: _OP_NO_SSLv2_OR_SSLv3,
|
127 |
+
ssl.TLSVersion.MAXIMUM_SUPPORTED: _OP_NO_SSLv2_OR_SSLv3,
|
128 |
+
}
|
129 |
+
|
130 |
+
# OpenSSL will only write 16K at a time
|
131 |
+
SSL_WRITE_BLOCKSIZE = 16384
|
132 |
+
|
133 |
+
orig_util_SSLContext = util.ssl_.SSLContext
|
134 |
+
|
135 |
+
|
136 |
+
log = logging.getLogger(__name__)
|
137 |
+
|
138 |
+
|
139 |
+
def inject_into_urllib3() -> None:
|
140 |
+
"Monkey-patch urllib3 with PyOpenSSL-backed SSL-support."
|
141 |
+
|
142 |
+
_validate_dependencies_met()
|
143 |
+
|
144 |
+
util.SSLContext = PyOpenSSLContext # type: ignore[assignment]
|
145 |
+
util.ssl_.SSLContext = PyOpenSSLContext # type: ignore[assignment]
|
146 |
+
util.IS_PYOPENSSL = True
|
147 |
+
util.ssl_.IS_PYOPENSSL = True
|
148 |
+
|
149 |
+
|
150 |
+
def extract_from_urllib3() -> None:
|
151 |
+
"Undo monkey-patching by :func:`inject_into_urllib3`."
|
152 |
+
|
153 |
+
util.SSLContext = orig_util_SSLContext
|
154 |
+
util.ssl_.SSLContext = orig_util_SSLContext
|
155 |
+
util.IS_PYOPENSSL = False
|
156 |
+
util.ssl_.IS_PYOPENSSL = False
|
157 |
+
|
158 |
+
|
159 |
+
def _validate_dependencies_met() -> None:
|
160 |
+
"""
|
161 |
+
Verifies that PyOpenSSL's package-level dependencies have been met.
|
162 |
+
Throws `ImportError` if they are not met.
|
163 |
+
"""
|
164 |
+
# Method added in `cryptography==1.1`; not available in older versions
|
165 |
+
from cryptography.x509.extensions import Extensions
|
166 |
+
|
167 |
+
if getattr(Extensions, "get_extension_for_class", None) is None:
|
168 |
+
raise ImportError(
|
169 |
+
"'cryptography' module missing required functionality. "
|
170 |
+
"Try upgrading to v1.3.4 or newer."
|
171 |
+
)
|
172 |
+
|
173 |
+
# pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509
|
174 |
+
# attribute is only present on those versions.
|
175 |
+
from OpenSSL.crypto import X509
|
176 |
+
|
177 |
+
x509 = X509()
|
178 |
+
if getattr(x509, "_x509", None) is None:
|
179 |
+
raise ImportError(
|
180 |
+
"'pyOpenSSL' module missing required functionality. "
|
181 |
+
"Try upgrading to v0.14 or newer."
|
182 |
+
)
|
183 |
+
|
184 |
+
|
185 |
+
def _dnsname_to_stdlib(name: str) -> str | None:
|
186 |
+
"""
|
187 |
+
Converts a dNSName SubjectAlternativeName field to the form used by the
|
188 |
+
standard library on the given Python version.
|
189 |
+
|
190 |
+
Cryptography produces a dNSName as a unicode string that was idna-decoded
|
191 |
+
from ASCII bytes. We need to idna-encode that string to get it back, and
|
192 |
+
then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib
|
193 |
+
uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8).
|
194 |
+
|
195 |
+
If the name cannot be idna-encoded then we return None signalling that
|
196 |
+
the name given should be skipped.
|
197 |
+
"""
|
198 |
+
|
199 |
+
def idna_encode(name: str) -> bytes | None:
|
200 |
+
"""
|
201 |
+
Borrowed wholesale from the Python Cryptography Project. It turns out
|
202 |
+
that we can't just safely call `idna.encode`: it can explode for
|
203 |
+
wildcard names. This avoids that problem.
|
204 |
+
"""
|
205 |
+
import idna
|
206 |
+
|
207 |
+
try:
|
208 |
+
for prefix in ["*.", "."]:
|
209 |
+
if name.startswith(prefix):
|
210 |
+
name = name[len(prefix) :]
|
211 |
+
return prefix.encode("ascii") + idna.encode(name)
|
212 |
+
return idna.encode(name)
|
213 |
+
except idna.core.IDNAError:
|
214 |
+
return None
|
215 |
+
|
216 |
+
# Don't send IPv6 addresses through the IDNA encoder.
|
217 |
+
if ":" in name:
|
218 |
+
return name
|
219 |
+
|
220 |
+
encoded_name = idna_encode(name)
|
221 |
+
if encoded_name is None:
|
222 |
+
return None
|
223 |
+
return encoded_name.decode("utf-8")
|
224 |
+
|
225 |
+
|
226 |
+
def get_subj_alt_name(peer_cert: X509) -> list[tuple[str, str]]:
|
227 |
+
"""
|
228 |
+
Given an PyOpenSSL certificate, provides all the subject alternative names.
|
229 |
+
"""
|
230 |
+
cert = peer_cert.to_cryptography()
|
231 |
+
|
232 |
+
# We want to find the SAN extension. Ask Cryptography to locate it (it's
|
233 |
+
# faster than looping in Python)
|
234 |
+
try:
|
235 |
+
ext = cert.extensions.get_extension_for_class(x509.SubjectAlternativeName).value
|
236 |
+
except x509.ExtensionNotFound:
|
237 |
+
# No such extension, return the empty list.
|
238 |
+
return []
|
239 |
+
except (
|
240 |
+
x509.DuplicateExtension,
|
241 |
+
UnsupportedExtension,
|
242 |
+
x509.UnsupportedGeneralNameType,
|
243 |
+
UnicodeError,
|
244 |
+
) as e:
|
245 |
+
# A problem has been found with the quality of the certificate. Assume
|
246 |
+
# no SAN field is present.
|
247 |
+
log.warning(
|
248 |
+
"A problem was encountered with the certificate that prevented "
|
249 |
+
"urllib3 from finding the SubjectAlternativeName field. This can "
|
250 |
+
"affect certificate validation. The error was %s",
|
251 |
+
e,
|
252 |
+
)
|
253 |
+
return []
|
254 |
+
|
255 |
+
# We want to return dNSName and iPAddress fields. We need to cast the IPs
|
256 |
+
# back to strings because the match_hostname function wants them as
|
257 |
+
# strings.
|
258 |
+
# Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8
|
259 |
+
# decoded. This is pretty frustrating, but that's what the standard library
|
260 |
+
# does with certificates, and so we need to attempt to do the same.
|
261 |
+
# We also want to skip over names which cannot be idna encoded.
|
262 |
+
names = [
|
263 |
+
("DNS", name)
|
264 |
+
for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName))
|
265 |
+
if name is not None
|
266 |
+
]
|
267 |
+
names.extend(
|
268 |
+
("IP Address", str(name)) for name in ext.get_values_for_type(x509.IPAddress)
|
269 |
+
)
|
270 |
+
|
271 |
+
return names
|
272 |
+
|
273 |
+
|
274 |
+
class WrappedSocket:
|
275 |
+
"""API-compatibility wrapper for Python OpenSSL's Connection-class."""
|
276 |
+
|
277 |
+
def __init__(
|
278 |
+
self,
|
279 |
+
connection: OpenSSL.SSL.Connection,
|
280 |
+
socket: socket_cls,
|
281 |
+
suppress_ragged_eofs: bool = True,
|
282 |
+
) -> None:
|
283 |
+
self.connection = connection
|
284 |
+
self.socket = socket
|
285 |
+
self.suppress_ragged_eofs = suppress_ragged_eofs
|
286 |
+
self._io_refs = 0
|
287 |
+
self._closed = False
|
288 |
+
|
289 |
+
def fileno(self) -> int:
|
290 |
+
return self.socket.fileno()
|
291 |
+
|
292 |
+
# Copy-pasted from Python 3.5 source code
|
293 |
+
def _decref_socketios(self) -> None:
|
294 |
+
if self._io_refs > 0:
|
295 |
+
self._io_refs -= 1
|
296 |
+
if self._closed:
|
297 |
+
self.close()
|
298 |
+
|
299 |
+
def recv(self, *args: typing.Any, **kwargs: typing.Any) -> bytes:
|
300 |
+
try:
|
301 |
+
data = self.connection.recv(*args, **kwargs)
|
302 |
+
except OpenSSL.SSL.SysCallError as e:
|
303 |
+
if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"):
|
304 |
+
return b""
|
305 |
+
else:
|
306 |
+
raise OSError(e.args[0], str(e)) from e
|
307 |
+
except OpenSSL.SSL.ZeroReturnError:
|
308 |
+
if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
|
309 |
+
return b""
|
310 |
+
else:
|
311 |
+
raise
|
312 |
+
except OpenSSL.SSL.WantReadError as e:
|
313 |
+
if not util.wait_for_read(self.socket, self.socket.gettimeout()):
|
314 |
+
raise timeout("The read operation timed out") from e
|
315 |
+
else:
|
316 |
+
return self.recv(*args, **kwargs)
|
317 |
+
|
318 |
+
# TLS 1.3 post-handshake authentication
|
319 |
+
except OpenSSL.SSL.Error as e:
|
320 |
+
raise ssl.SSLError(f"read error: {e!r}") from e
|
321 |
+
else:
|
322 |
+
return data # type: ignore[no-any-return]
|
323 |
+
|
324 |
+
def recv_into(self, *args: typing.Any, **kwargs: typing.Any) -> int:
|
325 |
+
try:
|
326 |
+
return self.connection.recv_into(*args, **kwargs) # type: ignore[no-any-return]
|
327 |
+
except OpenSSL.SSL.SysCallError as e:
|
328 |
+
if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"):
|
329 |
+
return 0
|
330 |
+
else:
|
331 |
+
raise OSError(e.args[0], str(e)) from e
|
332 |
+
except OpenSSL.SSL.ZeroReturnError:
|
333 |
+
if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
|
334 |
+
return 0
|
335 |
+
else:
|
336 |
+
raise
|
337 |
+
except OpenSSL.SSL.WantReadError as e:
|
338 |
+
if not util.wait_for_read(self.socket, self.socket.gettimeout()):
|
339 |
+
raise timeout("The read operation timed out") from e
|
340 |
+
else:
|
341 |
+
return self.recv_into(*args, **kwargs)
|
342 |
+
|
343 |
+
# TLS 1.3 post-handshake authentication
|
344 |
+
except OpenSSL.SSL.Error as e:
|
345 |
+
raise ssl.SSLError(f"read error: {e!r}") from e
|
346 |
+
|
347 |
+
def settimeout(self, timeout: float) -> None:
|
348 |
+
return self.socket.settimeout(timeout)
|
349 |
+
|
350 |
+
def _send_until_done(self, data: bytes) -> int:
|
351 |
+
while True:
|
352 |
+
try:
|
353 |
+
return self.connection.send(data) # type: ignore[no-any-return]
|
354 |
+
except OpenSSL.SSL.WantWriteError as e:
|
355 |
+
if not util.wait_for_write(self.socket, self.socket.gettimeout()):
|
356 |
+
raise timeout() from e
|
357 |
+
continue
|
358 |
+
except OpenSSL.SSL.SysCallError as e:
|
359 |
+
raise OSError(e.args[0], str(e)) from e
|
360 |
+
|
361 |
+
def sendall(self, data: bytes) -> None:
|
362 |
+
total_sent = 0
|
363 |
+
while total_sent < len(data):
|
364 |
+
sent = self._send_until_done(
|
365 |
+
data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE]
|
366 |
+
)
|
367 |
+
total_sent += sent
|
368 |
+
|
369 |
+
def shutdown(self, how: int) -> None:
|
370 |
+
try:
|
371 |
+
self.connection.shutdown()
|
372 |
+
except OpenSSL.SSL.Error as e:
|
373 |
+
raise ssl.SSLError(f"shutdown error: {e!r}") from e
|
374 |
+
|
375 |
+
def close(self) -> None:
|
376 |
+
self._closed = True
|
377 |
+
if self._io_refs <= 0:
|
378 |
+
self._real_close()
|
379 |
+
|
380 |
+
def _real_close(self) -> None:
|
381 |
+
try:
|
382 |
+
return self.connection.close() # type: ignore[no-any-return]
|
383 |
+
except OpenSSL.SSL.Error:
|
384 |
+
return
|
385 |
+
|
386 |
+
def getpeercert(
|
387 |
+
self, binary_form: bool = False
|
388 |
+
) -> dict[str, list[typing.Any]] | None:
|
389 |
+
x509 = self.connection.get_peer_certificate()
|
390 |
+
|
391 |
+
if not x509:
|
392 |
+
return x509 # type: ignore[no-any-return]
|
393 |
+
|
394 |
+
if binary_form:
|
395 |
+
return OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, x509) # type: ignore[no-any-return]
|
396 |
+
|
397 |
+
return {
|
398 |
+
"subject": ((("commonName", x509.get_subject().CN),),), # type: ignore[dict-item]
|
399 |
+
"subjectAltName": get_subj_alt_name(x509),
|
400 |
+
}
|
401 |
+
|
402 |
+
def version(self) -> str:
|
403 |
+
return self.connection.get_protocol_version_name() # type: ignore[no-any-return]
|
404 |
+
|
405 |
+
def selected_alpn_protocol(self) -> str | None:
|
406 |
+
alpn_proto = self.connection.get_alpn_proto_negotiated()
|
407 |
+
return alpn_proto.decode() if alpn_proto else None
|
408 |
+
|
409 |
+
|
410 |
+
WrappedSocket.makefile = socket_cls.makefile # type: ignore[attr-defined]
|
411 |
+
|
412 |
+
|
413 |
+
class PyOpenSSLContext:
|
414 |
+
"""
|
415 |
+
I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible
|
416 |
+
for translating the interface of the standard library ``SSLContext`` object
|
417 |
+
to calls into PyOpenSSL.
|
418 |
+
"""
|
419 |
+
|
420 |
+
def __init__(self, protocol: int) -> None:
|
421 |
+
self.protocol = _openssl_versions[protocol]
|
422 |
+
self._ctx = OpenSSL.SSL.Context(self.protocol)
|
423 |
+
self._options = 0
|
424 |
+
self.check_hostname = False
|
425 |
+
self._minimum_version: int = ssl.TLSVersion.MINIMUM_SUPPORTED
|
426 |
+
self._maximum_version: int = ssl.TLSVersion.MAXIMUM_SUPPORTED
|
427 |
+
|
428 |
+
@property
|
429 |
+
def options(self) -> int:
|
430 |
+
return self._options
|
431 |
+
|
432 |
+
@options.setter
|
433 |
+
def options(self, value: int) -> None:
|
434 |
+
self._options = value
|
435 |
+
self._set_ctx_options()
|
436 |
+
|
437 |
+
@property
|
438 |
+
def verify_mode(self) -> int:
|
439 |
+
return _openssl_to_stdlib_verify[self._ctx.get_verify_mode()]
|
440 |
+
|
441 |
+
@verify_mode.setter
|
442 |
+
def verify_mode(self, value: ssl.VerifyMode) -> None:
|
443 |
+
self._ctx.set_verify(_stdlib_to_openssl_verify[value], _verify_callback)
|
444 |
+
|
445 |
+
def set_default_verify_paths(self) -> None:
|
446 |
+
self._ctx.set_default_verify_paths()
|
447 |
+
|
448 |
+
def set_ciphers(self, ciphers: bytes | str) -> None:
|
449 |
+
if isinstance(ciphers, str):
|
450 |
+
ciphers = ciphers.encode("utf-8")
|
451 |
+
self._ctx.set_cipher_list(ciphers)
|
452 |
+
|
453 |
+
def load_verify_locations(
|
454 |
+
self,
|
455 |
+
cafile: str | None = None,
|
456 |
+
capath: str | None = None,
|
457 |
+
cadata: bytes | None = None,
|
458 |
+
) -> None:
|
459 |
+
if cafile is not None:
|
460 |
+
cafile = cafile.encode("utf-8") # type: ignore[assignment]
|
461 |
+
if capath is not None:
|
462 |
+
capath = capath.encode("utf-8") # type: ignore[assignment]
|
463 |
+
try:
|
464 |
+
self._ctx.load_verify_locations(cafile, capath)
|
465 |
+
if cadata is not None:
|
466 |
+
self._ctx.load_verify_locations(BytesIO(cadata))
|
467 |
+
except OpenSSL.SSL.Error as e:
|
468 |
+
raise ssl.SSLError(f"unable to load trusted certificates: {e!r}") from e
|
469 |
+
|
470 |
+
def load_cert_chain(
|
471 |
+
self,
|
472 |
+
certfile: str,
|
473 |
+
keyfile: str | None = None,
|
474 |
+
password: str | None = None,
|
475 |
+
) -> None:
|
476 |
+
try:
|
477 |
+
self._ctx.use_certificate_chain_file(certfile)
|
478 |
+
if password is not None:
|
479 |
+
if not isinstance(password, bytes):
|
480 |
+
password = password.encode("utf-8") # type: ignore[assignment]
|
481 |
+
self._ctx.set_passwd_cb(lambda *_: password)
|
482 |
+
self._ctx.use_privatekey_file(keyfile or certfile)
|
483 |
+
except OpenSSL.SSL.Error as e:
|
484 |
+
raise ssl.SSLError(f"Unable to load certificate chain: {e!r}") from e
|
485 |
+
|
486 |
+
def set_alpn_protocols(self, protocols: list[bytes | str]) -> None:
|
487 |
+
protocols = [util.util.to_bytes(p, "ascii") for p in protocols]
|
488 |
+
return self._ctx.set_alpn_protos(protocols) # type: ignore[no-any-return]
|
489 |
+
|
490 |
+
def wrap_socket(
|
491 |
+
self,
|
492 |
+
sock: socket_cls,
|
493 |
+
server_side: bool = False,
|
494 |
+
do_handshake_on_connect: bool = True,
|
495 |
+
suppress_ragged_eofs: bool = True,
|
496 |
+
server_hostname: bytes | str | None = None,
|
497 |
+
) -> WrappedSocket:
|
498 |
+
cnx = OpenSSL.SSL.Connection(self._ctx, sock)
|
499 |
+
|
500 |
+
# If server_hostname is an IP, don't use it for SNI, per RFC6066 Section 3
|
501 |
+
if server_hostname and not util.ssl_.is_ipaddress(server_hostname):
|
502 |
+
if isinstance(server_hostname, str):
|
503 |
+
server_hostname = server_hostname.encode("utf-8")
|
504 |
+
cnx.set_tlsext_host_name(server_hostname)
|
505 |
+
|
506 |
+
cnx.set_connect_state()
|
507 |
+
|
508 |
+
while True:
|
509 |
+
try:
|
510 |
+
cnx.do_handshake()
|
511 |
+
except OpenSSL.SSL.WantReadError as e:
|
512 |
+
if not util.wait_for_read(sock, sock.gettimeout()):
|
513 |
+
raise timeout("select timed out") from e
|
514 |
+
continue
|
515 |
+
except OpenSSL.SSL.Error as e:
|
516 |
+
raise ssl.SSLError(f"bad handshake: {e!r}") from e
|
517 |
+
break
|
518 |
+
|
519 |
+
return WrappedSocket(cnx, sock)
|
520 |
+
|
521 |
+
def _set_ctx_options(self) -> None:
|
522 |
+
self._ctx.set_options(
|
523 |
+
self._options
|
524 |
+
| _openssl_to_ssl_minimum_version[self._minimum_version]
|
525 |
+
| _openssl_to_ssl_maximum_version[self._maximum_version]
|
526 |
+
)
|
527 |
+
|
528 |
+
@property
|
529 |
+
def minimum_version(self) -> int:
|
530 |
+
return self._minimum_version
|
531 |
+
|
532 |
+
@minimum_version.setter
|
533 |
+
def minimum_version(self, minimum_version: int) -> None:
|
534 |
+
self._minimum_version = minimum_version
|
535 |
+
self._set_ctx_options()
|
536 |
+
|
537 |
+
@property
|
538 |
+
def maximum_version(self) -> int:
|
539 |
+
return self._maximum_version
|
540 |
+
|
541 |
+
@maximum_version.setter
|
542 |
+
def maximum_version(self, maximum_version: int) -> None:
|
543 |
+
self._maximum_version = maximum_version
|
544 |
+
self._set_ctx_options()
|
545 |
+
|
546 |
+
|
547 |
+
def _verify_callback(
|
548 |
+
cnx: OpenSSL.SSL.Connection,
|
549 |
+
x509: X509,
|
550 |
+
err_no: int,
|
551 |
+
err_depth: int,
|
552 |
+
return_code: int,
|
553 |
+
) -> bool:
|
554 |
+
return err_no == 0
|
.venv/lib/python3.11/site-packages/urllib3/contrib/socks.py
ADDED
@@ -0,0 +1,228 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
This module contains provisional support for SOCKS proxies from within
|
3 |
+
urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and
|
4 |
+
SOCKS5. To enable its functionality, either install PySocks or install this
|
5 |
+
module with the ``socks`` extra.
|
6 |
+
|
7 |
+
The SOCKS implementation supports the full range of urllib3 features. It also
|
8 |
+
supports the following SOCKS features:
|
9 |
+
|
10 |
+
- SOCKS4A (``proxy_url='socks4a://...``)
|
11 |
+
- SOCKS4 (``proxy_url='socks4://...``)
|
12 |
+
- SOCKS5 with remote DNS (``proxy_url='socks5h://...``)
|
13 |
+
- SOCKS5 with local DNS (``proxy_url='socks5://...``)
|
14 |
+
- Usernames and passwords for the SOCKS proxy
|
15 |
+
|
16 |
+
.. note::
|
17 |
+
It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in
|
18 |
+
your ``proxy_url`` to ensure that DNS resolution is done from the remote
|
19 |
+
server instead of client-side when connecting to a domain name.
|
20 |
+
|
21 |
+
SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5
|
22 |
+
supports IPv4, IPv6, and domain names.
|
23 |
+
|
24 |
+
When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url``
|
25 |
+
will be sent as the ``userid`` section of the SOCKS request:
|
26 |
+
|
27 |
+
.. code-block:: python
|
28 |
+
|
29 |
+
proxy_url="socks4a://<userid>@proxy-host"
|
30 |
+
|
31 |
+
When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion
|
32 |
+
of the ``proxy_url`` will be sent as the username/password to authenticate
|
33 |
+
with the proxy:
|
34 |
+
|
35 |
+
.. code-block:: python
|
36 |
+
|
37 |
+
proxy_url="socks5h://<username>:<password>@proxy-host"
|
38 |
+
|
39 |
+
"""
|
40 |
+
|
41 |
+
from __future__ import annotations
|
42 |
+
|
43 |
+
try:
|
44 |
+
import socks # type: ignore[import-not-found]
|
45 |
+
except ImportError:
|
46 |
+
import warnings
|
47 |
+
|
48 |
+
from ..exceptions import DependencyWarning
|
49 |
+
|
50 |
+
warnings.warn(
|
51 |
+
(
|
52 |
+
"SOCKS support in urllib3 requires the installation of optional "
|
53 |
+
"dependencies: specifically, PySocks. For more information, see "
|
54 |
+
"https://urllib3.readthedocs.io/en/latest/advanced-usage.html#socks-proxies"
|
55 |
+
),
|
56 |
+
DependencyWarning,
|
57 |
+
)
|
58 |
+
raise
|
59 |
+
|
60 |
+
import typing
|
61 |
+
from socket import timeout as SocketTimeout
|
62 |
+
|
63 |
+
from ..connection import HTTPConnection, HTTPSConnection
|
64 |
+
from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool
|
65 |
+
from ..exceptions import ConnectTimeoutError, NewConnectionError
|
66 |
+
from ..poolmanager import PoolManager
|
67 |
+
from ..util.url import parse_url
|
68 |
+
|
69 |
+
try:
|
70 |
+
import ssl
|
71 |
+
except ImportError:
|
72 |
+
ssl = None # type: ignore[assignment]
|
73 |
+
|
74 |
+
|
75 |
+
class _TYPE_SOCKS_OPTIONS(typing.TypedDict):
|
76 |
+
socks_version: int
|
77 |
+
proxy_host: str | None
|
78 |
+
proxy_port: str | None
|
79 |
+
username: str | None
|
80 |
+
password: str | None
|
81 |
+
rdns: bool
|
82 |
+
|
83 |
+
|
84 |
+
class SOCKSConnection(HTTPConnection):
|
85 |
+
"""
|
86 |
+
A plain-text HTTP connection that connects via a SOCKS proxy.
|
87 |
+
"""
|
88 |
+
|
89 |
+
def __init__(
|
90 |
+
self,
|
91 |
+
_socks_options: _TYPE_SOCKS_OPTIONS,
|
92 |
+
*args: typing.Any,
|
93 |
+
**kwargs: typing.Any,
|
94 |
+
) -> None:
|
95 |
+
self._socks_options = _socks_options
|
96 |
+
super().__init__(*args, **kwargs)
|
97 |
+
|
98 |
+
def _new_conn(self) -> socks.socksocket:
|
99 |
+
"""
|
100 |
+
Establish a new connection via the SOCKS proxy.
|
101 |
+
"""
|
102 |
+
extra_kw: dict[str, typing.Any] = {}
|
103 |
+
if self.source_address:
|
104 |
+
extra_kw["source_address"] = self.source_address
|
105 |
+
|
106 |
+
if self.socket_options:
|
107 |
+
extra_kw["socket_options"] = self.socket_options
|
108 |
+
|
109 |
+
try:
|
110 |
+
conn = socks.create_connection(
|
111 |
+
(self.host, self.port),
|
112 |
+
proxy_type=self._socks_options["socks_version"],
|
113 |
+
proxy_addr=self._socks_options["proxy_host"],
|
114 |
+
proxy_port=self._socks_options["proxy_port"],
|
115 |
+
proxy_username=self._socks_options["username"],
|
116 |
+
proxy_password=self._socks_options["password"],
|
117 |
+
proxy_rdns=self._socks_options["rdns"],
|
118 |
+
timeout=self.timeout,
|
119 |
+
**extra_kw,
|
120 |
+
)
|
121 |
+
|
122 |
+
except SocketTimeout as e:
|
123 |
+
raise ConnectTimeoutError(
|
124 |
+
self,
|
125 |
+
f"Connection to {self.host} timed out. (connect timeout={self.timeout})",
|
126 |
+
) from e
|
127 |
+
|
128 |
+
except socks.ProxyError as e:
|
129 |
+
# This is fragile as hell, but it seems to be the only way to raise
|
130 |
+
# useful errors here.
|
131 |
+
if e.socket_err:
|
132 |
+
error = e.socket_err
|
133 |
+
if isinstance(error, SocketTimeout):
|
134 |
+
raise ConnectTimeoutError(
|
135 |
+
self,
|
136 |
+
f"Connection to {self.host} timed out. (connect timeout={self.timeout})",
|
137 |
+
) from e
|
138 |
+
else:
|
139 |
+
# Adding `from e` messes with coverage somehow, so it's omitted.
|
140 |
+
# See #2386.
|
141 |
+
raise NewConnectionError(
|
142 |
+
self, f"Failed to establish a new connection: {error}"
|
143 |
+
)
|
144 |
+
else:
|
145 |
+
raise NewConnectionError(
|
146 |
+
self, f"Failed to establish a new connection: {e}"
|
147 |
+
) from e
|
148 |
+
|
149 |
+
except OSError as e: # Defensive: PySocks should catch all these.
|
150 |
+
raise NewConnectionError(
|
151 |
+
self, f"Failed to establish a new connection: {e}"
|
152 |
+
) from e
|
153 |
+
|
154 |
+
return conn
|
155 |
+
|
156 |
+
|
157 |
+
# We don't need to duplicate the Verified/Unverified distinction from
|
158 |
+
# urllib3/connection.py here because the HTTPSConnection will already have been
|
159 |
+
# correctly set to either the Verified or Unverified form by that module. This
|
160 |
+
# means the SOCKSHTTPSConnection will automatically be the correct type.
|
161 |
+
class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection):
|
162 |
+
pass
|
163 |
+
|
164 |
+
|
165 |
+
class SOCKSHTTPConnectionPool(HTTPConnectionPool):
|
166 |
+
ConnectionCls = SOCKSConnection
|
167 |
+
|
168 |
+
|
169 |
+
class SOCKSHTTPSConnectionPool(HTTPSConnectionPool):
|
170 |
+
ConnectionCls = SOCKSHTTPSConnection
|
171 |
+
|
172 |
+
|
173 |
+
class SOCKSProxyManager(PoolManager):
|
174 |
+
"""
|
175 |
+
A version of the urllib3 ProxyManager that routes connections via the
|
176 |
+
defined SOCKS proxy.
|
177 |
+
"""
|
178 |
+
|
179 |
+
pool_classes_by_scheme = {
|
180 |
+
"http": SOCKSHTTPConnectionPool,
|
181 |
+
"https": SOCKSHTTPSConnectionPool,
|
182 |
+
}
|
183 |
+
|
184 |
+
def __init__(
|
185 |
+
self,
|
186 |
+
proxy_url: str,
|
187 |
+
username: str | None = None,
|
188 |
+
password: str | None = None,
|
189 |
+
num_pools: int = 10,
|
190 |
+
headers: typing.Mapping[str, str] | None = None,
|
191 |
+
**connection_pool_kw: typing.Any,
|
192 |
+
):
|
193 |
+
parsed = parse_url(proxy_url)
|
194 |
+
|
195 |
+
if username is None and password is None and parsed.auth is not None:
|
196 |
+
split = parsed.auth.split(":")
|
197 |
+
if len(split) == 2:
|
198 |
+
username, password = split
|
199 |
+
if parsed.scheme == "socks5":
|
200 |
+
socks_version = socks.PROXY_TYPE_SOCKS5
|
201 |
+
rdns = False
|
202 |
+
elif parsed.scheme == "socks5h":
|
203 |
+
socks_version = socks.PROXY_TYPE_SOCKS5
|
204 |
+
rdns = True
|
205 |
+
elif parsed.scheme == "socks4":
|
206 |
+
socks_version = socks.PROXY_TYPE_SOCKS4
|
207 |
+
rdns = False
|
208 |
+
elif parsed.scheme == "socks4a":
|
209 |
+
socks_version = socks.PROXY_TYPE_SOCKS4
|
210 |
+
rdns = True
|
211 |
+
else:
|
212 |
+
raise ValueError(f"Unable to determine SOCKS version from {proxy_url}")
|
213 |
+
|
214 |
+
self.proxy_url = proxy_url
|
215 |
+
|
216 |
+
socks_options = {
|
217 |
+
"socks_version": socks_version,
|
218 |
+
"proxy_host": parsed.host,
|
219 |
+
"proxy_port": parsed.port,
|
220 |
+
"username": username,
|
221 |
+
"password": password,
|
222 |
+
"rdns": rdns,
|
223 |
+
}
|
224 |
+
connection_pool_kw["_socks_options"] = socks_options
|
225 |
+
|
226 |
+
super().__init__(num_pools, headers, **connection_pool_kw)
|
227 |
+
|
228 |
+
self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme
|
.venv/lib/python3.11/site-packages/urllib3/http2/__init__.py
ADDED
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
from importlib.metadata import version
|
4 |
+
|
5 |
+
__all__ = [
|
6 |
+
"inject_into_urllib3",
|
7 |
+
"extract_from_urllib3",
|
8 |
+
]
|
9 |
+
|
10 |
+
import typing
|
11 |
+
|
12 |
+
orig_HTTPSConnection: typing.Any = None
|
13 |
+
|
14 |
+
|
15 |
+
def inject_into_urllib3() -> None:
|
16 |
+
# First check if h2 version is valid
|
17 |
+
h2_version = version("h2")
|
18 |
+
if not h2_version.startswith("4."):
|
19 |
+
raise ImportError(
|
20 |
+
"urllib3 v2 supports h2 version 4.x.x, currently "
|
21 |
+
f"the 'h2' module is compiled with {h2_version!r}. "
|
22 |
+
"See: https://github.com/urllib3/urllib3/issues/3290"
|
23 |
+
)
|
24 |
+
|
25 |
+
# Import here to avoid circular dependencies.
|
26 |
+
from .. import connection as urllib3_connection
|
27 |
+
from .. import util as urllib3_util
|
28 |
+
from ..connectionpool import HTTPSConnectionPool
|
29 |
+
from ..util import ssl_ as urllib3_util_ssl
|
30 |
+
from .connection import HTTP2Connection
|
31 |
+
|
32 |
+
global orig_HTTPSConnection
|
33 |
+
orig_HTTPSConnection = urllib3_connection.HTTPSConnection
|
34 |
+
|
35 |
+
HTTPSConnectionPool.ConnectionCls = HTTP2Connection
|
36 |
+
urllib3_connection.HTTPSConnection = HTTP2Connection # type: ignore[misc]
|
37 |
+
|
38 |
+
# TODO: Offer 'http/1.1' as well, but for testing purposes this is handy.
|
39 |
+
urllib3_util.ALPN_PROTOCOLS = ["h2"]
|
40 |
+
urllib3_util_ssl.ALPN_PROTOCOLS = ["h2"]
|
41 |
+
|
42 |
+
|
43 |
+
def extract_from_urllib3() -> None:
|
44 |
+
from .. import connection as urllib3_connection
|
45 |
+
from .. import util as urllib3_util
|
46 |
+
from ..connectionpool import HTTPSConnectionPool
|
47 |
+
from ..util import ssl_ as urllib3_util_ssl
|
48 |
+
|
49 |
+
HTTPSConnectionPool.ConnectionCls = orig_HTTPSConnection
|
50 |
+
urllib3_connection.HTTPSConnection = orig_HTTPSConnection # type: ignore[misc]
|
51 |
+
|
52 |
+
urllib3_util.ALPN_PROTOCOLS = ["http/1.1"]
|
53 |
+
urllib3_util_ssl.ALPN_PROTOCOLS = ["http/1.1"]
|
.venv/lib/python3.11/site-packages/urllib3/http2/__pycache__/__init__.cpython-311.pyc
ADDED
Binary file (2.01 kB). View file
|
|
.venv/lib/python3.11/site-packages/urllib3/http2/__pycache__/connection.cpython-311.pyc
ADDED
Binary file (19 kB). View file
|
|
.venv/lib/python3.11/site-packages/urllib3/http2/__pycache__/probe.cpython-311.pyc
ADDED
Binary file (4.33 kB). View file
|
|
.venv/lib/python3.11/site-packages/urllib3/http2/connection.py
ADDED
@@ -0,0 +1,356 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import logging
|
4 |
+
import re
|
5 |
+
import threading
|
6 |
+
import types
|
7 |
+
import typing
|
8 |
+
|
9 |
+
import h2.config # type: ignore[import-untyped]
|
10 |
+
import h2.connection # type: ignore[import-untyped]
|
11 |
+
import h2.events # type: ignore[import-untyped]
|
12 |
+
|
13 |
+
from .._base_connection import _TYPE_BODY
|
14 |
+
from .._collections import HTTPHeaderDict
|
15 |
+
from ..connection import HTTPSConnection, _get_default_user_agent
|
16 |
+
from ..exceptions import ConnectionError
|
17 |
+
from ..response import BaseHTTPResponse
|
18 |
+
|
19 |
+
orig_HTTPSConnection = HTTPSConnection
|
20 |
+
|
21 |
+
T = typing.TypeVar("T")
|
22 |
+
|
23 |
+
log = logging.getLogger(__name__)
|
24 |
+
|
25 |
+
RE_IS_LEGAL_HEADER_NAME = re.compile(rb"^[!#$%&'*+\-.^_`|~0-9a-z]+$")
|
26 |
+
RE_IS_ILLEGAL_HEADER_VALUE = re.compile(rb"[\0\x00\x0a\x0d\r\n]|^[ \r\n\t]|[ \r\n\t]$")
|
27 |
+
|
28 |
+
|
29 |
+
def _is_legal_header_name(name: bytes) -> bool:
|
30 |
+
"""
|
31 |
+
"An implementation that validates fields according to the definitions in Sections
|
32 |
+
5.1 and 5.5 of [HTTP] only needs an additional check that field names do not
|
33 |
+
include uppercase characters." (https://httpwg.org/specs/rfc9113.html#n-field-validity)
|
34 |
+
|
35 |
+
`http.client._is_legal_header_name` does not validate the field name according to the
|
36 |
+
HTTP 1.1 spec, so we do that here, in addition to checking for uppercase characters.
|
37 |
+
|
38 |
+
This does not allow for the `:` character in the header name, so should not
|
39 |
+
be used to validate pseudo-headers.
|
40 |
+
"""
|
41 |
+
return bool(RE_IS_LEGAL_HEADER_NAME.match(name))
|
42 |
+
|
43 |
+
|
44 |
+
def _is_illegal_header_value(value: bytes) -> bool:
|
45 |
+
"""
|
46 |
+
"A field value MUST NOT contain the zero value (ASCII NUL, 0x00), line feed
|
47 |
+
(ASCII LF, 0x0a), or carriage return (ASCII CR, 0x0d) at any position. A field
|
48 |
+
value MUST NOT start or end with an ASCII whitespace character (ASCII SP or HTAB,
|
49 |
+
0x20 or 0x09)." (https://httpwg.org/specs/rfc9113.html#n-field-validity)
|
50 |
+
"""
|
51 |
+
return bool(RE_IS_ILLEGAL_HEADER_VALUE.search(value))
|
52 |
+
|
53 |
+
|
54 |
+
class _LockedObject(typing.Generic[T]):
|
55 |
+
"""
|
56 |
+
A wrapper class that hides a specific object behind a lock.
|
57 |
+
The goal here is to provide a simple way to protect access to an object
|
58 |
+
that cannot safely be simultaneously accessed from multiple threads. The
|
59 |
+
intended use of this class is simple: take hold of it with a context
|
60 |
+
manager, which returns the protected object.
|
61 |
+
"""
|
62 |
+
|
63 |
+
__slots__ = (
|
64 |
+
"lock",
|
65 |
+
"_obj",
|
66 |
+
)
|
67 |
+
|
68 |
+
def __init__(self, obj: T):
|
69 |
+
self.lock = threading.RLock()
|
70 |
+
self._obj = obj
|
71 |
+
|
72 |
+
def __enter__(self) -> T:
|
73 |
+
self.lock.acquire()
|
74 |
+
return self._obj
|
75 |
+
|
76 |
+
def __exit__(
|
77 |
+
self,
|
78 |
+
exc_type: type[BaseException] | None,
|
79 |
+
exc_val: BaseException | None,
|
80 |
+
exc_tb: types.TracebackType | None,
|
81 |
+
) -> None:
|
82 |
+
self.lock.release()
|
83 |
+
|
84 |
+
|
85 |
+
class HTTP2Connection(HTTPSConnection):
|
86 |
+
def __init__(
|
87 |
+
self, host: str, port: int | None = None, **kwargs: typing.Any
|
88 |
+
) -> None:
|
89 |
+
self._h2_conn = self._new_h2_conn()
|
90 |
+
self._h2_stream: int | None = None
|
91 |
+
self._headers: list[tuple[bytes, bytes]] = []
|
92 |
+
|
93 |
+
if "proxy" in kwargs or "proxy_config" in kwargs: # Defensive:
|
94 |
+
raise NotImplementedError("Proxies aren't supported with HTTP/2")
|
95 |
+
|
96 |
+
super().__init__(host, port, **kwargs)
|
97 |
+
|
98 |
+
if self._tunnel_host is not None:
|
99 |
+
raise NotImplementedError("Tunneling isn't supported with HTTP/2")
|
100 |
+
|
101 |
+
def _new_h2_conn(self) -> _LockedObject[h2.connection.H2Connection]:
|
102 |
+
config = h2.config.H2Configuration(client_side=True)
|
103 |
+
return _LockedObject(h2.connection.H2Connection(config=config))
|
104 |
+
|
105 |
+
def connect(self) -> None:
|
106 |
+
super().connect()
|
107 |
+
with self._h2_conn as conn:
|
108 |
+
conn.initiate_connection()
|
109 |
+
if data_to_send := conn.data_to_send():
|
110 |
+
self.sock.sendall(data_to_send)
|
111 |
+
|
112 |
+
def putrequest( # type: ignore[override]
|
113 |
+
self,
|
114 |
+
method: str,
|
115 |
+
url: str,
|
116 |
+
**kwargs: typing.Any,
|
117 |
+
) -> None:
|
118 |
+
"""putrequest
|
119 |
+
This deviates from the HTTPConnection method signature since we never need to override
|
120 |
+
sending accept-encoding headers or the host header.
|
121 |
+
"""
|
122 |
+
if "skip_host" in kwargs:
|
123 |
+
raise NotImplementedError("`skip_host` isn't supported")
|
124 |
+
if "skip_accept_encoding" in kwargs:
|
125 |
+
raise NotImplementedError("`skip_accept_encoding` isn't supported")
|
126 |
+
|
127 |
+
self._request_url = url or "/"
|
128 |
+
self._validate_path(url) # type: ignore[attr-defined]
|
129 |
+
|
130 |
+
if ":" in self.host:
|
131 |
+
authority = f"[{self.host}]:{self.port or 443}"
|
132 |
+
else:
|
133 |
+
authority = f"{self.host}:{self.port or 443}"
|
134 |
+
|
135 |
+
self._headers.append((b":scheme", b"https"))
|
136 |
+
self._headers.append((b":method", method.encode()))
|
137 |
+
self._headers.append((b":authority", authority.encode()))
|
138 |
+
self._headers.append((b":path", url.encode()))
|
139 |
+
|
140 |
+
with self._h2_conn as conn:
|
141 |
+
self._h2_stream = conn.get_next_available_stream_id()
|
142 |
+
|
143 |
+
def putheader(self, header: str | bytes, *values: str | bytes) -> None:
|
144 |
+
# TODO SKIPPABLE_HEADERS from urllib3 are ignored.
|
145 |
+
header = header.encode() if isinstance(header, str) else header
|
146 |
+
header = header.lower() # A lot of upstream code uses capitalized headers.
|
147 |
+
if not _is_legal_header_name(header):
|
148 |
+
raise ValueError(f"Illegal header name {str(header)}")
|
149 |
+
|
150 |
+
for value in values:
|
151 |
+
value = value.encode() if isinstance(value, str) else value
|
152 |
+
if _is_illegal_header_value(value):
|
153 |
+
raise ValueError(f"Illegal header value {str(value)}")
|
154 |
+
self._headers.append((header, value))
|
155 |
+
|
156 |
+
def endheaders(self, message_body: typing.Any = None) -> None: # type: ignore[override]
|
157 |
+
if self._h2_stream is None:
|
158 |
+
raise ConnectionError("Must call `putrequest` first.")
|
159 |
+
|
160 |
+
with self._h2_conn as conn:
|
161 |
+
conn.send_headers(
|
162 |
+
stream_id=self._h2_stream,
|
163 |
+
headers=self._headers,
|
164 |
+
end_stream=(message_body is None),
|
165 |
+
)
|
166 |
+
if data_to_send := conn.data_to_send():
|
167 |
+
self.sock.sendall(data_to_send)
|
168 |
+
self._headers = [] # Reset headers for the next request.
|
169 |
+
|
170 |
+
def send(self, data: typing.Any) -> None:
|
171 |
+
"""Send data to the server.
|
172 |
+
`data` can be: `str`, `bytes`, an iterable, or file-like objects
|
173 |
+
that support a .read() method.
|
174 |
+
"""
|
175 |
+
if self._h2_stream is None:
|
176 |
+
raise ConnectionError("Must call `putrequest` first.")
|
177 |
+
|
178 |
+
with self._h2_conn as conn:
|
179 |
+
if data_to_send := conn.data_to_send():
|
180 |
+
self.sock.sendall(data_to_send)
|
181 |
+
|
182 |
+
if hasattr(data, "read"): # file-like objects
|
183 |
+
while True:
|
184 |
+
chunk = data.read(self.blocksize)
|
185 |
+
if not chunk:
|
186 |
+
break
|
187 |
+
if isinstance(chunk, str):
|
188 |
+
chunk = chunk.encode() # pragma: no cover
|
189 |
+
conn.send_data(self._h2_stream, chunk, end_stream=False)
|
190 |
+
if data_to_send := conn.data_to_send():
|
191 |
+
self.sock.sendall(data_to_send)
|
192 |
+
conn.end_stream(self._h2_stream)
|
193 |
+
return
|
194 |
+
|
195 |
+
if isinstance(data, str): # str -> bytes
|
196 |
+
data = data.encode()
|
197 |
+
|
198 |
+
try:
|
199 |
+
if isinstance(data, bytes):
|
200 |
+
conn.send_data(self._h2_stream, data, end_stream=True)
|
201 |
+
if data_to_send := conn.data_to_send():
|
202 |
+
self.sock.sendall(data_to_send)
|
203 |
+
else:
|
204 |
+
for chunk in data:
|
205 |
+
conn.send_data(self._h2_stream, chunk, end_stream=False)
|
206 |
+
if data_to_send := conn.data_to_send():
|
207 |
+
self.sock.sendall(data_to_send)
|
208 |
+
conn.end_stream(self._h2_stream)
|
209 |
+
except TypeError:
|
210 |
+
raise TypeError(
|
211 |
+
"`data` should be str, bytes, iterable, or file. got %r"
|
212 |
+
% type(data)
|
213 |
+
)
|
214 |
+
|
215 |
+
def set_tunnel(
|
216 |
+
self,
|
217 |
+
host: str,
|
218 |
+
port: int | None = None,
|
219 |
+
headers: typing.Mapping[str, str] | None = None,
|
220 |
+
scheme: str = "http",
|
221 |
+
) -> None:
|
222 |
+
raise NotImplementedError(
|
223 |
+
"HTTP/2 does not support setting up a tunnel through a proxy"
|
224 |
+
)
|
225 |
+
|
226 |
+
def getresponse( # type: ignore[override]
|
227 |
+
self,
|
228 |
+
) -> HTTP2Response:
|
229 |
+
status = None
|
230 |
+
data = bytearray()
|
231 |
+
with self._h2_conn as conn:
|
232 |
+
end_stream = False
|
233 |
+
while not end_stream:
|
234 |
+
# TODO: Arbitrary read value.
|
235 |
+
if received_data := self.sock.recv(65535):
|
236 |
+
events = conn.receive_data(received_data)
|
237 |
+
for event in events:
|
238 |
+
if isinstance(event, h2.events.ResponseReceived):
|
239 |
+
headers = HTTPHeaderDict()
|
240 |
+
for header, value in event.headers:
|
241 |
+
if header == b":status":
|
242 |
+
status = int(value.decode())
|
243 |
+
else:
|
244 |
+
headers.add(
|
245 |
+
header.decode("ascii"), value.decode("ascii")
|
246 |
+
)
|
247 |
+
|
248 |
+
elif isinstance(event, h2.events.DataReceived):
|
249 |
+
data += event.data
|
250 |
+
conn.acknowledge_received_data(
|
251 |
+
event.flow_controlled_length, event.stream_id
|
252 |
+
)
|
253 |
+
|
254 |
+
elif isinstance(event, h2.events.StreamEnded):
|
255 |
+
end_stream = True
|
256 |
+
|
257 |
+
if data_to_send := conn.data_to_send():
|
258 |
+
self.sock.sendall(data_to_send)
|
259 |
+
|
260 |
+
assert status is not None
|
261 |
+
return HTTP2Response(
|
262 |
+
status=status,
|
263 |
+
headers=headers,
|
264 |
+
request_url=self._request_url,
|
265 |
+
data=bytes(data),
|
266 |
+
)
|
267 |
+
|
268 |
+
def request( # type: ignore[override]
|
269 |
+
self,
|
270 |
+
method: str,
|
271 |
+
url: str,
|
272 |
+
body: _TYPE_BODY | None = None,
|
273 |
+
headers: typing.Mapping[str, str] | None = None,
|
274 |
+
*,
|
275 |
+
preload_content: bool = True,
|
276 |
+
decode_content: bool = True,
|
277 |
+
enforce_content_length: bool = True,
|
278 |
+
**kwargs: typing.Any,
|
279 |
+
) -> None:
|
280 |
+
"""Send an HTTP/2 request"""
|
281 |
+
if "chunked" in kwargs:
|
282 |
+
# TODO this is often present from upstream.
|
283 |
+
# raise NotImplementedError("`chunked` isn't supported with HTTP/2")
|
284 |
+
pass
|
285 |
+
|
286 |
+
if self.sock is not None:
|
287 |
+
self.sock.settimeout(self.timeout)
|
288 |
+
|
289 |
+
self.putrequest(method, url)
|
290 |
+
|
291 |
+
headers = headers or {}
|
292 |
+
for k, v in headers.items():
|
293 |
+
if k.lower() == "transfer-encoding" and v == "chunked":
|
294 |
+
continue
|
295 |
+
else:
|
296 |
+
self.putheader(k, v)
|
297 |
+
|
298 |
+
if b"user-agent" not in dict(self._headers):
|
299 |
+
self.putheader(b"user-agent", _get_default_user_agent())
|
300 |
+
|
301 |
+
if body:
|
302 |
+
self.endheaders(message_body=body)
|
303 |
+
self.send(body)
|
304 |
+
else:
|
305 |
+
self.endheaders()
|
306 |
+
|
307 |
+
def close(self) -> None:
|
308 |
+
with self._h2_conn as conn:
|
309 |
+
try:
|
310 |
+
conn.close_connection()
|
311 |
+
if data := conn.data_to_send():
|
312 |
+
self.sock.sendall(data)
|
313 |
+
except Exception:
|
314 |
+
pass
|
315 |
+
|
316 |
+
# Reset all our HTTP/2 connection state.
|
317 |
+
self._h2_conn = self._new_h2_conn()
|
318 |
+
self._h2_stream = None
|
319 |
+
self._headers = []
|
320 |
+
|
321 |
+
super().close()
|
322 |
+
|
323 |
+
|
324 |
+
class HTTP2Response(BaseHTTPResponse):
|
325 |
+
# TODO: This is a woefully incomplete response object, but works for non-streaming.
|
326 |
+
def __init__(
|
327 |
+
self,
|
328 |
+
status: int,
|
329 |
+
headers: HTTPHeaderDict,
|
330 |
+
request_url: str,
|
331 |
+
data: bytes,
|
332 |
+
decode_content: bool = False, # TODO: support decoding
|
333 |
+
) -> None:
|
334 |
+
super().__init__(
|
335 |
+
status=status,
|
336 |
+
headers=headers,
|
337 |
+
# Following CPython, we map HTTP versions to major * 10 + minor integers
|
338 |
+
version=20,
|
339 |
+
version_string="HTTP/2",
|
340 |
+
# No reason phrase in HTTP/2
|
341 |
+
reason=None,
|
342 |
+
decode_content=decode_content,
|
343 |
+
request_url=request_url,
|
344 |
+
)
|
345 |
+
self._data = data
|
346 |
+
self.length_remaining = 0
|
347 |
+
|
348 |
+
@property
|
349 |
+
def data(self) -> bytes:
|
350 |
+
return self._data
|
351 |
+
|
352 |
+
def get_redirect_location(self) -> None:
|
353 |
+
return None
|
354 |
+
|
355 |
+
def close(self) -> None:
|
356 |
+
pass
|
.venv/lib/python3.11/site-packages/urllib3/http2/probe.py
ADDED
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import threading
|
4 |
+
|
5 |
+
|
6 |
+
class _HTTP2ProbeCache:
|
7 |
+
__slots__ = (
|
8 |
+
"_lock",
|
9 |
+
"_cache_locks",
|
10 |
+
"_cache_values",
|
11 |
+
)
|
12 |
+
|
13 |
+
def __init__(self) -> None:
|
14 |
+
self._lock = threading.Lock()
|
15 |
+
self._cache_locks: dict[tuple[str, int], threading.RLock] = {}
|
16 |
+
self._cache_values: dict[tuple[str, int], bool | None] = {}
|
17 |
+
|
18 |
+
def acquire_and_get(self, host: str, port: int) -> bool | None:
|
19 |
+
# By the end of this block we know that
|
20 |
+
# _cache_[values,locks] is available.
|
21 |
+
value = None
|
22 |
+
with self._lock:
|
23 |
+
key = (host, port)
|
24 |
+
try:
|
25 |
+
value = self._cache_values[key]
|
26 |
+
# If it's a known value we return right away.
|
27 |
+
if value is not None:
|
28 |
+
return value
|
29 |
+
except KeyError:
|
30 |
+
self._cache_locks[key] = threading.RLock()
|
31 |
+
self._cache_values[key] = None
|
32 |
+
|
33 |
+
# If the value is unknown, we acquire the lock to signal
|
34 |
+
# to the requesting thread that the probe is in progress
|
35 |
+
# or that the current thread needs to return their findings.
|
36 |
+
key_lock = self._cache_locks[key]
|
37 |
+
key_lock.acquire()
|
38 |
+
try:
|
39 |
+
# If the by the time we get the lock the value has been
|
40 |
+
# updated we want to return the updated value.
|
41 |
+
value = self._cache_values[key]
|
42 |
+
|
43 |
+
# In case an exception like KeyboardInterrupt is raised here.
|
44 |
+
except BaseException as e: # Defensive:
|
45 |
+
assert not isinstance(e, KeyError) # KeyError shouldn't be possible.
|
46 |
+
key_lock.release()
|
47 |
+
raise
|
48 |
+
|
49 |
+
return value
|
50 |
+
|
51 |
+
def set_and_release(
|
52 |
+
self, host: str, port: int, supports_http2: bool | None
|
53 |
+
) -> None:
|
54 |
+
key = (host, port)
|
55 |
+
key_lock = self._cache_locks[key]
|
56 |
+
with key_lock: # Uses an RLock, so can be locked again from same thread.
|
57 |
+
if supports_http2 is None and self._cache_values[key] is not None:
|
58 |
+
raise ValueError(
|
59 |
+
"Cannot reset HTTP/2 support for origin after value has been set."
|
60 |
+
) # Defensive: not expected in normal usage
|
61 |
+
|
62 |
+
self._cache_values[key] = supports_http2
|
63 |
+
key_lock.release()
|
64 |
+
|
65 |
+
def _values(self) -> dict[tuple[str, int], bool | None]:
|
66 |
+
"""This function is for testing purposes only. Gets the current state of the probe cache"""
|
67 |
+
with self._lock:
|
68 |
+
return {k: v for k, v in self._cache_values.items()}
|
69 |
+
|
70 |
+
def _reset(self) -> None:
|
71 |
+
"""This function is for testing purposes only. Reset the cache values"""
|
72 |
+
with self._lock:
|
73 |
+
self._cache_locks = {}
|
74 |
+
self._cache_values = {}
|
75 |
+
|
76 |
+
|
77 |
+
_HTTP2_PROBE_CACHE = _HTTP2ProbeCache()
|
78 |
+
|
79 |
+
set_and_release = _HTTP2_PROBE_CACHE.set_and_release
|
80 |
+
acquire_and_get = _HTTP2_PROBE_CACHE.acquire_and_get
|
81 |
+
_values = _HTTP2_PROBE_CACHE._values
|
82 |
+
_reset = _HTTP2_PROBE_CACHE._reset
|
83 |
+
|
84 |
+
__all__ = [
|
85 |
+
"set_and_release",
|
86 |
+
"acquire_and_get",
|
87 |
+
]
|
.venv/lib/python3.11/site-packages/urllib3/poolmanager.py
ADDED
@@ -0,0 +1,637 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import functools
|
4 |
+
import logging
|
5 |
+
import typing
|
6 |
+
import warnings
|
7 |
+
from types import TracebackType
|
8 |
+
from urllib.parse import urljoin
|
9 |
+
|
10 |
+
from ._collections import HTTPHeaderDict, RecentlyUsedContainer
|
11 |
+
from ._request_methods import RequestMethods
|
12 |
+
from .connection import ProxyConfig
|
13 |
+
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme
|
14 |
+
from .exceptions import (
|
15 |
+
LocationValueError,
|
16 |
+
MaxRetryError,
|
17 |
+
ProxySchemeUnknown,
|
18 |
+
URLSchemeUnknown,
|
19 |
+
)
|
20 |
+
from .response import BaseHTTPResponse
|
21 |
+
from .util.connection import _TYPE_SOCKET_OPTIONS
|
22 |
+
from .util.proxy import connection_requires_http_tunnel
|
23 |
+
from .util.retry import Retry
|
24 |
+
from .util.timeout import Timeout
|
25 |
+
from .util.url import Url, parse_url
|
26 |
+
|
27 |
+
if typing.TYPE_CHECKING:
|
28 |
+
import ssl
|
29 |
+
|
30 |
+
from typing_extensions import Self
|
31 |
+
|
32 |
+
__all__ = ["PoolManager", "ProxyManager", "proxy_from_url"]
|
33 |
+
|
34 |
+
|
35 |
+
log = logging.getLogger(__name__)
|
36 |
+
|
37 |
+
SSL_KEYWORDS = (
|
38 |
+
"key_file",
|
39 |
+
"cert_file",
|
40 |
+
"cert_reqs",
|
41 |
+
"ca_certs",
|
42 |
+
"ca_cert_data",
|
43 |
+
"ssl_version",
|
44 |
+
"ssl_minimum_version",
|
45 |
+
"ssl_maximum_version",
|
46 |
+
"ca_cert_dir",
|
47 |
+
"ssl_context",
|
48 |
+
"key_password",
|
49 |
+
"server_hostname",
|
50 |
+
)
|
51 |
+
# Default value for `blocksize` - a new parameter introduced to
|
52 |
+
# http.client.HTTPConnection & http.client.HTTPSConnection in Python 3.7
|
53 |
+
_DEFAULT_BLOCKSIZE = 16384
|
54 |
+
|
55 |
+
|
56 |
+
class PoolKey(typing.NamedTuple):
|
57 |
+
"""
|
58 |
+
All known keyword arguments that could be provided to the pool manager, its
|
59 |
+
pools, or the underlying connections.
|
60 |
+
|
61 |
+
All custom key schemes should include the fields in this key at a minimum.
|
62 |
+
"""
|
63 |
+
|
64 |
+
key_scheme: str
|
65 |
+
key_host: str
|
66 |
+
key_port: int | None
|
67 |
+
key_timeout: Timeout | float | int | None
|
68 |
+
key_retries: Retry | bool | int | None
|
69 |
+
key_block: bool | None
|
70 |
+
key_source_address: tuple[str, int] | None
|
71 |
+
key_key_file: str | None
|
72 |
+
key_key_password: str | None
|
73 |
+
key_cert_file: str | None
|
74 |
+
key_cert_reqs: str | None
|
75 |
+
key_ca_certs: str | None
|
76 |
+
key_ca_cert_data: str | bytes | None
|
77 |
+
key_ssl_version: int | str | None
|
78 |
+
key_ssl_minimum_version: ssl.TLSVersion | None
|
79 |
+
key_ssl_maximum_version: ssl.TLSVersion | None
|
80 |
+
key_ca_cert_dir: str | None
|
81 |
+
key_ssl_context: ssl.SSLContext | None
|
82 |
+
key_maxsize: int | None
|
83 |
+
key_headers: frozenset[tuple[str, str]] | None
|
84 |
+
key__proxy: Url | None
|
85 |
+
key__proxy_headers: frozenset[tuple[str, str]] | None
|
86 |
+
key__proxy_config: ProxyConfig | None
|
87 |
+
key_socket_options: _TYPE_SOCKET_OPTIONS | None
|
88 |
+
key__socks_options: frozenset[tuple[str, str]] | None
|
89 |
+
key_assert_hostname: bool | str | None
|
90 |
+
key_assert_fingerprint: str | None
|
91 |
+
key_server_hostname: str | None
|
92 |
+
key_blocksize: int | None
|
93 |
+
|
94 |
+
|
95 |
+
def _default_key_normalizer(
|
96 |
+
key_class: type[PoolKey], request_context: dict[str, typing.Any]
|
97 |
+
) -> PoolKey:
|
98 |
+
"""
|
99 |
+
Create a pool key out of a request context dictionary.
|
100 |
+
|
101 |
+
According to RFC 3986, both the scheme and host are case-insensitive.
|
102 |
+
Therefore, this function normalizes both before constructing the pool
|
103 |
+
key for an HTTPS request. If you wish to change this behaviour, provide
|
104 |
+
alternate callables to ``key_fn_by_scheme``.
|
105 |
+
|
106 |
+
:param key_class:
|
107 |
+
The class to use when constructing the key. This should be a namedtuple
|
108 |
+
with the ``scheme`` and ``host`` keys at a minimum.
|
109 |
+
:type key_class: namedtuple
|
110 |
+
:param request_context:
|
111 |
+
A dictionary-like object that contain the context for a request.
|
112 |
+
:type request_context: dict
|
113 |
+
|
114 |
+
:return: A namedtuple that can be used as a connection pool key.
|
115 |
+
:rtype: PoolKey
|
116 |
+
"""
|
117 |
+
# Since we mutate the dictionary, make a copy first
|
118 |
+
context = request_context.copy()
|
119 |
+
context["scheme"] = context["scheme"].lower()
|
120 |
+
context["host"] = context["host"].lower()
|
121 |
+
|
122 |
+
# These are both dictionaries and need to be transformed into frozensets
|
123 |
+
for key in ("headers", "_proxy_headers", "_socks_options"):
|
124 |
+
if key in context and context[key] is not None:
|
125 |
+
context[key] = frozenset(context[key].items())
|
126 |
+
|
127 |
+
# The socket_options key may be a list and needs to be transformed into a
|
128 |
+
# tuple.
|
129 |
+
socket_opts = context.get("socket_options")
|
130 |
+
if socket_opts is not None:
|
131 |
+
context["socket_options"] = tuple(socket_opts)
|
132 |
+
|
133 |
+
# Map the kwargs to the names in the namedtuple - this is necessary since
|
134 |
+
# namedtuples can't have fields starting with '_'.
|
135 |
+
for key in list(context.keys()):
|
136 |
+
context["key_" + key] = context.pop(key)
|
137 |
+
|
138 |
+
# Default to ``None`` for keys missing from the context
|
139 |
+
for field in key_class._fields:
|
140 |
+
if field not in context:
|
141 |
+
context[field] = None
|
142 |
+
|
143 |
+
# Default key_blocksize to _DEFAULT_BLOCKSIZE if missing from the context
|
144 |
+
if context.get("key_blocksize") is None:
|
145 |
+
context["key_blocksize"] = _DEFAULT_BLOCKSIZE
|
146 |
+
|
147 |
+
return key_class(**context)
|
148 |
+
|
149 |
+
|
150 |
+
#: A dictionary that maps a scheme to a callable that creates a pool key.
|
151 |
+
#: This can be used to alter the way pool keys are constructed, if desired.
|
152 |
+
#: Each PoolManager makes a copy of this dictionary so they can be configured
|
153 |
+
#: globally here, or individually on the instance.
|
154 |
+
key_fn_by_scheme = {
|
155 |
+
"http": functools.partial(_default_key_normalizer, PoolKey),
|
156 |
+
"https": functools.partial(_default_key_normalizer, PoolKey),
|
157 |
+
}
|
158 |
+
|
159 |
+
pool_classes_by_scheme = {"http": HTTPConnectionPool, "https": HTTPSConnectionPool}
|
160 |
+
|
161 |
+
|
162 |
+
class PoolManager(RequestMethods):
|
163 |
+
"""
|
164 |
+
Allows for arbitrary requests while transparently keeping track of
|
165 |
+
necessary connection pools for you.
|
166 |
+
|
167 |
+
:param num_pools:
|
168 |
+
Number of connection pools to cache before discarding the least
|
169 |
+
recently used pool.
|
170 |
+
|
171 |
+
:param headers:
|
172 |
+
Headers to include with all requests, unless other headers are given
|
173 |
+
explicitly.
|
174 |
+
|
175 |
+
:param \\**connection_pool_kw:
|
176 |
+
Additional parameters are used to create fresh
|
177 |
+
:class:`urllib3.connectionpool.ConnectionPool` instances.
|
178 |
+
|
179 |
+
Example:
|
180 |
+
|
181 |
+
.. code-block:: python
|
182 |
+
|
183 |
+
import urllib3
|
184 |
+
|
185 |
+
http = urllib3.PoolManager(num_pools=2)
|
186 |
+
|
187 |
+
resp1 = http.request("GET", "https://google.com/")
|
188 |
+
resp2 = http.request("GET", "https://google.com/mail")
|
189 |
+
resp3 = http.request("GET", "https://yahoo.com/")
|
190 |
+
|
191 |
+
print(len(http.pools))
|
192 |
+
# 2
|
193 |
+
|
194 |
+
"""
|
195 |
+
|
196 |
+
proxy: Url | None = None
|
197 |
+
proxy_config: ProxyConfig | None = None
|
198 |
+
|
199 |
+
def __init__(
|
200 |
+
self,
|
201 |
+
num_pools: int = 10,
|
202 |
+
headers: typing.Mapping[str, str] | None = None,
|
203 |
+
**connection_pool_kw: typing.Any,
|
204 |
+
) -> None:
|
205 |
+
super().__init__(headers)
|
206 |
+
self.connection_pool_kw = connection_pool_kw
|
207 |
+
|
208 |
+
self.pools: RecentlyUsedContainer[PoolKey, HTTPConnectionPool]
|
209 |
+
self.pools = RecentlyUsedContainer(num_pools)
|
210 |
+
|
211 |
+
# Locally set the pool classes and keys so other PoolManagers can
|
212 |
+
# override them.
|
213 |
+
self.pool_classes_by_scheme = pool_classes_by_scheme
|
214 |
+
self.key_fn_by_scheme = key_fn_by_scheme.copy()
|
215 |
+
|
216 |
+
def __enter__(self) -> Self:
|
217 |
+
return self
|
218 |
+
|
219 |
+
def __exit__(
|
220 |
+
self,
|
221 |
+
exc_type: type[BaseException] | None,
|
222 |
+
exc_val: BaseException | None,
|
223 |
+
exc_tb: TracebackType | None,
|
224 |
+
) -> typing.Literal[False]:
|
225 |
+
self.clear()
|
226 |
+
# Return False to re-raise any potential exceptions
|
227 |
+
return False
|
228 |
+
|
229 |
+
def _new_pool(
|
230 |
+
self,
|
231 |
+
scheme: str,
|
232 |
+
host: str,
|
233 |
+
port: int,
|
234 |
+
request_context: dict[str, typing.Any] | None = None,
|
235 |
+
) -> HTTPConnectionPool:
|
236 |
+
"""
|
237 |
+
Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and
|
238 |
+
any additional pool keyword arguments.
|
239 |
+
|
240 |
+
If ``request_context`` is provided, it is provided as keyword arguments
|
241 |
+
to the pool class used. This method is used to actually create the
|
242 |
+
connection pools handed out by :meth:`connection_from_url` and
|
243 |
+
companion methods. It is intended to be overridden for customization.
|
244 |
+
"""
|
245 |
+
pool_cls: type[HTTPConnectionPool] = self.pool_classes_by_scheme[scheme]
|
246 |
+
if request_context is None:
|
247 |
+
request_context = self.connection_pool_kw.copy()
|
248 |
+
|
249 |
+
# Default blocksize to _DEFAULT_BLOCKSIZE if missing or explicitly
|
250 |
+
# set to 'None' in the request_context.
|
251 |
+
if request_context.get("blocksize") is None:
|
252 |
+
request_context["blocksize"] = _DEFAULT_BLOCKSIZE
|
253 |
+
|
254 |
+
# Although the context has everything necessary to create the pool,
|
255 |
+
# this function has historically only used the scheme, host, and port
|
256 |
+
# in the positional args. When an API change is acceptable these can
|
257 |
+
# be removed.
|
258 |
+
for key in ("scheme", "host", "port"):
|
259 |
+
request_context.pop(key, None)
|
260 |
+
|
261 |
+
if scheme == "http":
|
262 |
+
for kw in SSL_KEYWORDS:
|
263 |
+
request_context.pop(kw, None)
|
264 |
+
|
265 |
+
return pool_cls(host, port, **request_context)
|
266 |
+
|
267 |
+
def clear(self) -> None:
|
268 |
+
"""
|
269 |
+
Empty our store of pools and direct them all to close.
|
270 |
+
|
271 |
+
This will not affect in-flight connections, but they will not be
|
272 |
+
re-used after completion.
|
273 |
+
"""
|
274 |
+
self.pools.clear()
|
275 |
+
|
276 |
+
def connection_from_host(
|
277 |
+
self,
|
278 |
+
host: str | None,
|
279 |
+
port: int | None = None,
|
280 |
+
scheme: str | None = "http",
|
281 |
+
pool_kwargs: dict[str, typing.Any] | None = None,
|
282 |
+
) -> HTTPConnectionPool:
|
283 |
+
"""
|
284 |
+
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme.
|
285 |
+
|
286 |
+
If ``port`` isn't given, it will be derived from the ``scheme`` using
|
287 |
+
``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is
|
288 |
+
provided, it is merged with the instance's ``connection_pool_kw``
|
289 |
+
variable and used to create the new connection pool, if one is
|
290 |
+
needed.
|
291 |
+
"""
|
292 |
+
|
293 |
+
if not host:
|
294 |
+
raise LocationValueError("No host specified.")
|
295 |
+
|
296 |
+
request_context = self._merge_pool_kwargs(pool_kwargs)
|
297 |
+
request_context["scheme"] = scheme or "http"
|
298 |
+
if not port:
|
299 |
+
port = port_by_scheme.get(request_context["scheme"].lower(), 80)
|
300 |
+
request_context["port"] = port
|
301 |
+
request_context["host"] = host
|
302 |
+
|
303 |
+
return self.connection_from_context(request_context)
|
304 |
+
|
305 |
+
def connection_from_context(
|
306 |
+
self, request_context: dict[str, typing.Any]
|
307 |
+
) -> HTTPConnectionPool:
|
308 |
+
"""
|
309 |
+
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context.
|
310 |
+
|
311 |
+
``request_context`` must at least contain the ``scheme`` key and its
|
312 |
+
value must be a key in ``key_fn_by_scheme`` instance variable.
|
313 |
+
"""
|
314 |
+
if "strict" in request_context:
|
315 |
+
warnings.warn(
|
316 |
+
"The 'strict' parameter is no longer needed on Python 3+. "
|
317 |
+
"This will raise an error in urllib3 v2.1.0.",
|
318 |
+
DeprecationWarning,
|
319 |
+
)
|
320 |
+
request_context.pop("strict")
|
321 |
+
|
322 |
+
scheme = request_context["scheme"].lower()
|
323 |
+
pool_key_constructor = self.key_fn_by_scheme.get(scheme)
|
324 |
+
if not pool_key_constructor:
|
325 |
+
raise URLSchemeUnknown(scheme)
|
326 |
+
pool_key = pool_key_constructor(request_context)
|
327 |
+
|
328 |
+
return self.connection_from_pool_key(pool_key, request_context=request_context)
|
329 |
+
|
330 |
+
def connection_from_pool_key(
|
331 |
+
self, pool_key: PoolKey, request_context: dict[str, typing.Any]
|
332 |
+
) -> HTTPConnectionPool:
|
333 |
+
"""
|
334 |
+
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key.
|
335 |
+
|
336 |
+
``pool_key`` should be a namedtuple that only contains immutable
|
337 |
+
objects. At a minimum it must have the ``scheme``, ``host``, and
|
338 |
+
``port`` fields.
|
339 |
+
"""
|
340 |
+
with self.pools.lock:
|
341 |
+
# If the scheme, host, or port doesn't match existing open
|
342 |
+
# connections, open a new ConnectionPool.
|
343 |
+
pool = self.pools.get(pool_key)
|
344 |
+
if pool:
|
345 |
+
return pool
|
346 |
+
|
347 |
+
# Make a fresh ConnectionPool of the desired type
|
348 |
+
scheme = request_context["scheme"]
|
349 |
+
host = request_context["host"]
|
350 |
+
port = request_context["port"]
|
351 |
+
pool = self._new_pool(scheme, host, port, request_context=request_context)
|
352 |
+
self.pools[pool_key] = pool
|
353 |
+
|
354 |
+
return pool
|
355 |
+
|
356 |
+
def connection_from_url(
|
357 |
+
self, url: str, pool_kwargs: dict[str, typing.Any] | None = None
|
358 |
+
) -> HTTPConnectionPool:
|
359 |
+
"""
|
360 |
+
Similar to :func:`urllib3.connectionpool.connection_from_url`.
|
361 |
+
|
362 |
+
If ``pool_kwargs`` is not provided and a new pool needs to be
|
363 |
+
constructed, ``self.connection_pool_kw`` is used to initialize
|
364 |
+
the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs``
|
365 |
+
is provided, it is used instead. Note that if a new pool does not
|
366 |
+
need to be created for the request, the provided ``pool_kwargs`` are
|
367 |
+
not used.
|
368 |
+
"""
|
369 |
+
u = parse_url(url)
|
370 |
+
return self.connection_from_host(
|
371 |
+
u.host, port=u.port, scheme=u.scheme, pool_kwargs=pool_kwargs
|
372 |
+
)
|
373 |
+
|
374 |
+
def _merge_pool_kwargs(
|
375 |
+
self, override: dict[str, typing.Any] | None
|
376 |
+
) -> dict[str, typing.Any]:
|
377 |
+
"""
|
378 |
+
Merge a dictionary of override values for self.connection_pool_kw.
|
379 |
+
|
380 |
+
This does not modify self.connection_pool_kw and returns a new dict.
|
381 |
+
Any keys in the override dictionary with a value of ``None`` are
|
382 |
+
removed from the merged dictionary.
|
383 |
+
"""
|
384 |
+
base_pool_kwargs = self.connection_pool_kw.copy()
|
385 |
+
if override:
|
386 |
+
for key, value in override.items():
|
387 |
+
if value is None:
|
388 |
+
try:
|
389 |
+
del base_pool_kwargs[key]
|
390 |
+
except KeyError:
|
391 |
+
pass
|
392 |
+
else:
|
393 |
+
base_pool_kwargs[key] = value
|
394 |
+
return base_pool_kwargs
|
395 |
+
|
396 |
+
def _proxy_requires_url_absolute_form(self, parsed_url: Url) -> bool:
|
397 |
+
"""
|
398 |
+
Indicates if the proxy requires the complete destination URL in the
|
399 |
+
request. Normally this is only needed when not using an HTTP CONNECT
|
400 |
+
tunnel.
|
401 |
+
"""
|
402 |
+
if self.proxy is None:
|
403 |
+
return False
|
404 |
+
|
405 |
+
return not connection_requires_http_tunnel(
|
406 |
+
self.proxy, self.proxy_config, parsed_url.scheme
|
407 |
+
)
|
408 |
+
|
409 |
+
def urlopen( # type: ignore[override]
|
410 |
+
self, method: str, url: str, redirect: bool = True, **kw: typing.Any
|
411 |
+
) -> BaseHTTPResponse:
|
412 |
+
"""
|
413 |
+
Same as :meth:`urllib3.HTTPConnectionPool.urlopen`
|
414 |
+
with custom cross-host redirect logic and only sends the request-uri
|
415 |
+
portion of the ``url``.
|
416 |
+
|
417 |
+
The given ``url`` parameter must be absolute, such that an appropriate
|
418 |
+
:class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
|
419 |
+
"""
|
420 |
+
u = parse_url(url)
|
421 |
+
|
422 |
+
if u.scheme is None:
|
423 |
+
warnings.warn(
|
424 |
+
"URLs without a scheme (ie 'https://') are deprecated and will raise an error "
|
425 |
+
"in a future version of urllib3. To avoid this DeprecationWarning ensure all URLs "
|
426 |
+
"start with 'https://' or 'http://'. Read more in this issue: "
|
427 |
+
"https://github.com/urllib3/urllib3/issues/2920",
|
428 |
+
category=DeprecationWarning,
|
429 |
+
stacklevel=2,
|
430 |
+
)
|
431 |
+
|
432 |
+
conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
|
433 |
+
|
434 |
+
kw["assert_same_host"] = False
|
435 |
+
kw["redirect"] = False
|
436 |
+
|
437 |
+
if "headers" not in kw:
|
438 |
+
kw["headers"] = self.headers
|
439 |
+
|
440 |
+
if self._proxy_requires_url_absolute_form(u):
|
441 |
+
response = conn.urlopen(method, url, **kw)
|
442 |
+
else:
|
443 |
+
response = conn.urlopen(method, u.request_uri, **kw)
|
444 |
+
|
445 |
+
redirect_location = redirect and response.get_redirect_location()
|
446 |
+
if not redirect_location:
|
447 |
+
return response
|
448 |
+
|
449 |
+
# Support relative URLs for redirecting.
|
450 |
+
redirect_location = urljoin(url, redirect_location)
|
451 |
+
|
452 |
+
if response.status == 303:
|
453 |
+
# Change the method according to RFC 9110, Section 15.4.4.
|
454 |
+
method = "GET"
|
455 |
+
# And lose the body not to transfer anything sensitive.
|
456 |
+
kw["body"] = None
|
457 |
+
kw["headers"] = HTTPHeaderDict(kw["headers"])._prepare_for_method_change()
|
458 |
+
|
459 |
+
retries = kw.get("retries")
|
460 |
+
if not isinstance(retries, Retry):
|
461 |
+
retries = Retry.from_int(retries, redirect=redirect)
|
462 |
+
|
463 |
+
# Strip headers marked as unsafe to forward to the redirected location.
|
464 |
+
# Check remove_headers_on_redirect to avoid a potential network call within
|
465 |
+
# conn.is_same_host() which may use socket.gethostbyname() in the future.
|
466 |
+
if retries.remove_headers_on_redirect and not conn.is_same_host(
|
467 |
+
redirect_location
|
468 |
+
):
|
469 |
+
new_headers = kw["headers"].copy()
|
470 |
+
for header in kw["headers"]:
|
471 |
+
if header.lower() in retries.remove_headers_on_redirect:
|
472 |
+
new_headers.pop(header, None)
|
473 |
+
kw["headers"] = new_headers
|
474 |
+
|
475 |
+
try:
|
476 |
+
retries = retries.increment(method, url, response=response, _pool=conn)
|
477 |
+
except MaxRetryError:
|
478 |
+
if retries.raise_on_redirect:
|
479 |
+
response.drain_conn()
|
480 |
+
raise
|
481 |
+
return response
|
482 |
+
|
483 |
+
kw["retries"] = retries
|
484 |
+
kw["redirect"] = redirect
|
485 |
+
|
486 |
+
log.info("Redirecting %s -> %s", url, redirect_location)
|
487 |
+
|
488 |
+
response.drain_conn()
|
489 |
+
return self.urlopen(method, redirect_location, **kw)
|
490 |
+
|
491 |
+
|
492 |
+
class ProxyManager(PoolManager):
|
493 |
+
"""
|
494 |
+
Behaves just like :class:`PoolManager`, but sends all requests through
|
495 |
+
the defined proxy, using the CONNECT method for HTTPS URLs.
|
496 |
+
|
497 |
+
:param proxy_url:
|
498 |
+
The URL of the proxy to be used.
|
499 |
+
|
500 |
+
:param proxy_headers:
|
501 |
+
A dictionary containing headers that will be sent to the proxy. In case
|
502 |
+
of HTTP they are being sent with each request, while in the
|
503 |
+
HTTPS/CONNECT case they are sent only once. Could be used for proxy
|
504 |
+
authentication.
|
505 |
+
|
506 |
+
:param proxy_ssl_context:
|
507 |
+
The proxy SSL context is used to establish the TLS connection to the
|
508 |
+
proxy when using HTTPS proxies.
|
509 |
+
|
510 |
+
:param use_forwarding_for_https:
|
511 |
+
(Defaults to False) If set to True will forward requests to the HTTPS
|
512 |
+
proxy to be made on behalf of the client instead of creating a TLS
|
513 |
+
tunnel via the CONNECT method. **Enabling this flag means that request
|
514 |
+
and response headers and content will be visible from the HTTPS proxy**
|
515 |
+
whereas tunneling keeps request and response headers and content
|
516 |
+
private. IP address, target hostname, SNI, and port are always visible
|
517 |
+
to an HTTPS proxy even when this flag is disabled.
|
518 |
+
|
519 |
+
:param proxy_assert_hostname:
|
520 |
+
The hostname of the certificate to verify against.
|
521 |
+
|
522 |
+
:param proxy_assert_fingerprint:
|
523 |
+
The fingerprint of the certificate to verify against.
|
524 |
+
|
525 |
+
Example:
|
526 |
+
|
527 |
+
.. code-block:: python
|
528 |
+
|
529 |
+
import urllib3
|
530 |
+
|
531 |
+
proxy = urllib3.ProxyManager("https://localhost:3128/")
|
532 |
+
|
533 |
+
resp1 = proxy.request("GET", "https://google.com/")
|
534 |
+
resp2 = proxy.request("GET", "https://httpbin.org/")
|
535 |
+
|
536 |
+
print(len(proxy.pools))
|
537 |
+
# 1
|
538 |
+
|
539 |
+
resp3 = proxy.request("GET", "https://httpbin.org/")
|
540 |
+
resp4 = proxy.request("GET", "https://twitter.com/")
|
541 |
+
|
542 |
+
print(len(proxy.pools))
|
543 |
+
# 3
|
544 |
+
|
545 |
+
"""
|
546 |
+
|
547 |
+
def __init__(
|
548 |
+
self,
|
549 |
+
proxy_url: str,
|
550 |
+
num_pools: int = 10,
|
551 |
+
headers: typing.Mapping[str, str] | None = None,
|
552 |
+
proxy_headers: typing.Mapping[str, str] | None = None,
|
553 |
+
proxy_ssl_context: ssl.SSLContext | None = None,
|
554 |
+
use_forwarding_for_https: bool = False,
|
555 |
+
proxy_assert_hostname: None | str | typing.Literal[False] = None,
|
556 |
+
proxy_assert_fingerprint: str | None = None,
|
557 |
+
**connection_pool_kw: typing.Any,
|
558 |
+
) -> None:
|
559 |
+
if isinstance(proxy_url, HTTPConnectionPool):
|
560 |
+
str_proxy_url = f"{proxy_url.scheme}://{proxy_url.host}:{proxy_url.port}"
|
561 |
+
else:
|
562 |
+
str_proxy_url = proxy_url
|
563 |
+
proxy = parse_url(str_proxy_url)
|
564 |
+
|
565 |
+
if proxy.scheme not in ("http", "https"):
|
566 |
+
raise ProxySchemeUnknown(proxy.scheme)
|
567 |
+
|
568 |
+
if not proxy.port:
|
569 |
+
port = port_by_scheme.get(proxy.scheme, 80)
|
570 |
+
proxy = proxy._replace(port=port)
|
571 |
+
|
572 |
+
self.proxy = proxy
|
573 |
+
self.proxy_headers = proxy_headers or {}
|
574 |
+
self.proxy_ssl_context = proxy_ssl_context
|
575 |
+
self.proxy_config = ProxyConfig(
|
576 |
+
proxy_ssl_context,
|
577 |
+
use_forwarding_for_https,
|
578 |
+
proxy_assert_hostname,
|
579 |
+
proxy_assert_fingerprint,
|
580 |
+
)
|
581 |
+
|
582 |
+
connection_pool_kw["_proxy"] = self.proxy
|
583 |
+
connection_pool_kw["_proxy_headers"] = self.proxy_headers
|
584 |
+
connection_pool_kw["_proxy_config"] = self.proxy_config
|
585 |
+
|
586 |
+
super().__init__(num_pools, headers, **connection_pool_kw)
|
587 |
+
|
588 |
+
def connection_from_host(
|
589 |
+
self,
|
590 |
+
host: str | None,
|
591 |
+
port: int | None = None,
|
592 |
+
scheme: str | None = "http",
|
593 |
+
pool_kwargs: dict[str, typing.Any] | None = None,
|
594 |
+
) -> HTTPConnectionPool:
|
595 |
+
if scheme == "https":
|
596 |
+
return super().connection_from_host(
|
597 |
+
host, port, scheme, pool_kwargs=pool_kwargs
|
598 |
+
)
|
599 |
+
|
600 |
+
return super().connection_from_host(
|
601 |
+
self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs # type: ignore[union-attr]
|
602 |
+
)
|
603 |
+
|
604 |
+
def _set_proxy_headers(
|
605 |
+
self, url: str, headers: typing.Mapping[str, str] | None = None
|
606 |
+
) -> typing.Mapping[str, str]:
|
607 |
+
"""
|
608 |
+
Sets headers needed by proxies: specifically, the Accept and Host
|
609 |
+
headers. Only sets headers not provided by the user.
|
610 |
+
"""
|
611 |
+
headers_ = {"Accept": "*/*"}
|
612 |
+
|
613 |
+
netloc = parse_url(url).netloc
|
614 |
+
if netloc:
|
615 |
+
headers_["Host"] = netloc
|
616 |
+
|
617 |
+
if headers:
|
618 |
+
headers_.update(headers)
|
619 |
+
return headers_
|
620 |
+
|
621 |
+
def urlopen( # type: ignore[override]
|
622 |
+
self, method: str, url: str, redirect: bool = True, **kw: typing.Any
|
623 |
+
) -> BaseHTTPResponse:
|
624 |
+
"Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
|
625 |
+
u = parse_url(url)
|
626 |
+
if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme):
|
627 |
+
# For connections using HTTP CONNECT, httplib sets the necessary
|
628 |
+
# headers on the CONNECT to the proxy. If we're not using CONNECT,
|
629 |
+
# we'll definitely need to set 'Host' at the very least.
|
630 |
+
headers = kw.get("headers", self.headers)
|
631 |
+
kw["headers"] = self._set_proxy_headers(url, headers)
|
632 |
+
|
633 |
+
return super().urlopen(method, url, redirect=redirect, **kw)
|
634 |
+
|
635 |
+
|
636 |
+
def proxy_from_url(url: str, **kw: typing.Any) -> ProxyManager:
|
637 |
+
return ProxyManager(proxy_url=url, **kw)
|
.venv/lib/python3.11/site-packages/urllib3/util/__init__.py
ADDED
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# For backwards compatibility, provide imports that used to be here.
|
2 |
+
from __future__ import annotations
|
3 |
+
|
4 |
+
from .connection import is_connection_dropped
|
5 |
+
from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers
|
6 |
+
from .response import is_fp_closed
|
7 |
+
from .retry import Retry
|
8 |
+
from .ssl_ import (
|
9 |
+
ALPN_PROTOCOLS,
|
10 |
+
IS_PYOPENSSL,
|
11 |
+
SSLContext,
|
12 |
+
assert_fingerprint,
|
13 |
+
create_urllib3_context,
|
14 |
+
resolve_cert_reqs,
|
15 |
+
resolve_ssl_version,
|
16 |
+
ssl_wrap_socket,
|
17 |
+
)
|
18 |
+
from .timeout import Timeout
|
19 |
+
from .url import Url, parse_url
|
20 |
+
from .wait import wait_for_read, wait_for_write
|
21 |
+
|
22 |
+
__all__ = (
|
23 |
+
"IS_PYOPENSSL",
|
24 |
+
"SSLContext",
|
25 |
+
"ALPN_PROTOCOLS",
|
26 |
+
"Retry",
|
27 |
+
"Timeout",
|
28 |
+
"Url",
|
29 |
+
"assert_fingerprint",
|
30 |
+
"create_urllib3_context",
|
31 |
+
"is_connection_dropped",
|
32 |
+
"is_fp_closed",
|
33 |
+
"parse_url",
|
34 |
+
"make_headers",
|
35 |
+
"resolve_cert_reqs",
|
36 |
+
"resolve_ssl_version",
|
37 |
+
"ssl_wrap_socket",
|
38 |
+
"wait_for_read",
|
39 |
+
"wait_for_write",
|
40 |
+
"SKIP_HEADER",
|
41 |
+
"SKIPPABLE_HEADERS",
|
42 |
+
)
|
.venv/lib/python3.11/site-packages/urllib3/util/__pycache__/ssl_.cpython-311.pyc
ADDED
Binary file (17.5 kB). View file
|
|
.venv/lib/python3.11/site-packages/urllib3/util/proxy.py
ADDED
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import typing
|
4 |
+
|
5 |
+
from .url import Url
|
6 |
+
|
7 |
+
if typing.TYPE_CHECKING:
|
8 |
+
from ..connection import ProxyConfig
|
9 |
+
|
10 |
+
|
11 |
+
def connection_requires_http_tunnel(
|
12 |
+
proxy_url: Url | None = None,
|
13 |
+
proxy_config: ProxyConfig | None = None,
|
14 |
+
destination_scheme: str | None = None,
|
15 |
+
) -> bool:
|
16 |
+
"""
|
17 |
+
Returns True if the connection requires an HTTP CONNECT through the proxy.
|
18 |
+
|
19 |
+
:param URL proxy_url:
|
20 |
+
URL of the proxy.
|
21 |
+
:param ProxyConfig proxy_config:
|
22 |
+
Proxy configuration from poolmanager.py
|
23 |
+
:param str destination_scheme:
|
24 |
+
The scheme of the destination. (i.e https, http, etc)
|
25 |
+
"""
|
26 |
+
# If we're not using a proxy, no way to use a tunnel.
|
27 |
+
if proxy_url is None:
|
28 |
+
return False
|
29 |
+
|
30 |
+
# HTTP destinations never require tunneling, we always forward.
|
31 |
+
if destination_scheme == "http":
|
32 |
+
return False
|
33 |
+
|
34 |
+
# Support for forwarding with HTTPS proxies and HTTPS destinations.
|
35 |
+
if (
|
36 |
+
proxy_url.scheme == "https"
|
37 |
+
and proxy_config
|
38 |
+
and proxy_config.use_forwarding_for_https
|
39 |
+
):
|
40 |
+
return False
|
41 |
+
|
42 |
+
# Otherwise always use a tunnel.
|
43 |
+
return True
|
.venv/lib/python3.11/site-packages/urllib3/util/request.py
ADDED
@@ -0,0 +1,258 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import io
|
4 |
+
import typing
|
5 |
+
from base64 import b64encode
|
6 |
+
from enum import Enum
|
7 |
+
|
8 |
+
from ..exceptions import UnrewindableBodyError
|
9 |
+
from .util import to_bytes
|
10 |
+
|
11 |
+
if typing.TYPE_CHECKING:
|
12 |
+
from typing import Final
|
13 |
+
|
14 |
+
# Pass as a value within ``headers`` to skip
|
15 |
+
# emitting some HTTP headers that are added automatically.
|
16 |
+
# The only headers that are supported are ``Accept-Encoding``,
|
17 |
+
# ``Host``, and ``User-Agent``.
|
18 |
+
SKIP_HEADER = "@@@SKIP_HEADER@@@"
|
19 |
+
SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"])
|
20 |
+
|
21 |
+
ACCEPT_ENCODING = "gzip,deflate"
|
22 |
+
try:
|
23 |
+
try:
|
24 |
+
import brotlicffi as _unused_module_brotli # type: ignore[import-not-found] # noqa: F401
|
25 |
+
except ImportError:
|
26 |
+
import brotli as _unused_module_brotli # type: ignore[import-not-found] # noqa: F401
|
27 |
+
except ImportError:
|
28 |
+
pass
|
29 |
+
else:
|
30 |
+
ACCEPT_ENCODING += ",br"
|
31 |
+
try:
|
32 |
+
import zstandard as _unused_module_zstd # noqa: F401
|
33 |
+
except ImportError:
|
34 |
+
pass
|
35 |
+
else:
|
36 |
+
ACCEPT_ENCODING += ",zstd"
|
37 |
+
|
38 |
+
|
39 |
+
class _TYPE_FAILEDTELL(Enum):
|
40 |
+
token = 0
|
41 |
+
|
42 |
+
|
43 |
+
_FAILEDTELL: Final[_TYPE_FAILEDTELL] = _TYPE_FAILEDTELL.token
|
44 |
+
|
45 |
+
_TYPE_BODY_POSITION = typing.Union[int, _TYPE_FAILEDTELL]
|
46 |
+
|
47 |
+
# When sending a request with these methods we aren't expecting
|
48 |
+
# a body so don't need to set an explicit 'Content-Length: 0'
|
49 |
+
# The reason we do this in the negative instead of tracking methods
|
50 |
+
# which 'should' have a body is because unknown methods should be
|
51 |
+
# treated as if they were 'POST' which *does* expect a body.
|
52 |
+
_METHODS_NOT_EXPECTING_BODY = {"GET", "HEAD", "DELETE", "TRACE", "OPTIONS", "CONNECT"}
|
53 |
+
|
54 |
+
|
55 |
+
def make_headers(
|
56 |
+
keep_alive: bool | None = None,
|
57 |
+
accept_encoding: bool | list[str] | str | None = None,
|
58 |
+
user_agent: str | None = None,
|
59 |
+
basic_auth: str | None = None,
|
60 |
+
proxy_basic_auth: str | None = None,
|
61 |
+
disable_cache: bool | None = None,
|
62 |
+
) -> dict[str, str]:
|
63 |
+
"""
|
64 |
+
Shortcuts for generating request headers.
|
65 |
+
|
66 |
+
:param keep_alive:
|
67 |
+
If ``True``, adds 'connection: keep-alive' header.
|
68 |
+
|
69 |
+
:param accept_encoding:
|
70 |
+
Can be a boolean, list, or string.
|
71 |
+
``True`` translates to 'gzip,deflate'. If the dependencies for
|
72 |
+
Brotli (either the ``brotli`` or ``brotlicffi`` package) and/or Zstandard
|
73 |
+
(the ``zstandard`` package) algorithms are installed, then their encodings are
|
74 |
+
included in the string ('br' and 'zstd', respectively).
|
75 |
+
List will get joined by comma.
|
76 |
+
String will be used as provided.
|
77 |
+
|
78 |
+
:param user_agent:
|
79 |
+
String representing the user-agent you want, such as
|
80 |
+
"python-urllib3/0.6"
|
81 |
+
|
82 |
+
:param basic_auth:
|
83 |
+
Colon-separated username:password string for 'authorization: basic ...'
|
84 |
+
auth header.
|
85 |
+
|
86 |
+
:param proxy_basic_auth:
|
87 |
+
Colon-separated username:password string for 'proxy-authorization: basic ...'
|
88 |
+
auth header.
|
89 |
+
|
90 |
+
:param disable_cache:
|
91 |
+
If ``True``, adds 'cache-control: no-cache' header.
|
92 |
+
|
93 |
+
Example:
|
94 |
+
|
95 |
+
.. code-block:: python
|
96 |
+
|
97 |
+
import urllib3
|
98 |
+
|
99 |
+
print(urllib3.util.make_headers(keep_alive=True, user_agent="Batman/1.0"))
|
100 |
+
# {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
|
101 |
+
print(urllib3.util.make_headers(accept_encoding=True))
|
102 |
+
# {'accept-encoding': 'gzip,deflate'}
|
103 |
+
"""
|
104 |
+
headers: dict[str, str] = {}
|
105 |
+
if accept_encoding:
|
106 |
+
if isinstance(accept_encoding, str):
|
107 |
+
pass
|
108 |
+
elif isinstance(accept_encoding, list):
|
109 |
+
accept_encoding = ",".join(accept_encoding)
|
110 |
+
else:
|
111 |
+
accept_encoding = ACCEPT_ENCODING
|
112 |
+
headers["accept-encoding"] = accept_encoding
|
113 |
+
|
114 |
+
if user_agent:
|
115 |
+
headers["user-agent"] = user_agent
|
116 |
+
|
117 |
+
if keep_alive:
|
118 |
+
headers["connection"] = "keep-alive"
|
119 |
+
|
120 |
+
if basic_auth:
|
121 |
+
headers["authorization"] = (
|
122 |
+
f"Basic {b64encode(basic_auth.encode('latin-1')).decode()}"
|
123 |
+
)
|
124 |
+
|
125 |
+
if proxy_basic_auth:
|
126 |
+
headers["proxy-authorization"] = (
|
127 |
+
f"Basic {b64encode(proxy_basic_auth.encode('latin-1')).decode()}"
|
128 |
+
)
|
129 |
+
|
130 |
+
if disable_cache:
|
131 |
+
headers["cache-control"] = "no-cache"
|
132 |
+
|
133 |
+
return headers
|
134 |
+
|
135 |
+
|
136 |
+
def set_file_position(
|
137 |
+
body: typing.Any, pos: _TYPE_BODY_POSITION | None
|
138 |
+
) -> _TYPE_BODY_POSITION | None:
|
139 |
+
"""
|
140 |
+
If a position is provided, move file to that point.
|
141 |
+
Otherwise, we'll attempt to record a position for future use.
|
142 |
+
"""
|
143 |
+
if pos is not None:
|
144 |
+
rewind_body(body, pos)
|
145 |
+
elif getattr(body, "tell", None) is not None:
|
146 |
+
try:
|
147 |
+
pos = body.tell()
|
148 |
+
except OSError:
|
149 |
+
# This differentiates from None, allowing us to catch
|
150 |
+
# a failed `tell()` later when trying to rewind the body.
|
151 |
+
pos = _FAILEDTELL
|
152 |
+
|
153 |
+
return pos
|
154 |
+
|
155 |
+
|
156 |
+
def rewind_body(body: typing.IO[typing.AnyStr], body_pos: _TYPE_BODY_POSITION) -> None:
|
157 |
+
"""
|
158 |
+
Attempt to rewind body to a certain position.
|
159 |
+
Primarily used for request redirects and retries.
|
160 |
+
|
161 |
+
:param body:
|
162 |
+
File-like object that supports seek.
|
163 |
+
|
164 |
+
:param int pos:
|
165 |
+
Position to seek to in file.
|
166 |
+
"""
|
167 |
+
body_seek = getattr(body, "seek", None)
|
168 |
+
if body_seek is not None and isinstance(body_pos, int):
|
169 |
+
try:
|
170 |
+
body_seek(body_pos)
|
171 |
+
except OSError as e:
|
172 |
+
raise UnrewindableBodyError(
|
173 |
+
"An error occurred when rewinding request body for redirect/retry."
|
174 |
+
) from e
|
175 |
+
elif body_pos is _FAILEDTELL:
|
176 |
+
raise UnrewindableBodyError(
|
177 |
+
"Unable to record file position for rewinding "
|
178 |
+
"request body during a redirect/retry."
|
179 |
+
)
|
180 |
+
else:
|
181 |
+
raise ValueError(
|
182 |
+
f"body_pos must be of type integer, instead it was {type(body_pos)}."
|
183 |
+
)
|
184 |
+
|
185 |
+
|
186 |
+
class ChunksAndContentLength(typing.NamedTuple):
|
187 |
+
chunks: typing.Iterable[bytes] | None
|
188 |
+
content_length: int | None
|
189 |
+
|
190 |
+
|
191 |
+
def body_to_chunks(
|
192 |
+
body: typing.Any | None, method: str, blocksize: int
|
193 |
+
) -> ChunksAndContentLength:
|
194 |
+
"""Takes the HTTP request method, body, and blocksize and
|
195 |
+
transforms them into an iterable of chunks to pass to
|
196 |
+
socket.sendall() and an optional 'Content-Length' header.
|
197 |
+
|
198 |
+
A 'Content-Length' of 'None' indicates the length of the body
|
199 |
+
can't be determined so should use 'Transfer-Encoding: chunked'
|
200 |
+
for framing instead.
|
201 |
+
"""
|
202 |
+
|
203 |
+
chunks: typing.Iterable[bytes] | None
|
204 |
+
content_length: int | None
|
205 |
+
|
206 |
+
# No body, we need to make a recommendation on 'Content-Length'
|
207 |
+
# based on whether that request method is expected to have
|
208 |
+
# a body or not.
|
209 |
+
if body is None:
|
210 |
+
chunks = None
|
211 |
+
if method.upper() not in _METHODS_NOT_EXPECTING_BODY:
|
212 |
+
content_length = 0
|
213 |
+
else:
|
214 |
+
content_length = None
|
215 |
+
|
216 |
+
# Bytes or strings become bytes
|
217 |
+
elif isinstance(body, (str, bytes)):
|
218 |
+
chunks = (to_bytes(body),)
|
219 |
+
content_length = len(chunks[0])
|
220 |
+
|
221 |
+
# File-like object, TODO: use seek() and tell() for length?
|
222 |
+
elif hasattr(body, "read"):
|
223 |
+
|
224 |
+
def chunk_readable() -> typing.Iterable[bytes]:
|
225 |
+
nonlocal body, blocksize
|
226 |
+
encode = isinstance(body, io.TextIOBase)
|
227 |
+
while True:
|
228 |
+
datablock = body.read(blocksize)
|
229 |
+
if not datablock:
|
230 |
+
break
|
231 |
+
if encode:
|
232 |
+
datablock = datablock.encode("utf-8")
|
233 |
+
yield datablock
|
234 |
+
|
235 |
+
chunks = chunk_readable()
|
236 |
+
content_length = None
|
237 |
+
|
238 |
+
# Otherwise we need to start checking via duck-typing.
|
239 |
+
else:
|
240 |
+
try:
|
241 |
+
# Check if the body implements the buffer API.
|
242 |
+
mv = memoryview(body)
|
243 |
+
except TypeError:
|
244 |
+
try:
|
245 |
+
# Check if the body is an iterable
|
246 |
+
chunks = iter(body)
|
247 |
+
content_length = None
|
248 |
+
except TypeError:
|
249 |
+
raise TypeError(
|
250 |
+
f"'body' must be a bytes-like object, file-like "
|
251 |
+
f"object, or iterable. Instead was {body!r}"
|
252 |
+
) from None
|
253 |
+
else:
|
254 |
+
# Since it implements the buffer API can be passed directly to socket.sendall()
|
255 |
+
chunks = (body,)
|
256 |
+
content_length = mv.nbytes
|
257 |
+
|
258 |
+
return ChunksAndContentLength(chunks=chunks, content_length=content_length)
|
.venv/lib/python3.11/site-packages/urllib3/util/retry.py
ADDED
@@ -0,0 +1,533 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import email
|
4 |
+
import logging
|
5 |
+
import random
|
6 |
+
import re
|
7 |
+
import time
|
8 |
+
import typing
|
9 |
+
from itertools import takewhile
|
10 |
+
from types import TracebackType
|
11 |
+
|
12 |
+
from ..exceptions import (
|
13 |
+
ConnectTimeoutError,
|
14 |
+
InvalidHeader,
|
15 |
+
MaxRetryError,
|
16 |
+
ProtocolError,
|
17 |
+
ProxyError,
|
18 |
+
ReadTimeoutError,
|
19 |
+
ResponseError,
|
20 |
+
)
|
21 |
+
from .util import reraise
|
22 |
+
|
23 |
+
if typing.TYPE_CHECKING:
|
24 |
+
from typing_extensions import Self
|
25 |
+
|
26 |
+
from ..connectionpool import ConnectionPool
|
27 |
+
from ..response import BaseHTTPResponse
|
28 |
+
|
29 |
+
log = logging.getLogger(__name__)
|
30 |
+
|
31 |
+
|
32 |
+
# Data structure for representing the metadata of requests that result in a retry.
|
33 |
+
class RequestHistory(typing.NamedTuple):
|
34 |
+
method: str | None
|
35 |
+
url: str | None
|
36 |
+
error: Exception | None
|
37 |
+
status: int | None
|
38 |
+
redirect_location: str | None
|
39 |
+
|
40 |
+
|
41 |
+
class Retry:
|
42 |
+
"""Retry configuration.
|
43 |
+
|
44 |
+
Each retry attempt will create a new Retry object with updated values, so
|
45 |
+
they can be safely reused.
|
46 |
+
|
47 |
+
Retries can be defined as a default for a pool:
|
48 |
+
|
49 |
+
.. code-block:: python
|
50 |
+
|
51 |
+
retries = Retry(connect=5, read=2, redirect=5)
|
52 |
+
http = PoolManager(retries=retries)
|
53 |
+
response = http.request("GET", "https://example.com/")
|
54 |
+
|
55 |
+
Or per-request (which overrides the default for the pool):
|
56 |
+
|
57 |
+
.. code-block:: python
|
58 |
+
|
59 |
+
response = http.request("GET", "https://example.com/", retries=Retry(10))
|
60 |
+
|
61 |
+
Retries can be disabled by passing ``False``:
|
62 |
+
|
63 |
+
.. code-block:: python
|
64 |
+
|
65 |
+
response = http.request("GET", "https://example.com/", retries=False)
|
66 |
+
|
67 |
+
Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless
|
68 |
+
retries are disabled, in which case the causing exception will be raised.
|
69 |
+
|
70 |
+
:param int total:
|
71 |
+
Total number of retries to allow. Takes precedence over other counts.
|
72 |
+
|
73 |
+
Set to ``None`` to remove this constraint and fall back on other
|
74 |
+
counts.
|
75 |
+
|
76 |
+
Set to ``0`` to fail on the first retry.
|
77 |
+
|
78 |
+
Set to ``False`` to disable and imply ``raise_on_redirect=False``.
|
79 |
+
|
80 |
+
:param int connect:
|
81 |
+
How many connection-related errors to retry on.
|
82 |
+
|
83 |
+
These are errors raised before the request is sent to the remote server,
|
84 |
+
which we assume has not triggered the server to process the request.
|
85 |
+
|
86 |
+
Set to ``0`` to fail on the first retry of this type.
|
87 |
+
|
88 |
+
:param int read:
|
89 |
+
How many times to retry on read errors.
|
90 |
+
|
91 |
+
These errors are raised after the request was sent to the server, so the
|
92 |
+
request may have side-effects.
|
93 |
+
|
94 |
+
Set to ``0`` to fail on the first retry of this type.
|
95 |
+
|
96 |
+
:param int redirect:
|
97 |
+
How many redirects to perform. Limit this to avoid infinite redirect
|
98 |
+
loops.
|
99 |
+
|
100 |
+
A redirect is a HTTP response with a status code 301, 302, 303, 307 or
|
101 |
+
308.
|
102 |
+
|
103 |
+
Set to ``0`` to fail on the first retry of this type.
|
104 |
+
|
105 |
+
Set to ``False`` to disable and imply ``raise_on_redirect=False``.
|
106 |
+
|
107 |
+
:param int status:
|
108 |
+
How many times to retry on bad status codes.
|
109 |
+
|
110 |
+
These are retries made on responses, where status code matches
|
111 |
+
``status_forcelist``.
|
112 |
+
|
113 |
+
Set to ``0`` to fail on the first retry of this type.
|
114 |
+
|
115 |
+
:param int other:
|
116 |
+
How many times to retry on other errors.
|
117 |
+
|
118 |
+
Other errors are errors that are not connect, read, redirect or status errors.
|
119 |
+
These errors might be raised after the request was sent to the server, so the
|
120 |
+
request might have side-effects.
|
121 |
+
|
122 |
+
Set to ``0`` to fail on the first retry of this type.
|
123 |
+
|
124 |
+
If ``total`` is not set, it's a good idea to set this to 0 to account
|
125 |
+
for unexpected edge cases and avoid infinite retry loops.
|
126 |
+
|
127 |
+
:param Collection allowed_methods:
|
128 |
+
Set of uppercased HTTP method verbs that we should retry on.
|
129 |
+
|
130 |
+
By default, we only retry on methods which are considered to be
|
131 |
+
idempotent (multiple requests with the same parameters end with the
|
132 |
+
same state). See :attr:`Retry.DEFAULT_ALLOWED_METHODS`.
|
133 |
+
|
134 |
+
Set to a ``None`` value to retry on any verb.
|
135 |
+
|
136 |
+
:param Collection status_forcelist:
|
137 |
+
A set of integer HTTP status codes that we should force a retry on.
|
138 |
+
A retry is initiated if the request method is in ``allowed_methods``
|
139 |
+
and the response status code is in ``status_forcelist``.
|
140 |
+
|
141 |
+
By default, this is disabled with ``None``.
|
142 |
+
|
143 |
+
:param float backoff_factor:
|
144 |
+
A backoff factor to apply between attempts after the second try
|
145 |
+
(most errors are resolved immediately by a second try without a
|
146 |
+
delay). urllib3 will sleep for::
|
147 |
+
|
148 |
+
{backoff factor} * (2 ** ({number of previous retries}))
|
149 |
+
|
150 |
+
seconds. If `backoff_jitter` is non-zero, this sleep is extended by::
|
151 |
+
|
152 |
+
random.uniform(0, {backoff jitter})
|
153 |
+
|
154 |
+
seconds. For example, if the backoff_factor is 0.1, then :func:`Retry.sleep` will
|
155 |
+
sleep for [0.0s, 0.2s, 0.4s, 0.8s, ...] between retries. No backoff will ever
|
156 |
+
be longer than `backoff_max`.
|
157 |
+
|
158 |
+
By default, backoff is disabled (factor set to 0).
|
159 |
+
|
160 |
+
:param bool raise_on_redirect: Whether, if the number of redirects is
|
161 |
+
exhausted, to raise a MaxRetryError, or to return a response with a
|
162 |
+
response code in the 3xx range.
|
163 |
+
|
164 |
+
:param bool raise_on_status: Similar meaning to ``raise_on_redirect``:
|
165 |
+
whether we should raise an exception, or return a response,
|
166 |
+
if status falls in ``status_forcelist`` range and retries have
|
167 |
+
been exhausted.
|
168 |
+
|
169 |
+
:param tuple history: The history of the request encountered during
|
170 |
+
each call to :meth:`~Retry.increment`. The list is in the order
|
171 |
+
the requests occurred. Each list item is of class :class:`RequestHistory`.
|
172 |
+
|
173 |
+
:param bool respect_retry_after_header:
|
174 |
+
Whether to respect Retry-After header on status codes defined as
|
175 |
+
:attr:`Retry.RETRY_AFTER_STATUS_CODES` or not.
|
176 |
+
|
177 |
+
:param Collection remove_headers_on_redirect:
|
178 |
+
Sequence of headers to remove from the request when a response
|
179 |
+
indicating a redirect is returned before firing off the redirected
|
180 |
+
request.
|
181 |
+
"""
|
182 |
+
|
183 |
+
#: Default methods to be used for ``allowed_methods``
|
184 |
+
DEFAULT_ALLOWED_METHODS = frozenset(
|
185 |
+
["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"]
|
186 |
+
)
|
187 |
+
|
188 |
+
#: Default status codes to be used for ``status_forcelist``
|
189 |
+
RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503])
|
190 |
+
|
191 |
+
#: Default headers to be used for ``remove_headers_on_redirect``
|
192 |
+
DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(
|
193 |
+
["Cookie", "Authorization", "Proxy-Authorization"]
|
194 |
+
)
|
195 |
+
|
196 |
+
#: Default maximum backoff time.
|
197 |
+
DEFAULT_BACKOFF_MAX = 120
|
198 |
+
|
199 |
+
# Backward compatibility; assigned outside of the class.
|
200 |
+
DEFAULT: typing.ClassVar[Retry]
|
201 |
+
|
202 |
+
def __init__(
|
203 |
+
self,
|
204 |
+
total: bool | int | None = 10,
|
205 |
+
connect: int | None = None,
|
206 |
+
read: int | None = None,
|
207 |
+
redirect: bool | int | None = None,
|
208 |
+
status: int | None = None,
|
209 |
+
other: int | None = None,
|
210 |
+
allowed_methods: typing.Collection[str] | None = DEFAULT_ALLOWED_METHODS,
|
211 |
+
status_forcelist: typing.Collection[int] | None = None,
|
212 |
+
backoff_factor: float = 0,
|
213 |
+
backoff_max: float = DEFAULT_BACKOFF_MAX,
|
214 |
+
raise_on_redirect: bool = True,
|
215 |
+
raise_on_status: bool = True,
|
216 |
+
history: tuple[RequestHistory, ...] | None = None,
|
217 |
+
respect_retry_after_header: bool = True,
|
218 |
+
remove_headers_on_redirect: typing.Collection[
|
219 |
+
str
|
220 |
+
] = DEFAULT_REMOVE_HEADERS_ON_REDIRECT,
|
221 |
+
backoff_jitter: float = 0.0,
|
222 |
+
) -> None:
|
223 |
+
self.total = total
|
224 |
+
self.connect = connect
|
225 |
+
self.read = read
|
226 |
+
self.status = status
|
227 |
+
self.other = other
|
228 |
+
|
229 |
+
if redirect is False or total is False:
|
230 |
+
redirect = 0
|
231 |
+
raise_on_redirect = False
|
232 |
+
|
233 |
+
self.redirect = redirect
|
234 |
+
self.status_forcelist = status_forcelist or set()
|
235 |
+
self.allowed_methods = allowed_methods
|
236 |
+
self.backoff_factor = backoff_factor
|
237 |
+
self.backoff_max = backoff_max
|
238 |
+
self.raise_on_redirect = raise_on_redirect
|
239 |
+
self.raise_on_status = raise_on_status
|
240 |
+
self.history = history or ()
|
241 |
+
self.respect_retry_after_header = respect_retry_after_header
|
242 |
+
self.remove_headers_on_redirect = frozenset(
|
243 |
+
h.lower() for h in remove_headers_on_redirect
|
244 |
+
)
|
245 |
+
self.backoff_jitter = backoff_jitter
|
246 |
+
|
247 |
+
def new(self, **kw: typing.Any) -> Self:
|
248 |
+
params = dict(
|
249 |
+
total=self.total,
|
250 |
+
connect=self.connect,
|
251 |
+
read=self.read,
|
252 |
+
redirect=self.redirect,
|
253 |
+
status=self.status,
|
254 |
+
other=self.other,
|
255 |
+
allowed_methods=self.allowed_methods,
|
256 |
+
status_forcelist=self.status_forcelist,
|
257 |
+
backoff_factor=self.backoff_factor,
|
258 |
+
backoff_max=self.backoff_max,
|
259 |
+
raise_on_redirect=self.raise_on_redirect,
|
260 |
+
raise_on_status=self.raise_on_status,
|
261 |
+
history=self.history,
|
262 |
+
remove_headers_on_redirect=self.remove_headers_on_redirect,
|
263 |
+
respect_retry_after_header=self.respect_retry_after_header,
|
264 |
+
backoff_jitter=self.backoff_jitter,
|
265 |
+
)
|
266 |
+
|
267 |
+
params.update(kw)
|
268 |
+
return type(self)(**params) # type: ignore[arg-type]
|
269 |
+
|
270 |
+
@classmethod
|
271 |
+
def from_int(
|
272 |
+
cls,
|
273 |
+
retries: Retry | bool | int | None,
|
274 |
+
redirect: bool | int | None = True,
|
275 |
+
default: Retry | bool | int | None = None,
|
276 |
+
) -> Retry:
|
277 |
+
"""Backwards-compatibility for the old retries format."""
|
278 |
+
if retries is None:
|
279 |
+
retries = default if default is not None else cls.DEFAULT
|
280 |
+
|
281 |
+
if isinstance(retries, Retry):
|
282 |
+
return retries
|
283 |
+
|
284 |
+
redirect = bool(redirect) and None
|
285 |
+
new_retries = cls(retries, redirect=redirect)
|
286 |
+
log.debug("Converted retries value: %r -> %r", retries, new_retries)
|
287 |
+
return new_retries
|
288 |
+
|
289 |
+
def get_backoff_time(self) -> float:
|
290 |
+
"""Formula for computing the current backoff
|
291 |
+
|
292 |
+
:rtype: float
|
293 |
+
"""
|
294 |
+
# We want to consider only the last consecutive errors sequence (Ignore redirects).
|
295 |
+
consecutive_errors_len = len(
|
296 |
+
list(
|
297 |
+
takewhile(lambda x: x.redirect_location is None, reversed(self.history))
|
298 |
+
)
|
299 |
+
)
|
300 |
+
if consecutive_errors_len <= 1:
|
301 |
+
return 0
|
302 |
+
|
303 |
+
backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1))
|
304 |
+
if self.backoff_jitter != 0.0:
|
305 |
+
backoff_value += random.random() * self.backoff_jitter
|
306 |
+
return float(max(0, min(self.backoff_max, backoff_value)))
|
307 |
+
|
308 |
+
def parse_retry_after(self, retry_after: str) -> float:
|
309 |
+
seconds: float
|
310 |
+
# Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4
|
311 |
+
if re.match(r"^\s*[0-9]+\s*$", retry_after):
|
312 |
+
seconds = int(retry_after)
|
313 |
+
else:
|
314 |
+
retry_date_tuple = email.utils.parsedate_tz(retry_after)
|
315 |
+
if retry_date_tuple is None:
|
316 |
+
raise InvalidHeader(f"Invalid Retry-After header: {retry_after}")
|
317 |
+
|
318 |
+
retry_date = email.utils.mktime_tz(retry_date_tuple)
|
319 |
+
seconds = retry_date - time.time()
|
320 |
+
|
321 |
+
seconds = max(seconds, 0)
|
322 |
+
|
323 |
+
return seconds
|
324 |
+
|
325 |
+
def get_retry_after(self, response: BaseHTTPResponse) -> float | None:
|
326 |
+
"""Get the value of Retry-After in seconds."""
|
327 |
+
|
328 |
+
retry_after = response.headers.get("Retry-After")
|
329 |
+
|
330 |
+
if retry_after is None:
|
331 |
+
return None
|
332 |
+
|
333 |
+
return self.parse_retry_after(retry_after)
|
334 |
+
|
335 |
+
def sleep_for_retry(self, response: BaseHTTPResponse) -> bool:
|
336 |
+
retry_after = self.get_retry_after(response)
|
337 |
+
if retry_after:
|
338 |
+
time.sleep(retry_after)
|
339 |
+
return True
|
340 |
+
|
341 |
+
return False
|
342 |
+
|
343 |
+
def _sleep_backoff(self) -> None:
|
344 |
+
backoff = self.get_backoff_time()
|
345 |
+
if backoff <= 0:
|
346 |
+
return
|
347 |
+
time.sleep(backoff)
|
348 |
+
|
349 |
+
def sleep(self, response: BaseHTTPResponse | None = None) -> None:
|
350 |
+
"""Sleep between retry attempts.
|
351 |
+
|
352 |
+
This method will respect a server's ``Retry-After`` response header
|
353 |
+
and sleep the duration of the time requested. If that is not present, it
|
354 |
+
will use an exponential backoff. By default, the backoff factor is 0 and
|
355 |
+
this method will return immediately.
|
356 |
+
"""
|
357 |
+
|
358 |
+
if self.respect_retry_after_header and response:
|
359 |
+
slept = self.sleep_for_retry(response)
|
360 |
+
if slept:
|
361 |
+
return
|
362 |
+
|
363 |
+
self._sleep_backoff()
|
364 |
+
|
365 |
+
def _is_connection_error(self, err: Exception) -> bool:
|
366 |
+
"""Errors when we're fairly sure that the server did not receive the
|
367 |
+
request, so it should be safe to retry.
|
368 |
+
"""
|
369 |
+
if isinstance(err, ProxyError):
|
370 |
+
err = err.original_error
|
371 |
+
return isinstance(err, ConnectTimeoutError)
|
372 |
+
|
373 |
+
def _is_read_error(self, err: Exception) -> bool:
|
374 |
+
"""Errors that occur after the request has been started, so we should
|
375 |
+
assume that the server began processing it.
|
376 |
+
"""
|
377 |
+
return isinstance(err, (ReadTimeoutError, ProtocolError))
|
378 |
+
|
379 |
+
def _is_method_retryable(self, method: str) -> bool:
|
380 |
+
"""Checks if a given HTTP method should be retried upon, depending if
|
381 |
+
it is included in the allowed_methods
|
382 |
+
"""
|
383 |
+
if self.allowed_methods and method.upper() not in self.allowed_methods:
|
384 |
+
return False
|
385 |
+
return True
|
386 |
+
|
387 |
+
def is_retry(
|
388 |
+
self, method: str, status_code: int, has_retry_after: bool = False
|
389 |
+
) -> bool:
|
390 |
+
"""Is this method/status code retryable? (Based on allowlists and control
|
391 |
+
variables such as the number of total retries to allow, whether to
|
392 |
+
respect the Retry-After header, whether this header is present, and
|
393 |
+
whether the returned status code is on the list of status codes to
|
394 |
+
be retried upon on the presence of the aforementioned header)
|
395 |
+
"""
|
396 |
+
if not self._is_method_retryable(method):
|
397 |
+
return False
|
398 |
+
|
399 |
+
if self.status_forcelist and status_code in self.status_forcelist:
|
400 |
+
return True
|
401 |
+
|
402 |
+
return bool(
|
403 |
+
self.total
|
404 |
+
and self.respect_retry_after_header
|
405 |
+
and has_retry_after
|
406 |
+
and (status_code in self.RETRY_AFTER_STATUS_CODES)
|
407 |
+
)
|
408 |
+
|
409 |
+
def is_exhausted(self) -> bool:
|
410 |
+
"""Are we out of retries?"""
|
411 |
+
retry_counts = [
|
412 |
+
x
|
413 |
+
for x in (
|
414 |
+
self.total,
|
415 |
+
self.connect,
|
416 |
+
self.read,
|
417 |
+
self.redirect,
|
418 |
+
self.status,
|
419 |
+
self.other,
|
420 |
+
)
|
421 |
+
if x
|
422 |
+
]
|
423 |
+
if not retry_counts:
|
424 |
+
return False
|
425 |
+
|
426 |
+
return min(retry_counts) < 0
|
427 |
+
|
428 |
+
def increment(
|
429 |
+
self,
|
430 |
+
method: str | None = None,
|
431 |
+
url: str | None = None,
|
432 |
+
response: BaseHTTPResponse | None = None,
|
433 |
+
error: Exception | None = None,
|
434 |
+
_pool: ConnectionPool | None = None,
|
435 |
+
_stacktrace: TracebackType | None = None,
|
436 |
+
) -> Self:
|
437 |
+
"""Return a new Retry object with incremented retry counters.
|
438 |
+
|
439 |
+
:param response: A response object, or None, if the server did not
|
440 |
+
return a response.
|
441 |
+
:type response: :class:`~urllib3.response.BaseHTTPResponse`
|
442 |
+
:param Exception error: An error encountered during the request, or
|
443 |
+
None if the response was received successfully.
|
444 |
+
|
445 |
+
:return: A new ``Retry`` object.
|
446 |
+
"""
|
447 |
+
if self.total is False and error:
|
448 |
+
# Disabled, indicate to re-raise the error.
|
449 |
+
raise reraise(type(error), error, _stacktrace)
|
450 |
+
|
451 |
+
total = self.total
|
452 |
+
if total is not None:
|
453 |
+
total -= 1
|
454 |
+
|
455 |
+
connect = self.connect
|
456 |
+
read = self.read
|
457 |
+
redirect = self.redirect
|
458 |
+
status_count = self.status
|
459 |
+
other = self.other
|
460 |
+
cause = "unknown"
|
461 |
+
status = None
|
462 |
+
redirect_location = None
|
463 |
+
|
464 |
+
if error and self._is_connection_error(error):
|
465 |
+
# Connect retry?
|
466 |
+
if connect is False:
|
467 |
+
raise reraise(type(error), error, _stacktrace)
|
468 |
+
elif connect is not None:
|
469 |
+
connect -= 1
|
470 |
+
|
471 |
+
elif error and self._is_read_error(error):
|
472 |
+
# Read retry?
|
473 |
+
if read is False or method is None or not self._is_method_retryable(method):
|
474 |
+
raise reraise(type(error), error, _stacktrace)
|
475 |
+
elif read is not None:
|
476 |
+
read -= 1
|
477 |
+
|
478 |
+
elif error:
|
479 |
+
# Other retry?
|
480 |
+
if other is not None:
|
481 |
+
other -= 1
|
482 |
+
|
483 |
+
elif response and response.get_redirect_location():
|
484 |
+
# Redirect retry?
|
485 |
+
if redirect is not None:
|
486 |
+
redirect -= 1
|
487 |
+
cause = "too many redirects"
|
488 |
+
response_redirect_location = response.get_redirect_location()
|
489 |
+
if response_redirect_location:
|
490 |
+
redirect_location = response_redirect_location
|
491 |
+
status = response.status
|
492 |
+
|
493 |
+
else:
|
494 |
+
# Incrementing because of a server error like a 500 in
|
495 |
+
# status_forcelist and the given method is in the allowed_methods
|
496 |
+
cause = ResponseError.GENERIC_ERROR
|
497 |
+
if response and response.status:
|
498 |
+
if status_count is not None:
|
499 |
+
status_count -= 1
|
500 |
+
cause = ResponseError.SPECIFIC_ERROR.format(status_code=response.status)
|
501 |
+
status = response.status
|
502 |
+
|
503 |
+
history = self.history + (
|
504 |
+
RequestHistory(method, url, error, status, redirect_location),
|
505 |
+
)
|
506 |
+
|
507 |
+
new_retry = self.new(
|
508 |
+
total=total,
|
509 |
+
connect=connect,
|
510 |
+
read=read,
|
511 |
+
redirect=redirect,
|
512 |
+
status=status_count,
|
513 |
+
other=other,
|
514 |
+
history=history,
|
515 |
+
)
|
516 |
+
|
517 |
+
if new_retry.is_exhausted():
|
518 |
+
reason = error or ResponseError(cause)
|
519 |
+
raise MaxRetryError(_pool, url, reason) from reason # type: ignore[arg-type]
|
520 |
+
|
521 |
+
log.debug("Incremented Retry for (url='%s'): %r", url, new_retry)
|
522 |
+
|
523 |
+
return new_retry
|
524 |
+
|
525 |
+
def __repr__(self) -> str:
|
526 |
+
return (
|
527 |
+
f"{type(self).__name__}(total={self.total}, connect={self.connect}, "
|
528 |
+
f"read={self.read}, redirect={self.redirect}, status={self.status})"
|
529 |
+
)
|
530 |
+
|
531 |
+
|
532 |
+
# For backwards compatibility (equivalent to pre-v1.9):
|
533 |
+
Retry.DEFAULT = Retry(3)
|