koichi12 commited on
Commit
799d677
·
verified ·
1 Parent(s): 9395cf5

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. client_secrets.json +1 -0
  2. gemini-gen3.py +41 -0
  3. problmes.json +0 -0
  4. tuning-competition-baseline/.gitignore +126 -0
  5. tuning-competition-baseline/.pre-commit-config.yaml +43 -0
  6. tuning-competition-baseline/.venv/bin/Activate.ps1 +247 -0
  7. tuning-competition-baseline/.venv/bin/activate +63 -0
  8. tuning-competition-baseline/.venv/bin/activate.csh +26 -0
  9. tuning-competition-baseline/.venv/bin/activate.fish +69 -0
  10. tuning-competition-baseline/.venv/bin/convert-caffe2-to-onnx +8 -0
  11. tuning-competition-baseline/.venv/bin/convert-onnx-to-caffe2 +8 -0
  12. tuning-competition-baseline/.venv/bin/cygdb +8 -0
  13. tuning-competition-baseline/.venv/bin/cython +8 -0
  14. tuning-competition-baseline/.venv/bin/cythonize +8 -0
  15. tuning-competition-baseline/.venv/bin/pip +8 -0
  16. tuning-competition-baseline/.venv/bin/pip3.11 +8 -0
  17. tuning-competition-baseline/.venv/bin/pybind11-config +8 -0
  18. tuning-competition-baseline/.venv/bin/python +0 -0
  19. tuning-competition-baseline/.venv/bin/python3 +0 -0
  20. tuning-competition-baseline/.venv/bin/python3.11 +0 -0
  21. tuning-competition-baseline/.venv/bin/torchrun +8 -0
  22. tuning-competition-baseline/.venv/bin/wheel +8 -0
  23. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Jinja2-3.1.3.dist-info/INSTALLER +1 -0
  24. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Jinja2-3.1.3.dist-info/LICENSE.rst +28 -0
  25. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Jinja2-3.1.3.dist-info/RECORD +58 -0
  26. tuning-competition-baseline/.venv/lib/python3.11/site-packages/Jinja2-3.1.3.dist-info/entry_points.txt +2 -0
  27. tuning-competition-baseline/.venv/lib/python3.11/site-packages/cython.py +24 -0
  28. tuning-competition-baseline/.venv/lib/python3.11/site-packages/isympy.py +342 -0
  29. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/lazy_imports.py +190 -0
  30. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/linalg/__pycache__/__init__.cpython-311.pyc +0 -0
  31. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/linalg/__pycache__/algebraicconnectivity.cpython-311.pyc +0 -0
  32. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/linalg/__pycache__/laplacianmatrix.cpython-311.pyc +0 -0
  33. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/linalg/__pycache__/modularitymatrix.cpython-311.pyc +0 -0
  34. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/linalg/graphmatrix.py +166 -0
  35. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/linalg/modularitymatrix.py +166 -0
  36. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/linalg/tests/__pycache__/__init__.cpython-311.pyc +0 -0
  37. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/linalg/tests/__pycache__/test_attrmatrix.cpython-311.pyc +0 -0
  38. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/linalg/tests/test_algebraic_connectivity.py +402 -0
  39. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/linalg/tests/test_bethehessian.py +41 -0
  40. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/linalg/tests/test_laplacian.py +242 -0
  41. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/readwrite/__pycache__/__init__.cpython-311.pyc +0 -0
  42. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/readwrite/__pycache__/adjlist.cpython-311.pyc +0 -0
  43. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/readwrite/__pycache__/leda.cpython-311.pyc +0 -0
  44. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/readwrite/__pycache__/p2g.cpython-311.pyc +0 -0
  45. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/readwrite/__pycache__/pajek.cpython-311.pyc +0 -0
  46. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/readwrite/__pycache__/sparse6.cpython-311.pyc +0 -0
  47. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/readwrite/__pycache__/text.cpython-311.pyc +0 -0
  48. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/readwrite/gml.py +878 -0
  49. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/readwrite/graphml.py +1051 -0
  50. tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/readwrite/json_graph/__init__.py +18 -0
client_secrets.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"installed":{"client_id":"880310693118-2v0cddkb4occnc45bmji0v9pmv8nrebv.apps.googleusercontent.com","project_id":"rare-deployment-367809","auth_uri":"https://accounts.google.com/o/oauth2/auth","token_uri":"https://oauth2.googleapis.com/token","auth_provider_x509_cert_url":"https://www.googleapis.com/oauth2/v1/certs","client_secret":"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","redirect_uris":["http://localhost"]}}
gemini-gen3.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from dotenv import load_dotenv
3
+ import google.generativeai as genai
4
+ import json
5
+
6
+ data = None
7
+ with open('problmes.json', 'rt') as f:
8
+ data = json.load(f)
9
+
10
+ problems = [d['problem'].replace('\n\n', '\n') for d in data]
11
+
12
+ print(len(problems))
13
+ print(len(problems)//50)
14
+
15
+ # .envファイルの読み込み
16
+ load_dotenv()
17
+
18
+ # API-KEYの設定
19
+ GOOGLE_API_KEY=os.getenv('GOOGLE_API_KEY')
20
+ genai.configure(api_key=GOOGLE_API_KEY)
21
+ gemini_pro = genai.GenerativeModel("gemini-1.5-flash")
22
+
23
+
24
+ for i in range(len(problems)//50):
25
+ start = i*50
26
+ end = (i+1)*50
27
+ text_problems = '\n\n'.join(problems[start:end])
28
+ prompt = "以下のテキストを行ごとに日本語に翻訳して,1行ずつ空けて出力してください.ただし,LATEX文字列には変更を加えずそのまま出力してください.\n\n" + text_problems
29
+
30
+ print('prompt:\n'+ prompt + '\n\n')
31
+ print("total_tokens: ", gemini_pro.count_tokens(prompt))
32
+ response = gemini_pro.generate_content(prompt)
33
+ print(response.text)
34
+ print('----- meta data -----')
35
+ print(response.usage_metadata)
36
+ print('---------------------')
37
+
38
+ output_dir = 'gens'
39
+ file_name = f'gens{start}-{end}.txt'
40
+ with open(os.path.join(output_dir, file_name), 'wt') as f:
41
+ f.write(response.text)
problmes.json ADDED
The diff for this file is too large to render. See raw diff
 
tuning-competition-baseline/.gitignore ADDED
@@ -0,0 +1,126 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ *.egg-info/
24
+ .installed.cfg
25
+ *.egg
26
+ MANIFEST
27
+
28
+ # PyInstaller
29
+ # Usually these files are written by a python script from a template
30
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
31
+ *.manifest
32
+ *.spec
33
+
34
+ # Installer logs
35
+ pip-log.txt
36
+ pip-delete-this-directory.txt
37
+
38
+ # Unit test / coverage reports
39
+ htmlcov/
40
+ .tox/
41
+ .coverage
42
+ .coverage.*
43
+ .cache
44
+ nosetests.xml
45
+ coverage.xml
46
+ *.cover
47
+ .hypothesis/
48
+
49
+ # Translations
50
+ *.mo
51
+ *.pot
52
+
53
+ # Django stuff:
54
+ *.log
55
+ .static_storage/
56
+ .media/
57
+ local_settings.py
58
+
59
+ # Flask stuff:
60
+ instance/
61
+ .webassets-cache
62
+
63
+ # Scrapy stuff:
64
+ .scrapy
65
+
66
+ # Sphinx documentation
67
+ docs/_build/
68
+
69
+ # PyBuilder
70
+ target/
71
+
72
+ # Jupyter Notebook
73
+ .ipynb_checkpoints
74
+
75
+ # pyenv
76
+ .python-version
77
+
78
+ # celery beat schedule file
79
+ celerybeat-schedule
80
+
81
+ # SageMath parsed files
82
+ *.sage.py
83
+
84
+ # Environments
85
+ .venv
86
+ env/
87
+ venv/
88
+ ENV/
89
+ env.bak/
90
+ venv.bak/
91
+
92
+ # Spyder project settings
93
+ .spyderproject
94
+ .spyproject
95
+
96
+ # Rope project settings
97
+ .ropeproject
98
+
99
+ # mkdocs documentation
100
+ /site
101
+
102
+ # mypy
103
+ .mypy_cache/
104
+
105
+ # weights and biases
106
+ wandb/
107
+ outputs/
108
+
109
+ # config
110
+ configs/base.yaml
111
+
112
+ # local debugging
113
+ pretrained_model
114
+ tokenizer
115
+ result
116
+ log
117
+
118
+ # data
119
+ datasets
120
+
121
+ # dependncies
122
+ apex/
123
+ flash-attention/
124
+
125
+ # tmp dir
126
+ tmp/
tuning-competition-baseline/.pre-commit-config.yaml ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ default_language_version:
2
+ python: python3.11
3
+ repos:
4
+ - repo: https://github.com/pre-commit/pre-commit-hooks
5
+ rev: v4.4.0
6
+ hooks:
7
+ - id: end-of-file-fixer
8
+ - id: trailing-whitespace
9
+ - id: check-yaml
10
+ - id: check-toml
11
+ - id: check-added-large-files
12
+ - repo: https://github.com/psf/black-pre-commit-mirror
13
+ rev: 23.9.0
14
+ hooks:
15
+ - id: black
16
+ - repo: https://github.com/PyCQA/flake8
17
+ rev: 6.1.0
18
+ hooks:
19
+ - id: flake8
20
+ additional_dependencies: [Flake8-pyproject]
21
+ - repo: https://github.com/PyCQA/isort
22
+ rev: 5.12.0
23
+ hooks:
24
+ - id: isort
25
+ - repo: https://github.com/pre-commit/mirrors-mypy
26
+ rev: v1.5.1
27
+ hooks:
28
+ - id: mypy
29
+ additional_dependencies:
30
+ - hydra-core==1.3.2
31
+ - torch==2.2.2
32
+ - torchmetrics==1.1.0
33
+ - tokenizers==0.13.3
34
+ - wandb==0.16.6
35
+ - typer==0.9.0
36
+ - types-PyYAML==6.0.12.11
37
+ - git+https://github.com/nobu-g/[email protected]
38
+ - repo: https://github.com/asottile/pyupgrade
39
+ rev: v3.10.1
40
+ hooks:
41
+ - id: pyupgrade
42
+ args:
43
+ - --py38-plus
tuning-competition-baseline/.venv/bin/Activate.ps1 ADDED
@@ -0,0 +1,247 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <#
2
+ .Synopsis
3
+ Activate a Python virtual environment for the current PowerShell session.
4
+
5
+ .Description
6
+ Pushes the python executable for a virtual environment to the front of the
7
+ $Env:PATH environment variable and sets the prompt to signify that you are
8
+ in a Python virtual environment. Makes use of the command line switches as
9
+ well as the `pyvenv.cfg` file values present in the virtual environment.
10
+
11
+ .Parameter VenvDir
12
+ Path to the directory that contains the virtual environment to activate. The
13
+ default value for this is the parent of the directory that the Activate.ps1
14
+ script is located within.
15
+
16
+ .Parameter Prompt
17
+ The prompt prefix to display when this virtual environment is activated. By
18
+ default, this prompt is the name of the virtual environment folder (VenvDir)
19
+ surrounded by parentheses and followed by a single space (ie. '(.venv) ').
20
+
21
+ .Example
22
+ Activate.ps1
23
+ Activates the Python virtual environment that contains the Activate.ps1 script.
24
+
25
+ .Example
26
+ Activate.ps1 -Verbose
27
+ Activates the Python virtual environment that contains the Activate.ps1 script,
28
+ and shows extra information about the activation as it executes.
29
+
30
+ .Example
31
+ Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
32
+ Activates the Python virtual environment located in the specified location.
33
+
34
+ .Example
35
+ Activate.ps1 -Prompt "MyPython"
36
+ Activates the Python virtual environment that contains the Activate.ps1 script,
37
+ and prefixes the current prompt with the specified string (surrounded in
38
+ parentheses) while the virtual environment is active.
39
+
40
+ .Notes
41
+ On Windows, it may be required to enable this Activate.ps1 script by setting the
42
+ execution policy for the user. You can do this by issuing the following PowerShell
43
+ command:
44
+
45
+ PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
46
+
47
+ For more information on Execution Policies:
48
+ https://go.microsoft.com/fwlink/?LinkID=135170
49
+
50
+ #>
51
+ Param(
52
+ [Parameter(Mandatory = $false)]
53
+ [String]
54
+ $VenvDir,
55
+ [Parameter(Mandatory = $false)]
56
+ [String]
57
+ $Prompt
58
+ )
59
+
60
+ <# Function declarations --------------------------------------------------- #>
61
+
62
+ <#
63
+ .Synopsis
64
+ Remove all shell session elements added by the Activate script, including the
65
+ addition of the virtual environment's Python executable from the beginning of
66
+ the PATH variable.
67
+
68
+ .Parameter NonDestructive
69
+ If present, do not remove this function from the global namespace for the
70
+ session.
71
+
72
+ #>
73
+ function global:deactivate ([switch]$NonDestructive) {
74
+ # Revert to original values
75
+
76
+ # The prior prompt:
77
+ if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
78
+ Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
79
+ Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
80
+ }
81
+
82
+ # The prior PYTHONHOME:
83
+ if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
84
+ Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
85
+ Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
86
+ }
87
+
88
+ # The prior PATH:
89
+ if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
90
+ Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
91
+ Remove-Item -Path Env:_OLD_VIRTUAL_PATH
92
+ }
93
+
94
+ # Just remove the VIRTUAL_ENV altogether:
95
+ if (Test-Path -Path Env:VIRTUAL_ENV) {
96
+ Remove-Item -Path env:VIRTUAL_ENV
97
+ }
98
+
99
+ # Just remove VIRTUAL_ENV_PROMPT altogether.
100
+ if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
101
+ Remove-Item -Path env:VIRTUAL_ENV_PROMPT
102
+ }
103
+
104
+ # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
105
+ if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
106
+ Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
107
+ }
108
+
109
+ # Leave deactivate function in the global namespace if requested:
110
+ if (-not $NonDestructive) {
111
+ Remove-Item -Path function:deactivate
112
+ }
113
+ }
114
+
115
+ <#
116
+ .Description
117
+ Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
118
+ given folder, and returns them in a map.
119
+
120
+ For each line in the pyvenv.cfg file, if that line can be parsed into exactly
121
+ two strings separated by `=` (with any amount of whitespace surrounding the =)
122
+ then it is considered a `key = value` line. The left hand string is the key,
123
+ the right hand is the value.
124
+
125
+ If the value starts with a `'` or a `"` then the first and last character is
126
+ stripped from the value before being captured.
127
+
128
+ .Parameter ConfigDir
129
+ Path to the directory that contains the `pyvenv.cfg` file.
130
+ #>
131
+ function Get-PyVenvConfig(
132
+ [String]
133
+ $ConfigDir
134
+ ) {
135
+ Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
136
+
137
+ # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
138
+ $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
139
+
140
+ # An empty map will be returned if no config file is found.
141
+ $pyvenvConfig = @{ }
142
+
143
+ if ($pyvenvConfigPath) {
144
+
145
+ Write-Verbose "File exists, parse `key = value` lines"
146
+ $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
147
+
148
+ $pyvenvConfigContent | ForEach-Object {
149
+ $keyval = $PSItem -split "\s*=\s*", 2
150
+ if ($keyval[0] -and $keyval[1]) {
151
+ $val = $keyval[1]
152
+
153
+ # Remove extraneous quotations around a string value.
154
+ if ("'""".Contains($val.Substring(0, 1))) {
155
+ $val = $val.Substring(1, $val.Length - 2)
156
+ }
157
+
158
+ $pyvenvConfig[$keyval[0]] = $val
159
+ Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
160
+ }
161
+ }
162
+ }
163
+ return $pyvenvConfig
164
+ }
165
+
166
+
167
+ <# Begin Activate script --------------------------------------------------- #>
168
+
169
+ # Determine the containing directory of this script
170
+ $VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
171
+ $VenvExecDir = Get-Item -Path $VenvExecPath
172
+
173
+ Write-Verbose "Activation script is located in path: '$VenvExecPath'"
174
+ Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
175
+ Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
176
+
177
+ # Set values required in priority: CmdLine, ConfigFile, Default
178
+ # First, get the location of the virtual environment, it might not be
179
+ # VenvExecDir if specified on the command line.
180
+ if ($VenvDir) {
181
+ Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
182
+ }
183
+ else {
184
+ Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
185
+ $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
186
+ Write-Verbose "VenvDir=$VenvDir"
187
+ }
188
+
189
+ # Next, read the `pyvenv.cfg` file to determine any required value such
190
+ # as `prompt`.
191
+ $pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
192
+
193
+ # Next, set the prompt from the command line, or the config file, or
194
+ # just use the name of the virtual environment folder.
195
+ if ($Prompt) {
196
+ Write-Verbose "Prompt specified as argument, using '$Prompt'"
197
+ }
198
+ else {
199
+ Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
200
+ if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
201
+ Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
202
+ $Prompt = $pyvenvCfg['prompt'];
203
+ }
204
+ else {
205
+ Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
206
+ Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
207
+ $Prompt = Split-Path -Path $venvDir -Leaf
208
+ }
209
+ }
210
+
211
+ Write-Verbose "Prompt = '$Prompt'"
212
+ Write-Verbose "VenvDir='$VenvDir'"
213
+
214
+ # Deactivate any currently active virtual environment, but leave the
215
+ # deactivate function in place.
216
+ deactivate -nondestructive
217
+
218
+ # Now set the environment variable VIRTUAL_ENV, used by many tools to determine
219
+ # that there is an activated venv.
220
+ $env:VIRTUAL_ENV = $VenvDir
221
+
222
+ if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
223
+
224
+ Write-Verbose "Setting prompt to '$Prompt'"
225
+
226
+ # Set the prompt to include the env name
227
+ # Make sure _OLD_VIRTUAL_PROMPT is global
228
+ function global:_OLD_VIRTUAL_PROMPT { "" }
229
+ Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
230
+ New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
231
+
232
+ function global:prompt {
233
+ Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
234
+ _OLD_VIRTUAL_PROMPT
235
+ }
236
+ $env:VIRTUAL_ENV_PROMPT = $Prompt
237
+ }
238
+
239
+ # Clear PYTHONHOME
240
+ if (Test-Path -Path Env:PYTHONHOME) {
241
+ Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
242
+ Remove-Item -Path Env:PYTHONHOME
243
+ }
244
+
245
+ # Add the venv to the PATH
246
+ Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
247
+ $Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
tuning-competition-baseline/.venv/bin/activate ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file must be used with "source bin/activate" *from bash*
2
+ # you cannot run it directly
3
+
4
+ deactivate () {
5
+ # reset old environment variables
6
+ if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
7
+ PATH="${_OLD_VIRTUAL_PATH:-}"
8
+ export PATH
9
+ unset _OLD_VIRTUAL_PATH
10
+ fi
11
+ if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
12
+ PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
13
+ export PYTHONHOME
14
+ unset _OLD_VIRTUAL_PYTHONHOME
15
+ fi
16
+
17
+ # Call hash to forget past commands. Without forgetting
18
+ # past commands the $PATH changes we made may not be respected
19
+ hash -r 2> /dev/null
20
+
21
+ if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
22
+ PS1="${_OLD_VIRTUAL_PS1:-}"
23
+ export PS1
24
+ unset _OLD_VIRTUAL_PS1
25
+ fi
26
+
27
+ unset VIRTUAL_ENV
28
+ unset VIRTUAL_ENV_PROMPT
29
+ if [ ! "${1:-}" = "nondestructive" ] ; then
30
+ # Self destruct!
31
+ unset -f deactivate
32
+ fi
33
+ }
34
+
35
+ # unset irrelevant variables
36
+ deactivate nondestructive
37
+
38
+ VIRTUAL_ENV="/home/koiwa/work/tuning-competition-baseline/.venv"
39
+ export VIRTUAL_ENV
40
+
41
+ _OLD_VIRTUAL_PATH="$PATH"
42
+ PATH="$VIRTUAL_ENV/bin:$PATH"
43
+ export PATH
44
+
45
+ # unset PYTHONHOME if set
46
+ # this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
47
+ # could use `if (set -u; : $PYTHONHOME) ;` in bash
48
+ if [ -n "${PYTHONHOME:-}" ] ; then
49
+ _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
50
+ unset PYTHONHOME
51
+ fi
52
+
53
+ if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
54
+ _OLD_VIRTUAL_PS1="${PS1:-}"
55
+ PS1="(.venv) ${PS1:-}"
56
+ export PS1
57
+ VIRTUAL_ENV_PROMPT="(.venv) "
58
+ export VIRTUAL_ENV_PROMPT
59
+ fi
60
+
61
+ # Call hash to forget past commands. Without forgetting
62
+ # past commands the $PATH changes we made may not be respected
63
+ hash -r 2> /dev/null
tuning-competition-baseline/.venv/bin/activate.csh ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file must be used with "source bin/activate.csh" *from csh*.
2
+ # You cannot run it directly.
3
+ # Created by Davide Di Blasi <[email protected]>.
4
+ # Ported to Python 3.3 venv by Andrew Svetlov <[email protected]>
5
+
6
+ alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
7
+
8
+ # Unset irrelevant variables.
9
+ deactivate nondestructive
10
+
11
+ setenv VIRTUAL_ENV "/home/koiwa/work/tuning-competition-baseline/.venv"
12
+
13
+ set _OLD_VIRTUAL_PATH="$PATH"
14
+ setenv PATH "$VIRTUAL_ENV/bin:$PATH"
15
+
16
+
17
+ set _OLD_VIRTUAL_PROMPT="$prompt"
18
+
19
+ if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
20
+ set prompt = "(.venv) $prompt"
21
+ setenv VIRTUAL_ENV_PROMPT "(.venv) "
22
+ endif
23
+
24
+ alias pydoc python -m pydoc
25
+
26
+ rehash
tuning-competition-baseline/.venv/bin/activate.fish ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file must be used with "source <venv>/bin/activate.fish" *from fish*
2
+ # (https://fishshell.com/); you cannot run it directly.
3
+
4
+ function deactivate -d "Exit virtual environment and return to normal shell environment"
5
+ # reset old environment variables
6
+ if test -n "$_OLD_VIRTUAL_PATH"
7
+ set -gx PATH $_OLD_VIRTUAL_PATH
8
+ set -e _OLD_VIRTUAL_PATH
9
+ end
10
+ if test -n "$_OLD_VIRTUAL_PYTHONHOME"
11
+ set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
12
+ set -e _OLD_VIRTUAL_PYTHONHOME
13
+ end
14
+
15
+ if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
16
+ set -e _OLD_FISH_PROMPT_OVERRIDE
17
+ # prevents error when using nested fish instances (Issue #93858)
18
+ if functions -q _old_fish_prompt
19
+ functions -e fish_prompt
20
+ functions -c _old_fish_prompt fish_prompt
21
+ functions -e _old_fish_prompt
22
+ end
23
+ end
24
+
25
+ set -e VIRTUAL_ENV
26
+ set -e VIRTUAL_ENV_PROMPT
27
+ if test "$argv[1]" != "nondestructive"
28
+ # Self-destruct!
29
+ functions -e deactivate
30
+ end
31
+ end
32
+
33
+ # Unset irrelevant variables.
34
+ deactivate nondestructive
35
+
36
+ set -gx VIRTUAL_ENV "/home/koiwa/work/tuning-competition-baseline/.venv"
37
+
38
+ set -gx _OLD_VIRTUAL_PATH $PATH
39
+ set -gx PATH "$VIRTUAL_ENV/bin" $PATH
40
+
41
+ # Unset PYTHONHOME if set.
42
+ if set -q PYTHONHOME
43
+ set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
44
+ set -e PYTHONHOME
45
+ end
46
+
47
+ if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
48
+ # fish uses a function instead of an env var to generate the prompt.
49
+
50
+ # Save the current fish_prompt function as the function _old_fish_prompt.
51
+ functions -c fish_prompt _old_fish_prompt
52
+
53
+ # With the original prompt function renamed, we can override with our own.
54
+ function fish_prompt
55
+ # Save the return status of the last command.
56
+ set -l old_status $status
57
+
58
+ # Output the venv prompt; color taken from the blue of the Python logo.
59
+ printf "%s%s%s" (set_color 4B8BBE) "(.venv) " (set_color normal)
60
+
61
+ # Restore the return status of the previous command.
62
+ echo "exit $old_status" | .
63
+ # Output the original/"old" prompt.
64
+ _old_fish_prompt
65
+ end
66
+
67
+ set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
68
+ set -gx VIRTUAL_ENV_PROMPT "(.venv) "
69
+ end
tuning-competition-baseline/.venv/bin/convert-caffe2-to-onnx ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ #!/home/koiwa/work/tuning-competition-baseline/.venv/bin/python3.11
2
+ # -*- coding: utf-8 -*-
3
+ import re
4
+ import sys
5
+ from caffe2.python.onnx.bin.conversion import caffe2_to_onnx
6
+ if __name__ == '__main__':
7
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
8
+ sys.exit(caffe2_to_onnx())
tuning-competition-baseline/.venv/bin/convert-onnx-to-caffe2 ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ #!/home/koiwa/work/tuning-competition-baseline/.venv/bin/python3.11
2
+ # -*- coding: utf-8 -*-
3
+ import re
4
+ import sys
5
+ from caffe2.python.onnx.bin.conversion import onnx_to_caffe2
6
+ if __name__ == '__main__':
7
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
8
+ sys.exit(onnx_to_caffe2())
tuning-competition-baseline/.venv/bin/cygdb ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ #!/home/koiwa/work/tuning-competition-baseline/.venv/bin/python3.11
2
+ # -*- coding: utf-8 -*-
3
+ import re
4
+ import sys
5
+ from Cython.Debugger.Cygdb import main
6
+ if __name__ == '__main__':
7
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
8
+ sys.exit(main())
tuning-competition-baseline/.venv/bin/cython ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ #!/home/koiwa/work/tuning-competition-baseline/.venv/bin/python3.11
2
+ # -*- coding: utf-8 -*-
3
+ import re
4
+ import sys
5
+ from Cython.Compiler.Main import setuptools_main
6
+ if __name__ == '__main__':
7
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
8
+ sys.exit(setuptools_main())
tuning-competition-baseline/.venv/bin/cythonize ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ #!/home/koiwa/work/tuning-competition-baseline/.venv/bin/python3.11
2
+ # -*- coding: utf-8 -*-
3
+ import re
4
+ import sys
5
+ from Cython.Build.Cythonize import main
6
+ if __name__ == '__main__':
7
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
8
+ sys.exit(main())
tuning-competition-baseline/.venv/bin/pip ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ #!/home/koiwa/work/tuning-competition-baseline/.venv/bin/python3.11
2
+ # -*- coding: utf-8 -*-
3
+ import re
4
+ import sys
5
+ from pip._internal.cli.main import main
6
+ if __name__ == '__main__':
7
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
8
+ sys.exit(main())
tuning-competition-baseline/.venv/bin/pip3.11 ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ #!/home/koiwa/work/tuning-competition-baseline/.venv/bin/python3.11
2
+ # -*- coding: utf-8 -*-
3
+ import re
4
+ import sys
5
+ from pip._internal.cli.main import main
6
+ if __name__ == '__main__':
7
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
8
+ sys.exit(main())
tuning-competition-baseline/.venv/bin/pybind11-config ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ #!/home/koiwa/work/tuning-competition-baseline/.venv/bin/python3.11
2
+ # -*- coding: utf-8 -*-
3
+ import re
4
+ import sys
5
+ from pybind11.__main__ import main
6
+ if __name__ == '__main__':
7
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
8
+ sys.exit(main())
tuning-competition-baseline/.venv/bin/python ADDED
Binary file (17.7 kB). View file
 
tuning-competition-baseline/.venv/bin/python3 ADDED
Binary file (17.7 kB). View file
 
tuning-competition-baseline/.venv/bin/python3.11 ADDED
Binary file (17.7 kB). View file
 
tuning-competition-baseline/.venv/bin/torchrun ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ #!/home/koiwa/work/tuning-competition-baseline/.venv/bin/python3.11
2
+ # -*- coding: utf-8 -*-
3
+ import re
4
+ import sys
5
+ from torch.distributed.run import main
6
+ if __name__ == '__main__':
7
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
8
+ sys.exit(main())
tuning-competition-baseline/.venv/bin/wheel ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ #!/home/koiwa/work/tuning-competition-baseline/.venv/bin/python3.11
2
+ # -*- coding: utf-8 -*-
3
+ import re
4
+ import sys
5
+ from wheel.cli import main
6
+ if __name__ == '__main__':
7
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
8
+ sys.exit(main())
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Jinja2-3.1.3.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ pip
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Jinja2-3.1.3.dist-info/LICENSE.rst ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Copyright 2007 Pallets
2
+
3
+ Redistribution and use in source and binary forms, with or without
4
+ modification, are permitted provided that the following conditions are
5
+ met:
6
+
7
+ 1. Redistributions of source code must retain the above copyright
8
+ notice, this list of conditions and the following disclaimer.
9
+
10
+ 2. Redistributions in binary form must reproduce the above copyright
11
+ notice, this list of conditions and the following disclaimer in the
12
+ documentation and/or other materials provided with the distribution.
13
+
14
+ 3. Neither the name of the copyright holder nor the names of its
15
+ contributors may be used to endorse or promote products derived from
16
+ this software without specific prior written permission.
17
+
18
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
21
+ PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22
+ HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
24
+ TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
25
+ PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
26
+ LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
27
+ NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
28
+ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Jinja2-3.1.3.dist-info/RECORD ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Jinja2-3.1.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
2
+ Jinja2-3.1.3.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475
3
+ Jinja2-3.1.3.dist-info/METADATA,sha256=0cLNbRCI91jytc7Bzv3XAQfZzFDF2gxkJuH46eF5vew,3301
4
+ Jinja2-3.1.3.dist-info/RECORD,,
5
+ Jinja2-3.1.3.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
6
+ Jinja2-3.1.3.dist-info/entry_points.txt,sha256=zRd62fbqIyfUpsRtU7EVIFyiu1tPwfgO7EvPErnxgTE,59
7
+ Jinja2-3.1.3.dist-info/top_level.txt,sha256=PkeVWtLb3-CqjWi1fO29OCbj55EhX_chhKrCdrVe_zs,7
8
+ jinja2/__init__.py,sha256=NTBwMwsECrdHmxeXF7seusHLzrh6Ldn1A9qhS5cDuf0,1927
9
+ jinja2/__pycache__/__init__.cpython-311.pyc,,
10
+ jinja2/__pycache__/_identifier.cpython-311.pyc,,
11
+ jinja2/__pycache__/async_utils.cpython-311.pyc,,
12
+ jinja2/__pycache__/bccache.cpython-311.pyc,,
13
+ jinja2/__pycache__/compiler.cpython-311.pyc,,
14
+ jinja2/__pycache__/constants.cpython-311.pyc,,
15
+ jinja2/__pycache__/debug.cpython-311.pyc,,
16
+ jinja2/__pycache__/defaults.cpython-311.pyc,,
17
+ jinja2/__pycache__/environment.cpython-311.pyc,,
18
+ jinja2/__pycache__/exceptions.cpython-311.pyc,,
19
+ jinja2/__pycache__/ext.cpython-311.pyc,,
20
+ jinja2/__pycache__/filters.cpython-311.pyc,,
21
+ jinja2/__pycache__/idtracking.cpython-311.pyc,,
22
+ jinja2/__pycache__/lexer.cpython-311.pyc,,
23
+ jinja2/__pycache__/loaders.cpython-311.pyc,,
24
+ jinja2/__pycache__/meta.cpython-311.pyc,,
25
+ jinja2/__pycache__/nativetypes.cpython-311.pyc,,
26
+ jinja2/__pycache__/nodes.cpython-311.pyc,,
27
+ jinja2/__pycache__/optimizer.cpython-311.pyc,,
28
+ jinja2/__pycache__/parser.cpython-311.pyc,,
29
+ jinja2/__pycache__/runtime.cpython-311.pyc,,
30
+ jinja2/__pycache__/sandbox.cpython-311.pyc,,
31
+ jinja2/__pycache__/tests.cpython-311.pyc,,
32
+ jinja2/__pycache__/utils.cpython-311.pyc,,
33
+ jinja2/__pycache__/visitor.cpython-311.pyc,,
34
+ jinja2/_identifier.py,sha256=_zYctNKzRqlk_murTNlzrju1FFJL7Va_Ijqqd7ii2lU,1958
35
+ jinja2/async_utils.py,sha256=dFcmh6lMNfbh7eLKrBio8JqAKLHdZbpCuurFN4OERtY,2447
36
+ jinja2/bccache.py,sha256=mhz5xtLxCcHRAa56azOhphIAe19u1we0ojifNMClDio,14061
37
+ jinja2/compiler.py,sha256=PJzYdRLStlEOqmnQs1YxlizPrJoj3jTZuUleREn6AIQ,72199
38
+ jinja2/constants.py,sha256=GMoFydBF_kdpaRKPoM5cl5MviquVRLVyZtfp5-16jg0,1433
39
+ jinja2/debug.py,sha256=iWJ432RadxJNnaMOPrjIDInz50UEgni3_HKuFXi2vuQ,6299
40
+ jinja2/defaults.py,sha256=boBcSw78h-lp20YbaXSJsqkAI2uN_mD_TtCydpeq5wU,1267
41
+ jinja2/environment.py,sha256=0qldX3VQKZcm6lgn7zHz94oRFow7YPYERiqkquomNjU,61253
42
+ jinja2/exceptions.py,sha256=ioHeHrWwCWNaXX1inHmHVblvc4haO7AXsjCp3GfWvx0,5071
43
+ jinja2/ext.py,sha256=5fnMpllaXkfm2P_93RIvi-OnK7Tk8mCW8Du-GcD12Hc,31844
44
+ jinja2/filters.py,sha256=vYjKb2zaPShvYtn_LpSmqfS8SScbrA_KOanNibsMDIE,53862
45
+ jinja2/idtracking.py,sha256=GfNmadir4oDALVxzn3DL9YInhJDr69ebXeA2ygfuCGA,10704
46
+ jinja2/lexer.py,sha256=DW2nX9zk-6MWp65YR2bqqj0xqCvLtD-u9NWT8AnFRxQ,29726
47
+ jinja2/loaders.py,sha256=ayAwxfrA1SAffQta0nwSDm3TDT4KYiIGN_D9Z45B310,23085
48
+ jinja2/meta.py,sha256=GNPEvifmSaU3CMxlbheBOZjeZ277HThOPUTf1RkppKQ,4396
49
+ jinja2/nativetypes.py,sha256=7GIGALVJgdyL80oZJdQUaUfwSt5q2lSSZbXt0dNf_M4,4210
50
+ jinja2/nodes.py,sha256=i34GPRAZexXMT6bwuf5SEyvdmS-bRCy9KMjwN5O6pjk,34550
51
+ jinja2/optimizer.py,sha256=tHkMwXxfZkbfA1KmLcqmBMSaz7RLIvvItrJcPoXTyD8,1650
52
+ jinja2/parser.py,sha256=Y199wPL-G67gJoi5G_5sHuu9uEP1PJkjjLEW_xTH8-k,39736
53
+ jinja2/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
54
+ jinja2/runtime.py,sha256=_6LkKIWFJjQdqlrgA3K39zBFQ-7Orm3wGDm96RwxQoE,33406
55
+ jinja2/sandbox.py,sha256=Y0xZeXQnH6EX5VjaV2YixESxoepnRbW_3UeQosaBU3M,14584
56
+ jinja2/tests.py,sha256=Am5Z6Lmfr2XaH_npIfJJ8MdXtWsbLjMULZJulTAj30E,5905
57
+ jinja2/utils.py,sha256=IMwRIcN1SsTw2-jdQtlH2KzNABsXZBW_-tnFXafQBvY,23933
58
+ jinja2/visitor.py,sha256=MH14C6yq24G_KVtWzjwaI7Wg14PCJIYlWW1kpkxYak0,3568
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Jinja2-3.1.3.dist-info/entry_points.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ [babel.extractors]
2
+ jinja2 = jinja2.ext:babel_extract[i18n]
tuning-competition-baseline/.venv/lib/python3.11/site-packages/cython.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+
3
+ #
4
+ # Cython -- Main Program, generic
5
+ #
6
+
7
+ if __name__ == '__main__':
8
+
9
+ import os
10
+ import sys
11
+
12
+ # Make sure we import the right Cython
13
+ cythonpath, _ = os.path.split(os.path.realpath(__file__))
14
+ sys.path.insert(0, cythonpath)
15
+
16
+ from Cython.Compiler.Main import main
17
+ main(command_line = 1)
18
+
19
+ else:
20
+ # Void cython.* directives.
21
+ from Cython.Shadow import *
22
+ ## and bring in the __version__
23
+ from Cython import __version__
24
+ from Cython import load_ipython_extension
tuning-competition-baseline/.venv/lib/python3.11/site-packages/isympy.py ADDED
@@ -0,0 +1,342 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Python shell for SymPy.
3
+
4
+ This is just a normal Python shell (IPython shell if you have the
5
+ IPython package installed), that executes the following commands for
6
+ the user:
7
+
8
+ >>> from __future__ import division
9
+ >>> from sympy import *
10
+ >>> x, y, z, t = symbols('x y z t')
11
+ >>> k, m, n = symbols('k m n', integer=True)
12
+ >>> f, g, h = symbols('f g h', cls=Function)
13
+ >>> init_printing()
14
+
15
+ So starting 'isympy' is equivalent to starting Python (or IPython) and
16
+ executing the above commands by hand. It is intended for easy and quick
17
+ experimentation with SymPy. isympy is a good way to use SymPy as an
18
+ interactive calculator. If you have IPython and Matplotlib installed, then
19
+ interactive plotting is enabled by default.
20
+
21
+ COMMAND LINE OPTIONS
22
+ --------------------
23
+
24
+ -c CONSOLE, --console=CONSOLE
25
+
26
+ Use the specified shell (Python or IPython) shell as the console
27
+ backend instead of the default one (IPython if present, Python
28
+ otherwise), e.g.:
29
+
30
+ $isympy -c python
31
+
32
+ CONSOLE must be one of 'ipython' or 'python'
33
+
34
+ -p PRETTY, --pretty PRETTY
35
+
36
+ Setup pretty-printing in SymPy. When pretty-printing is enabled,
37
+ expressions can be printed with Unicode or ASCII. The default is
38
+ to use pretty-printing (with Unicode if the terminal supports it).
39
+ When this option is 'no', expressions will not be pretty-printed
40
+ and ASCII will be used:
41
+
42
+ $isympy -p no
43
+
44
+ PRETTY must be one of 'unicode', 'ascii', or 'no'
45
+
46
+ -t TYPES, --types=TYPES
47
+
48
+ Setup the ground types for the polys. By default, gmpy ground types
49
+ are used if gmpy2 or gmpy is installed, otherwise it falls back to python
50
+ ground types, which are a little bit slower. You can manually
51
+ choose python ground types even if gmpy is installed (e.g., for
52
+ testing purposes):
53
+
54
+ $isympy -t python
55
+
56
+ TYPES must be one of 'gmpy', 'gmpy1' or 'python'
57
+
58
+ Note that the ground type gmpy1 is primarily intended for testing; it
59
+ forces the use of gmpy version 1 even if gmpy2 is available.
60
+
61
+ This is the same as setting the environment variable
62
+ SYMPY_GROUND_TYPES to the given ground type (e.g.,
63
+ SYMPY_GROUND_TYPES='gmpy')
64
+
65
+ The ground types can be determined interactively from the variable
66
+ sympy.polys.domains.GROUND_TYPES.
67
+
68
+ -o ORDER, --order ORDER
69
+
70
+ Setup the ordering of terms for printing. The default is lex, which
71
+ orders terms lexicographically (e.g., x**2 + x + 1). You can choose
72
+ other orderings, such as rev-lex, which will use reverse
73
+ lexicographic ordering (e.g., 1 + x + x**2):
74
+
75
+ $isympy -o rev-lex
76
+
77
+ ORDER must be one of 'lex', 'rev-lex', 'grlex', 'rev-grlex',
78
+ 'grevlex', 'rev-grevlex', 'old', or 'none'.
79
+
80
+ Note that for very large expressions, ORDER='none' may speed up
81
+ printing considerably but the terms will have no canonical order.
82
+
83
+ -q, --quiet
84
+
85
+ Print only Python's and SymPy's versions to stdout at startup.
86
+
87
+ -d, --doctest
88
+
89
+ Use the same format that should be used for doctests. This is
90
+ equivalent to -c python -p no.
91
+
92
+ -C, --no-cache
93
+
94
+ Disable the caching mechanism. Disabling the cache may slow certain
95
+ operations down considerably. This is useful for testing the cache,
96
+ or for benchmarking, as the cache can result in deceptive timings.
97
+
98
+ This is equivalent to setting the environment variable
99
+ SYMPY_USE_CACHE to 'no'.
100
+
101
+ -a, --auto-symbols (requires at least IPython 0.11)
102
+
103
+ Automatically create missing symbols. Normally, typing a name of a
104
+ Symbol that has not been instantiated first would raise NameError,
105
+ but with this option enabled, any undefined name will be
106
+ automatically created as a Symbol.
107
+
108
+ Note that this is intended only for interactive, calculator style
109
+ usage. In a script that uses SymPy, Symbols should be instantiated
110
+ at the top, so that it's clear what they are.
111
+
112
+ This will not override any names that are already defined, which
113
+ includes the single character letters represented by the mnemonic
114
+ QCOSINE (see the "Gotchas and Pitfalls" document in the
115
+ documentation). You can delete existing names by executing "del
116
+ name". If a name is defined, typing "'name' in dir()" will return True.
117
+
118
+ The Symbols that are created using this have default assumptions.
119
+ If you want to place assumptions on symbols, you should create them
120
+ using symbols() or var().
121
+
122
+ Finally, this only works in the top level namespace. So, for
123
+ example, if you define a function in isympy with an undefined
124
+ Symbol, it will not work.
125
+
126
+ See also the -i and -I options.
127
+
128
+ -i, --int-to-Integer (requires at least IPython 0.11)
129
+
130
+ Automatically wrap int literals with Integer. This makes it so that
131
+ things like 1/2 will come out as Rational(1, 2), rather than 0.5. This
132
+ works by preprocessing the source and wrapping all int literals with
133
+ Integer. Note that this will not change the behavior of int literals
134
+ assigned to variables, and it also won't change the behavior of functions
135
+ that return int literals.
136
+
137
+ If you want an int, you can wrap the literal in int(), e.g. int(3)/int(2)
138
+ gives 1.5 (with division imported from __future__).
139
+
140
+ -I, --interactive (requires at least IPython 0.11)
141
+
142
+ This is equivalent to --auto-symbols --int-to-Integer. Future options
143
+ designed for ease of interactive use may be added to this.
144
+
145
+ -D, --debug
146
+
147
+ Enable debugging output. This is the same as setting the
148
+ environment variable SYMPY_DEBUG to 'True'. The debug status is set
149
+ in the variable SYMPY_DEBUG within isympy.
150
+
151
+ -- IPython options
152
+
153
+ Additionally you can pass command line options directly to the IPython
154
+ interpreter (the standard Python shell is not supported). However you
155
+ need to add the '--' separator between two types of options, e.g the
156
+ startup banner option and the colors option. You need to enter the
157
+ options as required by the version of IPython that you are using, too:
158
+
159
+ in IPython 0.11,
160
+
161
+ $isympy -q -- --colors=NoColor
162
+
163
+ or older versions of IPython,
164
+
165
+ $isympy -q -- -colors NoColor
166
+
167
+ See also isympy --help.
168
+ """
169
+
170
+ import os
171
+ import sys
172
+
173
+ # DO NOT IMPORT SYMPY HERE! Or the setting of the sympy environment variables
174
+ # by the command line will break.
175
+
176
+ def main() -> None:
177
+ from argparse import ArgumentParser, RawDescriptionHelpFormatter
178
+
179
+ VERSION = None
180
+ if '--version' in sys.argv:
181
+ # We cannot import sympy before this is run, because flags like -C and
182
+ # -t set environment variables that must be set before SymPy is
183
+ # imported. The only thing we need to import it for is to get the
184
+ # version, which only matters with the --version flag.
185
+ import sympy
186
+ VERSION = sympy.__version__
187
+
188
+ usage = 'isympy [options] -- [ipython options]'
189
+ parser = ArgumentParser(
190
+ usage=usage,
191
+ description=__doc__,
192
+ formatter_class=RawDescriptionHelpFormatter,
193
+ )
194
+
195
+ parser.add_argument('--version', action='version', version=VERSION)
196
+
197
+ parser.add_argument(
198
+ '-c', '--console',
199
+ dest='console',
200
+ action='store',
201
+ default=None,
202
+ choices=['ipython', 'python'],
203
+ metavar='CONSOLE',
204
+ help='select type of interactive session: ipython | python; defaults '
205
+ 'to ipython if IPython is installed, otherwise python')
206
+
207
+ parser.add_argument(
208
+ '-p', '--pretty',
209
+ dest='pretty',
210
+ action='store',
211
+ default=None,
212
+ metavar='PRETTY',
213
+ choices=['unicode', 'ascii', 'no'],
214
+ help='setup pretty printing: unicode | ascii | no; defaults to '
215
+ 'unicode printing if the terminal supports it, otherwise ascii')
216
+
217
+ parser.add_argument(
218
+ '-t', '--types',
219
+ dest='types',
220
+ action='store',
221
+ default=None,
222
+ metavar='TYPES',
223
+ choices=['gmpy', 'gmpy1', 'python'],
224
+ help='setup ground types: gmpy | gmpy1 | python; defaults to gmpy if gmpy2 '
225
+ 'or gmpy is installed, otherwise python')
226
+
227
+ parser.add_argument(
228
+ '-o', '--order',
229
+ dest='order',
230
+ action='store',
231
+ default=None,
232
+ metavar='ORDER',
233
+ choices=['lex', 'grlex', 'grevlex', 'rev-lex', 'rev-grlex', 'rev-grevlex', 'old', 'none'],
234
+ help='setup ordering of terms: [rev-]lex | [rev-]grlex | [rev-]grevlex | old | none; defaults to lex')
235
+
236
+ parser.add_argument(
237
+ '-q', '--quiet',
238
+ dest='quiet',
239
+ action='store_true',
240
+ default=False,
241
+ help='print only version information at startup')
242
+
243
+ parser.add_argument(
244
+ '-d', '--doctest',
245
+ dest='doctest',
246
+ action='store_true',
247
+ default=False,
248
+ help='use the doctest format for output (you can just copy and paste it)')
249
+
250
+ parser.add_argument(
251
+ '-C', '--no-cache',
252
+ dest='cache',
253
+ action='store_false',
254
+ default=True,
255
+ help='disable caching mechanism')
256
+
257
+ parser.add_argument(
258
+ '-a', '--auto-symbols',
259
+ dest='auto_symbols',
260
+ action='store_true',
261
+ default=False,
262
+ help='automatically construct missing symbols')
263
+
264
+ parser.add_argument(
265
+ '-i', '--int-to-Integer',
266
+ dest='auto_int_to_Integer',
267
+ action='store_true',
268
+ default=False,
269
+ help="automatically wrap int literals with Integer")
270
+
271
+ parser.add_argument(
272
+ '-I', '--interactive',
273
+ dest='interactive',
274
+ action='store_true',
275
+ default=False,
276
+ help="equivalent to -a -i")
277
+
278
+ parser.add_argument(
279
+ '-D', '--debug',
280
+ dest='debug',
281
+ action='store_true',
282
+ default=False,
283
+ help='enable debugging output')
284
+
285
+ (options, ipy_args) = parser.parse_known_args()
286
+ if '--' in ipy_args:
287
+ ipy_args.remove('--')
288
+
289
+ if not options.cache:
290
+ os.environ['SYMPY_USE_CACHE'] = 'no'
291
+
292
+ if options.types:
293
+ os.environ['SYMPY_GROUND_TYPES'] = options.types
294
+
295
+ if options.debug:
296
+ os.environ['SYMPY_DEBUG'] = str(options.debug)
297
+
298
+ if options.doctest:
299
+ options.pretty = 'no'
300
+ options.console = 'python'
301
+
302
+ session = options.console
303
+
304
+ if session is not None:
305
+ ipython = session == 'ipython'
306
+ else:
307
+ try:
308
+ import IPython
309
+ ipython = True
310
+ except ImportError:
311
+ if not options.quiet:
312
+ from sympy.interactive.session import no_ipython
313
+ print(no_ipython)
314
+ ipython = False
315
+
316
+ args = {
317
+ 'pretty_print': True,
318
+ 'use_unicode': None,
319
+ 'use_latex': None,
320
+ 'order': None,
321
+ 'argv': ipy_args,
322
+ }
323
+
324
+ if options.pretty == 'unicode':
325
+ args['use_unicode'] = True
326
+ elif options.pretty == 'ascii':
327
+ args['use_unicode'] = False
328
+ elif options.pretty == 'no':
329
+ args['pretty_print'] = False
330
+
331
+ if options.order is not None:
332
+ args['order'] = options.order
333
+
334
+ args['quiet'] = options.quiet
335
+ args['auto_symbols'] = options.auto_symbols or options.interactive
336
+ args['auto_int_to_Integer'] = options.auto_int_to_Integer or options.interactive
337
+
338
+ from sympy.interactive import init_session
339
+ init_session(ipython, **args)
340
+
341
+ if __name__ == "__main__":
342
+ main()
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/lazy_imports.py ADDED
@@ -0,0 +1,190 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import importlib
2
+ import importlib.util
3
+ import inspect
4
+ import os
5
+ import sys
6
+ import types
7
+
8
+ __all__ = ["attach", "_lazy_import"]
9
+
10
+
11
+ def attach(module_name, submodules=None, submod_attrs=None):
12
+ """Attach lazily loaded submodules, and functions or other attributes.
13
+
14
+ Typically, modules import submodules and attributes as follows::
15
+
16
+ import mysubmodule
17
+ import anothersubmodule
18
+
19
+ from .foo import someattr
20
+
21
+ The idea of this function is to replace the `__init__.py`
22
+ module's `__getattr__`, `__dir__`, and `__all__` attributes such that
23
+ all imports work exactly the way they normally would, except that the
24
+ actual import is delayed until the resulting module object is first used.
25
+
26
+ The typical way to call this function, replacing the above imports, is::
27
+
28
+ __getattr__, __lazy_dir__, __all__ = lazy.attach(
29
+ __name__,
30
+ ['mysubmodule', 'anothersubmodule'],
31
+ {'foo': 'someattr'}
32
+ )
33
+
34
+ This functionality requires Python 3.7 or higher.
35
+
36
+ Parameters
37
+ ----------
38
+ module_name : str
39
+ Typically use __name__.
40
+ submodules : set
41
+ List of submodules to lazily import.
42
+ submod_attrs : dict
43
+ Dictionary of submodule -> list of attributes / functions.
44
+ These attributes are imported as they are used.
45
+
46
+ Returns
47
+ -------
48
+ __getattr__, __dir__, __all__
49
+
50
+ """
51
+ if submod_attrs is None:
52
+ submod_attrs = {}
53
+
54
+ if submodules is None:
55
+ submodules = set()
56
+ else:
57
+ submodules = set(submodules)
58
+
59
+ attr_to_modules = {
60
+ attr: mod for mod, attrs in submod_attrs.items() for attr in attrs
61
+ }
62
+
63
+ __all__ = list(submodules | attr_to_modules.keys())
64
+
65
+ def __getattr__(name):
66
+ if name in submodules:
67
+ return importlib.import_module(f"{module_name}.{name}")
68
+ elif name in attr_to_modules:
69
+ submod = importlib.import_module(f"{module_name}.{attr_to_modules[name]}")
70
+ return getattr(submod, name)
71
+ else:
72
+ raise AttributeError(f"No {module_name} attribute {name}")
73
+
74
+ def __dir__():
75
+ return __all__
76
+
77
+ if os.environ.get("EAGER_IMPORT", ""):
78
+ for attr in set(attr_to_modules.keys()) | submodules:
79
+ __getattr__(attr)
80
+
81
+ return __getattr__, __dir__, list(__all__)
82
+
83
+
84
+ class DelayedImportErrorModule(types.ModuleType):
85
+ def __init__(self, frame_data, *args, **kwargs):
86
+ self.__frame_data = frame_data
87
+ super().__init__(*args, **kwargs)
88
+
89
+ def __getattr__(self, x):
90
+ if x in ("__class__", "__file__", "__frame_data"):
91
+ super().__getattr__(x)
92
+ else:
93
+ fd = self.__frame_data
94
+ raise ModuleNotFoundError(
95
+ f"No module named '{fd['spec']}'\n\n"
96
+ "This error is lazily reported, having originally occurred in\n"
97
+ f' File {fd["filename"]}, line {fd["lineno"]}, in {fd["function"]}\n\n'
98
+ f'----> {"".join(fd["code_context"] or "").strip()}'
99
+ )
100
+
101
+
102
+ def _lazy_import(fullname):
103
+ """Return a lazily imported proxy for a module or library.
104
+
105
+ Warning
106
+ -------
107
+ Importing using this function can currently cause trouble
108
+ when the user tries to import from a subpackage of a module before
109
+ the package is fully imported. In particular, this idiom may not work:
110
+
111
+ np = lazy_import("numpy")
112
+ from numpy.lib import recfunctions
113
+
114
+ This is due to a difference in the way Python's LazyLoader handles
115
+ subpackage imports compared to the normal import process. Hopefully
116
+ we will get Python's LazyLoader to fix this, or find a workaround.
117
+ In the meantime, this is a potential problem.
118
+
119
+ The workaround is to import numpy before importing from the subpackage.
120
+
121
+ Notes
122
+ -----
123
+ We often see the following pattern::
124
+
125
+ def myfunc():
126
+ import scipy as sp
127
+ sp.argmin(...)
128
+ ....
129
+
130
+ This is to prevent a library, in this case `scipy`, from being
131
+ imported at function definition time, since that can be slow.
132
+
133
+ This function provides a proxy module that, upon access, imports
134
+ the actual module. So the idiom equivalent to the above example is::
135
+
136
+ sp = lazy.load("scipy")
137
+
138
+ def myfunc():
139
+ sp.argmin(...)
140
+ ....
141
+
142
+ The initial import time is fast because the actual import is delayed
143
+ until the first attribute is requested. The overall import time may
144
+ decrease as well for users that don't make use of large portions
145
+ of the library.
146
+
147
+ Parameters
148
+ ----------
149
+ fullname : str
150
+ The full name of the package or subpackage to import. For example::
151
+
152
+ sp = lazy.load('scipy') # import scipy as sp
153
+ spla = lazy.load('scipy.linalg') # import scipy.linalg as spla
154
+
155
+ Returns
156
+ -------
157
+ pm : importlib.util._LazyModule
158
+ Proxy module. Can be used like any regularly imported module.
159
+ Actual loading of the module occurs upon first attribute request.
160
+
161
+ """
162
+ try:
163
+ return sys.modules[fullname]
164
+ except:
165
+ pass
166
+
167
+ # Not previously loaded -- look it up
168
+ spec = importlib.util.find_spec(fullname)
169
+
170
+ if spec is None:
171
+ try:
172
+ parent = inspect.stack()[1]
173
+ frame_data = {
174
+ "spec": fullname,
175
+ "filename": parent.filename,
176
+ "lineno": parent.lineno,
177
+ "function": parent.function,
178
+ "code_context": parent.code_context,
179
+ }
180
+ return DelayedImportErrorModule(frame_data, "DelayedImportErrorModule")
181
+ finally:
182
+ del parent
183
+
184
+ module = importlib.util.module_from_spec(spec)
185
+ sys.modules[fullname] = module
186
+
187
+ loader = importlib.util.LazyLoader(spec.loader)
188
+ loader.exec_module(module)
189
+
190
+ return module
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/linalg/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (913 Bytes). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/linalg/__pycache__/algebraicconnectivity.cpython-311.pyc ADDED
Binary file (30.3 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/linalg/__pycache__/laplacianmatrix.cpython-311.pyc ADDED
Binary file (16 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/linalg/__pycache__/modularitymatrix.cpython-311.pyc ADDED
Binary file (5.73 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/linalg/graphmatrix.py ADDED
@@ -0,0 +1,166 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Adjacency matrix and incidence matrix of graphs.
3
+ """
4
+ import networkx as nx
5
+
6
+ __all__ = ["incidence_matrix", "adjacency_matrix"]
7
+
8
+
9
+ @nx._dispatch(edge_attrs="weight")
10
+ def incidence_matrix(
11
+ G, nodelist=None, edgelist=None, oriented=False, weight=None, *, dtype=None
12
+ ):
13
+ """Returns incidence matrix of G.
14
+
15
+ The incidence matrix assigns each row to a node and each column to an edge.
16
+ For a standard incidence matrix a 1 appears wherever a row's node is
17
+ incident on the column's edge. For an oriented incidence matrix each
18
+ edge is assigned an orientation (arbitrarily for undirected and aligning to
19
+ direction for directed). A -1 appears for the source (tail) of an edge and
20
+ 1 for the destination (head) of the edge. The elements are zero otherwise.
21
+
22
+ Parameters
23
+ ----------
24
+ G : graph
25
+ A NetworkX graph
26
+
27
+ nodelist : list, optional (default= all nodes in G)
28
+ The rows are ordered according to the nodes in nodelist.
29
+ If nodelist is None, then the ordering is produced by G.nodes().
30
+
31
+ edgelist : list, optional (default= all edges in G)
32
+ The columns are ordered according to the edges in edgelist.
33
+ If edgelist is None, then the ordering is produced by G.edges().
34
+
35
+ oriented: bool, optional (default=False)
36
+ If True, matrix elements are +1 or -1 for the head or tail node
37
+ respectively of each edge. If False, +1 occurs at both nodes.
38
+
39
+ weight : string or None, optional (default=None)
40
+ The edge data key used to provide each value in the matrix.
41
+ If None, then each edge has weight 1. Edge weights, if used,
42
+ should be positive so that the orientation can provide the sign.
43
+
44
+ dtype : a NumPy dtype or None (default=None)
45
+ The dtype of the output sparse array. This type should be a compatible
46
+ type of the weight argument, eg. if weight would return a float this
47
+ argument should also be a float.
48
+ If None, then the default for SciPy is used.
49
+
50
+ Returns
51
+ -------
52
+ A : SciPy sparse array
53
+ The incidence matrix of G.
54
+
55
+ Notes
56
+ -----
57
+ For MultiGraph/MultiDiGraph, the edges in edgelist should be
58
+ (u,v,key) 3-tuples.
59
+
60
+ "Networks are the best discrete model for so many problems in
61
+ applied mathematics" [1]_.
62
+
63
+ References
64
+ ----------
65
+ .. [1] Gil Strang, Network applications: A = incidence matrix,
66
+ http://videolectures.net/mit18085f07_strang_lec03/
67
+ """
68
+ import scipy as sp
69
+
70
+ if nodelist is None:
71
+ nodelist = list(G)
72
+ if edgelist is None:
73
+ if G.is_multigraph():
74
+ edgelist = list(G.edges(keys=True))
75
+ else:
76
+ edgelist = list(G.edges())
77
+ A = sp.sparse.lil_array((len(nodelist), len(edgelist)), dtype=dtype)
78
+ node_index = {node: i for i, node in enumerate(nodelist)}
79
+ for ei, e in enumerate(edgelist):
80
+ (u, v) = e[:2]
81
+ if u == v:
82
+ continue # self loops give zero column
83
+ try:
84
+ ui = node_index[u]
85
+ vi = node_index[v]
86
+ except KeyError as err:
87
+ raise nx.NetworkXError(
88
+ f"node {u} or {v} in edgelist but not in nodelist"
89
+ ) from err
90
+ if weight is None:
91
+ wt = 1
92
+ else:
93
+ if G.is_multigraph():
94
+ ekey = e[2]
95
+ wt = G[u][v][ekey].get(weight, 1)
96
+ else:
97
+ wt = G[u][v].get(weight, 1)
98
+ if oriented:
99
+ A[ui, ei] = -wt
100
+ A[vi, ei] = wt
101
+ else:
102
+ A[ui, ei] = wt
103
+ A[vi, ei] = wt
104
+ return A.asformat("csc")
105
+
106
+
107
+ @nx._dispatch(edge_attrs="weight")
108
+ def adjacency_matrix(G, nodelist=None, dtype=None, weight="weight"):
109
+ """Returns adjacency matrix of G.
110
+
111
+ Parameters
112
+ ----------
113
+ G : graph
114
+ A NetworkX graph
115
+
116
+ nodelist : list, optional
117
+ The rows and columns are ordered according to the nodes in nodelist.
118
+ If nodelist is None, then the ordering is produced by G.nodes().
119
+
120
+ dtype : NumPy data-type, optional
121
+ The desired data-type for the array.
122
+ If None, then the NumPy default is used.
123
+
124
+ weight : string or None, optional (default='weight')
125
+ The edge data key used to provide each value in the matrix.
126
+ If None, then each edge has weight 1.
127
+
128
+ Returns
129
+ -------
130
+ A : SciPy sparse array
131
+ Adjacency matrix representation of G.
132
+
133
+ Notes
134
+ -----
135
+ For directed graphs, entry i,j corresponds to an edge from i to j.
136
+
137
+ If you want a pure Python adjacency matrix representation try
138
+ networkx.convert.to_dict_of_dicts which will return a
139
+ dictionary-of-dictionaries format that can be addressed as a
140
+ sparse matrix.
141
+
142
+ For MultiGraph/MultiDiGraph with parallel edges the weights are summed.
143
+ See `to_numpy_array` for other options.
144
+
145
+ The convention used for self-loop edges in graphs is to assign the
146
+ diagonal matrix entry value to the edge weight attribute
147
+ (or the number 1 if the edge has no weight attribute). If the
148
+ alternate convention of doubling the edge weight is desired the
149
+ resulting SciPy sparse array can be modified as follows:
150
+
151
+ >>> G = nx.Graph([(1, 1)])
152
+ >>> A = nx.adjacency_matrix(G)
153
+ >>> print(A.todense())
154
+ [[1]]
155
+ >>> A.setdiag(A.diagonal() * 2)
156
+ >>> print(A.todense())
157
+ [[2]]
158
+
159
+ See Also
160
+ --------
161
+ to_numpy_array
162
+ to_scipy_sparse_array
163
+ to_dict_of_dicts
164
+ adjacency_spectrum
165
+ """
166
+ return nx.to_scipy_sparse_array(G, nodelist=nodelist, dtype=dtype, weight=weight)
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/linalg/modularitymatrix.py ADDED
@@ -0,0 +1,166 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Modularity matrix of graphs.
2
+ """
3
+ import networkx as nx
4
+ from networkx.utils import not_implemented_for
5
+
6
+ __all__ = ["modularity_matrix", "directed_modularity_matrix"]
7
+
8
+
9
+ @not_implemented_for("directed")
10
+ @not_implemented_for("multigraph")
11
+ @nx._dispatch(edge_attrs="weight")
12
+ def modularity_matrix(G, nodelist=None, weight=None):
13
+ r"""Returns the modularity matrix of G.
14
+
15
+ The modularity matrix is the matrix B = A - <A>, where A is the adjacency
16
+ matrix and <A> is the average adjacency matrix, assuming that the graph
17
+ is described by the configuration model.
18
+
19
+ More specifically, the element B_ij of B is defined as
20
+
21
+ .. math::
22
+ A_{ij} - {k_i k_j \over 2 m}
23
+
24
+ where k_i is the degree of node i, and where m is the number of edges
25
+ in the graph. When weight is set to a name of an attribute edge, Aij, k_i,
26
+ k_j and m are computed using its value.
27
+
28
+ Parameters
29
+ ----------
30
+ G : Graph
31
+ A NetworkX graph
32
+
33
+ nodelist : list, optional
34
+ The rows and columns are ordered according to the nodes in nodelist.
35
+ If nodelist is None, then the ordering is produced by G.nodes().
36
+
37
+ weight : string or None, optional (default=None)
38
+ The edge attribute that holds the numerical value used for
39
+ the edge weight. If None then all edge weights are 1.
40
+
41
+ Returns
42
+ -------
43
+ B : Numpy array
44
+ The modularity matrix of G.
45
+
46
+ Examples
47
+ --------
48
+ >>> k = [3, 2, 2, 1, 0]
49
+ >>> G = nx.havel_hakimi_graph(k)
50
+ >>> B = nx.modularity_matrix(G)
51
+
52
+
53
+ See Also
54
+ --------
55
+ to_numpy_array
56
+ modularity_spectrum
57
+ adjacency_matrix
58
+ directed_modularity_matrix
59
+
60
+ References
61
+ ----------
62
+ .. [1] M. E. J. Newman, "Modularity and community structure in networks",
63
+ Proc. Natl. Acad. Sci. USA, vol. 103, pp. 8577-8582, 2006.
64
+ """
65
+ import numpy as np
66
+
67
+ if nodelist is None:
68
+ nodelist = list(G)
69
+ A = nx.to_scipy_sparse_array(G, nodelist=nodelist, weight=weight, format="csr")
70
+ k = A.sum(axis=1)
71
+ m = k.sum() * 0.5
72
+ # Expected adjacency matrix
73
+ X = np.outer(k, k) / (2 * m)
74
+
75
+ return A - X
76
+
77
+
78
+ @not_implemented_for("undirected")
79
+ @not_implemented_for("multigraph")
80
+ @nx._dispatch(edge_attrs="weight")
81
+ def directed_modularity_matrix(G, nodelist=None, weight=None):
82
+ """Returns the directed modularity matrix of G.
83
+
84
+ The modularity matrix is the matrix B = A - <A>, where A is the adjacency
85
+ matrix and <A> is the expected adjacency matrix, assuming that the graph
86
+ is described by the configuration model.
87
+
88
+ More specifically, the element B_ij of B is defined as
89
+
90
+ .. math::
91
+ B_{ij} = A_{ij} - k_i^{out} k_j^{in} / m
92
+
93
+ where :math:`k_i^{in}` is the in degree of node i, and :math:`k_j^{out}` is the out degree
94
+ of node j, with m the number of edges in the graph. When weight is set
95
+ to a name of an attribute edge, Aij, k_i, k_j and m are computed using
96
+ its value.
97
+
98
+ Parameters
99
+ ----------
100
+ G : DiGraph
101
+ A NetworkX DiGraph
102
+
103
+ nodelist : list, optional
104
+ The rows and columns are ordered according to the nodes in nodelist.
105
+ If nodelist is None, then the ordering is produced by G.nodes().
106
+
107
+ weight : string or None, optional (default=None)
108
+ The edge attribute that holds the numerical value used for
109
+ the edge weight. If None then all edge weights are 1.
110
+
111
+ Returns
112
+ -------
113
+ B : Numpy array
114
+ The modularity matrix of G.
115
+
116
+ Examples
117
+ --------
118
+ >>> G = nx.DiGraph()
119
+ >>> G.add_edges_from(
120
+ ... (
121
+ ... (1, 2),
122
+ ... (1, 3),
123
+ ... (3, 1),
124
+ ... (3, 2),
125
+ ... (3, 5),
126
+ ... (4, 5),
127
+ ... (4, 6),
128
+ ... (5, 4),
129
+ ... (5, 6),
130
+ ... (6, 4),
131
+ ... )
132
+ ... )
133
+ >>> B = nx.directed_modularity_matrix(G)
134
+
135
+
136
+ Notes
137
+ -----
138
+ NetworkX defines the element A_ij of the adjacency matrix as 1 if there
139
+ is a link going from node i to node j. Leicht and Newman use the opposite
140
+ definition. This explains the different expression for B_ij.
141
+
142
+ See Also
143
+ --------
144
+ to_numpy_array
145
+ modularity_spectrum
146
+ adjacency_matrix
147
+ modularity_matrix
148
+
149
+ References
150
+ ----------
151
+ .. [1] E. A. Leicht, M. E. J. Newman,
152
+ "Community structure in directed networks",
153
+ Phys. Rev Lett., vol. 100, no. 11, p. 118703, 2008.
154
+ """
155
+ import numpy as np
156
+
157
+ if nodelist is None:
158
+ nodelist = list(G)
159
+ A = nx.to_scipy_sparse_array(G, nodelist=nodelist, weight=weight, format="csr")
160
+ k_in = A.sum(axis=0)
161
+ k_out = A.sum(axis=1)
162
+ m = k_in.sum()
163
+ # Expected adjacency matrix
164
+ X = np.outer(k_out, k_in) / m
165
+
166
+ return A - X
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/linalg/tests/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (222 Bytes). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/linalg/tests/__pycache__/test_attrmatrix.cpython-311.pyc ADDED
Binary file (6.83 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/linalg/tests/test_algebraic_connectivity.py ADDED
@@ -0,0 +1,402 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from math import sqrt
2
+
3
+ import pytest
4
+
5
+ np = pytest.importorskip("numpy")
6
+
7
+
8
+ import networkx as nx
9
+
10
+ methods = ("tracemin_pcg", "tracemin_lu", "lanczos", "lobpcg")
11
+
12
+
13
+ def test_algebraic_connectivity_tracemin_chol():
14
+ """Test that "tracemin_chol" raises an exception."""
15
+ pytest.importorskip("scipy")
16
+ G = nx.barbell_graph(5, 4)
17
+ with pytest.raises(nx.NetworkXError):
18
+ nx.algebraic_connectivity(G, method="tracemin_chol")
19
+
20
+
21
+ def test_fiedler_vector_tracemin_chol():
22
+ """Test that "tracemin_chol" raises an exception."""
23
+ pytest.importorskip("scipy")
24
+ G = nx.barbell_graph(5, 4)
25
+ with pytest.raises(nx.NetworkXError):
26
+ nx.fiedler_vector(G, method="tracemin_chol")
27
+
28
+
29
+ def test_spectral_ordering_tracemin_chol():
30
+ """Test that "tracemin_chol" raises an exception."""
31
+ pytest.importorskip("scipy")
32
+ G = nx.barbell_graph(5, 4)
33
+ with pytest.raises(nx.NetworkXError):
34
+ nx.spectral_ordering(G, method="tracemin_chol")
35
+
36
+
37
+ def test_fiedler_vector_tracemin_unknown():
38
+ """Test that "tracemin_unknown" raises an exception."""
39
+ pytest.importorskip("scipy")
40
+ G = nx.barbell_graph(5, 4)
41
+ L = nx.laplacian_matrix(G)
42
+ X = np.asarray(np.random.normal(size=(1, L.shape[0]))).T
43
+ with pytest.raises(nx.NetworkXError, match="Unknown linear system solver"):
44
+ nx.linalg.algebraicconnectivity._tracemin_fiedler(
45
+ L, X, normalized=False, tol=1e-8, method="tracemin_unknown"
46
+ )
47
+
48
+
49
+ def test_spectral_bisection():
50
+ pytest.importorskip("scipy")
51
+ G = nx.barbell_graph(3, 0)
52
+ C = nx.spectral_bisection(G)
53
+ assert C == ({0, 1, 2}, {3, 4, 5})
54
+
55
+ mapping = dict(enumerate("badfec"))
56
+ G = nx.relabel_nodes(G, mapping)
57
+ C = nx.spectral_bisection(G)
58
+ assert C == (
59
+ {mapping[0], mapping[1], mapping[2]},
60
+ {mapping[3], mapping[4], mapping[5]},
61
+ )
62
+
63
+
64
+ def check_eigenvector(A, l, x):
65
+ nx = np.linalg.norm(x)
66
+ # Check zeroness.
67
+ assert nx != pytest.approx(0, abs=1e-07)
68
+ y = A @ x
69
+ ny = np.linalg.norm(y)
70
+ # Check collinearity.
71
+ assert x @ y == pytest.approx(nx * ny, abs=1e-7)
72
+ # Check eigenvalue.
73
+ assert ny == pytest.approx(l * nx, abs=1e-7)
74
+
75
+
76
+ class TestAlgebraicConnectivity:
77
+ @pytest.mark.parametrize("method", methods)
78
+ def test_directed(self, method):
79
+ G = nx.DiGraph()
80
+ pytest.raises(
81
+ nx.NetworkXNotImplemented, nx.algebraic_connectivity, G, method=method
82
+ )
83
+ pytest.raises(nx.NetworkXNotImplemented, nx.fiedler_vector, G, method=method)
84
+
85
+ @pytest.mark.parametrize("method", methods)
86
+ def test_null_and_singleton(self, method):
87
+ G = nx.Graph()
88
+ pytest.raises(nx.NetworkXError, nx.algebraic_connectivity, G, method=method)
89
+ pytest.raises(nx.NetworkXError, nx.fiedler_vector, G, method=method)
90
+ G.add_edge(0, 0)
91
+ pytest.raises(nx.NetworkXError, nx.algebraic_connectivity, G, method=method)
92
+ pytest.raises(nx.NetworkXError, nx.fiedler_vector, G, method=method)
93
+
94
+ @pytest.mark.parametrize("method", methods)
95
+ def test_disconnected(self, method):
96
+ G = nx.Graph()
97
+ G.add_nodes_from(range(2))
98
+ assert nx.algebraic_connectivity(G) == 0
99
+ pytest.raises(nx.NetworkXError, nx.fiedler_vector, G, method=method)
100
+ G.add_edge(0, 1, weight=0)
101
+ assert nx.algebraic_connectivity(G) == 0
102
+ pytest.raises(nx.NetworkXError, nx.fiedler_vector, G, method=method)
103
+
104
+ def test_unrecognized_method(self):
105
+ pytest.importorskip("scipy")
106
+ G = nx.path_graph(4)
107
+ pytest.raises(nx.NetworkXError, nx.algebraic_connectivity, G, method="unknown")
108
+ pytest.raises(nx.NetworkXError, nx.fiedler_vector, G, method="unknown")
109
+
110
+ @pytest.mark.parametrize("method", methods)
111
+ def test_two_nodes(self, method):
112
+ pytest.importorskip("scipy")
113
+ G = nx.Graph()
114
+ G.add_edge(0, 1, weight=1)
115
+ A = nx.laplacian_matrix(G)
116
+ assert nx.algebraic_connectivity(G, tol=1e-12, method=method) == pytest.approx(
117
+ 2, abs=1e-7
118
+ )
119
+ x = nx.fiedler_vector(G, tol=1e-12, method=method)
120
+ check_eigenvector(A, 2, x)
121
+
122
+ @pytest.mark.parametrize("method", methods)
123
+ def test_two_nodes_multigraph(self, method):
124
+ pytest.importorskip("scipy")
125
+ G = nx.MultiGraph()
126
+ G.add_edge(0, 0, spam=1e8)
127
+ G.add_edge(0, 1, spam=1)
128
+ G.add_edge(0, 1, spam=-2)
129
+ A = -3 * nx.laplacian_matrix(G, weight="spam")
130
+ assert nx.algebraic_connectivity(
131
+ G, weight="spam", tol=1e-12, method=method
132
+ ) == pytest.approx(6, abs=1e-7)
133
+ x = nx.fiedler_vector(G, weight="spam", tol=1e-12, method=method)
134
+ check_eigenvector(A, 6, x)
135
+
136
+ def test_abbreviation_of_method(self):
137
+ pytest.importorskip("scipy")
138
+ G = nx.path_graph(8)
139
+ A = nx.laplacian_matrix(G)
140
+ sigma = 2 - sqrt(2 + sqrt(2))
141
+ ac = nx.algebraic_connectivity(G, tol=1e-12, method="tracemin")
142
+ assert ac == pytest.approx(sigma, abs=1e-7)
143
+ x = nx.fiedler_vector(G, tol=1e-12, method="tracemin")
144
+ check_eigenvector(A, sigma, x)
145
+
146
+ @pytest.mark.parametrize("method", methods)
147
+ def test_path(self, method):
148
+ pytest.importorskip("scipy")
149
+ G = nx.path_graph(8)
150
+ A = nx.laplacian_matrix(G)
151
+ sigma = 2 - sqrt(2 + sqrt(2))
152
+ ac = nx.algebraic_connectivity(G, tol=1e-12, method=method)
153
+ assert ac == pytest.approx(sigma, abs=1e-7)
154
+ x = nx.fiedler_vector(G, tol=1e-12, method=method)
155
+ check_eigenvector(A, sigma, x)
156
+
157
+ @pytest.mark.parametrize("method", methods)
158
+ def test_problematic_graph_issue_2381(self, method):
159
+ pytest.importorskip("scipy")
160
+ G = nx.path_graph(4)
161
+ G.add_edges_from([(4, 2), (5, 1)])
162
+ A = nx.laplacian_matrix(G)
163
+ sigma = 0.438447187191
164
+ ac = nx.algebraic_connectivity(G, tol=1e-12, method=method)
165
+ assert ac == pytest.approx(sigma, abs=1e-7)
166
+ x = nx.fiedler_vector(G, tol=1e-12, method=method)
167
+ check_eigenvector(A, sigma, x)
168
+
169
+ @pytest.mark.parametrize("method", methods)
170
+ def test_cycle(self, method):
171
+ pytest.importorskip("scipy")
172
+ G = nx.cycle_graph(8)
173
+ A = nx.laplacian_matrix(G)
174
+ sigma = 2 - sqrt(2)
175
+ ac = nx.algebraic_connectivity(G, tol=1e-12, method=method)
176
+ assert ac == pytest.approx(sigma, abs=1e-7)
177
+ x = nx.fiedler_vector(G, tol=1e-12, method=method)
178
+ check_eigenvector(A, sigma, x)
179
+
180
+ @pytest.mark.parametrize("method", methods)
181
+ def test_seed_argument(self, method):
182
+ pytest.importorskip("scipy")
183
+ G = nx.cycle_graph(8)
184
+ A = nx.laplacian_matrix(G)
185
+ sigma = 2 - sqrt(2)
186
+ ac = nx.algebraic_connectivity(G, tol=1e-12, method=method, seed=1)
187
+ assert ac == pytest.approx(sigma, abs=1e-7)
188
+ x = nx.fiedler_vector(G, tol=1e-12, method=method, seed=1)
189
+ check_eigenvector(A, sigma, x)
190
+
191
+ @pytest.mark.parametrize(
192
+ ("normalized", "sigma", "laplacian_fn"),
193
+ (
194
+ (False, 0.2434017461399311, nx.laplacian_matrix),
195
+ (True, 0.08113391537997749, nx.normalized_laplacian_matrix),
196
+ ),
197
+ )
198
+ @pytest.mark.parametrize("method", methods)
199
+ def test_buckminsterfullerene(self, normalized, sigma, laplacian_fn, method):
200
+ pytest.importorskip("scipy")
201
+ G = nx.Graph(
202
+ [
203
+ (1, 10),
204
+ (1, 41),
205
+ (1, 59),
206
+ (2, 12),
207
+ (2, 42),
208
+ (2, 60),
209
+ (3, 6),
210
+ (3, 43),
211
+ (3, 57),
212
+ (4, 8),
213
+ (4, 44),
214
+ (4, 58),
215
+ (5, 13),
216
+ (5, 56),
217
+ (5, 57),
218
+ (6, 10),
219
+ (6, 31),
220
+ (7, 14),
221
+ (7, 56),
222
+ (7, 58),
223
+ (8, 12),
224
+ (8, 32),
225
+ (9, 23),
226
+ (9, 53),
227
+ (9, 59),
228
+ (10, 15),
229
+ (11, 24),
230
+ (11, 53),
231
+ (11, 60),
232
+ (12, 16),
233
+ (13, 14),
234
+ (13, 25),
235
+ (14, 26),
236
+ (15, 27),
237
+ (15, 49),
238
+ (16, 28),
239
+ (16, 50),
240
+ (17, 18),
241
+ (17, 19),
242
+ (17, 54),
243
+ (18, 20),
244
+ (18, 55),
245
+ (19, 23),
246
+ (19, 41),
247
+ (20, 24),
248
+ (20, 42),
249
+ (21, 31),
250
+ (21, 33),
251
+ (21, 57),
252
+ (22, 32),
253
+ (22, 34),
254
+ (22, 58),
255
+ (23, 24),
256
+ (25, 35),
257
+ (25, 43),
258
+ (26, 36),
259
+ (26, 44),
260
+ (27, 51),
261
+ (27, 59),
262
+ (28, 52),
263
+ (28, 60),
264
+ (29, 33),
265
+ (29, 34),
266
+ (29, 56),
267
+ (30, 51),
268
+ (30, 52),
269
+ (30, 53),
270
+ (31, 47),
271
+ (32, 48),
272
+ (33, 45),
273
+ (34, 46),
274
+ (35, 36),
275
+ (35, 37),
276
+ (36, 38),
277
+ (37, 39),
278
+ (37, 49),
279
+ (38, 40),
280
+ (38, 50),
281
+ (39, 40),
282
+ (39, 51),
283
+ (40, 52),
284
+ (41, 47),
285
+ (42, 48),
286
+ (43, 49),
287
+ (44, 50),
288
+ (45, 46),
289
+ (45, 54),
290
+ (46, 55),
291
+ (47, 54),
292
+ (48, 55),
293
+ ]
294
+ )
295
+ A = laplacian_fn(G)
296
+ try:
297
+ assert nx.algebraic_connectivity(
298
+ G, normalized=normalized, tol=1e-12, method=method
299
+ ) == pytest.approx(sigma, abs=1e-7)
300
+ x = nx.fiedler_vector(G, normalized=normalized, tol=1e-12, method=method)
301
+ check_eigenvector(A, sigma, x)
302
+ except nx.NetworkXError as err:
303
+ if err.args not in (
304
+ ("Cholesky solver unavailable.",),
305
+ ("LU solver unavailable.",),
306
+ ):
307
+ raise
308
+
309
+
310
+ class TestSpectralOrdering:
311
+ _graphs = (nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)
312
+
313
+ @pytest.mark.parametrize("graph", _graphs)
314
+ def test_nullgraph(self, graph):
315
+ G = graph()
316
+ pytest.raises(nx.NetworkXError, nx.spectral_ordering, G)
317
+
318
+ @pytest.mark.parametrize("graph", _graphs)
319
+ def test_singleton(self, graph):
320
+ G = graph()
321
+ G.add_node("x")
322
+ assert nx.spectral_ordering(G) == ["x"]
323
+ G.add_edge("x", "x", weight=33)
324
+ G.add_edge("x", "x", weight=33)
325
+ assert nx.spectral_ordering(G) == ["x"]
326
+
327
+ def test_unrecognized_method(self):
328
+ G = nx.path_graph(4)
329
+ pytest.raises(nx.NetworkXError, nx.spectral_ordering, G, method="unknown")
330
+
331
+ @pytest.mark.parametrize("method", methods)
332
+ def test_three_nodes(self, method):
333
+ pytest.importorskip("scipy")
334
+ G = nx.Graph()
335
+ G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2), (2, 3, 1)], weight="spam")
336
+ order = nx.spectral_ordering(G, weight="spam", method=method)
337
+ assert set(order) == set(G)
338
+ assert {1, 3} in (set(order[:-1]), set(order[1:]))
339
+
340
+ @pytest.mark.parametrize("method", methods)
341
+ def test_three_nodes_multigraph(self, method):
342
+ pytest.importorskip("scipy")
343
+ G = nx.MultiDiGraph()
344
+ G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2), (2, 3, 1), (2, 3, 2)])
345
+ order = nx.spectral_ordering(G, method=method)
346
+ assert set(order) == set(G)
347
+ assert {2, 3} in (set(order[:-1]), set(order[1:]))
348
+
349
+ @pytest.mark.parametrize("method", methods)
350
+ def test_path(self, method):
351
+ pytest.importorskip("scipy")
352
+ path = list(range(10))
353
+ np.random.shuffle(path)
354
+ G = nx.Graph()
355
+ nx.add_path(G, path)
356
+ order = nx.spectral_ordering(G, method=method)
357
+ assert order in [path, list(reversed(path))]
358
+
359
+ @pytest.mark.parametrize("method", methods)
360
+ def test_seed_argument(self, method):
361
+ pytest.importorskip("scipy")
362
+ path = list(range(10))
363
+ np.random.shuffle(path)
364
+ G = nx.Graph()
365
+ nx.add_path(G, path)
366
+ order = nx.spectral_ordering(G, method=method, seed=1)
367
+ assert order in [path, list(reversed(path))]
368
+
369
+ @pytest.mark.parametrize("method", methods)
370
+ def test_disconnected(self, method):
371
+ pytest.importorskip("scipy")
372
+ G = nx.Graph()
373
+ nx.add_path(G, range(0, 10, 2))
374
+ nx.add_path(G, range(1, 10, 2))
375
+ order = nx.spectral_ordering(G, method=method)
376
+ assert set(order) == set(G)
377
+ seqs = [
378
+ list(range(0, 10, 2)),
379
+ list(range(8, -1, -2)),
380
+ list(range(1, 10, 2)),
381
+ list(range(9, -1, -2)),
382
+ ]
383
+ assert order[:5] in seqs
384
+ assert order[5:] in seqs
385
+
386
+ @pytest.mark.parametrize(
387
+ ("normalized", "expected_order"),
388
+ (
389
+ (False, [[1, 2, 0, 3, 4, 5, 6, 9, 7, 8], [8, 7, 9, 6, 5, 4, 3, 0, 2, 1]]),
390
+ (True, [[1, 2, 3, 0, 4, 5, 9, 6, 7, 8], [8, 7, 6, 9, 5, 4, 0, 3, 2, 1]]),
391
+ ),
392
+ )
393
+ @pytest.mark.parametrize("method", methods)
394
+ def test_cycle(self, normalized, expected_order, method):
395
+ pytest.importorskip("scipy")
396
+ path = list(range(10))
397
+ G = nx.Graph()
398
+ nx.add_path(G, path, weight=5)
399
+ G.add_edge(path[-1], path[0], weight=1)
400
+ A = nx.laplacian_matrix(G).todense()
401
+ order = nx.spectral_ordering(G, normalized=normalized, method=method)
402
+ assert order in expected_order
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/linalg/tests/test_bethehessian.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ np = pytest.importorskip("numpy")
4
+ pytest.importorskip("scipy")
5
+
6
+ import networkx as nx
7
+ from networkx.generators.degree_seq import havel_hakimi_graph
8
+
9
+
10
+ class TestBetheHessian:
11
+ @classmethod
12
+ def setup_class(cls):
13
+ deg = [3, 2, 2, 1, 0]
14
+ cls.G = havel_hakimi_graph(deg)
15
+ cls.P = nx.path_graph(3)
16
+
17
+ def test_bethe_hessian(self):
18
+ "Bethe Hessian matrix"
19
+ # fmt: off
20
+ H = np.array([[4, -2, 0],
21
+ [-2, 5, -2],
22
+ [0, -2, 4]])
23
+ # fmt: on
24
+ permutation = [2, 0, 1]
25
+ # Bethe Hessian gives expected form
26
+ np.testing.assert_equal(nx.bethe_hessian_matrix(self.P, r=2).todense(), H)
27
+ # nodelist is correctly implemented
28
+ np.testing.assert_equal(
29
+ nx.bethe_hessian_matrix(self.P, r=2, nodelist=permutation).todense(),
30
+ H[np.ix_(permutation, permutation)],
31
+ )
32
+ # Equal to Laplacian matrix when r=1
33
+ np.testing.assert_equal(
34
+ nx.bethe_hessian_matrix(self.G, r=1).todense(),
35
+ nx.laplacian_matrix(self.G).todense(),
36
+ )
37
+ # Correct default for the regularizer r
38
+ np.testing.assert_equal(
39
+ nx.bethe_hessian_matrix(self.G).todense(),
40
+ nx.bethe_hessian_matrix(self.G, r=1.25).todense(),
41
+ )
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/linalg/tests/test_laplacian.py ADDED
@@ -0,0 +1,242 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ np = pytest.importorskip("numpy")
4
+ pytest.importorskip("scipy")
5
+
6
+ import networkx as nx
7
+ from networkx.generators.degree_seq import havel_hakimi_graph
8
+ from networkx.generators.expanders import margulis_gabber_galil_graph
9
+
10
+
11
+ class TestLaplacian:
12
+ @classmethod
13
+ def setup_class(cls):
14
+ deg = [3, 2, 2, 1, 0]
15
+ cls.G = havel_hakimi_graph(deg)
16
+ cls.WG = nx.Graph(
17
+ (u, v, {"weight": 0.5, "other": 0.3}) for (u, v) in cls.G.edges()
18
+ )
19
+ cls.WG.add_node(4)
20
+ cls.MG = nx.MultiGraph(cls.G)
21
+
22
+ # Graph with clsloops
23
+ cls.Gsl = cls.G.copy()
24
+ for node in cls.Gsl.nodes():
25
+ cls.Gsl.add_edge(node, node)
26
+
27
+ def test_laplacian(self):
28
+ "Graph Laplacian"
29
+ # fmt: off
30
+ NL = np.array([[ 3, -1, -1, -1, 0],
31
+ [-1, 2, -1, 0, 0],
32
+ [-1, -1, 2, 0, 0],
33
+ [-1, 0, 0, 1, 0],
34
+ [ 0, 0, 0, 0, 0]])
35
+ # fmt: on
36
+ WL = 0.5 * NL
37
+ OL = 0.3 * NL
38
+ np.testing.assert_equal(nx.laplacian_matrix(self.G).todense(), NL)
39
+ np.testing.assert_equal(nx.laplacian_matrix(self.MG).todense(), NL)
40
+ np.testing.assert_equal(
41
+ nx.laplacian_matrix(self.G, nodelist=[0, 1]).todense(),
42
+ np.array([[1, -1], [-1, 1]]),
43
+ )
44
+ np.testing.assert_equal(nx.laplacian_matrix(self.WG).todense(), WL)
45
+ np.testing.assert_equal(nx.laplacian_matrix(self.WG, weight=None).todense(), NL)
46
+ np.testing.assert_equal(
47
+ nx.laplacian_matrix(self.WG, weight="other").todense(), OL
48
+ )
49
+
50
+ def test_normalized_laplacian(self):
51
+ "Generalized Graph Laplacian"
52
+ # fmt: off
53
+ G = np.array([[ 1. , -0.408, -0.408, -0.577, 0.],
54
+ [-0.408, 1. , -0.5 , 0. , 0.],
55
+ [-0.408, -0.5 , 1. , 0. , 0.],
56
+ [-0.577, 0. , 0. , 1. , 0.],
57
+ [ 0. , 0. , 0. , 0. , 0.]])
58
+ GL = np.array([[ 1. , -0.408, -0.408, -0.577, 0. ],
59
+ [-0.408, 1. , -0.5 , 0. , 0. ],
60
+ [-0.408, -0.5 , 1. , 0. , 0. ],
61
+ [-0.577, 0. , 0. , 1. , 0. ],
62
+ [ 0. , 0. , 0. , 0. , 0. ]])
63
+ Lsl = np.array([[ 0.75 , -0.2887, -0.2887, -0.3536, 0. ],
64
+ [-0.2887, 0.6667, -0.3333, 0. , 0. ],
65
+ [-0.2887, -0.3333, 0.6667, 0. , 0. ],
66
+ [-0.3536, 0. , 0. , 0.5 , 0. ],
67
+ [ 0. , 0. , 0. , 0. , 0. ]])
68
+ # fmt: on
69
+
70
+ np.testing.assert_almost_equal(
71
+ nx.normalized_laplacian_matrix(self.G, nodelist=range(5)).todense(),
72
+ G,
73
+ decimal=3,
74
+ )
75
+ np.testing.assert_almost_equal(
76
+ nx.normalized_laplacian_matrix(self.G).todense(), GL, decimal=3
77
+ )
78
+ np.testing.assert_almost_equal(
79
+ nx.normalized_laplacian_matrix(self.MG).todense(), GL, decimal=3
80
+ )
81
+ np.testing.assert_almost_equal(
82
+ nx.normalized_laplacian_matrix(self.WG).todense(), GL, decimal=3
83
+ )
84
+ np.testing.assert_almost_equal(
85
+ nx.normalized_laplacian_matrix(self.WG, weight="other").todense(),
86
+ GL,
87
+ decimal=3,
88
+ )
89
+ np.testing.assert_almost_equal(
90
+ nx.normalized_laplacian_matrix(self.Gsl).todense(), Lsl, decimal=3
91
+ )
92
+
93
+
94
+ def test_directed_laplacian():
95
+ "Directed Laplacian"
96
+ # Graph used as an example in Sec. 4.1 of Langville and Meyer,
97
+ # "Google's PageRank and Beyond". The graph contains dangling nodes, so
98
+ # the pagerank random walk is selected by directed_laplacian
99
+ G = nx.DiGraph()
100
+ G.add_edges_from(
101
+ (
102
+ (1, 2),
103
+ (1, 3),
104
+ (3, 1),
105
+ (3, 2),
106
+ (3, 5),
107
+ (4, 5),
108
+ (4, 6),
109
+ (5, 4),
110
+ (5, 6),
111
+ (6, 4),
112
+ )
113
+ )
114
+ # fmt: off
115
+ GL = np.array([[ 0.9833, -0.2941, -0.3882, -0.0291, -0.0231, -0.0261],
116
+ [-0.2941, 0.8333, -0.2339, -0.0536, -0.0589, -0.0554],
117
+ [-0.3882, -0.2339, 0.9833, -0.0278, -0.0896, -0.0251],
118
+ [-0.0291, -0.0536, -0.0278, 0.9833, -0.4878, -0.6675],
119
+ [-0.0231, -0.0589, -0.0896, -0.4878, 0.9833, -0.2078],
120
+ [-0.0261, -0.0554, -0.0251, -0.6675, -0.2078, 0.9833]])
121
+ # fmt: on
122
+ L = nx.directed_laplacian_matrix(G, alpha=0.9, nodelist=sorted(G))
123
+ np.testing.assert_almost_equal(L, GL, decimal=3)
124
+
125
+ # Make the graph strongly connected, so we can use a random and lazy walk
126
+ G.add_edges_from(((2, 5), (6, 1)))
127
+ # fmt: off
128
+ GL = np.array([[ 1. , -0.3062, -0.4714, 0. , 0. , -0.3227],
129
+ [-0.3062, 1. , -0.1443, 0. , -0.3162, 0. ],
130
+ [-0.4714, -0.1443, 1. , 0. , -0.0913, 0. ],
131
+ [ 0. , 0. , 0. , 1. , -0.5 , -0.5 ],
132
+ [ 0. , -0.3162, -0.0913, -0.5 , 1. , -0.25 ],
133
+ [-0.3227, 0. , 0. , -0.5 , -0.25 , 1. ]])
134
+ # fmt: on
135
+ L = nx.directed_laplacian_matrix(
136
+ G, alpha=0.9, nodelist=sorted(G), walk_type="random"
137
+ )
138
+ np.testing.assert_almost_equal(L, GL, decimal=3)
139
+
140
+ # fmt: off
141
+ GL = np.array([[ 0.5 , -0.1531, -0.2357, 0. , 0. , -0.1614],
142
+ [-0.1531, 0.5 , -0.0722, 0. , -0.1581, 0. ],
143
+ [-0.2357, -0.0722, 0.5 , 0. , -0.0456, 0. ],
144
+ [ 0. , 0. , 0. , 0.5 , -0.25 , -0.25 ],
145
+ [ 0. , -0.1581, -0.0456, -0.25 , 0.5 , -0.125 ],
146
+ [-0.1614, 0. , 0. , -0.25 , -0.125 , 0.5 ]])
147
+ # fmt: on
148
+ L = nx.directed_laplacian_matrix(G, alpha=0.9, nodelist=sorted(G), walk_type="lazy")
149
+ np.testing.assert_almost_equal(L, GL, decimal=3)
150
+
151
+ # Make a strongly connected periodic graph
152
+ G = nx.DiGraph()
153
+ G.add_edges_from(((1, 2), (2, 4), (4, 1), (1, 3), (3, 4)))
154
+ # fmt: off
155
+ GL = np.array([[ 0.5 , -0.176, -0.176, -0.25 ],
156
+ [-0.176, 0.5 , 0. , -0.176],
157
+ [-0.176, 0. , 0.5 , -0.176],
158
+ [-0.25 , -0.176, -0.176, 0.5 ]])
159
+ # fmt: on
160
+ L = nx.directed_laplacian_matrix(G, alpha=0.9, nodelist=sorted(G))
161
+ np.testing.assert_almost_equal(L, GL, decimal=3)
162
+
163
+
164
+ def test_directed_combinatorial_laplacian():
165
+ "Directed combinatorial Laplacian"
166
+ # Graph used as an example in Sec. 4.1 of Langville and Meyer,
167
+ # "Google's PageRank and Beyond". The graph contains dangling nodes, so
168
+ # the pagerank random walk is selected by directed_laplacian
169
+ G = nx.DiGraph()
170
+ G.add_edges_from(
171
+ (
172
+ (1, 2),
173
+ (1, 3),
174
+ (3, 1),
175
+ (3, 2),
176
+ (3, 5),
177
+ (4, 5),
178
+ (4, 6),
179
+ (5, 4),
180
+ (5, 6),
181
+ (6, 4),
182
+ )
183
+ )
184
+ # fmt: off
185
+ GL = np.array([[ 0.0366, -0.0132, -0.0153, -0.0034, -0.0020, -0.0027],
186
+ [-0.0132, 0.0450, -0.0111, -0.0076, -0.0062, -0.0069],
187
+ [-0.0153, -0.0111, 0.0408, -0.0035, -0.0083, -0.0027],
188
+ [-0.0034, -0.0076, -0.0035, 0.3688, -0.1356, -0.2187],
189
+ [-0.0020, -0.0062, -0.0083, -0.1356, 0.2026, -0.0505],
190
+ [-0.0027, -0.0069, -0.0027, -0.2187, -0.0505, 0.2815]])
191
+ # fmt: on
192
+
193
+ L = nx.directed_combinatorial_laplacian_matrix(G, alpha=0.9, nodelist=sorted(G))
194
+ np.testing.assert_almost_equal(L, GL, decimal=3)
195
+
196
+ # Make the graph strongly connected, so we can use a random and lazy walk
197
+ G.add_edges_from(((2, 5), (6, 1)))
198
+
199
+ # fmt: off
200
+ GL = np.array([[ 0.1395, -0.0349, -0.0465, 0. , 0. , -0.0581],
201
+ [-0.0349, 0.093 , -0.0116, 0. , -0.0465, 0. ],
202
+ [-0.0465, -0.0116, 0.0698, 0. , -0.0116, 0. ],
203
+ [ 0. , 0. , 0. , 0.2326, -0.1163, -0.1163],
204
+ [ 0. , -0.0465, -0.0116, -0.1163, 0.2326, -0.0581],
205
+ [-0.0581, 0. , 0. , -0.1163, -0.0581, 0.2326]])
206
+ # fmt: on
207
+
208
+ L = nx.directed_combinatorial_laplacian_matrix(
209
+ G, alpha=0.9, nodelist=sorted(G), walk_type="random"
210
+ )
211
+ np.testing.assert_almost_equal(L, GL, decimal=3)
212
+
213
+ # fmt: off
214
+ GL = np.array([[ 0.0698, -0.0174, -0.0233, 0. , 0. , -0.0291],
215
+ [-0.0174, 0.0465, -0.0058, 0. , -0.0233, 0. ],
216
+ [-0.0233, -0.0058, 0.0349, 0. , -0.0058, 0. ],
217
+ [ 0. , 0. , 0. , 0.1163, -0.0581, -0.0581],
218
+ [ 0. , -0.0233, -0.0058, -0.0581, 0.1163, -0.0291],
219
+ [-0.0291, 0. , 0. , -0.0581, -0.0291, 0.1163]])
220
+ # fmt: on
221
+
222
+ L = nx.directed_combinatorial_laplacian_matrix(
223
+ G, alpha=0.9, nodelist=sorted(G), walk_type="lazy"
224
+ )
225
+ np.testing.assert_almost_equal(L, GL, decimal=3)
226
+
227
+ E = nx.DiGraph(margulis_gabber_galil_graph(2))
228
+ L = nx.directed_combinatorial_laplacian_matrix(E)
229
+ # fmt: off
230
+ expected = np.array(
231
+ [[ 0.16666667, -0.08333333, -0.08333333, 0. ],
232
+ [-0.08333333, 0.16666667, 0. , -0.08333333],
233
+ [-0.08333333, 0. , 0.16666667, -0.08333333],
234
+ [ 0. , -0.08333333, -0.08333333, 0.16666667]]
235
+ )
236
+ # fmt: on
237
+ np.testing.assert_almost_equal(L, expected, decimal=6)
238
+
239
+ with pytest.raises(nx.NetworkXError):
240
+ nx.directed_combinatorial_laplacian_matrix(G, walk_type="pagerank", alpha=100)
241
+ with pytest.raises(nx.NetworkXError):
242
+ nx.directed_combinatorial_laplacian_matrix(G, walk_type="silly")
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/readwrite/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (886 Bytes). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/readwrite/__pycache__/adjlist.cpython-311.pyc ADDED
Binary file (10.8 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/readwrite/__pycache__/leda.cpython-311.pyc ADDED
Binary file (4.84 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/readwrite/__pycache__/p2g.cpython-311.pyc ADDED
Binary file (5.09 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/readwrite/__pycache__/pajek.cpython-311.pyc ADDED
Binary file (13.3 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/readwrite/__pycache__/sparse6.cpython-311.pyc ADDED
Binary file (14.1 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/readwrite/__pycache__/text.cpython-311.pyc ADDED
Binary file (33.1 kB). View file
 
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/readwrite/gml.py ADDED
@@ -0,0 +1,878 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Read graphs in GML format.
3
+
4
+ "GML, the Graph Modelling Language, is our proposal for a portable
5
+ file format for graphs. GML's key features are portability, simple
6
+ syntax, extensibility and flexibility. A GML file consists of a
7
+ hierarchical key-value lists. Graphs can be annotated with arbitrary
8
+ data structures. The idea for a common file format was born at the
9
+ GD'95; this proposal is the outcome of many discussions. GML is the
10
+ standard file format in the Graphlet graph editor system. It has been
11
+ overtaken and adapted by several other systems for drawing graphs."
12
+
13
+ GML files are stored using a 7-bit ASCII encoding with any extended
14
+ ASCII characters (iso8859-1) appearing as HTML character entities.
15
+ You will need to give some thought into how the exported data should
16
+ interact with different languages and even different Python versions.
17
+ Re-importing from gml is also a concern.
18
+
19
+ Without specifying a `stringizer`/`destringizer`, the code is capable of
20
+ writing `int`/`float`/`str`/`dict`/`list` data as required by the GML
21
+ specification. For writing other data types, and for reading data other
22
+ than `str` you need to explicitly supply a `stringizer`/`destringizer`.
23
+
24
+ For additional documentation on the GML file format, please see the
25
+ `GML website <https://web.archive.org/web/20190207140002/http://www.fim.uni-passau.de/index.php?id=17297&L=1>`_.
26
+
27
+ Several example graphs in GML format may be found on Mark Newman's
28
+ `Network data page <http://www-personal.umich.edu/~mejn/netdata/>`_.
29
+ """
30
+ import html.entities as htmlentitydefs
31
+ import re
32
+ import warnings
33
+ from ast import literal_eval
34
+ from collections import defaultdict
35
+ from enum import Enum
36
+ from io import StringIO
37
+ from typing import Any, NamedTuple
38
+
39
+ import networkx as nx
40
+ from networkx.exception import NetworkXError
41
+ from networkx.utils import open_file
42
+
43
+ __all__ = ["read_gml", "parse_gml", "generate_gml", "write_gml"]
44
+
45
+
46
+ def escape(text):
47
+ """Use XML character references to escape characters.
48
+
49
+ Use XML character references for unprintable or non-ASCII
50
+ characters, double quotes and ampersands in a string
51
+ """
52
+
53
+ def fixup(m):
54
+ ch = m.group(0)
55
+ return "&#" + str(ord(ch)) + ";"
56
+
57
+ text = re.sub('[^ -~]|[&"]', fixup, text)
58
+ return text if isinstance(text, str) else str(text)
59
+
60
+
61
+ def unescape(text):
62
+ """Replace XML character references with the referenced characters"""
63
+
64
+ def fixup(m):
65
+ text = m.group(0)
66
+ if text[1] == "#":
67
+ # Character reference
68
+ if text[2] == "x":
69
+ code = int(text[3:-1], 16)
70
+ else:
71
+ code = int(text[2:-1])
72
+ else:
73
+ # Named entity
74
+ try:
75
+ code = htmlentitydefs.name2codepoint[text[1:-1]]
76
+ except KeyError:
77
+ return text # leave unchanged
78
+ try:
79
+ return chr(code)
80
+ except (ValueError, OverflowError):
81
+ return text # leave unchanged
82
+
83
+ return re.sub("&(?:[0-9A-Za-z]+|#(?:[0-9]+|x[0-9A-Fa-f]+));", fixup, text)
84
+
85
+
86
+ def literal_destringizer(rep):
87
+ """Convert a Python literal to the value it represents.
88
+
89
+ Parameters
90
+ ----------
91
+ rep : string
92
+ A Python literal.
93
+
94
+ Returns
95
+ -------
96
+ value : object
97
+ The value of the Python literal.
98
+
99
+ Raises
100
+ ------
101
+ ValueError
102
+ If `rep` is not a Python literal.
103
+ """
104
+ if isinstance(rep, str):
105
+ orig_rep = rep
106
+ try:
107
+ return literal_eval(rep)
108
+ except SyntaxError as err:
109
+ raise ValueError(f"{orig_rep!r} is not a valid Python literal") from err
110
+ else:
111
+ raise ValueError(f"{rep!r} is not a string")
112
+
113
+
114
+ @open_file(0, mode="rb")
115
+ @nx._dispatch(graphs=None)
116
+ def read_gml(path, label="label", destringizer=None):
117
+ """Read graph in GML format from `path`.
118
+
119
+ Parameters
120
+ ----------
121
+ path : filename or filehandle
122
+ The filename or filehandle to read from.
123
+
124
+ label : string, optional
125
+ If not None, the parsed nodes will be renamed according to node
126
+ attributes indicated by `label`. Default value: 'label'.
127
+
128
+ destringizer : callable, optional
129
+ A `destringizer` that recovers values stored as strings in GML. If it
130
+ cannot convert a string to a value, a `ValueError` is raised. Default
131
+ value : None.
132
+
133
+ Returns
134
+ -------
135
+ G : NetworkX graph
136
+ The parsed graph.
137
+
138
+ Raises
139
+ ------
140
+ NetworkXError
141
+ If the input cannot be parsed.
142
+
143
+ See Also
144
+ --------
145
+ write_gml, parse_gml
146
+ literal_destringizer
147
+
148
+ Notes
149
+ -----
150
+ GML files are stored using a 7-bit ASCII encoding with any extended
151
+ ASCII characters (iso8859-1) appearing as HTML character entities.
152
+ Without specifying a `stringizer`/`destringizer`, the code is capable of
153
+ writing `int`/`float`/`str`/`dict`/`list` data as required by the GML
154
+ specification. For writing other data types, and for reading data other
155
+ than `str` you need to explicitly supply a `stringizer`/`destringizer`.
156
+
157
+ For additional documentation on the GML file format, please see the
158
+ `GML url <https://web.archive.org/web/20190207140002/http://www.fim.uni-passau.de/index.php?id=17297&L=1>`_.
159
+
160
+ See the module docstring :mod:`networkx.readwrite.gml` for more details.
161
+
162
+ Examples
163
+ --------
164
+ >>> G = nx.path_graph(4)
165
+ >>> nx.write_gml(G, "test.gml")
166
+
167
+ GML values are interpreted as strings by default:
168
+
169
+ >>> H = nx.read_gml("test.gml")
170
+ >>> H.nodes
171
+ NodeView(('0', '1', '2', '3'))
172
+
173
+ When a `destringizer` is provided, GML values are converted to the provided type.
174
+ For example, integer nodes can be recovered as shown below:
175
+
176
+ >>> J = nx.read_gml("test.gml", destringizer=int)
177
+ >>> J.nodes
178
+ NodeView((0, 1, 2, 3))
179
+
180
+ """
181
+
182
+ def filter_lines(lines):
183
+ for line in lines:
184
+ try:
185
+ line = line.decode("ascii")
186
+ except UnicodeDecodeError as err:
187
+ raise NetworkXError("input is not ASCII-encoded") from err
188
+ if not isinstance(line, str):
189
+ lines = str(lines)
190
+ if line and line[-1] == "\n":
191
+ line = line[:-1]
192
+ yield line
193
+
194
+ G = parse_gml_lines(filter_lines(path), label, destringizer)
195
+ return G
196
+
197
+
198
+ @nx._dispatch(graphs=None)
199
+ def parse_gml(lines, label="label", destringizer=None):
200
+ """Parse GML graph from a string or iterable.
201
+
202
+ Parameters
203
+ ----------
204
+ lines : string or iterable of strings
205
+ Data in GML format.
206
+
207
+ label : string, optional
208
+ If not None, the parsed nodes will be renamed according to node
209
+ attributes indicated by `label`. Default value: 'label'.
210
+
211
+ destringizer : callable, optional
212
+ A `destringizer` that recovers values stored as strings in GML. If it
213
+ cannot convert a string to a value, a `ValueError` is raised. Default
214
+ value : None.
215
+
216
+ Returns
217
+ -------
218
+ G : NetworkX graph
219
+ The parsed graph.
220
+
221
+ Raises
222
+ ------
223
+ NetworkXError
224
+ If the input cannot be parsed.
225
+
226
+ See Also
227
+ --------
228
+ write_gml, read_gml
229
+
230
+ Notes
231
+ -----
232
+ This stores nested GML attributes as dictionaries in the NetworkX graph,
233
+ node, and edge attribute structures.
234
+
235
+ GML files are stored using a 7-bit ASCII encoding with any extended
236
+ ASCII characters (iso8859-1) appearing as HTML character entities.
237
+ Without specifying a `stringizer`/`destringizer`, the code is capable of
238
+ writing `int`/`float`/`str`/`dict`/`list` data as required by the GML
239
+ specification. For writing other data types, and for reading data other
240
+ than `str` you need to explicitly supply a `stringizer`/`destringizer`.
241
+
242
+ For additional documentation on the GML file format, please see the
243
+ `GML url <https://web.archive.org/web/20190207140002/http://www.fim.uni-passau.de/index.php?id=17297&L=1>`_.
244
+
245
+ See the module docstring :mod:`networkx.readwrite.gml` for more details.
246
+ """
247
+
248
+ def decode_line(line):
249
+ if isinstance(line, bytes):
250
+ try:
251
+ line.decode("ascii")
252
+ except UnicodeDecodeError as err:
253
+ raise NetworkXError("input is not ASCII-encoded") from err
254
+ if not isinstance(line, str):
255
+ line = str(line)
256
+ return line
257
+
258
+ def filter_lines(lines):
259
+ if isinstance(lines, str):
260
+ lines = decode_line(lines)
261
+ lines = lines.splitlines()
262
+ yield from lines
263
+ else:
264
+ for line in lines:
265
+ line = decode_line(line)
266
+ if line and line[-1] == "\n":
267
+ line = line[:-1]
268
+ if line.find("\n") != -1:
269
+ raise NetworkXError("input line contains newline")
270
+ yield line
271
+
272
+ G = parse_gml_lines(filter_lines(lines), label, destringizer)
273
+ return G
274
+
275
+
276
+ class Pattern(Enum):
277
+ """encodes the index of each token-matching pattern in `tokenize`."""
278
+
279
+ KEYS = 0
280
+ REALS = 1
281
+ INTS = 2
282
+ STRINGS = 3
283
+ DICT_START = 4
284
+ DICT_END = 5
285
+ COMMENT_WHITESPACE = 6
286
+
287
+
288
+ class Token(NamedTuple):
289
+ category: Pattern
290
+ value: Any
291
+ line: int
292
+ position: int
293
+
294
+
295
+ LIST_START_VALUE = "_networkx_list_start"
296
+
297
+
298
+ def parse_gml_lines(lines, label, destringizer):
299
+ """Parse GML `lines` into a graph."""
300
+
301
+ def tokenize():
302
+ patterns = [
303
+ r"[A-Za-z][0-9A-Za-z_]*\b", # keys
304
+ # reals
305
+ r"[+-]?(?:[0-9]*\.[0-9]+|[0-9]+\.[0-9]*|INF)(?:[Ee][+-]?[0-9]+)?",
306
+ r"[+-]?[0-9]+", # ints
307
+ r'".*?"', # strings
308
+ r"\[", # dict start
309
+ r"\]", # dict end
310
+ r"#.*$|\s+", # comments and whitespaces
311
+ ]
312
+ tokens = re.compile("|".join(f"({pattern})" for pattern in patterns))
313
+ lineno = 0
314
+ multilines = [] # entries spread across multiple lines
315
+ for line in lines:
316
+ pos = 0
317
+
318
+ # deal with entries spread across multiple lines
319
+ #
320
+ # should we actually have to deal with escaped "s then do it here
321
+ if multilines:
322
+ multilines.append(line.strip())
323
+ if line[-1] == '"': # closing multiline entry
324
+ # multiline entries will be joined by space. cannot
325
+ # reintroduce newlines as this will break the tokenizer
326
+ line = " ".join(multilines)
327
+ multilines = []
328
+ else: # continued multiline entry
329
+ lineno += 1
330
+ continue
331
+ else:
332
+ if line.count('"') == 1: # opening multiline entry
333
+ if line.strip()[0] != '"' and line.strip()[-1] != '"':
334
+ # since we expect something like key "value", the " should not be found at ends
335
+ # otherwise tokenizer will pick up the formatting mistake.
336
+ multilines = [line.rstrip()]
337
+ lineno += 1
338
+ continue
339
+
340
+ length = len(line)
341
+
342
+ while pos < length:
343
+ match = tokens.match(line, pos)
344
+ if match is None:
345
+ m = f"cannot tokenize {line[pos:]} at ({lineno + 1}, {pos + 1})"
346
+ raise NetworkXError(m)
347
+ for i in range(len(patterns)):
348
+ group = match.group(i + 1)
349
+ if group is not None:
350
+ if i == 0: # keys
351
+ value = group.rstrip()
352
+ elif i == 1: # reals
353
+ value = float(group)
354
+ elif i == 2: # ints
355
+ value = int(group)
356
+ else:
357
+ value = group
358
+ if i != 6: # comments and whitespaces
359
+ yield Token(Pattern(i), value, lineno + 1, pos + 1)
360
+ pos += len(group)
361
+ break
362
+ lineno += 1
363
+ yield Token(None, None, lineno + 1, 1) # EOF
364
+
365
+ def unexpected(curr_token, expected):
366
+ category, value, lineno, pos = curr_token
367
+ value = repr(value) if value is not None else "EOF"
368
+ raise NetworkXError(f"expected {expected}, found {value} at ({lineno}, {pos})")
369
+
370
+ def consume(curr_token, category, expected):
371
+ if curr_token.category == category:
372
+ return next(tokens)
373
+ unexpected(curr_token, expected)
374
+
375
+ def parse_kv(curr_token):
376
+ dct = defaultdict(list)
377
+ while curr_token.category == Pattern.KEYS:
378
+ key = curr_token.value
379
+ curr_token = next(tokens)
380
+ category = curr_token.category
381
+ if category == Pattern.REALS or category == Pattern.INTS:
382
+ value = curr_token.value
383
+ curr_token = next(tokens)
384
+ elif category == Pattern.STRINGS:
385
+ value = unescape(curr_token.value[1:-1])
386
+ if destringizer:
387
+ try:
388
+ value = destringizer(value)
389
+ except ValueError:
390
+ pass
391
+ # Special handling for empty lists and tuples
392
+ if value == "()":
393
+ value = ()
394
+ if value == "[]":
395
+ value = []
396
+ curr_token = next(tokens)
397
+ elif category == Pattern.DICT_START:
398
+ curr_token, value = parse_dict(curr_token)
399
+ else:
400
+ # Allow for string convertible id and label values
401
+ if key in ("id", "label", "source", "target"):
402
+ try:
403
+ # String convert the token value
404
+ value = unescape(str(curr_token.value))
405
+ if destringizer:
406
+ try:
407
+ value = destringizer(value)
408
+ except ValueError:
409
+ pass
410
+ curr_token = next(tokens)
411
+ except Exception:
412
+ msg = (
413
+ "an int, float, string, '[' or string"
414
+ + " convertible ASCII value for node id or label"
415
+ )
416
+ unexpected(curr_token, msg)
417
+ # Special handling for nan and infinity. Since the gml language
418
+ # defines unquoted strings as keys, the numeric and string branches
419
+ # are skipped and we end up in this special branch, so we need to
420
+ # convert the current token value to a float for NAN and plain INF.
421
+ # +/-INF are handled in the pattern for 'reals' in tokenize(). This
422
+ # allows labels and values to be nan or infinity, but not keys.
423
+ elif curr_token.value in {"NAN", "INF"}:
424
+ value = float(curr_token.value)
425
+ curr_token = next(tokens)
426
+ else: # Otherwise error out
427
+ unexpected(curr_token, "an int, float, string or '['")
428
+ dct[key].append(value)
429
+
430
+ def clean_dict_value(value):
431
+ if not isinstance(value, list):
432
+ return value
433
+ if len(value) == 1:
434
+ return value[0]
435
+ if value[0] == LIST_START_VALUE:
436
+ return value[1:]
437
+ return value
438
+
439
+ dct = {key: clean_dict_value(value) for key, value in dct.items()}
440
+ return curr_token, dct
441
+
442
+ def parse_dict(curr_token):
443
+ # dict start
444
+ curr_token = consume(curr_token, Pattern.DICT_START, "'['")
445
+ # dict contents
446
+ curr_token, dct = parse_kv(curr_token)
447
+ # dict end
448
+ curr_token = consume(curr_token, Pattern.DICT_END, "']'")
449
+ return curr_token, dct
450
+
451
+ def parse_graph():
452
+ curr_token, dct = parse_kv(next(tokens))
453
+ if curr_token.category is not None: # EOF
454
+ unexpected(curr_token, "EOF")
455
+ if "graph" not in dct:
456
+ raise NetworkXError("input contains no graph")
457
+ graph = dct["graph"]
458
+ if isinstance(graph, list):
459
+ raise NetworkXError("input contains more than one graph")
460
+ return graph
461
+
462
+ tokens = tokenize()
463
+ graph = parse_graph()
464
+
465
+ directed = graph.pop("directed", False)
466
+ multigraph = graph.pop("multigraph", False)
467
+ if not multigraph:
468
+ G = nx.DiGraph() if directed else nx.Graph()
469
+ else:
470
+ G = nx.MultiDiGraph() if directed else nx.MultiGraph()
471
+ graph_attr = {k: v for k, v in graph.items() if k not in ("node", "edge")}
472
+ G.graph.update(graph_attr)
473
+
474
+ def pop_attr(dct, category, attr, i):
475
+ try:
476
+ return dct.pop(attr)
477
+ except KeyError as err:
478
+ raise NetworkXError(f"{category} #{i} has no {attr!r} attribute") from err
479
+
480
+ nodes = graph.get("node", [])
481
+ mapping = {}
482
+ node_labels = set()
483
+ for i, node in enumerate(nodes if isinstance(nodes, list) else [nodes]):
484
+ id = pop_attr(node, "node", "id", i)
485
+ if id in G:
486
+ raise NetworkXError(f"node id {id!r} is duplicated")
487
+ if label is not None and label != "id":
488
+ node_label = pop_attr(node, "node", label, i)
489
+ if node_label in node_labels:
490
+ raise NetworkXError(f"node label {node_label!r} is duplicated")
491
+ node_labels.add(node_label)
492
+ mapping[id] = node_label
493
+ G.add_node(id, **node)
494
+
495
+ edges = graph.get("edge", [])
496
+ for i, edge in enumerate(edges if isinstance(edges, list) else [edges]):
497
+ source = pop_attr(edge, "edge", "source", i)
498
+ target = pop_attr(edge, "edge", "target", i)
499
+ if source not in G:
500
+ raise NetworkXError(f"edge #{i} has undefined source {source!r}")
501
+ if target not in G:
502
+ raise NetworkXError(f"edge #{i} has undefined target {target!r}")
503
+ if not multigraph:
504
+ if not G.has_edge(source, target):
505
+ G.add_edge(source, target, **edge)
506
+ else:
507
+ arrow = "->" if directed else "--"
508
+ msg = f"edge #{i} ({source!r}{arrow}{target!r}) is duplicated"
509
+ raise nx.NetworkXError(msg)
510
+ else:
511
+ key = edge.pop("key", None)
512
+ if key is not None and G.has_edge(source, target, key):
513
+ arrow = "->" if directed else "--"
514
+ msg = f"edge #{i} ({source!r}{arrow}{target!r}, {key!r})"
515
+ msg2 = 'Hint: If multigraph add "multigraph 1" to file header.'
516
+ raise nx.NetworkXError(msg + " is duplicated\n" + msg2)
517
+ G.add_edge(source, target, key, **edge)
518
+
519
+ if label is not None and label != "id":
520
+ G = nx.relabel_nodes(G, mapping)
521
+ return G
522
+
523
+
524
+ def literal_stringizer(value):
525
+ """Convert a `value` to a Python literal in GML representation.
526
+
527
+ Parameters
528
+ ----------
529
+ value : object
530
+ The `value` to be converted to GML representation.
531
+
532
+ Returns
533
+ -------
534
+ rep : string
535
+ A double-quoted Python literal representing value. Unprintable
536
+ characters are replaced by XML character references.
537
+
538
+ Raises
539
+ ------
540
+ ValueError
541
+ If `value` cannot be converted to GML.
542
+
543
+ Notes
544
+ -----
545
+ The original value can be recovered using the
546
+ :func:`networkx.readwrite.gml.literal_destringizer` function.
547
+ """
548
+
549
+ def stringize(value):
550
+ if isinstance(value, (int, bool)) or value is None:
551
+ if value is True: # GML uses 1/0 for boolean values.
552
+ buf.write(str(1))
553
+ elif value is False:
554
+ buf.write(str(0))
555
+ else:
556
+ buf.write(str(value))
557
+ elif isinstance(value, str):
558
+ text = repr(value)
559
+ if text[0] != "u":
560
+ try:
561
+ value.encode("latin1")
562
+ except UnicodeEncodeError:
563
+ text = "u" + text
564
+ buf.write(text)
565
+ elif isinstance(value, (float, complex, str, bytes)):
566
+ buf.write(repr(value))
567
+ elif isinstance(value, list):
568
+ buf.write("[")
569
+ first = True
570
+ for item in value:
571
+ if not first:
572
+ buf.write(",")
573
+ else:
574
+ first = False
575
+ stringize(item)
576
+ buf.write("]")
577
+ elif isinstance(value, tuple):
578
+ if len(value) > 1:
579
+ buf.write("(")
580
+ first = True
581
+ for item in value:
582
+ if not first:
583
+ buf.write(",")
584
+ else:
585
+ first = False
586
+ stringize(item)
587
+ buf.write(")")
588
+ elif value:
589
+ buf.write("(")
590
+ stringize(value[0])
591
+ buf.write(",)")
592
+ else:
593
+ buf.write("()")
594
+ elif isinstance(value, dict):
595
+ buf.write("{")
596
+ first = True
597
+ for key, value in value.items():
598
+ if not first:
599
+ buf.write(",")
600
+ else:
601
+ first = False
602
+ stringize(key)
603
+ buf.write(":")
604
+ stringize(value)
605
+ buf.write("}")
606
+ elif isinstance(value, set):
607
+ buf.write("{")
608
+ first = True
609
+ for item in value:
610
+ if not first:
611
+ buf.write(",")
612
+ else:
613
+ first = False
614
+ stringize(item)
615
+ buf.write("}")
616
+ else:
617
+ msg = f"{value!r} cannot be converted into a Python literal"
618
+ raise ValueError(msg)
619
+
620
+ buf = StringIO()
621
+ stringize(value)
622
+ return buf.getvalue()
623
+
624
+
625
+ def generate_gml(G, stringizer=None):
626
+ r"""Generate a single entry of the graph `G` in GML format.
627
+
628
+ Parameters
629
+ ----------
630
+ G : NetworkX graph
631
+ The graph to be converted to GML.
632
+
633
+ stringizer : callable, optional
634
+ A `stringizer` which converts non-int/non-float/non-dict values into
635
+ strings. If it cannot convert a value into a string, it should raise a
636
+ `ValueError` to indicate that. Default value: None.
637
+
638
+ Returns
639
+ -------
640
+ lines: generator of strings
641
+ Lines of GML data. Newlines are not appended.
642
+
643
+ Raises
644
+ ------
645
+ NetworkXError
646
+ If `stringizer` cannot convert a value into a string, or the value to
647
+ convert is not a string while `stringizer` is None.
648
+
649
+ See Also
650
+ --------
651
+ literal_stringizer
652
+
653
+ Notes
654
+ -----
655
+ Graph attributes named 'directed', 'multigraph', 'node' or
656
+ 'edge', node attributes named 'id' or 'label', edge attributes
657
+ named 'source' or 'target' (or 'key' if `G` is a multigraph)
658
+ are ignored because these attribute names are used to encode the graph
659
+ structure.
660
+
661
+ GML files are stored using a 7-bit ASCII encoding with any extended
662
+ ASCII characters (iso8859-1) appearing as HTML character entities.
663
+ Without specifying a `stringizer`/`destringizer`, the code is capable of
664
+ writing `int`/`float`/`str`/`dict`/`list` data as required by the GML
665
+ specification. For writing other data types, and for reading data other
666
+ than `str` you need to explicitly supply a `stringizer`/`destringizer`.
667
+
668
+ For additional documentation on the GML file format, please see the
669
+ `GML url <https://web.archive.org/web/20190207140002/http://www.fim.uni-passau.de/index.php?id=17297&L=1>`_.
670
+
671
+ See the module docstring :mod:`networkx.readwrite.gml` for more details.
672
+
673
+ Examples
674
+ --------
675
+ >>> G = nx.Graph()
676
+ >>> G.add_node("1")
677
+ >>> print("\n".join(nx.generate_gml(G)))
678
+ graph [
679
+ node [
680
+ id 0
681
+ label "1"
682
+ ]
683
+ ]
684
+ >>> G = nx.MultiGraph([("a", "b"), ("a", "b")])
685
+ >>> print("\n".join(nx.generate_gml(G)))
686
+ graph [
687
+ multigraph 1
688
+ node [
689
+ id 0
690
+ label "a"
691
+ ]
692
+ node [
693
+ id 1
694
+ label "b"
695
+ ]
696
+ edge [
697
+ source 0
698
+ target 1
699
+ key 0
700
+ ]
701
+ edge [
702
+ source 0
703
+ target 1
704
+ key 1
705
+ ]
706
+ ]
707
+ """
708
+ valid_keys = re.compile("^[A-Za-z][0-9A-Za-z_]*$")
709
+
710
+ def stringize(key, value, ignored_keys, indent, in_list=False):
711
+ if not isinstance(key, str):
712
+ raise NetworkXError(f"{key!r} is not a string")
713
+ if not valid_keys.match(key):
714
+ raise NetworkXError(f"{key!r} is not a valid key")
715
+ if not isinstance(key, str):
716
+ key = str(key)
717
+ if key not in ignored_keys:
718
+ if isinstance(value, (int, bool)):
719
+ if key == "label":
720
+ yield indent + key + ' "' + str(value) + '"'
721
+ elif value is True:
722
+ # python bool is an instance of int
723
+ yield indent + key + " 1"
724
+ elif value is False:
725
+ yield indent + key + " 0"
726
+ # GML only supports signed 32-bit integers
727
+ elif value < -(2**31) or value >= 2**31:
728
+ yield indent + key + ' "' + str(value) + '"'
729
+ else:
730
+ yield indent + key + " " + str(value)
731
+ elif isinstance(value, float):
732
+ text = repr(value).upper()
733
+ # GML matches INF to keys, so prepend + to INF. Use repr(float(*))
734
+ # instead of string literal to future proof against changes to repr.
735
+ if text == repr(float("inf")).upper():
736
+ text = "+" + text
737
+ else:
738
+ # GML requires that a real literal contain a decimal point, but
739
+ # repr may not output a decimal point when the mantissa is
740
+ # integral and hence needs fixing.
741
+ epos = text.rfind("E")
742
+ if epos != -1 and text.find(".", 0, epos) == -1:
743
+ text = text[:epos] + "." + text[epos:]
744
+ if key == "label":
745
+ yield indent + key + ' "' + text + '"'
746
+ else:
747
+ yield indent + key + " " + text
748
+ elif isinstance(value, dict):
749
+ yield indent + key + " ["
750
+ next_indent = indent + " "
751
+ for key, value in value.items():
752
+ yield from stringize(key, value, (), next_indent)
753
+ yield indent + "]"
754
+ elif isinstance(value, tuple) and key == "label":
755
+ yield indent + key + f" \"({','.join(repr(v) for v in value)})\""
756
+ elif isinstance(value, (list, tuple)) and key != "label" and not in_list:
757
+ if len(value) == 0:
758
+ yield indent + key + " " + f'"{value!r}"'
759
+ if len(value) == 1:
760
+ yield indent + key + " " + f'"{LIST_START_VALUE}"'
761
+ for val in value:
762
+ yield from stringize(key, val, (), indent, True)
763
+ else:
764
+ if stringizer:
765
+ try:
766
+ value = stringizer(value)
767
+ except ValueError as err:
768
+ raise NetworkXError(
769
+ f"{value!r} cannot be converted into a string"
770
+ ) from err
771
+ if not isinstance(value, str):
772
+ raise NetworkXError(f"{value!r} is not a string")
773
+ yield indent + key + ' "' + escape(value) + '"'
774
+
775
+ multigraph = G.is_multigraph()
776
+ yield "graph ["
777
+
778
+ # Output graph attributes
779
+ if G.is_directed():
780
+ yield " directed 1"
781
+ if multigraph:
782
+ yield " multigraph 1"
783
+ ignored_keys = {"directed", "multigraph", "node", "edge"}
784
+ for attr, value in G.graph.items():
785
+ yield from stringize(attr, value, ignored_keys, " ")
786
+
787
+ # Output node data
788
+ node_id = dict(zip(G, range(len(G))))
789
+ ignored_keys = {"id", "label"}
790
+ for node, attrs in G.nodes.items():
791
+ yield " node ["
792
+ yield " id " + str(node_id[node])
793
+ yield from stringize("label", node, (), " ")
794
+ for attr, value in attrs.items():
795
+ yield from stringize(attr, value, ignored_keys, " ")
796
+ yield " ]"
797
+
798
+ # Output edge data
799
+ ignored_keys = {"source", "target"}
800
+ kwargs = {"data": True}
801
+ if multigraph:
802
+ ignored_keys.add("key")
803
+ kwargs["keys"] = True
804
+ for e in G.edges(**kwargs):
805
+ yield " edge ["
806
+ yield " source " + str(node_id[e[0]])
807
+ yield " target " + str(node_id[e[1]])
808
+ if multigraph:
809
+ yield from stringize("key", e[2], (), " ")
810
+ for attr, value in e[-1].items():
811
+ yield from stringize(attr, value, ignored_keys, " ")
812
+ yield " ]"
813
+ yield "]"
814
+
815
+
816
+ @open_file(1, mode="wb")
817
+ def write_gml(G, path, stringizer=None):
818
+ """Write a graph `G` in GML format to the file or file handle `path`.
819
+
820
+ Parameters
821
+ ----------
822
+ G : NetworkX graph
823
+ The graph to be converted to GML.
824
+
825
+ path : filename or filehandle
826
+ The filename or filehandle to write. Files whose names end with .gz or
827
+ .bz2 will be compressed.
828
+
829
+ stringizer : callable, optional
830
+ A `stringizer` which converts non-int/non-float/non-dict values into
831
+ strings. If it cannot convert a value into a string, it should raise a
832
+ `ValueError` to indicate that. Default value: None.
833
+
834
+ Raises
835
+ ------
836
+ NetworkXError
837
+ If `stringizer` cannot convert a value into a string, or the value to
838
+ convert is not a string while `stringizer` is None.
839
+
840
+ See Also
841
+ --------
842
+ read_gml, generate_gml
843
+ literal_stringizer
844
+
845
+ Notes
846
+ -----
847
+ Graph attributes named 'directed', 'multigraph', 'node' or
848
+ 'edge', node attributes named 'id' or 'label', edge attributes
849
+ named 'source' or 'target' (or 'key' if `G` is a multigraph)
850
+ are ignored because these attribute names are used to encode the graph
851
+ structure.
852
+
853
+ GML files are stored using a 7-bit ASCII encoding with any extended
854
+ ASCII characters (iso8859-1) appearing as HTML character entities.
855
+ Without specifying a `stringizer`/`destringizer`, the code is capable of
856
+ writing `int`/`float`/`str`/`dict`/`list` data as required by the GML
857
+ specification. For writing other data types, and for reading data other
858
+ than `str` you need to explicitly supply a `stringizer`/`destringizer`.
859
+
860
+ Note that while we allow non-standard GML to be read from a file, we make
861
+ sure to write GML format. In particular, underscores are not allowed in
862
+ attribute names.
863
+ For additional documentation on the GML file format, please see the
864
+ `GML url <https://web.archive.org/web/20190207140002/http://www.fim.uni-passau.de/index.php?id=17297&L=1>`_.
865
+
866
+ See the module docstring :mod:`networkx.readwrite.gml` for more details.
867
+
868
+ Examples
869
+ --------
870
+ >>> G = nx.path_graph(4)
871
+ >>> nx.write_gml(G, "test.gml")
872
+
873
+ Filenames ending in .gz or .bz2 will be compressed.
874
+
875
+ >>> nx.write_gml(G, "test.gml.gz")
876
+ """
877
+ for line in generate_gml(G, stringizer):
878
+ path.write((line + "\n").encode("ascii"))
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/readwrite/graphml.py ADDED
@@ -0,0 +1,1051 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ *******
3
+ GraphML
4
+ *******
5
+ Read and write graphs in GraphML format.
6
+
7
+ .. warning::
8
+
9
+ This parser uses the standard xml library present in Python, which is
10
+ insecure - see :external+python:mod:`xml` for additional information.
11
+ Only parse GraphML files you trust.
12
+
13
+ This implementation does not support mixed graphs (directed and unidirected
14
+ edges together), hyperedges, nested graphs, or ports.
15
+
16
+ "GraphML is a comprehensive and easy-to-use file format for graphs. It
17
+ consists of a language core to describe the structural properties of a
18
+ graph and a flexible extension mechanism to add application-specific
19
+ data. Its main features include support of
20
+
21
+ * directed, undirected, and mixed graphs,
22
+ * hypergraphs,
23
+ * hierarchical graphs,
24
+ * graphical representations,
25
+ * references to external data,
26
+ * application-specific attribute data, and
27
+ * light-weight parsers.
28
+
29
+ Unlike many other file formats for graphs, GraphML does not use a
30
+ custom syntax. Instead, it is based on XML and hence ideally suited as
31
+ a common denominator for all kinds of services generating, archiving,
32
+ or processing graphs."
33
+
34
+ http://graphml.graphdrawing.org/
35
+
36
+ Format
37
+ ------
38
+ GraphML is an XML format. See
39
+ http://graphml.graphdrawing.org/specification.html for the specification and
40
+ http://graphml.graphdrawing.org/primer/graphml-primer.html
41
+ for examples.
42
+ """
43
+ import warnings
44
+ from collections import defaultdict
45
+
46
+ import networkx as nx
47
+ from networkx.utils import open_file
48
+
49
+ __all__ = [
50
+ "write_graphml",
51
+ "read_graphml",
52
+ "generate_graphml",
53
+ "write_graphml_xml",
54
+ "write_graphml_lxml",
55
+ "parse_graphml",
56
+ "GraphMLWriter",
57
+ "GraphMLReader",
58
+ ]
59
+
60
+
61
+ @open_file(1, mode="wb")
62
+ def write_graphml_xml(
63
+ G,
64
+ path,
65
+ encoding="utf-8",
66
+ prettyprint=True,
67
+ infer_numeric_types=False,
68
+ named_key_ids=False,
69
+ edge_id_from_attribute=None,
70
+ ):
71
+ """Write G in GraphML XML format to path
72
+
73
+ Parameters
74
+ ----------
75
+ G : graph
76
+ A networkx graph
77
+ path : file or string
78
+ File or filename to write.
79
+ Filenames ending in .gz or .bz2 will be compressed.
80
+ encoding : string (optional)
81
+ Encoding for text data.
82
+ prettyprint : bool (optional)
83
+ If True use line breaks and indenting in output XML.
84
+ infer_numeric_types : boolean
85
+ Determine if numeric types should be generalized.
86
+ For example, if edges have both int and float 'weight' attributes,
87
+ we infer in GraphML that both are floats.
88
+ named_key_ids : bool (optional)
89
+ If True use attr.name as value for key elements' id attribute.
90
+ edge_id_from_attribute : dict key (optional)
91
+ If provided, the graphml edge id is set by looking up the corresponding
92
+ edge data attribute keyed by this parameter. If `None` or the key does not exist in edge data,
93
+ the edge id is set by the edge key if `G` is a MultiGraph, else the edge id is left unset.
94
+
95
+ Examples
96
+ --------
97
+ >>> G = nx.path_graph(4)
98
+ >>> nx.write_graphml(G, "test.graphml")
99
+
100
+ Notes
101
+ -----
102
+ This implementation does not support mixed graphs (directed
103
+ and unidirected edges together) hyperedges, nested graphs, or ports.
104
+ """
105
+ writer = GraphMLWriter(
106
+ encoding=encoding,
107
+ prettyprint=prettyprint,
108
+ infer_numeric_types=infer_numeric_types,
109
+ named_key_ids=named_key_ids,
110
+ edge_id_from_attribute=edge_id_from_attribute,
111
+ )
112
+ writer.add_graph_element(G)
113
+ writer.dump(path)
114
+
115
+
116
+ @open_file(1, mode="wb")
117
+ def write_graphml_lxml(
118
+ G,
119
+ path,
120
+ encoding="utf-8",
121
+ prettyprint=True,
122
+ infer_numeric_types=False,
123
+ named_key_ids=False,
124
+ edge_id_from_attribute=None,
125
+ ):
126
+ """Write G in GraphML XML format to path
127
+
128
+ This function uses the LXML framework and should be faster than
129
+ the version using the xml library.
130
+
131
+ Parameters
132
+ ----------
133
+ G : graph
134
+ A networkx graph
135
+ path : file or string
136
+ File or filename to write.
137
+ Filenames ending in .gz or .bz2 will be compressed.
138
+ encoding : string (optional)
139
+ Encoding for text data.
140
+ prettyprint : bool (optional)
141
+ If True use line breaks and indenting in output XML.
142
+ infer_numeric_types : boolean
143
+ Determine if numeric types should be generalized.
144
+ For example, if edges have both int and float 'weight' attributes,
145
+ we infer in GraphML that both are floats.
146
+ named_key_ids : bool (optional)
147
+ If True use attr.name as value for key elements' id attribute.
148
+ edge_id_from_attribute : dict key (optional)
149
+ If provided, the graphml edge id is set by looking up the corresponding
150
+ edge data attribute keyed by this parameter. If `None` or the key does not exist in edge data,
151
+ the edge id is set by the edge key if `G` is a MultiGraph, else the edge id is left unset.
152
+
153
+ Examples
154
+ --------
155
+ >>> G = nx.path_graph(4)
156
+ >>> nx.write_graphml_lxml(G, "fourpath.graphml")
157
+
158
+ Notes
159
+ -----
160
+ This implementation does not support mixed graphs (directed
161
+ and unidirected edges together) hyperedges, nested graphs, or ports.
162
+ """
163
+ try:
164
+ import lxml.etree as lxmletree
165
+ except ImportError:
166
+ return write_graphml_xml(
167
+ G,
168
+ path,
169
+ encoding,
170
+ prettyprint,
171
+ infer_numeric_types,
172
+ named_key_ids,
173
+ edge_id_from_attribute,
174
+ )
175
+
176
+ writer = GraphMLWriterLxml(
177
+ path,
178
+ graph=G,
179
+ encoding=encoding,
180
+ prettyprint=prettyprint,
181
+ infer_numeric_types=infer_numeric_types,
182
+ named_key_ids=named_key_ids,
183
+ edge_id_from_attribute=edge_id_from_attribute,
184
+ )
185
+ writer.dump()
186
+
187
+
188
+ def generate_graphml(
189
+ G,
190
+ encoding="utf-8",
191
+ prettyprint=True,
192
+ named_key_ids=False,
193
+ edge_id_from_attribute=None,
194
+ ):
195
+ """Generate GraphML lines for G
196
+
197
+ Parameters
198
+ ----------
199
+ G : graph
200
+ A networkx graph
201
+ encoding : string (optional)
202
+ Encoding for text data.
203
+ prettyprint : bool (optional)
204
+ If True use line breaks and indenting in output XML.
205
+ named_key_ids : bool (optional)
206
+ If True use attr.name as value for key elements' id attribute.
207
+ edge_id_from_attribute : dict key (optional)
208
+ If provided, the graphml edge id is set by looking up the corresponding
209
+ edge data attribute keyed by this parameter. If `None` or the key does not exist in edge data,
210
+ the edge id is set by the edge key if `G` is a MultiGraph, else the edge id is left unset.
211
+
212
+ Examples
213
+ --------
214
+ >>> G = nx.path_graph(4)
215
+ >>> linefeed = chr(10) # linefeed = \n
216
+ >>> s = linefeed.join(nx.generate_graphml(G))
217
+ >>> for line in nx.generate_graphml(G): # doctest: +SKIP
218
+ ... print(line)
219
+
220
+ Notes
221
+ -----
222
+ This implementation does not support mixed graphs (directed and unidirected
223
+ edges together) hyperedges, nested graphs, or ports.
224
+ """
225
+ writer = GraphMLWriter(
226
+ encoding=encoding,
227
+ prettyprint=prettyprint,
228
+ named_key_ids=named_key_ids,
229
+ edge_id_from_attribute=edge_id_from_attribute,
230
+ )
231
+ writer.add_graph_element(G)
232
+ yield from str(writer).splitlines()
233
+
234
+
235
+ @open_file(0, mode="rb")
236
+ @nx._dispatch(graphs=None)
237
+ def read_graphml(path, node_type=str, edge_key_type=int, force_multigraph=False):
238
+ """Read graph in GraphML format from path.
239
+
240
+ Parameters
241
+ ----------
242
+ path : file or string
243
+ File or filename to write.
244
+ Filenames ending in .gz or .bz2 will be compressed.
245
+
246
+ node_type: Python type (default: str)
247
+ Convert node ids to this type
248
+
249
+ edge_key_type: Python type (default: int)
250
+ Convert graphml edge ids to this type. Multigraphs use id as edge key.
251
+ Non-multigraphs add to edge attribute dict with name "id".
252
+
253
+ force_multigraph : bool (default: False)
254
+ If True, return a multigraph with edge keys. If False (the default)
255
+ return a multigraph when multiedges are in the graph.
256
+
257
+ Returns
258
+ -------
259
+ graph: NetworkX graph
260
+ If parallel edges are present or `force_multigraph=True` then
261
+ a MultiGraph or MultiDiGraph is returned. Otherwise a Graph/DiGraph.
262
+ The returned graph is directed if the file indicates it should be.
263
+
264
+ Notes
265
+ -----
266
+ Default node and edge attributes are not propagated to each node and edge.
267
+ They can be obtained from `G.graph` and applied to node and edge attributes
268
+ if desired using something like this:
269
+
270
+ >>> default_color = G.graph["node_default"]["color"] # doctest: +SKIP
271
+ >>> for node, data in G.nodes(data=True): # doctest: +SKIP
272
+ ... if "color" not in data:
273
+ ... data["color"] = default_color
274
+ >>> default_color = G.graph["edge_default"]["color"] # doctest: +SKIP
275
+ >>> for u, v, data in G.edges(data=True): # doctest: +SKIP
276
+ ... if "color" not in data:
277
+ ... data["color"] = default_color
278
+
279
+ This implementation does not support mixed graphs (directed and unidirected
280
+ edges together), hypergraphs, nested graphs, or ports.
281
+
282
+ For multigraphs the GraphML edge "id" will be used as the edge
283
+ key. If not specified then they "key" attribute will be used. If
284
+ there is no "key" attribute a default NetworkX multigraph edge key
285
+ will be provided.
286
+
287
+ Files with the yEd "yfiles" extension can be read. The type of the node's
288
+ shape is preserved in the `shape_type` node attribute.
289
+
290
+ yEd compressed files ("file.graphmlz" extension) can be read by renaming
291
+ the file to "file.graphml.gz".
292
+
293
+ """
294
+ reader = GraphMLReader(node_type, edge_key_type, force_multigraph)
295
+ # need to check for multiple graphs
296
+ glist = list(reader(path=path))
297
+ if len(glist) == 0:
298
+ # If no graph comes back, try looking for an incomplete header
299
+ header = b'<graphml xmlns="http://graphml.graphdrawing.org/xmlns">'
300
+ path.seek(0)
301
+ old_bytes = path.read()
302
+ new_bytes = old_bytes.replace(b"<graphml>", header)
303
+ glist = list(reader(string=new_bytes))
304
+ if len(glist) == 0:
305
+ raise nx.NetworkXError("file not successfully read as graphml")
306
+ return glist[0]
307
+
308
+
309
+ @nx._dispatch(graphs=None)
310
+ def parse_graphml(
311
+ graphml_string, node_type=str, edge_key_type=int, force_multigraph=False
312
+ ):
313
+ """Read graph in GraphML format from string.
314
+
315
+ Parameters
316
+ ----------
317
+ graphml_string : string
318
+ String containing graphml information
319
+ (e.g., contents of a graphml file).
320
+
321
+ node_type: Python type (default: str)
322
+ Convert node ids to this type
323
+
324
+ edge_key_type: Python type (default: int)
325
+ Convert graphml edge ids to this type. Multigraphs use id as edge key.
326
+ Non-multigraphs add to edge attribute dict with name "id".
327
+
328
+ force_multigraph : bool (default: False)
329
+ If True, return a multigraph with edge keys. If False (the default)
330
+ return a multigraph when multiedges are in the graph.
331
+
332
+
333
+ Returns
334
+ -------
335
+ graph: NetworkX graph
336
+ If no parallel edges are found a Graph or DiGraph is returned.
337
+ Otherwise a MultiGraph or MultiDiGraph is returned.
338
+
339
+ Examples
340
+ --------
341
+ >>> G = nx.path_graph(4)
342
+ >>> linefeed = chr(10) # linefeed = \n
343
+ >>> s = linefeed.join(nx.generate_graphml(G))
344
+ >>> H = nx.parse_graphml(s)
345
+
346
+ Notes
347
+ -----
348
+ Default node and edge attributes are not propagated to each node and edge.
349
+ They can be obtained from `G.graph` and applied to node and edge attributes
350
+ if desired using something like this:
351
+
352
+ >>> default_color = G.graph["node_default"]["color"] # doctest: +SKIP
353
+ >>> for node, data in G.nodes(data=True): # doctest: +SKIP
354
+ ... if "color" not in data:
355
+ ... data["color"] = default_color
356
+ >>> default_color = G.graph["edge_default"]["color"] # doctest: +SKIP
357
+ >>> for u, v, data in G.edges(data=True): # doctest: +SKIP
358
+ ... if "color" not in data:
359
+ ... data["color"] = default_color
360
+
361
+ This implementation does not support mixed graphs (directed and unidirected
362
+ edges together), hypergraphs, nested graphs, or ports.
363
+
364
+ For multigraphs the GraphML edge "id" will be used as the edge
365
+ key. If not specified then they "key" attribute will be used. If
366
+ there is no "key" attribute a default NetworkX multigraph edge key
367
+ will be provided.
368
+
369
+ """
370
+ reader = GraphMLReader(node_type, edge_key_type, force_multigraph)
371
+ # need to check for multiple graphs
372
+ glist = list(reader(string=graphml_string))
373
+ if len(glist) == 0:
374
+ # If no graph comes back, try looking for an incomplete header
375
+ header = '<graphml xmlns="http://graphml.graphdrawing.org/xmlns">'
376
+ new_string = graphml_string.replace("<graphml>", header)
377
+ glist = list(reader(string=new_string))
378
+ if len(glist) == 0:
379
+ raise nx.NetworkXError("file not successfully read as graphml")
380
+ return glist[0]
381
+
382
+
383
+ class GraphML:
384
+ NS_GRAPHML = "http://graphml.graphdrawing.org/xmlns"
385
+ NS_XSI = "http://www.w3.org/2001/XMLSchema-instance"
386
+ # xmlns:y="http://www.yworks.com/xml/graphml"
387
+ NS_Y = "http://www.yworks.com/xml/graphml"
388
+ SCHEMALOCATION = " ".join(
389
+ [
390
+ "http://graphml.graphdrawing.org/xmlns",
391
+ "http://graphml.graphdrawing.org/xmlns/1.0/graphml.xsd",
392
+ ]
393
+ )
394
+
395
+ def construct_types(self):
396
+ types = [
397
+ (int, "integer"), # for Gephi GraphML bug
398
+ (str, "yfiles"),
399
+ (str, "string"),
400
+ (int, "int"),
401
+ (int, "long"),
402
+ (float, "float"),
403
+ (float, "double"),
404
+ (bool, "boolean"),
405
+ ]
406
+
407
+ # These additions to types allow writing numpy types
408
+ try:
409
+ import numpy as np
410
+ except:
411
+ pass
412
+ else:
413
+ # prepend so that python types are created upon read (last entry wins)
414
+ types = [
415
+ (np.float64, "float"),
416
+ (np.float32, "float"),
417
+ (np.float16, "float"),
418
+ (np.int_, "int"),
419
+ (np.int8, "int"),
420
+ (np.int16, "int"),
421
+ (np.int32, "int"),
422
+ (np.int64, "int"),
423
+ (np.uint8, "int"),
424
+ (np.uint16, "int"),
425
+ (np.uint32, "int"),
426
+ (np.uint64, "int"),
427
+ (np.int_, "int"),
428
+ (np.intc, "int"),
429
+ (np.intp, "int"),
430
+ ] + types
431
+
432
+ self.xml_type = dict(types)
433
+ self.python_type = dict(reversed(a) for a in types)
434
+
435
+ # This page says that data types in GraphML follow Java(TM).
436
+ # http://graphml.graphdrawing.org/primer/graphml-primer.html#AttributesDefinition
437
+ # true and false are the only boolean literals:
438
+ # http://en.wikibooks.org/wiki/Java_Programming/Literals#Boolean_Literals
439
+ convert_bool = {
440
+ # We use data.lower() in actual use.
441
+ "true": True,
442
+ "false": False,
443
+ # Include integer strings for convenience.
444
+ "0": False,
445
+ 0: False,
446
+ "1": True,
447
+ 1: True,
448
+ }
449
+
450
+ def get_xml_type(self, key):
451
+ """Wrapper around the xml_type dict that raises a more informative
452
+ exception message when a user attempts to use data of a type not
453
+ supported by GraphML."""
454
+ try:
455
+ return self.xml_type[key]
456
+ except KeyError as err:
457
+ raise TypeError(
458
+ f"GraphML does not support type {type(key)} as data values."
459
+ ) from err
460
+
461
+
462
+ class GraphMLWriter(GraphML):
463
+ def __init__(
464
+ self,
465
+ graph=None,
466
+ encoding="utf-8",
467
+ prettyprint=True,
468
+ infer_numeric_types=False,
469
+ named_key_ids=False,
470
+ edge_id_from_attribute=None,
471
+ ):
472
+ self.construct_types()
473
+ from xml.etree.ElementTree import Element
474
+
475
+ self.myElement = Element
476
+
477
+ self.infer_numeric_types = infer_numeric_types
478
+ self.prettyprint = prettyprint
479
+ self.named_key_ids = named_key_ids
480
+ self.edge_id_from_attribute = edge_id_from_attribute
481
+ self.encoding = encoding
482
+ self.xml = self.myElement(
483
+ "graphml",
484
+ {
485
+ "xmlns": self.NS_GRAPHML,
486
+ "xmlns:xsi": self.NS_XSI,
487
+ "xsi:schemaLocation": self.SCHEMALOCATION,
488
+ },
489
+ )
490
+ self.keys = {}
491
+ self.attributes = defaultdict(list)
492
+ self.attribute_types = defaultdict(set)
493
+
494
+ if graph is not None:
495
+ self.add_graph_element(graph)
496
+
497
+ def __str__(self):
498
+ from xml.etree.ElementTree import tostring
499
+
500
+ if self.prettyprint:
501
+ self.indent(self.xml)
502
+ s = tostring(self.xml).decode(self.encoding)
503
+ return s
504
+
505
+ def attr_type(self, name, scope, value):
506
+ """Infer the attribute type of data named name. Currently this only
507
+ supports inference of numeric types.
508
+
509
+ If self.infer_numeric_types is false, type is used. Otherwise, pick the
510
+ most general of types found across all values with name and scope. This
511
+ means edges with data named 'weight' are treated separately from nodes
512
+ with data named 'weight'.
513
+ """
514
+ if self.infer_numeric_types:
515
+ types = self.attribute_types[(name, scope)]
516
+
517
+ if len(types) > 1:
518
+ types = {self.get_xml_type(t) for t in types}
519
+ if "string" in types:
520
+ return str
521
+ elif "float" in types or "double" in types:
522
+ return float
523
+ else:
524
+ return int
525
+ else:
526
+ return list(types)[0]
527
+ else:
528
+ return type(value)
529
+
530
+ def get_key(self, name, attr_type, scope, default):
531
+ keys_key = (name, attr_type, scope)
532
+ try:
533
+ return self.keys[keys_key]
534
+ except KeyError:
535
+ if self.named_key_ids:
536
+ new_id = name
537
+ else:
538
+ new_id = f"d{len(list(self.keys))}"
539
+
540
+ self.keys[keys_key] = new_id
541
+ key_kwargs = {
542
+ "id": new_id,
543
+ "for": scope,
544
+ "attr.name": name,
545
+ "attr.type": attr_type,
546
+ }
547
+ key_element = self.myElement("key", **key_kwargs)
548
+ # add subelement for data default value if present
549
+ if default is not None:
550
+ default_element = self.myElement("default")
551
+ default_element.text = str(default)
552
+ key_element.append(default_element)
553
+ self.xml.insert(0, key_element)
554
+ return new_id
555
+
556
+ def add_data(self, name, element_type, value, scope="all", default=None):
557
+ """
558
+ Make a data element for an edge or a node. Keep a log of the
559
+ type in the keys table.
560
+ """
561
+ if element_type not in self.xml_type:
562
+ raise nx.NetworkXError(
563
+ f"GraphML writer does not support {element_type} as data values."
564
+ )
565
+ keyid = self.get_key(name, self.get_xml_type(element_type), scope, default)
566
+ data_element = self.myElement("data", key=keyid)
567
+ data_element.text = str(value)
568
+ return data_element
569
+
570
+ def add_attributes(self, scope, xml_obj, data, default):
571
+ """Appends attribute data to edges or nodes, and stores type information
572
+ to be added later. See add_graph_element.
573
+ """
574
+ for k, v in data.items():
575
+ self.attribute_types[(str(k), scope)].add(type(v))
576
+ self.attributes[xml_obj].append([k, v, scope, default.get(k)])
577
+
578
+ def add_nodes(self, G, graph_element):
579
+ default = G.graph.get("node_default", {})
580
+ for node, data in G.nodes(data=True):
581
+ node_element = self.myElement("node", id=str(node))
582
+ self.add_attributes("node", node_element, data, default)
583
+ graph_element.append(node_element)
584
+
585
+ def add_edges(self, G, graph_element):
586
+ if G.is_multigraph():
587
+ for u, v, key, data in G.edges(data=True, keys=True):
588
+ edge_element = self.myElement(
589
+ "edge",
590
+ source=str(u),
591
+ target=str(v),
592
+ id=str(data.get(self.edge_id_from_attribute))
593
+ if self.edge_id_from_attribute
594
+ and self.edge_id_from_attribute in data
595
+ else str(key),
596
+ )
597
+ default = G.graph.get("edge_default", {})
598
+ self.add_attributes("edge", edge_element, data, default)
599
+ graph_element.append(edge_element)
600
+ else:
601
+ for u, v, data in G.edges(data=True):
602
+ if self.edge_id_from_attribute and self.edge_id_from_attribute in data:
603
+ # select attribute to be edge id
604
+ edge_element = self.myElement(
605
+ "edge",
606
+ source=str(u),
607
+ target=str(v),
608
+ id=str(data.get(self.edge_id_from_attribute)),
609
+ )
610
+ else:
611
+ # default: no edge id
612
+ edge_element = self.myElement("edge", source=str(u), target=str(v))
613
+ default = G.graph.get("edge_default", {})
614
+ self.add_attributes("edge", edge_element, data, default)
615
+ graph_element.append(edge_element)
616
+
617
+ def add_graph_element(self, G):
618
+ """
619
+ Serialize graph G in GraphML to the stream.
620
+ """
621
+ if G.is_directed():
622
+ default_edge_type = "directed"
623
+ else:
624
+ default_edge_type = "undirected"
625
+
626
+ graphid = G.graph.pop("id", None)
627
+ if graphid is None:
628
+ graph_element = self.myElement("graph", edgedefault=default_edge_type)
629
+ else:
630
+ graph_element = self.myElement(
631
+ "graph", edgedefault=default_edge_type, id=graphid
632
+ )
633
+ default = {}
634
+ data = {
635
+ k: v
636
+ for (k, v) in G.graph.items()
637
+ if k not in ["node_default", "edge_default"]
638
+ }
639
+ self.add_attributes("graph", graph_element, data, default)
640
+ self.add_nodes(G, graph_element)
641
+ self.add_edges(G, graph_element)
642
+
643
+ # self.attributes contains a mapping from XML Objects to a list of
644
+ # data that needs to be added to them.
645
+ # We postpone processing in order to do type inference/generalization.
646
+ # See self.attr_type
647
+ for xml_obj, data in self.attributes.items():
648
+ for k, v, scope, default in data:
649
+ xml_obj.append(
650
+ self.add_data(
651
+ str(k), self.attr_type(k, scope, v), str(v), scope, default
652
+ )
653
+ )
654
+ self.xml.append(graph_element)
655
+
656
+ def add_graphs(self, graph_list):
657
+ """Add many graphs to this GraphML document."""
658
+ for G in graph_list:
659
+ self.add_graph_element(G)
660
+
661
+ def dump(self, stream):
662
+ from xml.etree.ElementTree import ElementTree
663
+
664
+ if self.prettyprint:
665
+ self.indent(self.xml)
666
+ document = ElementTree(self.xml)
667
+ document.write(stream, encoding=self.encoding, xml_declaration=True)
668
+
669
+ def indent(self, elem, level=0):
670
+ # in-place prettyprint formatter
671
+ i = "\n" + level * " "
672
+ if len(elem):
673
+ if not elem.text or not elem.text.strip():
674
+ elem.text = i + " "
675
+ if not elem.tail or not elem.tail.strip():
676
+ elem.tail = i
677
+ for elem in elem:
678
+ self.indent(elem, level + 1)
679
+ if not elem.tail or not elem.tail.strip():
680
+ elem.tail = i
681
+ else:
682
+ if level and (not elem.tail or not elem.tail.strip()):
683
+ elem.tail = i
684
+
685
+
686
+ class IncrementalElement:
687
+ """Wrapper for _IncrementalWriter providing an Element like interface.
688
+
689
+ This wrapper does not intend to be a complete implementation but rather to
690
+ deal with those calls used in GraphMLWriter.
691
+ """
692
+
693
+ def __init__(self, xml, prettyprint):
694
+ self.xml = xml
695
+ self.prettyprint = prettyprint
696
+
697
+ def append(self, element):
698
+ self.xml.write(element, pretty_print=self.prettyprint)
699
+
700
+
701
+ class GraphMLWriterLxml(GraphMLWriter):
702
+ def __init__(
703
+ self,
704
+ path,
705
+ graph=None,
706
+ encoding="utf-8",
707
+ prettyprint=True,
708
+ infer_numeric_types=False,
709
+ named_key_ids=False,
710
+ edge_id_from_attribute=None,
711
+ ):
712
+ self.construct_types()
713
+ import lxml.etree as lxmletree
714
+
715
+ self.myElement = lxmletree.Element
716
+
717
+ self._encoding = encoding
718
+ self._prettyprint = prettyprint
719
+ self.named_key_ids = named_key_ids
720
+ self.edge_id_from_attribute = edge_id_from_attribute
721
+ self.infer_numeric_types = infer_numeric_types
722
+
723
+ self._xml_base = lxmletree.xmlfile(path, encoding=encoding)
724
+ self._xml = self._xml_base.__enter__()
725
+ self._xml.write_declaration()
726
+
727
+ # We need to have a xml variable that support insertion. This call is
728
+ # used for adding the keys to the document.
729
+ # We will store those keys in a plain list, and then after the graph
730
+ # element is closed we will add them to the main graphml element.
731
+ self.xml = []
732
+ self._keys = self.xml
733
+ self._graphml = self._xml.element(
734
+ "graphml",
735
+ {
736
+ "xmlns": self.NS_GRAPHML,
737
+ "xmlns:xsi": self.NS_XSI,
738
+ "xsi:schemaLocation": self.SCHEMALOCATION,
739
+ },
740
+ )
741
+ self._graphml.__enter__()
742
+ self.keys = {}
743
+ self.attribute_types = defaultdict(set)
744
+
745
+ if graph is not None:
746
+ self.add_graph_element(graph)
747
+
748
+ def add_graph_element(self, G):
749
+ """
750
+ Serialize graph G in GraphML to the stream.
751
+ """
752
+ if G.is_directed():
753
+ default_edge_type = "directed"
754
+ else:
755
+ default_edge_type = "undirected"
756
+
757
+ graphid = G.graph.pop("id", None)
758
+ if graphid is None:
759
+ graph_element = self._xml.element("graph", edgedefault=default_edge_type)
760
+ else:
761
+ graph_element = self._xml.element(
762
+ "graph", edgedefault=default_edge_type, id=graphid
763
+ )
764
+
765
+ # gather attributes types for the whole graph
766
+ # to find the most general numeric format needed.
767
+ # Then pass through attributes to create key_id for each.
768
+ graphdata = {
769
+ k: v
770
+ for k, v in G.graph.items()
771
+ if k not in ("node_default", "edge_default")
772
+ }
773
+ node_default = G.graph.get("node_default", {})
774
+ edge_default = G.graph.get("edge_default", {})
775
+ # Graph attributes
776
+ for k, v in graphdata.items():
777
+ self.attribute_types[(str(k), "graph")].add(type(v))
778
+ for k, v in graphdata.items():
779
+ element_type = self.get_xml_type(self.attr_type(k, "graph", v))
780
+ self.get_key(str(k), element_type, "graph", None)
781
+ # Nodes and data
782
+ for node, d in G.nodes(data=True):
783
+ for k, v in d.items():
784
+ self.attribute_types[(str(k), "node")].add(type(v))
785
+ for node, d in G.nodes(data=True):
786
+ for k, v in d.items():
787
+ T = self.get_xml_type(self.attr_type(k, "node", v))
788
+ self.get_key(str(k), T, "node", node_default.get(k))
789
+ # Edges and data
790
+ if G.is_multigraph():
791
+ for u, v, ekey, d in G.edges(keys=True, data=True):
792
+ for k, v in d.items():
793
+ self.attribute_types[(str(k), "edge")].add(type(v))
794
+ for u, v, ekey, d in G.edges(keys=True, data=True):
795
+ for k, v in d.items():
796
+ T = self.get_xml_type(self.attr_type(k, "edge", v))
797
+ self.get_key(str(k), T, "edge", edge_default.get(k))
798
+ else:
799
+ for u, v, d in G.edges(data=True):
800
+ for k, v in d.items():
801
+ self.attribute_types[(str(k), "edge")].add(type(v))
802
+ for u, v, d in G.edges(data=True):
803
+ for k, v in d.items():
804
+ T = self.get_xml_type(self.attr_type(k, "edge", v))
805
+ self.get_key(str(k), T, "edge", edge_default.get(k))
806
+
807
+ # Now add attribute keys to the xml file
808
+ for key in self.xml:
809
+ self._xml.write(key, pretty_print=self._prettyprint)
810
+
811
+ # The incremental_writer writes each node/edge as it is created
812
+ incremental_writer = IncrementalElement(self._xml, self._prettyprint)
813
+ with graph_element:
814
+ self.add_attributes("graph", incremental_writer, graphdata, {})
815
+ self.add_nodes(G, incremental_writer) # adds attributes too
816
+ self.add_edges(G, incremental_writer) # adds attributes too
817
+
818
+ def add_attributes(self, scope, xml_obj, data, default):
819
+ """Appends attribute data."""
820
+ for k, v in data.items():
821
+ data_element = self.add_data(
822
+ str(k), self.attr_type(str(k), scope, v), str(v), scope, default.get(k)
823
+ )
824
+ xml_obj.append(data_element)
825
+
826
+ def __str__(self):
827
+ return object.__str__(self)
828
+
829
+ def dump(self):
830
+ self._graphml.__exit__(None, None, None)
831
+ self._xml_base.__exit__(None, None, None)
832
+
833
+
834
+ # default is lxml is present.
835
+ write_graphml = write_graphml_lxml
836
+
837
+
838
+ class GraphMLReader(GraphML):
839
+ """Read a GraphML document. Produces NetworkX graph objects."""
840
+
841
+ def __init__(self, node_type=str, edge_key_type=int, force_multigraph=False):
842
+ self.construct_types()
843
+ self.node_type = node_type
844
+ self.edge_key_type = edge_key_type
845
+ self.multigraph = force_multigraph # If False, test for multiedges
846
+ self.edge_ids = {} # dict mapping (u,v) tuples to edge id attributes
847
+
848
+ def __call__(self, path=None, string=None):
849
+ from xml.etree.ElementTree import ElementTree, fromstring
850
+
851
+ if path is not None:
852
+ self.xml = ElementTree(file=path)
853
+ elif string is not None:
854
+ self.xml = fromstring(string)
855
+ else:
856
+ raise ValueError("Must specify either 'path' or 'string' as kwarg")
857
+ (keys, defaults) = self.find_graphml_keys(self.xml)
858
+ for g in self.xml.findall(f"{{{self.NS_GRAPHML}}}graph"):
859
+ yield self.make_graph(g, keys, defaults)
860
+
861
+ def make_graph(self, graph_xml, graphml_keys, defaults, G=None):
862
+ # set default graph type
863
+ edgedefault = graph_xml.get("edgedefault", None)
864
+ if G is None:
865
+ if edgedefault == "directed":
866
+ G = nx.MultiDiGraph()
867
+ else:
868
+ G = nx.MultiGraph()
869
+ # set defaults for graph attributes
870
+ G.graph["node_default"] = {}
871
+ G.graph["edge_default"] = {}
872
+ for key_id, value in defaults.items():
873
+ key_for = graphml_keys[key_id]["for"]
874
+ name = graphml_keys[key_id]["name"]
875
+ python_type = graphml_keys[key_id]["type"]
876
+ if key_for == "node":
877
+ G.graph["node_default"].update({name: python_type(value)})
878
+ if key_for == "edge":
879
+ G.graph["edge_default"].update({name: python_type(value)})
880
+ # hyperedges are not supported
881
+ hyperedge = graph_xml.find(f"{{{self.NS_GRAPHML}}}hyperedge")
882
+ if hyperedge is not None:
883
+ raise nx.NetworkXError("GraphML reader doesn't support hyperedges")
884
+ # add nodes
885
+ for node_xml in graph_xml.findall(f"{{{self.NS_GRAPHML}}}node"):
886
+ self.add_node(G, node_xml, graphml_keys, defaults)
887
+ # add edges
888
+ for edge_xml in graph_xml.findall(f"{{{self.NS_GRAPHML}}}edge"):
889
+ self.add_edge(G, edge_xml, graphml_keys)
890
+ # add graph data
891
+ data = self.decode_data_elements(graphml_keys, graph_xml)
892
+ G.graph.update(data)
893
+
894
+ # switch to Graph or DiGraph if no parallel edges were found
895
+ if self.multigraph:
896
+ return G
897
+
898
+ G = nx.DiGraph(G) if G.is_directed() else nx.Graph(G)
899
+ # add explicit edge "id" from file as attribute in NX graph.
900
+ nx.set_edge_attributes(G, values=self.edge_ids, name="id")
901
+ return G
902
+
903
+ def add_node(self, G, node_xml, graphml_keys, defaults):
904
+ """Add a node to the graph."""
905
+ # warn on finding unsupported ports tag
906
+ ports = node_xml.find(f"{{{self.NS_GRAPHML}}}port")
907
+ if ports is not None:
908
+ warnings.warn("GraphML port tag not supported.")
909
+ # find the node by id and cast it to the appropriate type
910
+ node_id = self.node_type(node_xml.get("id"))
911
+ # get data/attributes for node
912
+ data = self.decode_data_elements(graphml_keys, node_xml)
913
+ G.add_node(node_id, **data)
914
+ # get child nodes
915
+ if node_xml.attrib.get("yfiles.foldertype") == "group":
916
+ graph_xml = node_xml.find(f"{{{self.NS_GRAPHML}}}graph")
917
+ self.make_graph(graph_xml, graphml_keys, defaults, G)
918
+
919
+ def add_edge(self, G, edge_element, graphml_keys):
920
+ """Add an edge to the graph."""
921
+ # warn on finding unsupported ports tag
922
+ ports = edge_element.find(f"{{{self.NS_GRAPHML}}}port")
923
+ if ports is not None:
924
+ warnings.warn("GraphML port tag not supported.")
925
+
926
+ # raise error if we find mixed directed and undirected edges
927
+ directed = edge_element.get("directed")
928
+ if G.is_directed() and directed == "false":
929
+ msg = "directed=false edge found in directed graph."
930
+ raise nx.NetworkXError(msg)
931
+ if (not G.is_directed()) and directed == "true":
932
+ msg = "directed=true edge found in undirected graph."
933
+ raise nx.NetworkXError(msg)
934
+
935
+ source = self.node_type(edge_element.get("source"))
936
+ target = self.node_type(edge_element.get("target"))
937
+ data = self.decode_data_elements(graphml_keys, edge_element)
938
+ # GraphML stores edge ids as an attribute
939
+ # NetworkX uses them as keys in multigraphs too if no key
940
+ # attribute is specified
941
+ edge_id = edge_element.get("id")
942
+ if edge_id:
943
+ # self.edge_ids is used by `make_graph` method for non-multigraphs
944
+ self.edge_ids[source, target] = edge_id
945
+ try:
946
+ edge_id = self.edge_key_type(edge_id)
947
+ except ValueError: # Could not convert.
948
+ pass
949
+ else:
950
+ edge_id = data.get("key")
951
+
952
+ if G.has_edge(source, target):
953
+ # mark this as a multigraph
954
+ self.multigraph = True
955
+
956
+ # Use add_edges_from to avoid error with add_edge when `'key' in data`
957
+ # Note there is only one edge here...
958
+ G.add_edges_from([(source, target, edge_id, data)])
959
+
960
+ def decode_data_elements(self, graphml_keys, obj_xml):
961
+ """Use the key information to decode the data XML if present."""
962
+ data = {}
963
+ for data_element in obj_xml.findall(f"{{{self.NS_GRAPHML}}}data"):
964
+ key = data_element.get("key")
965
+ try:
966
+ data_name = graphml_keys[key]["name"]
967
+ data_type = graphml_keys[key]["type"]
968
+ except KeyError as err:
969
+ raise nx.NetworkXError(f"Bad GraphML data: no key {key}") from err
970
+ text = data_element.text
971
+ # assume anything with subelements is a yfiles extension
972
+ if text is not None and len(list(data_element)) == 0:
973
+ if data_type == bool:
974
+ # Ignore cases.
975
+ # http://docs.oracle.com/javase/6/docs/api/java/lang/
976
+ # Boolean.html#parseBoolean%28java.lang.String%29
977
+ data[data_name] = self.convert_bool[text.lower()]
978
+ else:
979
+ data[data_name] = data_type(text)
980
+ elif len(list(data_element)) > 0:
981
+ # Assume yfiles as subelements, try to extract node_label
982
+ node_label = None
983
+ # set GenericNode's configuration as shape type
984
+ gn = data_element.find(f"{{{self.NS_Y}}}GenericNode")
985
+ if gn:
986
+ data["shape_type"] = gn.get("configuration")
987
+ for node_type in ["GenericNode", "ShapeNode", "SVGNode", "ImageNode"]:
988
+ pref = f"{{{self.NS_Y}}}{node_type}/{{{self.NS_Y}}}"
989
+ geometry = data_element.find(f"{pref}Geometry")
990
+ if geometry is not None:
991
+ data["x"] = geometry.get("x")
992
+ data["y"] = geometry.get("y")
993
+ if node_label is None:
994
+ node_label = data_element.find(f"{pref}NodeLabel")
995
+ shape = data_element.find(f"{pref}Shape")
996
+ if shape is not None:
997
+ data["shape_type"] = shape.get("type")
998
+ if node_label is not None:
999
+ data["label"] = node_label.text
1000
+
1001
+ # check all the different types of edges available in yEd.
1002
+ for edge_type in [
1003
+ "PolyLineEdge",
1004
+ "SplineEdge",
1005
+ "QuadCurveEdge",
1006
+ "BezierEdge",
1007
+ "ArcEdge",
1008
+ ]:
1009
+ pref = f"{{{self.NS_Y}}}{edge_type}/{{{self.NS_Y}}}"
1010
+ edge_label = data_element.find(f"{pref}EdgeLabel")
1011
+ if edge_label is not None:
1012
+ break
1013
+
1014
+ if edge_label is not None:
1015
+ data["label"] = edge_label.text
1016
+ return data
1017
+
1018
+ def find_graphml_keys(self, graph_element):
1019
+ """Extracts all the keys and key defaults from the xml."""
1020
+ graphml_keys = {}
1021
+ graphml_key_defaults = {}
1022
+ for k in graph_element.findall(f"{{{self.NS_GRAPHML}}}key"):
1023
+ attr_id = k.get("id")
1024
+ attr_type = k.get("attr.type")
1025
+ attr_name = k.get("attr.name")
1026
+ yfiles_type = k.get("yfiles.type")
1027
+ if yfiles_type is not None:
1028
+ attr_name = yfiles_type
1029
+ attr_type = "yfiles"
1030
+ if attr_type is None:
1031
+ attr_type = "string"
1032
+ warnings.warn(f"No key type for id {attr_id}. Using string")
1033
+ if attr_name is None:
1034
+ raise nx.NetworkXError(f"Unknown key for id {attr_id}.")
1035
+ graphml_keys[attr_id] = {
1036
+ "name": attr_name,
1037
+ "type": self.python_type[attr_type],
1038
+ "for": k.get("for"),
1039
+ }
1040
+ # check for "default" sub-element of key element
1041
+ default = k.find(f"{{{self.NS_GRAPHML}}}default")
1042
+ if default is not None:
1043
+ # Handle default values identically to data element values
1044
+ python_type = graphml_keys[attr_id]["type"]
1045
+ if python_type == bool:
1046
+ graphml_key_defaults[attr_id] = self.convert_bool[
1047
+ default.text.lower()
1048
+ ]
1049
+ else:
1050
+ graphml_key_defaults[attr_id] = python_type(default.text)
1051
+ return graphml_keys, graphml_key_defaults
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/readwrite/json_graph/__init__.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ *********
3
+ JSON data
4
+ *********
5
+ Generate and parse JSON serializable data for NetworkX graphs.
6
+
7
+ These formats are suitable for use with the d3.js examples https://d3js.org/
8
+
9
+ The three formats that you can generate with NetworkX are:
10
+
11
+ - node-link like in the d3.js example https://bl.ocks.org/mbostock/4062045
12
+ - tree like in the d3.js example https://bl.ocks.org/mbostock/4063550
13
+ - adjacency like in the d3.js example https://bost.ocks.org/mike/miserables/
14
+ """
15
+ from networkx.readwrite.json_graph.node_link import *
16
+ from networkx.readwrite.json_graph.adjacency import *
17
+ from networkx.readwrite.json_graph.tree import *
18
+ from networkx.readwrite.json_graph.cytoscape import *