hysts HF staff commited on
Commit
3c9cf58
·
1 Parent(s): 428842a
.pre-commit-config.yaml CHANGED
@@ -1,6 +1,6 @@
1
  repos:
2
  - repo: https://github.com/pre-commit/pre-commit-hooks
3
- rev: v4.6.0
4
  hooks:
5
  - id: check-executables-have-shebangs
6
  - id: check-json
@@ -18,13 +18,15 @@ repos:
18
  hooks:
19
  - id: docformatter
20
  args: ["--in-place"]
21
- - repo: https://github.com/pycqa/isort
22
- rev: 5.13.2
23
  hooks:
24
- - id: isort
25
- args: ["--profile", "black"]
 
 
26
  - repo: https://github.com/pre-commit/mirrors-mypy
27
- rev: v1.9.0
28
  hooks:
29
  - id: mypy
30
  args: ["--ignore-missing-imports"]
@@ -35,14 +37,8 @@ repos:
35
  "types-PyYAML",
36
  "types-pytz",
37
  ]
38
- - repo: https://github.com/psf/black
39
- rev: 24.4.0
40
- hooks:
41
- - id: black
42
- language_version: python3.10
43
- args: ["--line-length", "119"]
44
  - repo: https://github.com/kynan/nbstripout
45
- rev: 0.7.1
46
  hooks:
47
  - id: nbstripout
48
  args:
@@ -51,7 +47,7 @@ repos:
51
  "metadata.interpreter metadata.kernelspec cell.metadata.pycharm",
52
  ]
53
  - repo: https://github.com/nbQA-dev/nbQA
54
- rev: 1.8.5
55
  hooks:
56
  - id: nbqa-black
57
  - id: nbqa-pyupgrade
 
1
  repos:
2
  - repo: https://github.com/pre-commit/pre-commit-hooks
3
+ rev: v5.0.0
4
  hooks:
5
  - id: check-executables-have-shebangs
6
  - id: check-json
 
18
  hooks:
19
  - id: docformatter
20
  args: ["--in-place"]
21
+ - repo: https://github.com/astral-sh/ruff-pre-commit
22
+ rev: v0.8.4
23
  hooks:
24
+ - id: ruff
25
+ args: ["--fix"]
26
+ - id: ruff-format
27
+ args: ["--line-length", "119"]
28
  - repo: https://github.com/pre-commit/mirrors-mypy
29
+ rev: v1.14.0
30
  hooks:
31
  - id: mypy
32
  args: ["--ignore-missing-imports"]
 
37
  "types-PyYAML",
38
  "types-pytz",
39
  ]
 
 
 
 
 
 
40
  - repo: https://github.com/kynan/nbstripout
41
+ rev: 0.8.1
42
  hooks:
43
  - id: nbstripout
44
  args:
 
47
  "metadata.interpreter metadata.kernelspec cell.metadata.pycharm",
48
  ]
49
  - repo: https://github.com/nbQA-dev/nbQA
50
+ rev: 1.9.1
51
  hooks:
52
  - id: nbqa-black
53
  - id: nbqa-pyupgrade
.python-version ADDED
@@ -0,0 +1 @@
 
 
1
+ 3.10
.vscode/extensions.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "recommendations": [
3
+ "ms-python.python",
4
+ "charliermarsh.ruff",
5
+ "streetsidesoftware.code-spell-checker",
6
+ "tamasfe.even-better-toml"
7
+ ]
8
+ }
.vscode/settings.json CHANGED
@@ -2,29 +2,20 @@
2
  "editor.formatOnSave": true,
3
  "files.insertFinalNewline": false,
4
  "[python]": {
5
- "editor.defaultFormatter": "ms-python.black-formatter",
6
  "editor.formatOnType": true,
7
  "editor.codeActionsOnSave": {
 
8
  "source.organizeImports": "explicit"
9
  }
10
  },
11
  "[jupyter]": {
12
  "files.insertFinalNewline": false
13
  },
14
- "black-formatter.args": [
15
- "--line-length=119"
16
- ],
17
- "isort.args": ["--profile", "black"],
18
- "flake8.args": [
19
- "--max-line-length=119"
20
- ],
21
- "ruff.lint.args": [
22
- "--line-length=119"
23
- ],
24
  "notebook.output.scrolling": true,
25
  "notebook.formatOnCellExecution": true,
26
  "notebook.formatOnSave.enabled": true,
27
- "notebook.codeActionsOnSave": {
28
- "source.organizeImports": "explicit"
29
- }
30
  }
 
2
  "editor.formatOnSave": true,
3
  "files.insertFinalNewline": false,
4
  "[python]": {
5
+ "editor.defaultFormatter": "charliermarsh.ruff",
6
  "editor.formatOnType": true,
7
  "editor.codeActionsOnSave": {
8
+ "source.fixAll.ruff": "explicit",
9
  "source.organizeImports": "explicit"
10
  }
11
  },
12
  "[jupyter]": {
13
  "files.insertFinalNewline": false
14
  },
 
 
 
 
 
 
 
 
 
 
15
  "notebook.output.scrolling": true,
16
  "notebook.formatOnCellExecution": true,
17
  "notebook.formatOnSave.enabled": true,
18
+ "notebook.codeActionsOnSave": {
19
+ "source.organizeImports": "explicit"
20
+ }
21
  }
README.md CHANGED
@@ -4,7 +4,7 @@ emoji: 😻
4
  colorFrom: gray
5
  colorTo: purple
6
  sdk: gradio
7
- sdk_version: 4.36.1
8
  app_file: app.py
9
  pinned: false
10
  license: mit
 
4
  colorFrom: gray
5
  colorTo: purple
6
  sdk: gradio
7
+ sdk_version: 5.9.1
8
  app_file: app.py
9
  pinned: false
10
  license: mit
app.py CHANGED
@@ -1,7 +1,5 @@
1
  #!/usr/bin/env python
2
 
3
- from __future__ import annotations
4
-
5
  import os
6
  import random
7
 
@@ -39,7 +37,7 @@ else:
39
 
40
  def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
41
  if randomize_seed:
42
- seed = random.randint(0, MAX_SEED)
43
  return seed
44
 
45
 
@@ -54,6 +52,7 @@ def generate(
54
  guidance_scale: float = 4.0,
55
  num_inference_steps_prior: int = 50,
56
  num_inference_steps: int = 100,
 
57
  ) -> PIL.Image.Image:
58
  generator = torch.Generator().manual_seed(seed)
59
  image_embeds, negative_image_embeds = pipe_prior(
@@ -63,7 +62,7 @@ def generate(
63
  guidance_scale=guidance_scale_prior,
64
  num_inference_steps=num_inference_steps_prior,
65
  ).to_tuple()
66
- image = pipe(
67
  image_embeds=image_embeds,
68
  negative_image_embeds=negative_image_embeds,
69
  height=height,
@@ -72,7 +71,6 @@ def generate(
72
  guidance_scale=guidance_scale,
73
  num_inference_steps=num_inference_steps,
74
  ).images[0]
75
- return image
76
 
77
 
78
  examples = [
@@ -87,7 +85,7 @@ examples = [
87
  "Red sport car, sticker",
88
  ]
89
 
90
- with gr.Blocks(css="style.css") as demo:
91
  gr.Markdown(DESCRIPTION)
92
  gr.DuplicateButton(
93
  value="Duplicate Space for private use",
@@ -101,9 +99,8 @@ with gr.Blocks(css="style.css") as demo:
101
  show_label=False,
102
  max_lines=1,
103
  placeholder="Enter your prompt",
104
- container=False,
105
  )
106
- run_button = gr.Button("Run", scale=0)
107
  result = gr.Image(label="Result", show_label=False)
108
  with gr.Accordion("Advanced options", open=False):
109
  negative_prompt = gr.Text(
@@ -172,7 +169,7 @@ with gr.Blocks(css="style.css") as demo:
172
  )
173
 
174
  gr.on(
175
- triggers=[prompt.submit, negative_prompt.submit, run_button.click],
176
  fn=randomize_seed_fn,
177
  inputs=[seed, randomize_seed],
178
  outputs=seed,
 
1
  #!/usr/bin/env python
2
 
 
 
3
  import os
4
  import random
5
 
 
37
 
38
  def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
39
  if randomize_seed:
40
+ seed = random.randint(0, MAX_SEED) # noqa: S311
41
  return seed
42
 
43
 
 
52
  guidance_scale: float = 4.0,
53
  num_inference_steps_prior: int = 50,
54
  num_inference_steps: int = 100,
55
+ progress: gr.Progress = gr.Progress(track_tqdm=True), # noqa: ARG001, B008
56
  ) -> PIL.Image.Image:
57
  generator = torch.Generator().manual_seed(seed)
58
  image_embeds, negative_image_embeds = pipe_prior(
 
62
  guidance_scale=guidance_scale_prior,
63
  num_inference_steps=num_inference_steps_prior,
64
  ).to_tuple()
65
+ return pipe(
66
  image_embeds=image_embeds,
67
  negative_image_embeds=negative_image_embeds,
68
  height=height,
 
71
  guidance_scale=guidance_scale,
72
  num_inference_steps=num_inference_steps,
73
  ).images[0]
 
74
 
75
 
76
  examples = [
 
85
  "Red sport car, sticker",
86
  ]
87
 
88
+ with gr.Blocks(css_paths="style.css") as demo:
89
  gr.Markdown(DESCRIPTION)
90
  gr.DuplicateButton(
91
  value="Duplicate Space for private use",
 
99
  show_label=False,
100
  max_lines=1,
101
  placeholder="Enter your prompt",
102
+ submit_btn=True,
103
  )
 
104
  result = gr.Image(label="Result", show_label=False)
105
  with gr.Accordion("Advanced options", open=False):
106
  negative_prompt = gr.Text(
 
169
  )
170
 
171
  gr.on(
172
+ triggers=[prompt.submit, negative_prompt.submit],
173
  fn=randomize_seed_fn,
174
  inputs=[seed, randomize_seed],
175
  outputs=seed,
pyproject.toml ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [project]
2
+ name = "kandinsky-2-2"
3
+ version = "0.1.0"
4
+ description = ""
5
+ readme = "README.md"
6
+ requires-python = ">=3.10"
7
+ dependencies = [
8
+ "accelerate>=1.2.1",
9
+ "diffusers>=0.32.1",
10
+ "gradio>=5.9.1",
11
+ "hf-transfer>=0.1.8",
12
+ "spaces>=0.31.1",
13
+ "torch==2.4.0",
14
+ "torchvision>=0.19.0",
15
+ "transformers>=4.47.1",
16
+ ]
17
+
18
+ [tool.ruff]
19
+ line-length = 119
20
+
21
+ [tool.ruff.lint]
22
+ select = ["ALL"]
23
+ ignore = [
24
+ "COM812", # missing-trailing-comma
25
+ "D203", # one-blank-line-before-class
26
+ "D213", # multi-line-summary-second-line
27
+ "E501", # line-too-long
28
+ "SIM117", # multiple-with-statements
29
+ ]
30
+ extend-ignore = [
31
+ "D100", # undocumented-public-module
32
+ "D101", # undocumented-public-class
33
+ "D102", # undocumented-public-method
34
+ "D103", # undocumented-public-function
35
+ "D104", # undocumented-public-package
36
+ "D105", # undocumented-magic-method
37
+ "D107", # undocumented-public-init
38
+ "EM101", # raw-string-in-exception
39
+ "FBT001", # boolean-type-hint-positional-argument
40
+ "FBT002", # boolean-default-value-positional-argument
41
+ "PD901", # pandas-df-variable-name
42
+ "PGH003", # blanket-type-ignore
43
+ "PLR0913", # too-many-arguments
44
+ "PLR0915", # too-many-statements
45
+ "TRY003", # raise-vanilla-args
46
+ ]
47
+ unfixable = [
48
+ "F401", # unused-import
49
+ ]
50
+
51
+ [tool.ruff.format]
52
+ docstring-code-format = true
requirements.txt CHANGED
@@ -1,7 +1,260 @@
1
- accelerate==0.31.0
2
- diffusers==0.28.2
3
- gradio==4.36.1
4
- spaces==0.28.3
5
- torch==2.0.1
6
- torchvision==0.15.2
7
- transformers==4.41.2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file was autogenerated by uv via the following command:
2
+ # uv pip compile pyproject.toml -o requirements.txt
3
+ accelerate==1.2.1
4
+ # via kandinsky-2-2 (pyproject.toml)
5
+ aiofiles==23.2.1
6
+ # via gradio
7
+ annotated-types==0.7.0
8
+ # via pydantic
9
+ anyio==4.7.0
10
+ # via
11
+ # gradio
12
+ # httpx
13
+ # starlette
14
+ certifi==2024.12.14
15
+ # via
16
+ # httpcore
17
+ # httpx
18
+ # requests
19
+ charset-normalizer==3.4.1
20
+ # via requests
21
+ click==8.1.8
22
+ # via
23
+ # typer
24
+ # uvicorn
25
+ diffusers==0.32.1
26
+ # via kandinsky-2-2 (pyproject.toml)
27
+ exceptiongroup==1.2.2
28
+ # via anyio
29
+ fastapi==0.115.6
30
+ # via gradio
31
+ ffmpy==0.5.0
32
+ # via gradio
33
+ filelock==3.16.1
34
+ # via
35
+ # diffusers
36
+ # huggingface-hub
37
+ # torch
38
+ # transformers
39
+ # triton
40
+ fsspec==2024.12.0
41
+ # via
42
+ # gradio-client
43
+ # huggingface-hub
44
+ # torch
45
+ gradio==5.9.1
46
+ # via
47
+ # kandinsky-2-2 (pyproject.toml)
48
+ # spaces
49
+ gradio-client==1.5.2
50
+ # via gradio
51
+ h11==0.14.0
52
+ # via
53
+ # httpcore
54
+ # uvicorn
55
+ hf-transfer==0.1.8
56
+ # via kandinsky-2-2 (pyproject.toml)
57
+ httpcore==1.0.7
58
+ # via httpx
59
+ httpx==0.28.1
60
+ # via
61
+ # gradio
62
+ # gradio-client
63
+ # safehttpx
64
+ # spaces
65
+ huggingface-hub==0.27.0
66
+ # via
67
+ # accelerate
68
+ # diffusers
69
+ # gradio
70
+ # gradio-client
71
+ # tokenizers
72
+ # transformers
73
+ idna==3.10
74
+ # via
75
+ # anyio
76
+ # httpx
77
+ # requests
78
+ importlib-metadata==8.5.0
79
+ # via diffusers
80
+ jinja2==3.1.5
81
+ # via
82
+ # gradio
83
+ # torch
84
+ markdown-it-py==3.0.0
85
+ # via rich
86
+ markupsafe==2.1.5
87
+ # via
88
+ # gradio
89
+ # jinja2
90
+ mdurl==0.1.2
91
+ # via markdown-it-py
92
+ mpmath==1.3.0
93
+ # via sympy
94
+ networkx==3.4.2
95
+ # via torch
96
+ numpy==2.2.1
97
+ # via
98
+ # accelerate
99
+ # diffusers
100
+ # gradio
101
+ # pandas
102
+ # torchvision
103
+ # transformers
104
+ nvidia-cublas-cu12==12.1.3.1
105
+ # via
106
+ # nvidia-cudnn-cu12
107
+ # nvidia-cusolver-cu12
108
+ # torch
109
+ nvidia-cuda-cupti-cu12==12.1.105
110
+ # via torch
111
+ nvidia-cuda-nvrtc-cu12==12.1.105
112
+ # via torch
113
+ nvidia-cuda-runtime-cu12==12.1.105
114
+ # via torch
115
+ nvidia-cudnn-cu12==9.1.0.70
116
+ # via torch
117
+ nvidia-cufft-cu12==11.0.2.54
118
+ # via torch
119
+ nvidia-curand-cu12==10.3.2.106
120
+ # via torch
121
+ nvidia-cusolver-cu12==11.4.5.107
122
+ # via torch
123
+ nvidia-cusparse-cu12==12.1.0.106
124
+ # via
125
+ # nvidia-cusolver-cu12
126
+ # torch
127
+ nvidia-nccl-cu12==2.20.5
128
+ # via torch
129
+ nvidia-nvjitlink-cu12==12.6.85
130
+ # via
131
+ # nvidia-cusolver-cu12
132
+ # nvidia-cusparse-cu12
133
+ nvidia-nvtx-cu12==12.1.105
134
+ # via torch
135
+ orjson==3.10.13
136
+ # via gradio
137
+ packaging==24.2
138
+ # via
139
+ # accelerate
140
+ # gradio
141
+ # gradio-client
142
+ # huggingface-hub
143
+ # spaces
144
+ # transformers
145
+ pandas==2.2.3
146
+ # via gradio
147
+ pillow==11.0.0
148
+ # via
149
+ # diffusers
150
+ # gradio
151
+ # torchvision
152
+ psutil==5.9.8
153
+ # via
154
+ # accelerate
155
+ # spaces
156
+ pydantic==2.10.4
157
+ # via
158
+ # fastapi
159
+ # gradio
160
+ # spaces
161
+ pydantic-core==2.27.2
162
+ # via pydantic
163
+ pydub==0.25.1
164
+ # via gradio
165
+ pygments==2.18.0
166
+ # via rich
167
+ python-dateutil==2.9.0.post0
168
+ # via pandas
169
+ python-multipart==0.0.20
170
+ # via gradio
171
+ pytz==2024.2
172
+ # via pandas
173
+ pyyaml==6.0.2
174
+ # via
175
+ # accelerate
176
+ # gradio
177
+ # huggingface-hub
178
+ # transformers
179
+ regex==2024.11.6
180
+ # via
181
+ # diffusers
182
+ # transformers
183
+ requests==2.32.3
184
+ # via
185
+ # diffusers
186
+ # huggingface-hub
187
+ # spaces
188
+ # transformers
189
+ rich==13.9.4
190
+ # via typer
191
+ ruff==0.8.4
192
+ # via gradio
193
+ safehttpx==0.1.6
194
+ # via gradio
195
+ safetensors==0.4.5
196
+ # via
197
+ # accelerate
198
+ # diffusers
199
+ # transformers
200
+ semantic-version==2.10.0
201
+ # via gradio
202
+ shellingham==1.5.4
203
+ # via typer
204
+ six==1.17.0
205
+ # via python-dateutil
206
+ sniffio==1.3.1
207
+ # via anyio
208
+ spaces==0.31.1
209
+ # via kandinsky-2-2 (pyproject.toml)
210
+ starlette==0.41.3
211
+ # via
212
+ # fastapi
213
+ # gradio
214
+ sympy==1.13.3
215
+ # via torch
216
+ tokenizers==0.21.0
217
+ # via transformers
218
+ tomlkit==0.13.2
219
+ # via gradio
220
+ torch==2.4.0
221
+ # via
222
+ # kandinsky-2-2 (pyproject.toml)
223
+ # accelerate
224
+ # torchvision
225
+ torchvision==0.19.0
226
+ # via kandinsky-2-2 (pyproject.toml)
227
+ tqdm==4.67.1
228
+ # via
229
+ # huggingface-hub
230
+ # transformers
231
+ transformers==4.47.1
232
+ # via kandinsky-2-2 (pyproject.toml)
233
+ triton==3.0.0
234
+ # via torch
235
+ typer==0.15.1
236
+ # via gradio
237
+ typing-extensions==4.12.2
238
+ # via
239
+ # anyio
240
+ # fastapi
241
+ # gradio
242
+ # gradio-client
243
+ # huggingface-hub
244
+ # pydantic
245
+ # pydantic-core
246
+ # rich
247
+ # spaces
248
+ # torch
249
+ # typer
250
+ # uvicorn
251
+ tzdata==2024.2
252
+ # via pandas
253
+ urllib3==2.3.0
254
+ # via requests
255
+ uvicorn==0.34.0
256
+ # via gradio
257
+ websockets==14.1
258
+ # via gradio-client
259
+ zipp==3.21.0
260
+ # via importlib-metadata
uv.lock ADDED
The diff for this file is too large to render. See raw diff