Spaces:
Running
on
Zero
Running
on
Zero
Update
Browse files
README.md
CHANGED
@@ -4,7 +4,7 @@ emoji: π
|
|
4 |
colorFrom: green
|
5 |
colorTo: indigo
|
6 |
sdk: gradio
|
7 |
-
sdk_version: 3.
|
8 |
app_file: app.py
|
9 |
pinned: false
|
10 |
---
|
|
|
4 |
colorFrom: green
|
5 |
colorTo: indigo
|
6 |
sdk: gradio
|
7 |
+
sdk_version: 3.36.1
|
8 |
app_file: app.py
|
9 |
pinned: false
|
10 |
---
|
app.py
CHANGED
@@ -6,23 +6,17 @@ import gradio as gr
|
|
6 |
|
7 |
from model import Model
|
8 |
|
9 |
-
DESCRIPTION = '
|
10 |
-
|
11 |
-
This is an unofficial demo for [https://github.com/ljsabc/MangaLineExtraction_PyTorch](https://github.com/ljsabc/MangaLineExtraction_PyTorch).
|
12 |
-
'''
|
13 |
|
14 |
model = Model()
|
15 |
|
16 |
with gr.Blocks(css='style.css') as demo:
|
17 |
gr.Markdown(DESCRIPTION)
|
18 |
-
|
19 |
with gr.Row():
|
20 |
with gr.Column():
|
21 |
input_image = gr.Image(label='Input', type='numpy')
|
22 |
run_button = gr.Button(value='Run')
|
23 |
with gr.Column():
|
24 |
result = gr.Image(label='Result', type='numpy', elem_id='result')
|
25 |
-
|
26 |
run_button.click(fn=model.predict, inputs=input_image, outputs=result)
|
27 |
-
|
28 |
-
demo.queue().launch(show_api=False)
|
|
|
6 |
|
7 |
from model import Model
|
8 |
|
9 |
+
DESCRIPTION = '# [MangaLineExtraction_PyTorch](https://github.com/ljsabc/MangaLineExtraction_PyTorch)'
|
|
|
|
|
|
|
10 |
|
11 |
model = Model()
|
12 |
|
13 |
with gr.Blocks(css='style.css') as demo:
|
14 |
gr.Markdown(DESCRIPTION)
|
|
|
15 |
with gr.Row():
|
16 |
with gr.Column():
|
17 |
input_image = gr.Image(label='Input', type='numpy')
|
18 |
run_button = gr.Button(value='Run')
|
19 |
with gr.Column():
|
20 |
result = gr.Image(label='Result', type='numpy', elem_id='result')
|
|
|
21 |
run_button.click(fn=model.predict, inputs=input_image, outputs=result)
|
22 |
+
demo.queue().launch()
|
|
model.py
CHANGED
@@ -1,6 +1,5 @@
|
|
1 |
from __future__ import annotations
|
2 |
|
3 |
-
import os
|
4 |
import pathlib
|
5 |
import sys
|
6 |
|
@@ -16,8 +15,6 @@ sys.path.insert(0, submodule_dir.as_posix())
|
|
16 |
|
17 |
from model_torch import res_skip
|
18 |
|
19 |
-
HF_TOKEN = os.getenv('HF_TOKEN')
|
20 |
-
|
21 |
MAX_SIZE = 1000
|
22 |
|
23 |
|
@@ -29,9 +26,7 @@ class Model:
|
|
29 |
|
30 |
def _load_model(self) -> nn.Module:
|
31 |
ckpt_path = huggingface_hub.hf_hub_download(
|
32 |
-
'
|
33 |
-
'erika.pth',
|
34 |
-
use_auth_token=HF_TOKEN)
|
35 |
state_dict = torch.load(ckpt_path)
|
36 |
model = res_skip()
|
37 |
model.load_state_dict(state_dict)
|
|
|
1 |
from __future__ import annotations
|
2 |
|
|
|
3 |
import pathlib
|
4 |
import sys
|
5 |
|
|
|
15 |
|
16 |
from model_torch import res_skip
|
17 |
|
|
|
|
|
18 |
MAX_SIZE = 1000
|
19 |
|
20 |
|
|
|
26 |
|
27 |
def _load_model(self) -> nn.Module:
|
28 |
ckpt_path = huggingface_hub.hf_hub_download(
|
29 |
+
'public-data/MangaLineExtraction_PyTorch', 'erika.pth')
|
|
|
|
|
30 |
state_dict = torch.load(ckpt_path)
|
31 |
model = res_skip()
|
32 |
model.load_state_dict(state_dict)
|