Spaces:
Running
on
T4
Running
on
T4
yuntian-deng
commited on
Commit
•
83985af
1
Parent(s):
ea4d143
Update app.py
Browse files
app.py
CHANGED
@@ -42,7 +42,7 @@ def setup():
|
|
42 |
img_pipe.unet.up_blocks[i] = torch.compile(img_pipe.unet.up_blocks[i])
|
43 |
tokenizer = AutoTokenizer.from_pretrained(model_type, max_length=1024)
|
44 |
eos_id = tokenizer.encode(tokenizer.eos_token)[0]
|
45 |
-
|
46 |
def forward_encoder(latex):
|
47 |
device = ("cuda" if torch.cuda.is_available() else "cpu")
|
48 |
img_pipe.to(device)
|
@@ -76,6 +76,8 @@ with gr.Blocks() as demo:
|
|
76 |
image = gr.Image(label="Rendered Image", show_label=False, elem_id="image")
|
77 |
inputs = [textbox]
|
78 |
outputs = [slider, image, submit_btn]
|
|
|
|
|
79 |
def infer(formula):
|
80 |
current_time = datetime.now(tz)
|
81 |
print (current_time, formula)
|
|
|
42 |
img_pipe.unet.up_blocks[i] = torch.compile(img_pipe.unet.up_blocks[i])
|
43 |
tokenizer = AutoTokenizer.from_pretrained(model_type, max_length=1024)
|
44 |
eos_id = tokenizer.encode(tokenizer.eos_token)[0]
|
45 |
+
|
46 |
def forward_encoder(latex):
|
47 |
device = ("cuda" if torch.cuda.is_available() else "cpu")
|
48 |
img_pipe.to(device)
|
|
|
76 |
image = gr.Image(label="Rendered Image", show_label=False, elem_id="image")
|
77 |
inputs = [textbox]
|
78 |
outputs = [slider, image, submit_btn]
|
79 |
+
|
80 |
+
@spaces.GPU
|
81 |
def infer(formula):
|
82 |
current_time = datetime.now(tz)
|
83 |
print (current_time, formula)
|