Spaces:
Sleeping
Sleeping
swcrazyfan
commited on
Commit
•
e076c74
1
Parent(s):
d5e6e07
Update app.py
Browse files
app.py
CHANGED
@@ -9,7 +9,7 @@ tokenizer = T5Tokenizer.from_pretrained("swcrazyfan/Dekingify-T5-Large")
|
|
9 |
def tokenize_data(text):
|
10 |
# Tokenize the review body
|
11 |
# input_ = "paraphrase: "+ str(text) + ' >'
|
12 |
-
input_ = "
|
13 |
max_len = 512
|
14 |
# tokenize inputs
|
15 |
tokenized_inputs = tokenizer(input_, padding='max_length', truncation=True, max_length=max_len, return_attention_mask=True, return_tensors='pt')
|
@@ -29,5 +29,5 @@ def generate_answers(text, max_length, min_length, num_beams):
|
|
29 |
answer = tokenizer.decode(results[0], skip_special_tokens=True)
|
30 |
return answer
|
31 |
|
32 |
-
iface = gr.Interface(title="
|
33 |
iface.launch(inline=False)
|
|
|
9 |
def tokenize_data(text):
|
10 |
# Tokenize the review body
|
11 |
# input_ = "paraphrase: "+ str(text) + ' >'
|
12 |
+
input_ = "dekingify: " + str(text) + ' </s>'
|
13 |
max_len = 512
|
14 |
# tokenize inputs
|
15 |
tokenized_inputs = tokenizer(input_, padding='max_length', truncation=True, max_length=max_len, return_attention_mask=True, return_tensors='pt')
|
|
|
29 |
answer = tokenizer.decode(results[0], skip_special_tokens=True)
|
30 |
return answer
|
31 |
|
32 |
+
iface = gr.Interface(title="DeKingify", description="Write anything below. Then, click submit to 'DeKingify' it.", fn=generate_answers, inputs=[gr.inputs.Textbox(label="Original Text",lines=10), gr.inputs.Slider(label="Maximum Length", minimum=1, maximum=512, default=512, step=1), gr.inputs.Slider(label="Minimum Length", minimum=1, maximum=512, default=1, step=1), gr.inputs.Slider(label="Number of Beams", minimum=1, maximum=10, default=5, step=1)], outputs=["text"])
|
33 |
iface.launch(inline=False)
|