Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -21,10 +21,10 @@ model = AutoModelForCausalLM.from_pretrained(model_path, device_map='cuda', quan
|
|
21 |
def generate_text(usertitle, content, max_length, temperature):
|
22 |
input_text = {'title': usertitle, 'content': content}
|
23 |
inputs = tokenizer.apply_chat_template(input_text, return_tensors='pt').cuda()
|
24 |
-
generated_text = tokenizer.decode(model.generate(inputs, max_new_tokens=max_length, temperature=temperature, do_sample=True)[0])
|
25 |
-
split_text = generated_text.
|
26 |
|
27 |
-
return
|
28 |
|
29 |
def gradio_app():
|
30 |
with gr.Blocks() as demo:
|
|
|
21 |
def generate_text(usertitle, content, max_length, temperature):
|
22 |
input_text = {'title': usertitle, 'content': content}
|
23 |
inputs = tokenizer.apply_chat_template(input_text, return_tensors='pt').cuda()
|
24 |
+
generated_text = tokenizer.decode(model.generate(inputs, max_new_tokens=max_length, temperature=temperature, do_sample=True)[0]).strip().split(tokenizer.eos_token)[0]
|
25 |
+
# split_text = generated_text.split(tokenizer.eos_token)[0]
|
26 |
|
27 |
+
return generated_text
|
28 |
|
29 |
def gradio_app():
|
30 |
with gr.Blocks() as demo:
|