Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -4,39 +4,50 @@ from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM, AutoMode
|
|
4 |
import gradio as gr
|
5 |
from gradio.mix import Parallel, Series
|
6 |
#import torch.nn.functional as F
|
7 |
-
|
8 |
from datasets import load_dataset
|
9 |
dataset = load_dataset("bananabot/engMollywoodSummaries")
|
10 |
dataset
|
11 |
|
12 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
13 |
|
14 |
-
|
15 |
-
|
16 |
-
|
|
|
|
|
17 |
|
18 |
-
max_length=123
|
19 |
-
input_txt = "This malayalam movie is about"
|
20 |
-
n_steps = 8
|
21 |
|
22 |
-
input_ids = tokenizer(input_txt, return_tensors="pt")["input_ids"].to(device)
|
23 |
-
output = model.generate(input_ids, max_length=max_length, num_beams=5, do_sample=True, no_repeat_ngram_size=2, temperature=1.37, top_k=69, top_p=0.96)
|
24 |
-
print(tokenizer.decode(output[0]))
|
25 |
|
26 |
-
def generate(input_txt):
|
27 |
-
output = model.generate(input_ids, max_length=max_length, num_beams=5, do_sample=True, no_repeat_ngram_size=2, temperature=1.37, top_k=69, top_p=0.96)
|
28 |
-
print (output)
|
|
|
|
|
|
|
|
|
|
|
29 |
|
30 |
-
inputs= gr.inputs.Textbox(lines=7, placeholder="Enter the beginning of your mollywood movie idea and the നിർമ്മിത ബുദ്ധി will fill in the rest...")
|
31 |
|
32 |
#integrate a working translator later!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
33 |
#generator = output
|
34 |
#translator = gr.Interface.load("models/Helsinki-NLP/opus-mt-en-ml")
|
35 |
#gr.Series(generator, translator, inputs=gr.inputs.Textbox(lines=7, label="Input Text")).launch() # this demo generates text, then translates it to Malayalam, and outputs the final result.
|
36 |
|
37 |
-
interface = gr.Interface(fn=generate,
|
38 |
-
inputs=inputs,
|
39 |
-
outputs='text',
|
40 |
-
title='AI based Mollywood movie idea generator')
|
|
|
|
|
41 |
|
42 |
-
|
|
|
|
|
|
4 |
import gradio as gr
|
5 |
from gradio.mix import Parallel, Series
|
6 |
#import torch.nn.functional as F
|
7 |
+
from aitextgen import aitextgen
|
8 |
from datasets import load_dataset
|
9 |
dataset = load_dataset("bananabot/engMollywoodSummaries")
|
10 |
dataset
|
11 |
|
12 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
13 |
|
14 |
+
ai = aitextgen(model="EleutherAI/gpt-neo-1.3B")
|
15 |
+
|
16 |
+
#model_name = "EleutherAI/gpt-neo-125M"
|
17 |
+
#tokenizer = AutoTokenizer.from_pretrained(model_name)
|
18 |
+
#model = AutoModelForCausalLM.from_pretrained(model_name).to(device)
|
19 |
|
20 |
+
#max_length=123
|
21 |
+
#input_txt = "This malayalam movie is about"
|
22 |
+
#n_steps = 8
|
23 |
|
24 |
+
#input_ids = tokenizer(input_txt, return_tensors="pt")["input_ids"].to(device)
|
25 |
+
#output = model.generate(input_ids, max_length=max_length, num_beams=5, do_sample=True, no_repeat_ngram_size=2, temperature=1.37, top_k=69, top_p=0.96)
|
26 |
+
#print(tokenizer.decode(output[0]))
|
27 |
|
28 |
+
#def generate(input_txt):
|
29 |
+
# output = model.generate(input_ids, max_length=max_length, num_beams=5, do_sample=True, no_repeat_ngram_size=2, temperature=1.37, top_k=69, top_p=0.96)
|
30 |
+
# print (output)
|
31 |
+
|
32 |
+
def ai_text(inp):
|
33 |
+
generated_text = ai.generate_one(max_length=123, prompt = inp, no_repeat_ngram_size=3, num_beams=5, do_sample=True, temperature=1.37, top_k=69, top_p=0.96)
|
34 |
+
print(type(generated_text))
|
35 |
+
return generated_text
|
36 |
|
37 |
+
#inputs= gr.inputs.Textbox(lines=7, placeholder="Enter the beginning of your mollywood movie idea and the നിർമ്മിത ബുദ്ധി will fill in the rest...")
|
38 |
|
39 |
#integrate a working translator later!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
40 |
#generator = output
|
41 |
#translator = gr.Interface.load("models/Helsinki-NLP/opus-mt-en-ml")
|
42 |
#gr.Series(generator, translator, inputs=gr.inputs.Textbox(lines=7, label="Input Text")).launch() # this demo generates text, then translates it to Malayalam, and outputs the final result.
|
43 |
|
44 |
+
#interface = gr.Interface(fn=generate,
|
45 |
+
# inputs=inputs,
|
46 |
+
# outputs='text',
|
47 |
+
# title='AI based Mollywood movie idea generator')
|
48 |
+
|
49 |
+
#interface.launch()
|
50 |
|
51 |
+
output_text = gr.outputs.Textbox()
|
52 |
+
gr.Interface(ai_text,"textbox", output_text, title="AI based Mollywood movie idea generator",
|
53 |
+
description="Enter the beginning of your malayalam movie idea and the നിർമ്മിത ബുദ്ധി will fill in the rest...").launch()
|