File size: 931 Bytes
dcb0e9a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
import gradio as gr
from transformers import AutoTokenizer, AutoModelForCausalLM

model_checkpoint = "distilgpt2"
model_checkpoint_amal = "Amal17/wikipedia-20230601.ace"

tokenizer = AutoTokenizer.from_pretrained(model_checkpoint, use_fast=True, trust_remote_code=True)
model = AutoModelForCausalLM.from_pretrained(
    model_checkpoint_amal,
    trust_remote_code=True
)

model.eval()

def generate(input):
    inputs = tokenizer(input, return_tensors="pt")
    r = model.generate(
        inputs=inputs.input_ids,
        # streamer=streamer,
        pad_token_id=tokenizer.pad_token_id,
        eos_token_id=tokenizer.eos_token_id,
        # max_length=256, 
        # temperature=0.7,
        # do_sample=True, 
        # # top_k=4, 
        # top_p=0.95
        max_new_tokens=35
    )
    output = tokenizer.decode(r[0])

    
    return output

iface = gr.Interface(fn=generate, inputs="text", outputs="text")
iface.launch()