File size: 560 Bytes
96f6d0f
 
 
 
 
 
 
e04a26e
 
96f6d0f
 
e04a26e
 
96f6d0f
e04a26e
 
96f6d0f
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
from transformers import pipeline, set_seed
from transformers import BioGptTokenizer, BioGptForCausalLM
import gradio as gr
model = BioGptForCausalLM.from_pretrained("microsoft/biogpt")
tokenizer = BioGptTokenizer.from_pretrained("microsoft/biogpt")
generator = pipeline('text-generation', model=model, tokenizer=tokenizer)
set_seed(42)


def predict(text):
    return generator(text, max_length=20, num_return_sequences=1, do_sample=True)

    
interface = gr.Interface(predict, "textbox", "textbox", title="Biogpt", description="Biogpt")


interface.launch()