|
import gradio as gr |
|
from transformers import pipeline, set_seed |
|
from transformers import AutoModelForCausalLM, AutoTokenizer |
|
|
|
checkpoint = "bigcode/starcoder-3b" |
|
device = "cpu" |
|
|
|
|
|
tokenizer = AutoTokenizer.from_pretrained(checkpoint) |
|
model = AutoModelForCausalLM.from_pretrained(checkpoint).to(device) |
|
|
|
|
|
|
|
|
|
generator = pipeline('text-generation', model='gpt2', return_full_text=False) |
|
set_seed(42) |
|
|
|
|
|
def Bemenet(bemenet): |
|
inputs = tokenizer.encode(bemenet, return_tensors="pt").to(device) |
|
outputs = model.generate(inputs) |
|
return tokenizer.decode(outputs[0]) |
|
|
|
|
|
interface = gr.Interface(fn=Bemenet, |
|
title="Cím..", |
|
description="Leírás..", |
|
inputs="text", |
|
outputs="text") |
|
|
|
interface.launch() |