|
import gradio as gr |
|
from transformers import AutoTokenizer, AutoModelForCausalLM |
|
|
|
|
|
tokenizer = AutoTokenizer.from_pretrained("microsoft/graphcodebert-base") |
|
model = AutoModelForCausalLM.from_pretrained("microsoft/graphcodebert-base") |
|
|
|
|
|
input = gr.Textbox(lines=5, label="Input") |
|
output = gr.Textbox(label="Output") |
|
|
|
|
|
def use_graphcodebert(input): |
|
|
|
input_ids = tokenizer.encode(input, return_tensors="pt") |
|
|
|
output_ids = model.generate(input_ids, max_length=5000) |
|
|
|
output = tokenizer.decode(output_ids[0], skip_special_tokens=True) |
|
|
|
return output |
|
|
|
|
|
iface = gr.Interface( |
|
fn=use_graphcodebert, |
|
inputs=input, |
|
outputs=output, |
|
title="GraphCodeBERT Code Synthesis", |
|
description="Enter a natural language query and get a code snippet generated by GraphCodeBERT.", |
|
) |
|
iface.launch() |
|
|