Spaces:
Runtime error
Runtime error
import streamlit as st | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
def load_model(): | |
''' | |
Loads the model and tokenizer from the local directory. | |
:return: A list containing the model and the tokenizer. | |
''' | |
model_name = 'facebook/incoder-1B' | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForCausalLM.from_pretrained(model_name, low_cpu_mem_usage=True) | |
return [model, tokenizer] | |
st.set_page_config( | |
page_title="Incoder Example", | |
page_icon="π¨βπ»", | |
) | |
st.markdown("# π¨βπ» Incoder Example") | |
txt = st.text_area('Write code here', '''import os | |
def remove_file(file):''', height=400) | |
gen = st.button('Generate') | |
c = st.code('') | |
max_length = st.slider('max_length', 1, 2048, 128) | |
top_k = st.slider('top_k', 0, 100, 50) | |
top_p = st.slider('top_p', 0.0, 1.0, 0.9) | |
temperature = st.slider('temperature', 0.0, 1.0, 0.6) | |
num_beams = st.slider('num_beams', 1, 100, 0) | |
repetition_penalty = st.slider('repetition_penalty', 1.0, 10.0, 1.0) | |
if gen: | |
c.code('Generating...') | |
m = load_model() | |
inpt = m[1].encode(txt, return_tensors="pt") | |
out = m[0].generate(inpt, max_length=max_length, top_p=top_p, top_k=top_k, temperature=temperature, num_beams=num_beams, repetition_penalty=repetition_penalty) | |
res = m[1].decode(out[0]) | |
print('ok') | |
c.code(res) |