Spaces:
Sleeping
Sleeping
import streamlit as st | |
from transformers import RagTokenizer, RagRetriever, RagTokenForGeneration | |
tokenizer = RagTokenizer.from_pretrained("facebook/rag-token-nq") | |
retriever = RagRetriever.from_pretrained("facebook/rag-token-nq", index_name="exact", use_dummy_dataset=True) | |
model = RagTokenForGeneration.from_pretrained("facebook/rag-token-nq", retriever=retriever) | |
input_dict = tokenizer.prepare_seq2seq_batch("who holds the record in 100m freestyle", return_tensors="pt") | |
generated = model.generate(input_ids=input_dict["input_ids"]) | |
outstring = tokenizer.batch_decode(generated, skip_special_tokens=True)[0] | |
print(outstring) | |
st.write(outstring) | |