Spaces:
Runtime error
Runtime error
File size: 2,305 Bytes
d6e5fcb 0142f47 d6e5fcb 6cafacb d6e5fcb e9d9932 d6e5fcb f4901c0 d6e5fcb 59d0b2a d6e5fcb 4395928 d6e5fcb 59d0b2a d6e5fcb 59d0b2a d6e5fcb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 |
import gradio as gr
import random
import time
from huggingface_hub import InferenceClient
from transformers import AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained("meta-llama/Meta-Llama-3-70B-Instruct")
client = InferenceClient(model="https://6af4-4-223-164-145.ngrok-free.app")
SYSTEM_COMMAND = {"role": "system", "content": "You are a knowledgeable assistant trained to provide accurate and helpful information. Please respond to the user's queries promptly and politely."}
IGNORED_TOKENS = {None, "<|start_header_id|>", "<|end_header_id|>", "<|eot_id|>", "<|reserved_special_token"}
STOP_TOKENS = ["<|start_header_id|>", "<|end_header_id|>", "<|eot_id|>", "<|reserved_special_token"]
with gr.Blocks() as demo:
gr.Markdown("This demo is currently turned off")
tfs_history = gr.State([SYSTEM_COMMAND])
chatbot = gr.Chatbot()
msg = gr.Textbox(label="Prompt")
clear = gr.Button("Clear")
def user(user_message, history, dict_history):
data = {"role": "user", "content": user_message}
dict_history.append(data)
return "", history + [[user_message, None]], dict_history
def bot(history, dict_history):
history[-1][1] = ""
response = {"role": "assistant", "content": ""}
start_tokenize = time.perf_counter()
text_input = tokenizer.apply_chat_template(dict_history, tokenize=False, add_generation_prompt=True)
end_tokenize = time.perf_counter()
try:
for token in client.text_generation(prompt=text_input, max_new_tokens=300, stop_sequences=STOP_TOKENS, stream=True):
if token not in IGNORED_TOKENS:
history[-1][1] += token
response["content"] += token
yield history
finally:
dict_history.append(response)
def clear_history(tfs_history):
tfs_history = tfs_history[:1]
return tfs_history
msg.submit(
user,
inputs=[msg, chatbot, tfs_history],
outputs=[msg, chatbot, tfs_history],
queue=False).then(
bot,
[chatbot, tfs_history],
chatbot
)
clear.click(lambda: None, None, chatbot, queue=False)
clear.click(clear_history, tfs_history, tfs_history, queue=False)
demo.queue()
demo.launch()
|