Hristo ZHANG 张鹤立
add app.py
0284d5c
raw
history blame
1.1 kB
import gradio as gr
from llama_cpp import Llama
import os
model_file = "Yi-6B.q4_k_m.gguf"
if not os.path.isfile("Yi-6B.q4_k_m.gguf"):
os.system("wget -c https://huggingface.co/SamPurkis/Yi-6B-GGUF/resolve/main/Yi-6B.q4_k_m.gguf")
llm = Llama(model_path=model_file)
def generate_text(input_text):
output = llm(f"Human: {input_text} A:", max_tokens=512, stop=["Assistant:", "\n"], echo=True)
return output['choices'][0]['text']
input_text = gr.inputs.Textbox(lines= 10, label="Enter your input text")
output_text = gr.outputs.Textbox(label="Output text")
description = "llama.cpp implementation in python [https://github.com/abetlen/llama-cpp-python]"
examples = [
["What is the capital of France? ", "The capital of France is Paris."],
["Who wrote the novel 'Pride and Prejudice'?", "The novel 'Pride and Prejudice' was written by Jane Austen."],
["What is the square root of 64?", "The square root of 64 is 8."]
]
gr.Interface(fn=generate_text, inputs=input_text, outputs=output_text, title="Llama Language Model", description=description, examples=examples).launch()