HostMixCPU / app.py
Alessio2405's picture
Rename main.py to app.py
ac27802 verified
from ctransformers import AutoModelForCausalLM
import gradio as gr
from fastapi import FastAPI, Form
from pydantic import BaseModel
#Model loading
llm = AutoModelForCausalLM.from_pretrained("mistral-7b-openorca.Q4_K_S.gguf",
model_type='mistral',
max_new_tokens = 1096,
threads = 3,
)
#Pydantic object
class validation(BaseModel):
prompt: str
#Fast API
app = FastAPI()
@app.post("/llm_on_cpu")
async def stream(item: validation):
system_prompt = 'Below is an instruction that describes a task. Write a response that appropriately completes the request. Write only in ITALIAN.'
E_INST = "</s>"
user, assistant = "<|user|>", "<|assistant|>"
prompt = f"{system_prompt}{E_INST}\n{user}\n{item.prompt.strip()}{E_INST}\n{assistant}\n"
return llm(prompt)
greety = """
Test
"""
css = """
h1 {
text-align: center;
}
#duplicate-button {
margin: auto;
color: white;
background: #1565c0;
border-radius: 100vh;
}
.contain {
max-width: 900px;
margin: auto;
padding-top: 1.5rem;
}
"""
chat_interface = gr.ChatInterface(
fn=stream,
stop_btn=None,
examples=[
["explain Large language model"],
["what is quantum computing"]
],
)
with gr.Blocks(css=css) as demo:
gr.HTML("<h1><center>LLM Deployment Space<h1><center>")
gr.HTML("<h3><center><a href='#'>AI</a>💬<h3><center>")
gr.DuplicateButton(value="Duplicate Space for private use", elem_id="duplicate-button")
chat_interface.render()
gr.Markdown(greety)
if __name__ == "__main__":
demo.queue(max_size=10).launch()