Commit
·
1328239
1
Parent(s):
597ce80
update
Browse files- main.py +25 -1
- requirements.txt +0 -0
main.py
CHANGED
@@ -1,6 +1,9 @@
|
|
1 |
-
from fastapi import FastAPI
|
|
|
|
|
2 |
|
3 |
app = FastAPI()
|
|
|
4 |
|
5 |
@app.get("/")
|
6 |
def read_root():
|
@@ -9,3 +12,24 @@ def read_root():
|
|
9 |
@app.get("/items/{item_id}")
|
10 |
def read_item(item_id: int, query_param: str = None):
|
11 |
return {"item_id": item_id, "query_param": query_param}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import FastAPI, Form
|
2 |
+
import torch
|
3 |
+
from transformers import pipeline
|
4 |
|
5 |
app = FastAPI()
|
6 |
+
pipe = pipeline("text-generation", model="TinyLlama/TinyLlama-1.1B-Chat-v1.0", torch_dtype=torch.bfloat16, device_map="auto")
|
7 |
|
8 |
@app.get("/")
|
9 |
def read_root():
|
|
|
12 |
@app.get("/items/{item_id}")
|
13 |
def read_item(item_id: int, query_param: str = None):
|
14 |
return {"item_id": item_id, "query_param": query_param}
|
15 |
+
|
16 |
+
@app.post("/chat")
|
17 |
+
async def chat(sentence: str = Form(...)):
|
18 |
+
print("start chat")
|
19 |
+
if sentence.lower() == "quit":
|
20 |
+
return {"response": "Chatbot session ended."}
|
21 |
+
|
22 |
+
messages = [
|
23 |
+
{
|
24 |
+
"role": "system",
|
25 |
+
"content": "You are a friendly chatbot who always responds in the style of a pirate",
|
26 |
+
},
|
27 |
+
{"role": "user", "content": sentence},
|
28 |
+
]
|
29 |
+
print("start apply_chat_template")
|
30 |
+
prompt = pipe.tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
|
31 |
+
print("start outputs")
|
32 |
+
outputs = pipe(prompt, max_new_tokens=256, do_sample=True, temperature=0.7, top_k=50, top_p=0.95)
|
33 |
+
text = outputs[0]["generated_text"]
|
34 |
+
|
35 |
+
return {"response": text}
|
requirements.txt
CHANGED
Binary files a/requirements.txt and b/requirements.txt differ
|
|