Sirawitch commited on
Commit
5943c56
1 Parent(s): 4e580b4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +32 -29
app.py CHANGED
@@ -1,4 +1,6 @@
1
- from fastapi import FastAPI, Request
 
 
2
  from huggingface_hub import InferenceClient
3
  import uvicorn
4
 
@@ -6,35 +8,36 @@ app = FastAPI()
6
 
7
  client = InferenceClient("scb10x/llama-3-typhoon-v1.5-8b-instruct")
8
 
 
 
 
 
9
  @app.post("/webhook")
10
- async def webhook(request: Request):
11
- # รับข้อมูลจาก Dialogflow
12
- data = await request.json()
13
- query_text = data['queryResult']['queryText']
14
-
15
- # สร้าง messages สำหรับ Huggingface API
16
- messages = [
17
- {"role": "system", "content": "You are a friendly Chatbot."},
18
- {"role": "user", "content": query_text}
19
- ]
20
-
21
- # เรียกใช้ Huggingface API
22
- response = client.chat_completion(
23
- messages,
24
- max_tokens=512,
25
- temperature=0.7,
26
- top_p=0.95,
27
- )
28
-
29
- # ดึงข้อความตอบกลับ
30
- answer = response.choices[0].message.content
31
-
32
- # สร้างการตอบกลับสำหรับ Dialogflow
33
- dialogflow_response = {
34
- "fulfillmentText": answer,
35
- }
36
-
37
- return dialogflow_response
38
 
39
  if __name__ == "__main__":
40
  uvicorn.run(app, host="0.0.0.0", port=7860)
 
1
+ from fastapi import FastAPI, HTTPException
2
+ from pydantic import BaseModel
3
+ from typing import Optional
4
  from huggingface_hub import InferenceClient
5
  import uvicorn
6
 
 
8
 
9
  client = InferenceClient("scb10x/llama-3-typhoon-v1.5-8b-instruct")
10
 
11
+ class Query(BaseModel):
12
+ queryResult: Optional[dict] = None
13
+ queryText: Optional[str] = None
14
+
15
  @app.post("/webhook")
16
+ async def webhook(query: Query):
17
+ try:
18
+ # ใช้ queryText จาก queryResult ถ้ามี มิฉะนั้นใช้ queryText โดยตรง
19
+ user_query = query.queryResult.get('queryText') if query.queryResult else query.queryText
20
+
21
+ if not user_query:
22
+ raise HTTPException(status_code=400, detail="No query text provided")
23
+
24
+ messages = [
25
+ {"role": "system", "content": "You are a friendly Chatbot."},
26
+ {"role": "user", "content": user_query}
27
+ ]
28
+
29
+ response = client.chat_completion(
30
+ messages,
31
+ max_tokens=512,
32
+ temperature=0.7,
33
+ top_p=0.95,
34
+ )
35
+
36
+ model_reply = response.choices[0].message.content.strip()
37
+
38
+ return {"fulfillmentText": model_reply}
39
+ except Exception as e:
40
+ raise HTTPException(status_code=500, detail=str(e))
 
 
 
41
 
42
  if __name__ == "__main__":
43
  uvicorn.run(app, host="0.0.0.0", port=7860)