Spaces:
Sleeping
Sleeping
saifeddinemk
commited on
Commit
•
52422d2
1
Parent(s):
3588fbc
Fixed app v2
Browse files
app.py
CHANGED
@@ -1,7 +1,30 @@
|
|
|
|
|
|
1 |
from transformers import pipeline
|
2 |
|
3 |
-
|
4 |
-
|
5 |
-
|
|
|
6 |
pipe = pipeline("text-generation", model="segolilylabs/Lily-Cybersecurity-7B-v0.2")
|
7 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import FastAPI, HTTPException
|
2 |
+
from pydantic import BaseModel
|
3 |
from transformers import pipeline
|
4 |
|
5 |
+
# Define the FastAPI app
|
6 |
+
app = FastAPI()
|
7 |
+
|
8 |
+
# Initialize the pipeline for text generation with the Lily-Cybersecurity-7B-v0.2 model
|
9 |
pipe = pipeline("text-generation", model="segolilylabs/Lily-Cybersecurity-7B-v0.2")
|
10 |
+
|
11 |
+
# Define a request model for input
|
12 |
+
class MessageRequest(BaseModel):
|
13 |
+
content: str
|
14 |
+
|
15 |
+
# Define the route for message processing
|
16 |
+
@app.post("/generate_response/")
|
17 |
+
async def generate_response(message: MessageRequest):
|
18 |
+
try:
|
19 |
+
# Prepare the input for the model
|
20 |
+
input_data = [{"role": "user", "content": message.content}]
|
21 |
+
|
22 |
+
# Generate a response using the model pipeline
|
23 |
+
response = pipe(input_data)[0]["generated_text"]
|
24 |
+
|
25 |
+
# Return the generated response as JSON
|
26 |
+
return {"response": response}
|
27 |
+
|
28 |
+
except Exception as e:
|
29 |
+
raise HTTPException(status_code=500, detail=str(e))
|
30 |
+
|