Spaces:
Sleeping
Sleeping
File size: 1,138 Bytes
5aaa320 52422d2 a510792 7826a83 5aaa320 52422d2 a510792 3bb8b4d a510792 31dafcd 5aaa320 a510792 5aaa320 a510792 5aaa320 a510792 7d0de60 5aaa320 a510792 5aaa320 5b2d750 5aaa320 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 |
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
from transformers import pipeline
from typing import List, Dict
# Initialize the FastAPI app
app = FastAPI()
# Initialize the text generation pipeline
pipe = pipeline("text-generation", model="CyberNative-AI/Colibri_8b_v0.1")
# Define the input schema for FastAPI
class Message(BaseModel):
role: str
content: str
class MessagesInput(BaseModel):
messages: List[Message]
@app.post("/generate/")
async def generate_response(messages_input: MessagesInput):
try:
# Convert messages to the expected format
messages = [{"role": msg.role, "content": msg.content} for msg in messages_input.messages]
# Generate response using the pipeline
response = pipe(messages)
# Extract generated text
generated_text = response[0]["generated_text"]
return {
"response": generated_text
}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
# Run the app
# To start the server, use the command: uvicorn filename:app --host 0.0.0.0 --port 8000
|