Spaces:
Runtime error
Runtime error
from fastapi import FastAPI | |
from pydantic import BaseModel | |
from fastapi.middleware.cors import CORSMiddleware | |
import uvicorn | |
from langchain_ollama import OllamaLLM | |
app = FastAPI() | |
# Allow requests from your front-end's origin. | |
app.add_middleware( | |
CORSMiddleware, | |
allow_origins=["chrome-extension://*"], # Allow Chrome extensions | |
allow_credentials=True, | |
allow_methods=["*"], | |
allow_headers=["*"], | |
) | |
# Define the request model that expects a JSON body with "text" | |
class MeaningRequest(BaseModel): | |
text: str | |
# Define the response model that will return the meaning | |
class MeaningResponse(BaseModel): | |
meaning: str | |
def get_meaning_from_llm(text: str) -> str: | |
""" | |
Get meaning of text using Ollama LLM. | |
""" | |
# Create a prompt for your LLM | |
prompt = f"Explain the meaning of the following text in simple terms in only one or two lines not more than that: '{text}'" | |
# Make sure this URL is accessible and valid | |
llm = OllamaLLM( | |
model="llama3.2", | |
base_url="https://earwig-exact-slug.ngrok-free.app", | |
temperature=0.25 | |
) | |
meaning = llm(prompt) | |
return meaning | |
async def get_meaning(request: MeaningRequest): | |
""" | |
Endpoint to receive text and return its 'meaning' as generated by an LLM. | |
""" | |
print(f"Received text: {request.text}") | |
# Extract text from the request | |
text = request.text | |
# Generate meaning using the LLM call | |
meaning = get_meaning_from_llm(text) | |
# Return the meaning in a JSON response | |
return MeaningResponse(meaning=meaning) | |
if __name__ == "__main__": | |
# Run the FastAPI app with Uvicorn | |
uvicorn.run("app:app", host="0.0.0.0", port=8000, reload=True) |