sushku commited on
Commit
ae6764e
·
verified ·
1 Parent(s): f063a81

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +57 -66
app.py CHANGED
@@ -1,66 +1,57 @@
1
- from fastapi import FastAPI
2
- from pydantic import BaseModel
3
- from fastapi.middleware.cors import CORSMiddleware
4
- import uvicorn
5
- from langchain_ollama import OllamaLLM
6
-
7
- app = FastAPI()
8
-
9
- # Allow requests from your front-end's origin.
10
- app.add_middleware(
11
- CORSMiddleware,
12
- allow_origins=["chrome-extension://*"], # Allow Chrome extensions
13
- allow_credentials=True,
14
- allow_methods=["*"],
15
- allow_headers=["*"],
16
- )
17
-
18
-
19
- app.add_middleware(
20
- CORSMiddleware,
21
- allow_origins=["*"],
22
- allow_credentials=True,
23
- allow_methods=["*"],
24
- allow_headers=["*"],
25
- )
26
-
27
- # Define the request model that expects a JSON body with "text"
28
- class MeaningRequest(BaseModel):
29
- text: str
30
-
31
- # Define the response model that will return the meaning
32
- class MeaningResponse(BaseModel):
33
- meaning: str
34
-
35
- def get_meaning_from_llm(text: str) -> str:
36
- """
37
- Get meaning of text using Ollama LLM.
38
- """
39
- # Create a prompt for your LLM
40
- prompt = f"Explain the meaning of the following text in simple terms in only one or two lines not more than that: '{text}'"
41
-
42
- # Make sure this URL is accessible and valid
43
- llm = OllamaLLM(
44
- model="llama3.2",
45
- base_url="https://earwig-exact-slug.ngrok-free.app",
46
- temperature=0.25
47
- )
48
- meaning = llm(prompt)
49
- return meaning
50
-
51
- @app.post("/get_meaning", response_model=MeaningResponse)
52
- async def get_meaning(request: MeaningRequest):
53
- """
54
- Endpoint to receive text and return its 'meaning' as generated by an LLM.
55
- """
56
- print(f"Received text: {request.text}")
57
- # Extract text from the request
58
- text = request.text
59
- # Generate meaning using the LLM call
60
- meaning = get_meaning_from_llm(text)
61
- # Return the meaning in a JSON response
62
- return MeaningResponse(meaning=meaning)
63
-
64
- if __name__ == "__main__":
65
- # Run the FastAPI app with Uvicorn
66
- uvicorn.run("main:app", host="0.0.0.0", port=8000, reload=True)
 
1
+ from fastapi import FastAPI
2
+ from pydantic import BaseModel
3
+ from fastapi.middleware.cors import CORSMiddleware
4
+ import uvicorn
5
+ from langchain_ollama import OllamaLLM
6
+
7
+ app = FastAPI()
8
+
9
+ # Allow requests from your front-end's origin.
10
+ app.add_middleware(
11
+ CORSMiddleware,
12
+ allow_origins=["chrome-extension://*"], # Allow Chrome extensions
13
+ allow_credentials=True,
14
+ allow_methods=["*"],
15
+ allow_headers=["*"],
16
+ )
17
+
18
+ # Define the request model that expects a JSON body with "text"
19
+ class MeaningRequest(BaseModel):
20
+ text: str
21
+
22
+ # Define the response model that will return the meaning
23
+ class MeaningResponse(BaseModel):
24
+ meaning: str
25
+
26
+ def get_meaning_from_llm(text: str) -> str:
27
+ """
28
+ Get meaning of text using Ollama LLM.
29
+ """
30
+ # Create a prompt for your LLM
31
+ prompt = f"Explain the meaning of the following text in simple terms in only one or two lines not more than that: '{text}'"
32
+
33
+ # Make sure this URL is accessible and valid
34
+ llm = OllamaLLM(
35
+ model="llama3.2",
36
+ base_url="https://earwig-exact-slug.ngrok-free.app",
37
+ temperature=0.25
38
+ )
39
+ meaning = llm(prompt)
40
+ return meaning
41
+
42
+ @app.post("/get_meaning", response_model=MeaningResponse)
43
+ async def get_meaning(request: MeaningRequest):
44
+ """
45
+ Endpoint to receive text and return its 'meaning' as generated by an LLM.
46
+ """
47
+ print(f"Received text: {request.text}")
48
+ # Extract text from the request
49
+ text = request.text
50
+ # Generate meaning using the LLM call
51
+ meaning = get_meaning_from_llm(text)
52
+ # Return the meaning in a JSON response
53
+ return MeaningResponse(meaning=meaning)
54
+
55
+ if __name__ == "__main__":
56
+ # Run the FastAPI app with Uvicorn
57
+ uvicorn.run("app:app", host="0.0.0.0", port=8000, reload=True)