flokabukie
commited on
Commit
•
3494105
1
Parent(s):
f8daf65
Upload main.py
Browse files
main.py
ADDED
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import FastAPI
|
2 |
+
from pydantic import BaseModel
|
3 |
+
import pickle
|
4 |
+
import pandas as pd
|
5 |
+
import numpy as np
|
6 |
+
import uvicorn
|
7 |
+
import os
|
8 |
+
from sklearn.preprocessing import StandardScaler
|
9 |
+
import joblib
|
10 |
+
|
11 |
+
|
12 |
+
""" Creating the FastAPI Instance. i.e. foundation for our API,
|
13 |
+
which will be the main part of our project"""
|
14 |
+
|
15 |
+
app = FastAPI(title="API")
|
16 |
+
|
17 |
+
|
18 |
+
"""We load a machine learning model and a scaler that help us make predictions based on data."""
|
19 |
+
model = joblib.load('gbc.pkl',mmap_mode='r')
|
20 |
+
scaler = joblib.load('scaler.pkl',mmap_mode='r')
|
21 |
+
|
22 |
+
"""We define a function that will make predictions using our model and scaler."""
|
23 |
+
def predict(df, endpoint='simple'):
|
24 |
+
# Scaling
|
25 |
+
scaled_df = scaler.transform(df)
|
26 |
+
|
27 |
+
# Prediction
|
28 |
+
prediction = model.predict_proba(scaled_df)
|
29 |
+
highest_proba = prediction.max(axis=1)
|
30 |
+
|
31 |
+
predicted_labels = ["Patient does not have sepsis" if i == 0 else "Patient has Sepsis" for i in highest_proba]
|
32 |
+
response = []
|
33 |
+
for label, proba in zip(predicted_labels, highest_proba):
|
34 |
+
output = {
|
35 |
+
"prediction": label,
|
36 |
+
"probability of prediction": str(round(proba * 100)) + '%'
|
37 |
+
}
|
38 |
+
response.append(output)
|
39 |
+
return response
|
40 |
+
|
41 |
+
|
42 |
+
"""We create models for the data that our API will work with.
|
43 |
+
We define what kind of information the data will have.
|
44 |
+
It's like deciding what information we need to collect and how it should be organized."""
|
45 |
+
|
46 |
+
|
47 |
+
"""These classes define the data models used for API endpoints.
|
48 |
+
The 'Patient' class represents a single patient's data,
|
49 |
+
and the 'Patients' class represents a list of patients' data.
|
50 |
+
The Patients class also includes a class method return_list_of_dict()
|
51 |
+
that converts the Patients object into a list of dictionaries"""
|
52 |
+
|
53 |
+
class Patient(BaseModel):
|
54 |
+
Blood_Work_R1: float
|
55 |
+
Blood_Pressure: float
|
56 |
+
Blood_Work_R3: float
|
57 |
+
BMI: float
|
58 |
+
Blood_Work_R4: float
|
59 |
+
Patient_age: int
|
60 |
+
|
61 |
+
|
62 |
+
"""Next block of code defines different parts of our API and how it responds to different requests.
|
63 |
+
It sets up a main page with a specific message, provides a checkup endpoint to receive
|
64 |
+
optional parameters, and sets up prediction endpoints to receive medical data for making predictions,
|
65 |
+
either for a single patient or multiple patients."""
|
66 |
+
|
67 |
+
@app.get("/")
|
68 |
+
def root():
|
69 |
+
return {"API": "This is an API for sepsis prediction."}
|
70 |
+
|
71 |
+
# Prediction endpoint (Where we will input our features)
|
72 |
+
@app.post("/predict")
|
73 |
+
def predict_sepsis(patient: Patient):
|
74 |
+
|
75 |
+
# Make prediction
|
76 |
+
data = pd.DataFrame(patient.dict(), index=[0])
|
77 |
+
scaled_data = scaler.transform(data)
|
78 |
+
parsed = predict(df=scaled_data)
|
79 |
+
return {"output": parsed}
|
80 |
+
|
81 |
+
|
82 |
+
if __name__ == "__main__":
|
83 |
+
os.environ["DEBUG"] = "True" # Enable debug mode
|
84 |
+
uvicorn.run("main:app", reload=True)
|