|
import gradio as gr |
|
import torch |
|
import onnxruntime as ort |
|
import os |
|
import gdown |
|
|
|
|
|
model_url = "https://drive.google.com/file/d/18HYScsRJuRmfzL0E0BW35uaA542Vd5M5/view?usp=sharing" |
|
model_path = os.path.join(os.getcwd(),"bone_age_model.onnx") |
|
|
|
|
|
if not os.path.exists(model_path): |
|
gdown.download(model_url, model_path, quiet=False) |
|
|
|
|
|
session = ort.InferenceSession(model_path) |
|
|
|
|
|
def inference(sample_name): |
|
sample_path = os.path.join(os.getcwd(),f'{sample_name}.pth') |
|
sample = torch.load(sample_path) |
|
age = sample['boneage'].item() |
|
outputs = session.run(None, {"input": sample['path'].numpy()}) |
|
predicted_age = (outputs[0]*41.172)+127.329 |
|
|
|
image_path = sample['path'][0] |
|
image = Image.open(image_path) |
|
|
|
return { |
|
'Bone age': age, |
|
'Predicted Bone age': predicted_age[0][0], |
|
'Image': image |
|
} |
|
|
|
|
|
sample_files = sorted(os.listdir(os.path.join(os.getcwd(),'samples','*.pth'))) |
|
sample_names = [os.path.basename(x).split('.pth')[0] for x in sample_files] |
|
|
|
dropdown = gr.inputs.Dropdown(choices=sample_names, label="Select a sample") |
|
|
|
iface = gr.Interface( |
|
fn=inference, |
|
inputs=dropdown, |
|
outputs=[ |
|
gr.outputs.Textbox(label="Bone Age"), |
|
gr.outputs.Textbox(label="Predicted Bone Age"), |
|
gr.outputs.Image(label="Image") |
|
], |
|
title="Bone Age Prediction", |
|
description="Select a sample from the dropdown to see the bone age and predicted bone age." |
|
) |
|
|
|
|
|
iface.launch() |
|
|