File size: 6,113 Bytes
b5e1135
 
 
 
 
 
 
 
 
 
 
 
 
 
efac48a
 
b5e1135
 
722b69d
b5e1135
 
 
 
 
 
 
 
 
 
 
 
a183a3a
b5e1135
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
efac48a
 
 
 
 
 
 
 
 
 
 
 
 
 
b5e1135
efac48a
b5e1135
 
 
 
 
 
efac48a
b5e1135
 
 
efac48a
 
 
 
72b387c
 
efac48a
 
 
 
 
 
 
 
 
72b387c
efac48a
 
 
b5e1135
 
 
 
 
 
 
 
 
 
 
 
 
 
 
efac48a
b5e1135
 
efac48a
b5e1135
 
efac48a
b5e1135
 
 
 
ed5fb6d
b5e1135
 
 
 
 
 
 
efac48a
 
 
b5e1135
 
 
 
efac48a
 
 
 
b5e1135
a55b0a9
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
import time

import gradio as gr
import numpy as np
from pathlib import Path
import time
from anomalib.deploy import OpenVINOInferencer
from openvino.runtime import Core
# Initialize the Core
core = Core() 

# Get the available devices
devices = core.available_devices
inferencer = None
prev_category_selection = None
prev_device_selection = None

example_list = [["bottle/examples/000.png", "anomaly_map", "bottle", "CPU"],
               ["pill/examples/014.png", "heat_map", "pill", "CPU"],
               ["zipper/examples/001.png", "pred_mask", "zipper", "CPU"],
               ["grid/examples/005.png", "segmentations", "grid", "CPU"],
               ["cubes/examples/005.jpg", "heat_map", "cubes", "CPU"]]

def OV_compilemodel(category_choice, device):
    global inferencer
    #Get the available models
    openvino_model_path = Path.cwd() / category_choice / "run" / "weights" / "openvino" / "model.bin"
    metadata_path = Path.cwd() / category_choice / "run" / "weights" / "openvino" / "metadata.json"
    
    inferencer = OpenVINOInferencer(
        path=openvino_model_path,  # Path to the OpenVINO IR model.
        metadata=metadata_path,  # Path to the metadata file.
        device=device,  # We would like to run it on an Intel CPU.
        config= {"INFERENCE_PRECISION_HINT": "f16" } if device != "CPU" else {}
    )
    
    return inferencer

def OV_inference(input_img, operation, category_choice, device):
         
    start_time = time.time()
    predictions = inferencer.predict(image=input_img)
    stop_time = time.time()
    inference_time = stop_time - start_time
    confidence = predictions.pred_score
    
    if operation == "original":
        output_img1 = predictions.image
    elif operation == "anomaly_map":
        output_img1 = predictions.anomaly_map
    elif operation == "heat_map":
        output_img1 = predictions.heat_map
    elif operation == "pred_mask":
        output_img1 = predictions.pred_mask
    elif operation == "segmentations":
        output_img1 = predictions.segmentations
    else: 
        output_img1 = predictions.image
    return output_img1, round(inference_time*1000), round(confidence*100,2)

#Run + Compile the model
def OV_compile_run_model(category_choice, device_choice, image, output_choice):
    #If a different category or device are selected, compile/re-compile the model
    global prev_category_selection
    global prev_device_selection
    if device_choice != prev_device_selection or category_choice != prev_category_selection:
        OV_compilemodel(category_choice, device_choice)
        prev_category_selection = category_choice
        prev_device_selection = device_choice
    #Run model
    print("Running model")
    output_img, output_time, output_confidence = OV_inference(image, output_choice, category_choice, device_choice)
    return output_img, output_time, output_confidence

with gr.Blocks() as demo:

    gr.Markdown(
    """
    <img align="left" width="150" src= "https://github.com/openvinotoolkit/anomalib/assets/10940214/7e61a627-d1b0-4ad4-b602-da9b348c0cbe">   
    <img align="right" width="150" src= "https://github.com/openvinotoolkit/anomalib/assets/10940214/5d6dd038-b40c-441f-ad38-1cf526137de2">   
    
    <h1 align="center"> πŸš€ Anomaly detection πŸš€ </h1>  

    """
    )
    
    with gr.Row():
        with gr.Column():
            gr.Markdown(
                """
                
                
                Experience the power of the state-of-the-art anomaly detection with Anomalib-OpenVINO Anomaly detection toolbox. This interactive APP leverages the robust capabilities of Anomalib and OpenVINO.
                
                All model are FP32 precision, if you select GPU it will automatically change precision to FP16. Using Anomalib you can also quantize your model in INT8 using NNCF.
                """
            ) 
            
        with gr.Column():
            gr.Markdown(
                """
                <img src="https://github.com/openvinotoolkit/openvino_notebooks/assets/10940214/45dfb61f-c6d1-4098-88d1-8498f0a42e11" alt="drawing" width="500"/>
                """
            ) 
    
    gr.Markdown("## 1. Select the category over you want to detect anormalities.")
    category_choice = gr.Radio(["bottle", "grid", "pill", "zipper", "cubes"], label="Choose the category")
    
    gr.Markdown(
        """
        ## 2. Select the Intel device
        Device Name   | CPU  | GPU.0  | GPU.1 
        ------------- | ------------  |------------- | -------------
        Intel Device  | CPU  |  Integrated GPU  | Discrete GPU  
        
        
        """
    )
    device_choice = gr.Dropdown(devices, label="Choose the device")

    gr.Markdown("## 3. Choose the output you want to visualize.")
    output_choice = gr.Radio(["original", "anomaly_map", "heat_map", "pred_mask", "segmentations"], label="Choose the output")
    
    gr.Markdown("## 4. Drop the image in the input image box and run the inference")
    with gr.Row():
        with gr.Column():
            image = gr.Image(type="numpy", label= "Input image")  
            
        with gr.Column():
            output_img = gr.outputs.Image(type="numpy", label="Anomalib Output")

    compile_inference_btn = gr.Button("Run Inference")
    
    with gr.Row():
        # Create your output components
        #output_prediction = gr.Textbox(label="Prediction")
        output_confidence = gr.Textbox(label="Confidence [%]")
        output_time = gr.Textbox(label="Inference Time [ms]")
    
    gr.Markdown("Changing your choice of category or device will recompile the model.")    

    gr.Markdown("## OR use our image examples for a quick start")
    gr.Examples(
        examples=example_list,
        inputs=[image, output_choice, category_choice, device_choice],
        outputs=[output_img, output_time, output_confidence],
        fn=OV_compile_run_model)

    compile_inference_btn.click(OV_compile_run_model, inputs=[category_choice, device_choice, image, output_choice], 
        outputs=[output_img, output_time, output_confidence])

demo.launch(enable_queue=True)