Spaces:
Build error
Build error
File size: 4,941 Bytes
c8c12e9 d5f4c11 c8c12e9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 |
"""Anomalib Gradio Script.
This script provide a gradio web interface
"""
from argparse import ArgumentParser, Namespace
from importlib import import_module
from pathlib import Path
from typing import Tuple, Union
import gradio as gr
import gradio.inputs
import gradio.outputs
import numpy as np
from omegaconf import DictConfig, ListConfig
from skimage.segmentation import mark_boundaries
from anomalib.config import get_configurable_parameters
from anomalib.deploy.inferencers.base import Inferencer
from anomalib.post_processing import compute_mask, superimpose_anomaly_map
def infer(
image: np.ndarray, inferencer: Inferencer, threshold: float = 50.0
) -> Tuple[np.ndarray, float, np.ndarray, np.ndarray, np.ndarray]:
"""Inference fonction, return anomaly map, score, heat map, prediction mask ans visualisation.
:param image: image
:type image: np.ndarray
:param inferencer: model inferencer
:type inferencer: Inferencer
:param threshold: threshold between 0 and 100, defaults to 50.0
:type threshold: float, optional
:return: anomaly_map, anomaly_score, heat_map, pred_mask, vis_img
:rtype: Tuple[np.ndarray, float, np.ndarray, np.ndarray, np.ndarray]
"""
# Perform inference for the given image.
threshold = threshold / 100
anomaly_map, anomaly_score = inferencer.predict(image=image, superimpose=False)
heat_map = superimpose_anomaly_map(anomaly_map, image)
pred_mask = compute_mask(anomaly_map, threshold)
vis_img = mark_boundaries(image, pred_mask, color=(1, 0, 0), mode="thick")
return anomaly_map, anomaly_score, heat_map, pred_mask, vis_img
def get_args() -> Namespace:
"""Get command line arguments.
Returns:
Namespace: List of arguments.
"""
parser = ArgumentParser()
parser.add_argument("--config", type=Path, default="/home/user/app/anomalib/models/padim/config.yaml", required=False, help="Path to a model config file")
parser.add_argument("--weight_path", type=Path, default="/home/user/app/results/padim//mvtec/bottle/weights/model.ckpt", required=False, help="Path to a model weights")
parser.add_argument("--meta_data", type=Path, required=False, help="Path to JSON file containing the metadata.")
parser.add_argument(
"--threshold",
type=float,
required=False,
default=75.0,
help="Value to threshold anomaly scores into 0-100 range",
)
parser.add_argument("--share", type=bool, required=False, default=False, help="Share Gradio `share_url`")
args = parser.parse_args()
return args
def get_inferencer(gladio_args: Union[DictConfig, ListConfig]) -> Inferencer:
"""Parse args and open inferencer."""
config = get_configurable_parameters(config_path=gladio_args.config)
# Get the inferencer. We use .ckpt extension for Torch models and (onnx, bin)
# for the openvino models.
extension = gladio_args.weight_path.suffix
inferencer: Inferencer
if extension in (".ckpt"):
module = import_module("anomalib.deploy.inferencers.torch")
TorchInferencer = getattr(module, "TorchInferencer") # pylint: disable=invalid-name
inferencer = TorchInferencer(
config=config, model_source=gladio_args.weight_path, meta_data_path=gladio_args.meta_data
)
elif extension in (".onnx", ".bin", ".xml"):
module = import_module("anomalib.deploy.inferencers.openvino")
OpenVINOInferencer = getattr(module, "OpenVINOInferencer") # pylint: disable=invalid-name
inferencer = OpenVINOInferencer(
config=config, path=gladio_args.weight_path, meta_data_path=gladio_args.meta_data
)
else:
raise ValueError(
f"Model extension is not supported. Torch Inferencer exptects a .ckpt file,"
f"OpenVINO Inferencer expects either .onnx, .bin or .xml file. Got {extension}"
)
return inferencer
if __name__ == "__main__":
session_args = get_args()
gladio_inferencer = get_inferencer(session_args)
iface = gr.Interface(
fn=lambda image, threshold: infer(image, gladio_inferencer, threshold),
inputs=[
gradio.inputs.Image(
shape=None, image_mode="RGB", source="upload", tool="editor", type="numpy", label="Image"
),
gradio.inputs.Slider(default=session_args.threshold, label="threshold", optional=False),
],
outputs=[
gradio.outputs.Image(type="numpy", label="Anomaly Map"),
gradio.outputs.Textbox(type="number", label="Anomaly Score"),
gradio.outputs.Image(type="numpy", label="Predicted Heat Map"),
gradio.outputs.Image(type="numpy", label="Predicted Mask"),
gradio.outputs.Image(type="numpy", label="Segmentation Result"),
],
title="Anomalib",
description="Anomalib Gradio",
)
iface.launch(share=session_args.share)
|