File size: 4,768 Bytes
c8c12e9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
"""Tests for Torch and OpenVINO inferencers."""

# Copyright (C) 2020 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions
# and limitations under the License.

import os
from pathlib import Path
from tempfile import TemporaryDirectory
from typing import Union

import pytest
import torch
from omegaconf import DictConfig, ListConfig
from pytorch_lightning import Trainer

from anomalib.config import get_configurable_parameters
from anomalib.data import get_datamodule
from anomalib.deploy import OpenVINOInferencer, TorchInferencer, export_convert
from anomalib.models import get_model
from tests.helpers.dataset import TestDataset, get_dataset_path
from tests.helpers.inference import MockImageLoader, get_meta_data


def get_model_config(
    model_name: str, project_path: str, dataset_path: str, category: str
) -> Union[DictConfig, ListConfig]:
    model_config = get_configurable_parameters(model_name=model_name)
    model_config.project.path = project_path
    model_config.dataset.path = dataset_path
    model_config.dataset.category = category
    model_config.trainer.max_epochs = 1
    return model_config


class TestInferencers:
    @pytest.mark.parametrize(
        "model_name",
        ["padim", "stfpm", "patchcore", "dfm", "dfkde", "ganomaly", "cflow"],
    )
    @TestDataset(num_train=20, num_test=1, path=get_dataset_path(), use_mvtec=False)
    def test_torch_inference(self, model_name: str, category: str = "shapes", path: str = "./datasets/MVTec"):
        """Tests Torch inference.
        Model is not trained as this checks that the inferencers are working.
        Args:
            model_name (str): Name of the model
        """
        with TemporaryDirectory() as project_path:
            model_config = get_model_config(
                model_name=model_name, dataset_path=path, category=category, project_path=project_path
            )

            model = get_model(model_config)
            trainer = Trainer(logger=False, **model_config.trainer)
            datamodule = get_datamodule(model_config)

            trainer.fit(model=model, datamodule=datamodule)

            model.eval()

            # Test torch inferencer
            torch_inferencer = TorchInferencer(model_config, model)
            torch_dataloader = MockImageLoader(model_config.dataset.image_size, total_count=1)
            meta_data = get_meta_data(model, model_config.dataset.image_size)
            with torch.no_grad():
                for image in torch_dataloader():
                    torch_inferencer.predict(image, superimpose=False, meta_data=meta_data)

    @pytest.mark.parametrize(
        "model_name",
        [
            "padim",
            "stfpm",
            "dfm",
            "ganomaly",
        ],
    )
    @TestDataset(num_train=20, num_test=1, path=get_dataset_path(), use_mvtec=False)
    def test_openvino_inference(self, model_name: str, category: str = "shapes", path: str = "./datasets/MVTec"):
        """Tests OpenVINO inference.
        Model is not trained as this checks that the inferencers are working.
        Args:
            model_name (str): Name of the model
        """
        with TemporaryDirectory() as project_path:
            model_config = get_model_config(
                model_name=model_name, dataset_path=path, category=category, project_path=project_path
            )
            export_path = Path(project_path)

            model = get_model(model_config)
            trainer = Trainer(logger=False, **model_config.trainer)
            datamodule = get_datamodule(model_config)
            trainer.fit(model=model, datamodule=datamodule)

            export_convert(
                model=model,
                input_size=model_config.dataset.image_size,
                onnx_path=export_path / "model.onnx",
                export_path=export_path,
            )

            # Test OpenVINO inferencer
            openvino_inferencer = OpenVINOInferencer(model_config, export_path / "model.xml")
            openvino_dataloader = MockImageLoader(model_config.dataset.image_size, total_count=1)
            meta_data = get_meta_data(model, model_config.dataset.image_size)
            for image in openvino_dataloader():
                openvino_inferencer.predict(image, superimpose=False, meta_data=meta_data)