Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Mergeback 1.4.4 to 1.5.0 #2745

Merged
merged 3 commits into from
Dec 22, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ results/
build/
dist/
!src/otx/recipes/**
src/otx/recipes/**/__pycache__/
*egg-info

*.pth
Expand Down
2 changes: 1 addition & 1 deletion requirements/openvino.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
# OpenVINO Requirements. #
nncf==2.6.0
onnx==1.13.0
openvino-model-api==0.1.6
openvino-model-api==0.1.8
openvino==2023.0
openvino-dev==2023.0
openvino-telemetry==2023.2.*
11 changes: 7 additions & 4 deletions src/otx/algorithms/anomaly/tasks/openvino.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,27 +188,30 @@ def infer(self, dataset: DatasetEntity, inference_parameters: InferenceParameter
label = self.anomalous_label if image_result.pred_score >= 0.5 else self.normal_label
elif self.task_type == TaskType.ANOMALY_SEGMENTATION:
annotations = create_annotation_from_segmentation_map(
pred_mask, image_result.anomaly_map.squeeze(), {0: self.normal_label, 1: self.anomalous_label}
pred_mask,
image_result.anomaly_map.squeeze() / 255.0,
{0: self.normal_label, 1: self.anomalous_label},
)
dataset_item.append_annotations(annotations)
label = self.normal_label if len(annotations) == 0 else self.anomalous_label
elif self.task_type == TaskType.ANOMALY_DETECTION:
annotations = create_detection_annotation_from_anomaly_heatmap(
pred_mask, image_result.anomaly_map.squeeze(), {0: self.normal_label, 1: self.anomalous_label}
pred_mask,
image_result.anomaly_map.squeeze() / 255.0,
{0: self.normal_label, 1: self.anomalous_label},
)
dataset_item.append_annotations(annotations)
label = self.normal_label if len(annotations) == 0 else self.anomalous_label
else:
raise ValueError(f"Unknown task type: {self.task_type}")

dataset_item.append_labels([ScoredLabel(label=label, probability=float(probability))])
anomaly_map = (image_result.anomaly_map * 255).astype(np.uint8)
heatmap_media = ResultMediaEntity(
name="Anomaly Map",
type="anomaly_map",
label=label,
annotation_scene=dataset_item.annotation_scene,
numpy=anomaly_map,
numpy=image_result.anomaly_map,
)
dataset_item.append_metadata_item(heatmap_media)
update_progress_callback(int((idx + 1) / len(dataset) * 100))
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
openvino==2023.0
openvino-model-api==0.1.6
openvino-model-api==0.1.8
otx==1.5.0
numpy>=1.21.0,<=1.23.5 # np.bool was removed in 1.24.0 which was used in openvino runtime
Original file line number Diff line number Diff line change
Expand Up @@ -380,7 +380,7 @@ def convert_to_annotation(self, predictions: AnomalyResult, metadata: Dict[str,
assert predictions.pred_mask is not None
assert predictions.anomaly_map is not None
annotations = create_annotation_from_segmentation_map(
predictions.pred_mask, predictions.anomaly_map, self.label_map
predictions.pred_mask, predictions.anomaly_map / 255.0, self.label_map
)
if len(annotations) == 0:
# TODO: add confidence to this label
Expand Down
2 changes: 2 additions & 0 deletions src/otx/core/ov/ops/infrastructures.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,6 +233,8 @@
if not np.array_equal(data, data_):
logger.warning(f"Overflow detected in {op_name}")
data = torch.from_numpy(data_)
elif data.dtype == np.uint16:
data = torch.from_numpy(data.astype(np.int32))

Check warning on line 237 in src/otx/core/ov/ops/infrastructures.py

View check run for this annotation

Codecov / codecov/patch

src/otx/core/ov/ops/infrastructures.py#L237

Added line #L237 was not covered by tests
else:
data = torch.from_numpy(data)

Expand Down
1 change: 1 addition & 0 deletions src/otx/core/ov/ops/type_conversions.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
"u1": torch.uint8, # no type in torch
"u4": torch.uint8, # no type in torch
"u8": torch.uint8,
"u16": torch.int32, # no type in torch
"u32": torch.int32, # no type in torch
"u64": torch.int64, # no type in torch
"i4": torch.int8, # no type in torch
Expand Down
2 changes: 2 additions & 0 deletions tests/unit/core/ov/graph/test_ov_graph_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
# SPDX-License-Identifier: Apache-2.0
#

import pytest
from otx.core.ov.graph.graph import Graph
from otx.core.ov.graph.utils import (
get_constant_input_nodes,
Expand Down Expand Up @@ -38,6 +39,7 @@ def test_handle_merging_into_batchnorm():


@e2e_pytest_unit
@pytest.mark.skip(reason="Updated models are not compatible with the paired batchnorm converter")
def test_handle_paired_batchnorm():
graph = get_graph()
handle_paired_batchnorm(graph)
Expand Down