diff --git a/.github/workflows/scorecards.yml b/.github/workflows/scorecards.yml index 54278b0..cc2201f 100644 --- a/.github/workflows/scorecards.yml +++ b/.github/workflows/scorecards.yml @@ -34,9 +34,6 @@ jobs: with: results_file: results.sarif results_format: sarif - # Read-only PAT token. Necessary for the Branch-Protection check. - # On creating/modifying the PAT token, see https://github.com/ossf/scorecard-action#authentication-with-pat. - repo_token: ${{ secrets.SCORECARD_READ_TOKEN }} # Publish the results for public repositories to enable scorecard badges. For more details, see # https://github.com/ossf/scorecard-action#publishing-results. diff --git a/CHANGELOG b/CHANGELOG index 8e5ab2f..ee3deed 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -3,7 +3,11 @@ ## [Unreleased] ### Added -- + +## [1.0.1] + +### Added +- Overlay graphs to display both base detection and tracked detections. ### Changed - Replace absl::make_unique with std::make_unique (introduced in C++14). diff --git a/docs/changelog.md b/docs/changelog.md index 9c7caee..110c0cb 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -9,6 +9,11 @@ nav_order: 4 This document contains a summary of all changes to the Magritte open-source library. +## 1.0.1 + +* Add overlay graphs to display both base detection and tracked detections. +* Replace absl::make_unique with std::make_unique (introduced in C++14). + ## 1.0.0 * First release to GitHub. diff --git a/docs/technical_guide/graphs.md b/docs/technical_guide/graphs.md index 7972421..16fab7b 100644 --- a/docs/technical_guide/graphs.md +++ b/docs/technical_guide/graphs.md @@ -345,6 +345,89 @@ files in the test_data directory. **Code:** [source code](https://github.com/google/magritte/blob/master/magritte/graphs/face_sticker_redaction_offline_cpu.pbtxt) +#### FaceTrackingOverlayLiveGpu + +A graph that detects and tracks faces to draw debug information. + +It draws in red the raw detection output (detection box, keypoints, score), +and in blue the tracked detection output (detection box, keypoints, score, +when present). + +This graph is specialized for GPU architectures. It also optimized for live +streaming environments by throttling the input stream and by applying only +full-range face detection. + +**Input streams:** + +* `input_video`: A GpuBuffer stream containing the image on which detection + models are run. + +**Output streams:** + +* `output_video`: A GpuBuffer stream containing the image annotated with + detections data. + +**Build targets:** + +* Graph `cc_library`: + + ``` + @magritte//magritte/graphs:face_tracking_overlay_live_gpu + ``` +* Text proto file: + + ``` + @magritte//magritte/graphs:face_tracking_overlay_live_gpu.pbtxt + ``` +* Binary graph: + + ``` + @magritte//magritte/graphs:face_tracking_overlay_live_gpu_graph + ``` + +**Code:** [source code](https://github.com/google/magritte/blob/master/magritte/graphs/face_tracking_overlay_live_gpu.pbtxt) + +#### FaceTrackingOverlayOfflineCpu + +A graph that detects and tracks faces to draw debug information. + +It draws in red the raw detection output (detection box, keypoints, score), +and in blue the tracked detection output (detection box, keypoints, score, +when present). + +This graph is specialized for CPU architectures and offline environments +(no throttling is applied). + +**Input streams:** + +* `input_video`: An ImageFrame stream containing the image on which detection + models are run. + +**Output streams:** + +* `output_video`: An ImageFrame stream containing the image annotated with + detections data. + +**Build targets:** + +* Graph `cc_library`: + + ``` + @magritte//magritte/graphs:face_tracking_overlay_offline_cpu + ``` +* Text proto file: + + ``` + @magritte//magritte/graphs:face_tracking_overlay_offline_cpu.pbtxt + ``` +* Binary graph: + + ``` + @magritte//magritte/graphs:face_tracking_overlay_offline_cpu_graph + ``` + +**Code:** [source code](https://github.com/google/magritte/blob/master/magritte/graphs/face_tracking_overlay_offline_cpu.pbtxt) + ## Subgraphs ### Detection @@ -879,6 +962,78 @@ the ovals will be white. **Code:** [source code](https://github.com/google/magritte/blob/master/magritte/graphs/redaction/detection_to_mask/face_detection_to_mask_gpu.pbtxt) +#### DetectionTrackingOverlaySubgraphCpu + +Subgraph to draw debug information at the locations specified by two incoming +detection streams. For each detection stream, it will draw all data available +from detections, including the bounding box, keypoints and score. + +**Input streams:** + +* `IMAGE`: An ImageFrame containing the image to draw the overlays on. +* `[0]`: A std::vector, will be rendered in red. +* `[1]`: A std::vector, will be rendered in blue. + +**Output streams:** + +* `IMAGE`: The resulting image. + +**Build targets:** + +* Graph `cc_library`: + + ``` + @magritte//magritte/graphs/redaction:detection_tracking_overlay_cpu + ``` +* Text proto file: + + ``` + @magritte//magritte/graphs/redaction:detection_tracking_overlay_cpu.pbtxt + ``` +* Binary graph: + + ``` + @magritte//magritte/graphs/redaction:detection_tracking_overlay_cpu_graph + ``` + +**Code:** [source code](https://github.com/google/magritte/blob/master/magritte/graphs/redaction/detection_tracking_overlay_cpu.pbtxt) + +#### DetectionTrackingOverlaySubgraphGpu + +Subgraph to draw debug information at the locations specified by two incoming +detection streams. For each detection stream, it will draw all data available +from detections, including the bounding box, keypoints and score. + +**Input streams:** + +* `IMAGE_GPU`: A GpuBuffer containing the image to draw the overlays on. +* `[0]`: A std::vector, will be rendered in red. +* `[1]`: A std::vector, will be rendered in blue. + +**Output streams:** + +* `IMAGE_GPU`: The resulting image. + +**Build targets:** + +* Graph `cc_library`: + + ``` + @magritte//magritte/graphs/redaction:detection_tracking_overlay_gpu + ``` +* Text proto file: + + ``` + @magritte//magritte/graphs/redaction:detection_tracking_overlay_gpu.pbtxt + ``` +* Binary graph: + + ``` + @magritte//magritte/graphs/redaction:detection_tracking_overlay_gpu_graph + ``` + +**Code:** [source code](https://github.com/google/magritte/blob/master/magritte/graphs/redaction/detection_tracking_overlay_gpu.pbtxt) + #### FaceDetectionOverlaySubgraphCpu Subgraph to draw debug information at the locations specified by incoming diff --git a/magritte/calculators/BUILD b/magritte/calculators/BUILD index c3a3e02..5cc824b 100644 --- a/magritte/calculators/BUILD +++ b/magritte/calculators/BUILD @@ -111,7 +111,6 @@ mediapipe_proto_library( cc_library( name = "pixelization_calculator_gpu", srcs = ["pixelization_calculator_gpu.cc"], - visibility = ["//visibility:public"], deps = [ ":pixelization_calculator_cc_proto", "@mediapipe//mediapipe/framework:calculator_framework", @@ -154,7 +153,6 @@ cc_library( name = "blend_calculator", srcs = ["blend_calculator.cc"], hdrs = ["blend_calculator.h"], - visibility = ["//visibility:public"], deps = [ "@mediapipe//mediapipe/framework:calculator_framework", "@mediapipe//mediapipe/framework/formats:image_frame", diff --git a/magritte/calculators/rois_to_sprite_list_calculator.cc b/magritte/calculators/rois_to_sprite_list_calculator.cc index 91cde3c..399e958 100644 --- a/magritte/calculators/rois_to_sprite_list_calculator.cc +++ b/magritte/calculators/rois_to_sprite_list_calculator.cc @@ -45,9 +45,9 @@ using ::mediapipe::GlTexture; using ::mediapipe::GpuBuffer; #endif // !MEDIAPIPE_DISABLE_GPU using ::mediapipe::MakePacket; -using ::mediapipe::NormalizedRect; using ::mediapipe::formats::MatView; using ::mediapipe::ImageFormat; +using ::mediapipe::NormalizedRect; using NormalizedRects = std::vector; using Size = std::pair; diff --git a/magritte/calculators/rois_to_sprite_list_calculator_test.cc b/magritte/calculators/rois_to_sprite_list_calculator_test.cc index f69ce52..b7b0543 100644 --- a/magritte/calculators/rois_to_sprite_list_calculator_test.cc +++ b/magritte/calculators/rois_to_sprite_list_calculator_test.cc @@ -44,10 +44,10 @@ using ::mediapipe::CalculatorGraphConfig; using ::mediapipe::CalculatorRunner; using ::mediapipe::ImageFormat; using ::mediapipe::ImageFrame; -using ::mediapipe::NormalizedRect; using ::mediapipe::Packet; using ::mediapipe::Timestamp; using ::mediapipe::formats::MatView; +using ::mediapipe::NormalizedRect; using SizeFloat = std::pair; using SizeInt = std::pair; diff --git a/magritte/examples/desktop/BUILD b/magritte/examples/desktop/BUILD index 9ba1a62..db323ec 100644 --- a/magritte/examples/desktop/BUILD +++ b/magritte/examples/desktop/BUILD @@ -50,6 +50,7 @@ _top_level_graph_targets = [ "//magritte/graphs:face_blur_with_tracking_live_cpu", "//magritte/graphs:face_blur_with_tracking_offline_cpu", "//magritte/graphs:face_overlay_offline_cpu", + "//magritte/graphs:face_tracking_overlay_offline_cpu", "//magritte/graphs:face_pixelization_offline_cpu", "//magritte/graphs:face_sticker_redaction_offline_cpu", ] + select({ diff --git a/magritte/graphs/BUILD b/magritte/graphs/BUILD index d01e264..8d3cd84 100644 --- a/magritte/graphs/BUILD +++ b/magritte/graphs/BUILD @@ -44,6 +44,17 @@ magritte_graph( ], ) +magritte_graph( + name = "face_tracking_overlay_offline_cpu", + graph = "face_tracking_overlay_offline_cpu.pbtxt", + register_as = "FaceTrackingOverlayOfflineCpu", + deps = [ + "//magritte/graphs/detection:face_detection_short_and_full_range_cpu", + "//magritte/graphs/redaction:detection_tracking_overlay_cpu", + "//magritte/graphs/tracking:tracking_cpu", + ], +) + magritte_graph( name = "face_blur_with_tracking_offline_cpu", graph = "face_blur_with_tracking_offline_cpu.pbtxt", @@ -89,6 +100,18 @@ magritte_graph( ], ) +magritte_graph( + name = "face_tracking_overlay_live_gpu", + graph = "face_tracking_overlay_live_gpu.pbtxt", + register_as = "FaceTrackingOverlayLiveGpu", + deps = [ + "//magritte/graphs/detection:face_detection_full_range_gpu", + "//magritte/graphs/redaction:detection_tracking_overlay_gpu", + "//magritte/graphs/tracking:tracking_gpu", + "@mediapipe//mediapipe/calculators/core:flow_limiter_calculator", + ], +) + magritte_graph( name = "face_sticker_redaction_offline_cpu", graph = "face_sticker_redaction_offline_cpu.pbtxt", diff --git a/magritte/graphs/face_tracking_overlay_live_gpu.pbtxt b/magritte/graphs/face_tracking_overlay_live_gpu.pbtxt new file mode 100644 index 0000000..fcd2370 --- /dev/null +++ b/magritte/graphs/face_tracking_overlay_live_gpu.pbtxt @@ -0,0 +1,71 @@ +# +# Copyright 2020-2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +package: "magritte" +type: "FaceTrackingOverlayLiveGpu" + +# A graph that detects and tracks faces to draw debug information. +# +# It draws in red the raw detection output (detection box, keypoints, score), +# and in blue the tracked detection output (detection box, keypoints, score, +# when present). +# +# This graph is specialized for GPU architectures. It also optimized for live +# streaming environments by throttling the input stream and by applying only +# full-range face detection. +# +# Inputs: +# - input_video: A GpuBuffer stream containing the image on which detection +# models are run. +# +# Outputs: +# - output_video: A GpuBuffer stream containing the image annotated with +# detections data. + +input_stream: "input_video" +output_stream: "output_video" + +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:output_video" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" +} + +node { + calculator: "FaceDetectionFullRangeGpu" + input_stream: "IMAGE:throttled_input_video" + output_stream: "DETECTIONS:detections" +} + +node { + calculator: "TrackingSubgraphGpu" + input_stream: "IMAGE:throttled_input_video" + input_stream: "DETECTIONS:detections" + output_stream: "DETECTIONS:tracked_detections" +} + +node { + calculator: "DetectionTrackingOverlaySubgraphGpu" + input_stream: "IMAGE_GPU:throttled_input_video" + input_stream: "detections" + input_stream: "tracked_detections" + output_stream: "IMAGE_GPU:output_video" +} + diff --git a/magritte/graphs/face_tracking_overlay_offline_cpu.pbtxt b/magritte/graphs/face_tracking_overlay_offline_cpu.pbtxt new file mode 100644 index 0000000..ff9c1b2 --- /dev/null +++ b/magritte/graphs/face_tracking_overlay_offline_cpu.pbtxt @@ -0,0 +1,58 @@ +# +# Copyright 2019-2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +package: "magritte" +type: "FaceTrackingOverlayOfflineCpu" + +# A graph that detects and tracks faces to draw debug information. +# +# It draws in red the raw detection output (detection box, keypoints, score), +# and in blue the tracked detection output (detection box, keypoints, score, +# when present). +# +# This graph is specialized for CPU architectures and offline environments +# (no throttling is applied). +# +# Inputs: +# - input_video: An ImageFrame stream containing the image on which detection +# models are run. +# +# Outputs: +# - output_video: An ImageFrame stream containing the image annotated with +# detections data. + +input_stream: "input_video" +output_stream: "output_video" + +node { + calculator: "FaceDetectionFullRangeCpu" + input_stream: "IMAGE:input_video" + output_stream: "DETECTIONS:detections" +} + +node { + calculator: "TrackingSubgraphCpu" + input_stream: "IMAGE:input_video" + input_stream: "DETECTIONS:detections" + output_stream: "DETECTIONS:tracked_detections" +} + +node { + calculator: "DetectionTrackingOverlaySubgraphCpu" + input_stream: "IMAGE:input_video" + input_stream: "detections" + input_stream: "tracked_detections" + output_stream: "IMAGE:output_video" +} diff --git a/magritte/graphs/redaction/BUILD b/magritte/graphs/redaction/BUILD index adaf500..02f2cf2 100644 --- a/magritte/graphs/redaction/BUILD +++ b/magritte/graphs/redaction/BUILD @@ -71,6 +71,26 @@ magritte_graph( ], ) +magritte_graph( + name = "detection_tracking_overlay_cpu", + graph = "detection_tracking_overlay_cpu.pbtxt", + register_as = "DetectionTrackingOverlaySubgraphCpu", + deps = [ + "@mediapipe//mediapipe/calculators/util:annotation_overlay_calculator", + "@mediapipe//mediapipe/calculators/util:detections_to_render_data_calculator", + ], +) + +magritte_graph( + name = "detection_tracking_overlay_gpu", + graph = "detection_tracking_overlay_gpu.pbtxt", + register_as = "DetectionTrackingOverlaySubgraphGpu", + deps = [ + "@mediapipe//mediapipe/calculators/util:annotation_overlay_calculator", + "@mediapipe//mediapipe/calculators/util:detections_to_render_data_calculator", + ], +) + magritte_graph( name = "face_sticker_redaction_cpu", data = ["//magritte/test_data:emoji.png"], diff --git a/magritte/graphs/redaction/detection_tracking_overlay_cpu.pbtxt b/magritte/graphs/redaction/detection_tracking_overlay_cpu.pbtxt new file mode 100644 index 0000000..32bdd7b --- /dev/null +++ b/magritte/graphs/redaction/detection_tracking_overlay_cpu.pbtxt @@ -0,0 +1,70 @@ +# +# Copyright 2019-2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +package: "magritte" +type: "DetectionTrackingOverlaySubgraphCpu" + +# Subgraph to draw debug information at the locations specified by two incoming +# detection streams. For each detection stream, it will draw all data available +# from detections, including the bounding box, keypoints and score. +# +# Inputs: +# - IMAGE: An ImageFrame containing the image to draw the overlays on. +# - A std::vector, will be rendered in red. +# - A std::vector, will be rendered in blue. +# +# Outputs: +# - IMAGE: The resulting image. +input_stream: "IMAGE:input_video" +input_stream: "output_detections" +input_stream: "tracked_detections" +output_stream: "IMAGE:output_video" + +# Converts the detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTIONS:output_detections" + output_stream: "RENDER_DATA:detections_render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 255 g: 0 b: 0 } + render_detection_id: true + } + } +} + +# Converts the detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTIONS:tracked_detections" + output_stream: "RENDER_DATA:tracked_detections_render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 0 g: 0 b: 255 } + render_detection_id: true + } + } +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE:input_video" + input_stream: "detections_render_data" + input_stream: "tracked_detections_render_data" + output_stream: "IMAGE:output_video" +} diff --git a/magritte/graphs/redaction/detection_tracking_overlay_gpu.pbtxt b/magritte/graphs/redaction/detection_tracking_overlay_gpu.pbtxt new file mode 100644 index 0000000..170e53c --- /dev/null +++ b/magritte/graphs/redaction/detection_tracking_overlay_gpu.pbtxt @@ -0,0 +1,70 @@ +# +# Copyright 2019-2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +package: "magritte" +type: "DetectionTrackingOverlaySubgraphGpu" + +# Subgraph to draw debug information at the locations specified by two incoming +# detection streams. For each detection stream, it will draw all data available +# from detections, including the bounding box, keypoints and score. +# +# Inputs: +# - IMAGE_GPU: A GpuBuffer containing the image to draw the overlays on. +# - A std::vector, will be rendered in red. +# - A std::vector, will be rendered in blue. +# +# Outputs: +# - IMAGE_GPU: The resulting image. +input_stream: "IMAGE_GPU:input_video" +input_stream: "output_detections" +input_stream: "tracked_detections" +output_stream: "IMAGE_GPU:output_video" + +# Converts the detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTIONS:output_detections" + output_stream: "RENDER_DATA:detections_render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 255 g: 0 b: 0 } + render_detection_id: true + } + } +} + +# Converts the detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTIONS:tracked_detections" + output_stream: "RENDER_DATA:tracked_detections_render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 0 g: 0 b: 255 } + render_detection_id: true + } + } +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE_GPU:input_video" + input_stream: "detections_render_data" + input_stream: "tracked_detections_render_data" + output_stream: "IMAGE_GPU:output_video" +}