Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix WB control and generate UV-map test artifacts #4914

Merged
merged 5 commits into from
Oct 23, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions common/tiny-profiler.h
Original file line number Diff line number Diff line change
Expand Up @@ -62,12 +62,12 @@ class scoped_timer
auto l = strlen(key);
std::cout << key;
std::cout << " ";
for (int i = 0; i < 50 - l - profiler::instance().scope * 2; i++)
for (int i = 0; i < 50 - int(l) - profiler::instance().scope * 2; i++)
std::cout << ".";
auto avg = (profiler::instance().duration[key]
/ profiler::instance().counts[key]);
std::cout << " ";
tocout(avg);
tocout(long(avg));
std::cout << " usec,\t" << (profiler::instance().counts[key] / 2) << " Hz\n";
profiler::instance().duration[key] = 0;
profiler::instance().counts[key] = 1;
Expand Down
2 changes: 1 addition & 1 deletion include/librealsense2/hpp/rs_frame.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -521,7 +521,7 @@ namespace rs2

/**
* retrieve data size from frame handle
* \return the pointer to the start of the frame data
* \return the number of bytes in frame
*/
const int get_data_size() const
{
Expand Down
4 changes: 2 additions & 2 deletions src/ds5/ds5-nonmonochrome.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -26,15 +26,15 @@ namespace librealsense
using namespace ds;

auto pid = group.uvc_devices.front().pid;
if ((_fw_version >= firmware_version("5.5.8.0")) && (pid != RS_USB2_PID))
if ((_fw_version >= firmware_version("5.5.8.0")) && (!val_in_range(pid, { RS_USB2_PID, RS465_PID })))
{
get_depth_sensor().register_option(RS2_OPTION_ENABLE_AUTO_WHITE_BALANCE,
std::make_shared<uvc_xu_option<uint8_t>>(get_depth_sensor(),
depth_xu,
DS5_ENABLE_AUTO_WHITE_BALANCE,
"Enable Auto White Balance"));

// RS400 rolling-shutter Skus allow to get low-quality color image from the same viewport as the depth
// RS400 rolling-shutter SKUs allow to get low-quality color image from the same viewport as the depth
get_depth_sensor().register_pixel_format(pf_uyvyl);
get_depth_sensor().register_pixel_format(pf_rgb888);
get_depth_sensor().register_pixel_format(pf_w10);
Expand Down
3 changes: 2 additions & 1 deletion unit-tests/internal/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,8 @@ set (INTERNAL_TESTS_SOURCES
internal-tests-main.cpp
internal-tests-usb.cpp
internal-tests-extrinsic.cpp
internal-tests-types.cpp
internal-tests-types.cpp
internal-tests-uv-map.cpp
)

add_executable(${PROJECT_NAME} ${INTERNAL_TESTS_SOURCES})
Expand Down
223 changes: 223 additions & 0 deletions unit-tests/internal/internal-tests-uv-map.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,223 @@
// License: Apache 2.0. See LICENSE file in root directory.
// Copyright(c) 2015 Intel Corporation. All Rights Reserved.

#include "catch/catch.hpp"
#include <cmath>
#include <iostream>
#include <chrono>
#include <ctime>
#include <algorithm>
#include <librealsense2/rs.hpp>
#include <librealsense2/hpp/rs_sensor.hpp>
#include "../../common/tiny-profiler.h"
#include "./../unit-tests-common.h"
#include "./../src/environment.h"

using namespace librealsense;
using namespace librealsense::platform;

std::vector<device_profiles> uv_tests_configurations = {
//D465
{ { { RS2_STREAM_DEPTH, RS2_FORMAT_Z16, 1280, 1024, 0 },{ RS2_STREAM_COLOR, RS2_FORMAT_YUYV, 2000, 1500, 0 } }, 30, true },
{ { { RS2_STREAM_DEPTH, RS2_FORMAT_Z16, 1280, 720, 0 },{ RS2_STREAM_COLOR, RS2_FORMAT_YUYV, 2000, 1500, 0 } }, 30, true },
{ { { RS2_STREAM_DEPTH, RS2_FORMAT_Z16, 960, 768, 0 },{ RS2_STREAM_COLOR, RS2_FORMAT_YUYV, 2000, 1500, 0 } }, 30, true },
{ { { RS2_STREAM_DEPTH, RS2_FORMAT_Z16, 1280, 1024, 0 },{ RS2_STREAM_COLOR, RS2_FORMAT_YUYV, 1920, 1080, 0 } }, 30, true },
{ { { RS2_STREAM_DEPTH, RS2_FORMAT_Z16, 1280, 720, 0 },{ RS2_STREAM_COLOR, RS2_FORMAT_YUYV, 1920, 1080, 0 } }, 30, true },
{ { { RS2_STREAM_DEPTH, RS2_FORMAT_Z16, 960, 768, 0 },{ RS2_STREAM_COLOR, RS2_FORMAT_YUYV, 1920, 1080, 0 } }, 30, true },
{ { { RS2_STREAM_DEPTH, RS2_FORMAT_Z16, 1280, 1024, 0 },{ RS2_STREAM_COLOR, RS2_FORMAT_YUYV, 960, 720, 0 } }, 30, true },
{ { { RS2_STREAM_DEPTH, RS2_FORMAT_Z16, 1280, 720, 0 },{ RS2_STREAM_COLOR, RS2_FORMAT_YUYV, 960, 720, 0 } }, 30, true },
{ { { RS2_STREAM_DEPTH, RS2_FORMAT_Z16, 960, 768, 0 },{ RS2_STREAM_COLOR, RS2_FORMAT_YUYV, 960, 720, 0 } }, 30, true },
//L500
//{ { { RS2_STREAM_DEPTH, RS2_FORMAT_Z16, 1024, 768, 0 },{ RS2_STREAM_INFRARED, RS2_FORMAT_Y8, 1024, 768, 0 },{ RS2_STREAM_COLOR, RS2_FORMAT_YUYV, 1920, 1080, 0 } }, 30, true },
};

TEST_CASE("Generate UV-MAP", "[live]")
{
log_to_console(rs2_log_severity::RS2_LOG_SEVERITY_WARN);

// Require at least one device to be plugged in
rs2::context ctx;
auto list = ctx.query_devices();
REQUIRE(list.size());

for (auto&& test_cfg : uv_tests_configurations)
{
// Record Depth + RGB streams
{
// We want the points object to be persistent so we can display the last cloud when a frame drops
rs2::points points;

rs2::pointcloud pc;
// Declare RealSense pipeline, encapsulating the actual device and sensors
rs2::pipeline pipe;

rs2::config cfg;
for (auto&& pf: test_cfg.streams)
cfg.enable_stream(pf.stream, pf.index, pf.width, pf.height, pf.format, pf.fps);

// Start streaming with default recommended configuration
auto pf = cfg.resolve(pipe);
for (auto&& snr : pf.get_device().query_sensors())
{
if (snr.supports(RS2_OPTION_GLOBAL_TIME_ENABLED))
snr.set_option(RS2_OPTION_GLOBAL_TIME_ENABLED, false);

if (snr.supports(RS2_OPTION_EMITTER_ENABLED))
{
snr.set_option(RS2_OPTION_EMITTER_ENABLED, false);
std::this_thread::sleep_for(std::chrono::milliseconds(100));
snr.set_option(RS2_OPTION_EMITTER_ENABLED, true);
std::this_thread::sleep_for(std::chrono::milliseconds(100));
snr.set_option(RS2_OPTION_LASER_POWER, 60.f);
}
}

rs2::temporal_filter temp_filter; // Temporal - reduces temporal noise
const std::string disparity_filter_name = "Disparity";
rs2::disparity_transform depth_to_disparity(true);
rs2::disparity_transform disparity_to_depth(false);
std::vector<rs2::filter> filters;

// The following order of emplacement will dictate the orders in which filters are applied
filters.push_back(std::move(depth_to_disparity));
filters.push_back(std::move(temp_filter));
filters.push_back(std::move(disparity_to_depth));

// Start streaming with default recommended configuration
pf = pipe.start(cfg);

size_t i = 0;
size_t startup = 50;

while (i < startup + 200)
{
// Wait for the next set of frames from the camera
auto frames = pipe.wait_for_frames();
if (++i < startup)
continue;

auto depth = frames.get_depth_frame();
auto color = frames.get_color_frame();
// For D465 streaming of Depth + IR + RGB at 4K is not possible.
// Therefore the data collection will be performed in two iterations
//auto ir = frames.get_infrared_frame(1);

// Prerequisite is that the required three streams are available and that the depth/ir are aligned
if (!(color && depth /*&& ir && (depth.get_frame_number() == ir.get_frame_number())*/))
return;

// Using of the temporal filter shall be recommended
//// Filter depth stream
//for (auto&& filter : filters)
// depth = filter.process(depth);

pc.map_to(color);
// Generate the pointcloud and texture mappings
points = pc.process(depth);

auto pf = color.get_profile().as<rs2::video_stream_profile>();
auto w = pf.width();
auto h = pf.height();
std::stringstream ss;
ss << "_yuyv_" << w << "_" << h;
std::string rgb_res(ss.str().c_str());

ss.clear(); ss.str(""); ss << i - startup << "_yuyv_" << w << "_" << h << ".bin";
{
assert(color.get_data_size() == w * h * sizeof(uint16_t));
std::ofstream outfile(ss.str().c_str(), std::ios::binary);
outfile.write((const char*)color.get_data(), color.get_data_size());
}

pf = depth.get_profile().as<rs2::video_stream_profile>();
w = pf.width();
h = pf.height();

auto tex_ptr = (float2*)points.get_texture_coordinates();
auto vert_ptr = (rs2::vertex*)points.get_vertices();
auto uv_map = reinterpret_cast<const char*>(tex_ptr);

std::cout << "Frame size is " << w * h << std::endl;

ss.clear(); ss.str(""); ss << i - startup << "_uv" << w << "_" << h << rgb_res <<".bin";
{
std::ofstream outfile(ss.str().c_str(), std::ios::binary);
outfile.write(uv_map, w * h * sizeof(float2));
}

pf = depth.get_profile().as<rs2::video_stream_profile>();
w = pf.width();
h = pf.height();
ss.clear(); ss.str(""); ss << i - startup << "_depth_" << w << "_" << h << rgb_res << ".bin";
{
REQUIRE(depth.get_data_size() == w * h * sizeof(uint16_t));
std::ofstream outfile(ss.str().c_str(), std::ios::binary);
outfile.write((const char*)depth.get_data(), depth.get_data_size());
}

//pf = ir.get_profile().as<rs2::video_stream_profile>();
//w = pf.width();
//h = pf.height();
//ss.clear(); ss.str(""); ss << i - startup << "_ir_" << w << "_" << h << ".bin";
//{
// assert(ir.get_data_size() == w * h * sizeof(uint8_t));
// std::ofstream outfile(ss.str().c_str(), std::ios::binary);
// outfile.write((const char*)ir.get_data(), ir.get_data_size());
//}

std::cout << "Iteration " << i - startup << " files were written" << std::endl;
}
}

// record IR stream separately
if (true)
{
rs2::pipeline pipe;
rs2::config cfg;

auto fmt = test_cfg.streams[0]; // Depth and IR align
cfg.enable_stream(rs2_stream::RS2_STREAM_INFRARED, 1, fmt.width, fmt.height, RS2_FORMAT_Y8, fmt.fps);

// Start streaming with default recommended configuration
auto pf = cfg.resolve(pipe);
for (auto&& snr : pf.get_device().query_sensors())
{
if (snr.supports(RS2_OPTION_EMITTER_ENABLED))
snr.set_option(RS2_OPTION_EMITTER_ENABLED, false);
}

// Start streaming with default recommended configuration
pf = pipe.start(cfg);

size_t i = 0;
size_t startup = 30;

while (i < startup)
{
// Wait for the next set of frames from the camera
auto frames = pipe.wait_for_frames();
if (++i < startup)
continue;

auto ir_frame = frames.get_infrared_frame(1);

// Prerequisite is that the required three streams are available and that the depth/ir are aligned
if (!ir_frame)
return;

auto pf = ir_frame.get_profile().as<rs2::video_stream_profile>();
auto w = pf.width();
auto h = pf.height();
std::stringstream ss;
ss << i - startup << "_ir_" << w << "_" << h << ".bin";
{
assert(ir_frame.get_data_size() == w * h * sizeof(uint16_t));
std::ofstream outfile(ss.str().c_str(), std::ios::binary);
outfile.write((const char*)ir_frame.get_data(), ir_frame.get_data_size());
}

std::cout << "Iteration " << i - startup << " files were written" << std::endl;
}
}
}
}