From 81e040b1109fdedbc1821dd83c734740631979a4 Mon Sep 17 00:00:00 2001 From: Awawa <69086569+awawa-dev@users.noreply.github.com> Date: Thu, 31 Oct 2024 20:57:21 +0100 Subject: [PATCH] Add P010 HDR10 video format support (2024-11-10) --- include/lut-calibrator/BestResult.h | 2 + include/lut-calibrator/BoardUtils.h | 2 +- include/lut-calibrator/ColorSpace.h | 4 +- include/lut-calibrator/LutCalibrator.h | 2 +- include/utils/FrameDecoder.h | 10 +- sources/base/Grabber.cpp | 2 +- sources/grabber/linux/v4l2/V4L2Grabber.cpp | 36 ++- sources/grabber/linux/v4l2/V4L2Worker.cpp | 4 +- sources/grabber/windows/MF/MFGrabber.cpp | 10 +- sources/grabber/windows/MF/MFWorker.cpp | 4 +- sources/lut-calibrator/ColorSpace.cpp | 2 + sources/lut-calibrator/LutCalibrator.cpp | 98 ++++++- sources/utils/CMakeLists.txt | 1 + sources/utils/FrameDecoder.cpp | 301 ++++++++++++++++++--- www/content/grabber_calibration.html | 4 +- 15 files changed, 418 insertions(+), 64 deletions(-) diff --git a/include/lut-calibrator/BestResult.h b/include/lut-calibrator/BestResult.h index 8a789559d..b615944f7 100644 --- a/include/lut-calibrator/BestResult.h +++ b/include/lut-calibrator/BestResult.h @@ -80,6 +80,7 @@ struct BestResult double upYLimit = 0; double downYLimit = 0; double yShift = 0; + bool isSourceP010 = false; } signal; long long int minError = MAX_CALIBRATION_ERROR; @@ -121,6 +122,7 @@ struct BestResult out << "bestResult.signal.upYLimit = " << std::to_string(signal.upYLimit) << ";" << std::endl; out << "bestResult.signal.downYLimit = " << std::to_string(signal.downYLimit) << ";" << std::endl; out << "bestResult.signal.yShift = " << std::to_string(signal.yShift) << ";" << std::endl; + out << "bestResult.signal.isSourceP010 = " << std::to_string(signal.isSourceP010) << ";" << std::endl; out << "bestResult.minError = " << std::to_string(std::round(minError * 100.0) / 30000.0) << ";" << std::endl; out << "*/" << std::endl; } diff --git a/include/lut-calibrator/BoardUtils.h b/include/lut-calibrator/BoardUtils.h index 8a4a8be68..9a36427dc 100644 --- a/include/lut-calibrator/BoardUtils.h +++ b/include/lut-calibrator/BoardUtils.h @@ -61,7 +61,7 @@ namespace BoardUtils constexpr int SCREEN_CRC_LINES = 2; constexpr int SCREEN_CRC_COUNT = 5; constexpr int SCREEN_MAX_CRC_BRIGHTNESS_ERROR = 1; - constexpr int SCREEN_MAX_COLOR_NOISE_ERROR = 8; + constexpr int SCREEN_MAX_COLOR_NOISE_ERROR = 16; constexpr int SCREEN_SAMPLES_PER_BOARD = (SCREEN_BLOCKS_X / 2) * (SCREEN_BLOCKS_Y - SCREEN_CRC_LINES); const int SCREEN_LAST_BOARD_INDEX = std::pow(SCREEN_COLOR_DIMENSION, 3) / SCREEN_SAMPLES_PER_BOARD; diff --git a/include/lut-calibrator/ColorSpace.h b/include/lut-calibrator/ColorSpace.h index 9833e6b82..800013c3d 100644 --- a/include/lut-calibrator/ColorSpace.h +++ b/include/lut-calibrator/ColorSpace.h @@ -44,7 +44,7 @@ using namespace aliases; namespace ColorSpaceMath { - enum PRIMARIES { SRGB = 0, BT_2020, WIDE_GAMMUT }; + enum PRIMARIES { SRGB = 0, BT_2020, WIDE_GAMMUT }; QString gammaToString(HDR_GAMMA gamma); @@ -85,6 +85,8 @@ namespace ColorSpaceMath double3 bt2020_linear_to_nonlinear(double3 input); + double srgb_nonlinear_to_linear(double input); + double3 srgb_nonlinear_to_linear(double3 input); double3 srgb_linear_to_nonlinear(double3 input); diff --git a/include/lut-calibrator/LutCalibrator.h b/include/lut-calibrator/LutCalibrator.h index 816afa5da..8245bfa02 100644 --- a/include/lut-calibrator/LutCalibrator.h +++ b/include/lut-calibrator/LutCalibrator.h @@ -58,7 +58,7 @@ namespace linalg { } namespace ColorSpaceMath { - enum HDR_GAMMA { PQ = 0, HLG, sRGB, BT2020inSRGB, PQinSRGB}; + enum HDR_GAMMA { PQ = 0, HLG, sRGB, BT2020inSRGB, PQinSRGB, P010 }; } struct BestResult; diff --git a/include/utils/FrameDecoder.h b/include/utils/FrameDecoder.h index 33393293b..21a594e33 100644 --- a/include/utils/FrameDecoder.h +++ b/include/utils/FrameDecoder.h @@ -8,17 +8,23 @@ // some stuff for HDR tone mapping #define LUT_INDEX(y,u,v) ((y + (u<<8) + (v<<16))*3) +namespace FrameDecoderUtils +{ + double unpackChromaP010(double x); + double unpackLuminanceP010(double val); +} + class FrameDecoder { public: static void processImage( int _cropLeft, int _cropRight, int _cropTop, int _cropBottom, const uint8_t* data, const uint8_t* dataUV, int width, int height, int lineLength, - const PixelFormat pixelFormat, const uint8_t* lutBuffer, Image& outputImage); + const PixelFormat pixelFormat, const uint8_t* lutBuffer, Image& outputImage, bool toneMapping = true); static void processQImage( const uint8_t* data, const uint8_t* dataUV, int width, int height, int lineLength, - const PixelFormat pixelFormat, const uint8_t* lutBuffer, Image& outputImage); + const PixelFormat pixelFormat, const uint8_t* lutBuffer, Image& outputImage, bool toneMapping = true); static void processSystemImageBGRA(Image& image, int targetSizeX, int targetSizeY, int startX, int startY, diff --git a/sources/base/Grabber.cpp b/sources/base/Grabber.cpp index 9ad749de3..858243496 100644 --- a/sources/base/Grabber.cpp +++ b/sources/base/Grabber.cpp @@ -101,7 +101,7 @@ void Grabber::setEnabled(bool enable) void Grabber::setMonitorNits(int nits) { - if (_targetMonitorNits != nits) + if (static_cast(_targetMonitorNits) != nits) { _targetMonitorNits = nits; diff --git a/sources/grabber/linux/v4l2/V4L2Grabber.cpp b/sources/grabber/linux/v4l2/V4L2Grabber.cpp index 9b363884b..56275355b 100644 --- a/sources/grabber/linux/v4l2/V4L2Grabber.cpp +++ b/sources/grabber/linux/v4l2/V4L2Grabber.cpp @@ -61,6 +61,21 @@ // some stuff for HDR tone mapping #define LUT_FILE_SIZE 50331648 +namespace +{ + #ifdef V4L2_PIX_FMT_P010 + #pragma message "P010 is supported on the build machine" + bool supportedP010 = true; + #else + #pragma message "P010 is NOT supported on the build machine" + bool supportedP010 = false; + #endif +}; + +#ifndef V4L2_PIX_FMT_P010 + #define V4L2_PIX_FMT_P010 v4l2_fourcc('P', '0', '1', '0') +#endif + static const V4L2Grabber::HyperHdrFormat supportedFormats[] = { { V4L2_PIX_FMT_YUYV, PixelFormat::YUYV }, @@ -68,10 +83,8 @@ static const V4L2Grabber::HyperHdrFormat supportedFormats[] = { V4L2_PIX_FMT_RGB24, PixelFormat::RGB24 }, { V4L2_PIX_FMT_YUV420, PixelFormat::I420 }, { V4L2_PIX_FMT_NV12, PixelFormat::NV12 }, - { V4L2_PIX_FMT_MJPEG, PixelFormat::MJPEG } - #ifdef V4L2_PIX_FMT_P010 - ,{ V4L2_PIX_FMT_P010, PixelFormat::P010 } - #endif + { V4L2_PIX_FMT_MJPEG, PixelFormat::MJPEG }, + { V4L2_PIX_FMT_P010, PixelFormat::P010 } }; @@ -84,6 +97,8 @@ V4L2Grabber::V4L2Grabber(const QString& device, const QString& configurationPath { // Refresh devices getV4L2devices(); + + Debug(_log, "P010 was %s on the build machine", (supportedP010) ? "supported" : "unsupported"); } QString V4L2Grabber::GetSharedLut() @@ -132,7 +147,8 @@ void V4L2Grabber::setHdrToneMappingEnabled(int mode) { Debug(_log, "setHdrToneMappingMode replacing LUT and restarting"); _V4L2WorkerManager.Stop(); - if ((_actualVideoFormat == PixelFormat::YUYV) || (_actualVideoFormat == PixelFormat::I420) || (_actualVideoFormat == PixelFormat::NV12) || (_actualVideoFormat == PixelFormat::MJPEG)) + if ((_actualVideoFormat == PixelFormat::YUYV) || (_actualVideoFormat == PixelFormat::I420) || (_actualVideoFormat == PixelFormat::NV12) + || (_actualVideoFormat == PixelFormat::P010) || (_actualVideoFormat == PixelFormat::MJPEG)) loadLutFile(PixelFormat::YUYV); else loadLutFile(PixelFormat::RGB24); @@ -985,6 +1001,16 @@ bool V4L2Grabber::init_device(QString selectedDeviceName, DevicePropertiesItem p } break; + case V4L2_PIX_FMT_P010: + { + loadLutFile(PixelFormat::YUYV); + _actualVideoFormat = PixelFormat::P010; + _frameByteSize = (props.x * props.y * 6) / 2; + _lineLength = props.x * 2; + Info(_log, "Video pixel format is set to: P010"); + } + break; + case V4L2_PIX_FMT_NV12: { loadLutFile(PixelFormat::YUYV); diff --git a/sources/grabber/linux/v4l2/V4L2Worker.cpp b/sources/grabber/linux/v4l2/V4L2Worker.cpp index 6bdd1f9c8..3584c139b 100644 --- a/sources/grabber/linux/v4l2/V4L2Worker.cpp +++ b/sources/grabber/linux/v4l2/V4L2Worker.cpp @@ -201,7 +201,7 @@ void V4L2Worker::runMe() { Image image(_width >> 1, _height >> 1); FrameDecoder::processQImage( - _sharedData, nullptr, _width, _height, _lineLength, _pixelFormat, _lutBuffer, image); + _sharedData, nullptr, _width, _height, _lineLength, _pixelFormat, _lutBuffer, image, _hdrToneMappingEnabled); image.setBufferCacheSize(); if (!_directAccess) @@ -222,7 +222,7 @@ void V4L2Worker::runMe() FrameDecoder::processImage( _cropLeft, _cropRight, _cropTop, _cropBottom, - _sharedData, nullptr, _width, _height, _lineLength, _pixelFormat, _lutBuffer, image); + _sharedData, nullptr, _width, _height, _lineLength, _pixelFormat, _lutBuffer, image, _hdrToneMappingEnabled); image.setBufferCacheSize(); if (!_directAccess) diff --git a/sources/grabber/windows/MF/MFGrabber.cpp b/sources/grabber/windows/MF/MFGrabber.cpp index 91c786bda..f46867ded 100644 --- a/sources/grabber/windows/MF/MFGrabber.cpp +++ b/sources/grabber/windows/MF/MFGrabber.cpp @@ -189,7 +189,7 @@ void MFGrabber::setHdrToneMappingEnabled(int mode) { Debug(_log, "setHdrToneMappingMode replacing LUT and restarting"); _MFWorkerManager.Stop(); - if ((_actualVideoFormat == PixelFormat::YUYV) || (_actualVideoFormat == PixelFormat::I420) || (_actualVideoFormat == PixelFormat::NV12) || (_actualVideoFormat == PixelFormat::MJPEG)) + if ((_actualVideoFormat == PixelFormat::YUYV) || (_actualVideoFormat == PixelFormat::I420) || (_actualVideoFormat == PixelFormat::P010) || (_actualVideoFormat == PixelFormat::NV12) || (_actualVideoFormat == PixelFormat::MJPEG)) loadLutFile(PixelFormat::YUYV); else loadLutFile(PixelFormat::RGB24); @@ -869,6 +869,14 @@ bool MFGrabber::init_device(QString selectedDeviceName, DevicePropertiesItem pro } break; + case PixelFormat::P010: + { + loadLutFile(PixelFormat::YUYV); + _frameByteSize = (6 * props.x * props.y) / 2; + _lineLength = props.x * 2; + } + break; + case PixelFormat::RGB24: { loadLutFile(PixelFormat::RGB24); diff --git a/sources/grabber/windows/MF/MFWorker.cpp b/sources/grabber/windows/MF/MFWorker.cpp index b889c48f4..abd453011 100644 --- a/sources/grabber/windows/MF/MFWorker.cpp +++ b/sources/grabber/windows/MF/MFWorker.cpp @@ -199,7 +199,7 @@ void MFWorker::runMe() { Image image(_width >> 1, _height >> 1); FrameDecoder::processQImage( - _localBuffer.data(), nullptr, _width, _height, _lineLength, _pixelFormat, _lutBuffer, image); + _localBuffer.data(), nullptr, _width, _height, _lineLength, _pixelFormat, _lutBuffer, image, _hdrToneMappingEnabled); image.setBufferCacheSize(); if (!_directAccess) @@ -218,7 +218,7 @@ void MFWorker::runMe() FrameDecoder::processImage( _cropLeft, _cropRight, _cropTop, _cropBottom, - _localBuffer.data(), nullptr, _width, _height, _lineLength, _pixelFormat, _lutBuffer, image); + _localBuffer.data(), nullptr, _width, _height, _lineLength, _pixelFormat, _lutBuffer, image, _hdrToneMappingEnabled); image.setBufferCacheSize(); if (!_directAccess) diff --git a/sources/lut-calibrator/ColorSpace.cpp b/sources/lut-calibrator/ColorSpace.cpp index 663b1c8da..445fedc3e 100644 --- a/sources/lut-calibrator/ColorSpace.cpp +++ b/sources/lut-calibrator/ColorSpace.cpp @@ -76,6 +76,8 @@ namespace ColorSpaceMath return "BT2020 with sRGB TRC"; else if (gamma == HDR_GAMMA::PQinSRGB) return "PQ in SRGB"; + else if (gamma == HDR_GAMMA::P010) + return "P010"; return "UNKNOWN"; } diff --git a/sources/lut-calibrator/LutCalibrator.cpp b/sources/lut-calibrator/LutCalibrator.cpp index a24825d54..ce5366bd2 100644 --- a/sources/lut-calibrator/LutCalibrator.cpp +++ b/sources/lut-calibrator/LutCalibrator.cpp @@ -118,6 +118,27 @@ LutCalibrator::~LutCalibrator() Info(_log, "The calibration object is deleted"); } +static void unpackP010(double *y, double *u, double *v) +{ + if (y !=nullptr) + { + double val = FrameDecoderUtils::unpackLuminanceP010(*y); + *y = (16.0 + val * 219.0) / 255.0; + } + + for (auto chroma : { u, v }) + if (chroma != nullptr) + { + double val = (*chroma * 255.0 - 128.0) / 128.0; + double fVal = FrameDecoderUtils::unpackChromaP010(std::abs(val)); + *chroma = (128.0 + ((val < 0) ? -fVal : fVal) * 112.0) / 255.0; + } +}; + +static void unpackP010(double3& yuv) +{ + unpackP010(&yuv.x, &yuv.y, &yuv.z); +}; void LutCalibrator::cancelCalibrationSafe() { @@ -302,6 +323,7 @@ void LutCalibrator::startHandler() _capturedColors.reset(); _capturedColors = std::make_shared(); + bestResult = std::make_shared(); if (setTestData()) { @@ -406,11 +428,15 @@ void LutCalibrator::handleImage(const Image& image) } auto pixelFormat = image.getOriginFormat(); - if (pixelFormat != PixelFormat::NV12 && pixelFormat != PixelFormat::MJPEG && pixelFormat != PixelFormat::YUYV) + if (pixelFormat != PixelFormat::NV12 && pixelFormat != PixelFormat::MJPEG && pixelFormat != PixelFormat::YUYV && pixelFormat != PixelFormat::P010) { - error("Only NV12/MJPEG/YUYV video format for the USB grabber and NV12 for the flatbuffers source are supported for the LUT calibration."); + error("Only NV12/MJPEG/YUYV/P010 video format for the USB grabber and NV12 for the flatbuffers source are supported for the LUT calibration."); return; } + else if (pixelFormat == PixelFormat::P010) + { + bestResult->signal.isSourceP010 = true; + } int boardIndex = -1; @@ -663,6 +689,11 @@ static double3 hdr_to_srgb(const YuvConverter* _yuvConverter, double3 yuv, const double3 srgb; bool white = true; + if (gamma == HDR_GAMMA::P010) + { + unpackP010(yuv); + } + if (gamma == HDR_GAMMA::sRGB || gamma == HDR_GAMMA::BT2020inSRGB) { CapturedColors::correctYRange(yuv, signal.yRange, signal.upYLimit, signal.downYLimit, signal.yShift); @@ -689,7 +720,7 @@ static double3 hdr_to_srgb(const YuvConverter* _yuvConverter, double3 yuv, const double3 e; - if (gamma == HDR_GAMMA::PQ) + if (gamma == HDR_GAMMA::PQ || gamma == HDR_GAMMA::P010) { e = PQ_ST2084(10000.0 / nits, a); } @@ -854,7 +885,16 @@ void CalibrationWorker::run() std::list coloredAspectModeList; if (!precise) - coloredAspectModeList = { 0, 1, 2, 3 }; + { + if (bestResult.signal.isSourceP010) + { + coloredAspectModeList = { 0 }; + } + else + { + coloredAspectModeList = { 0, 1, 2, 3 }; + } + } else if (bestResult.coloredAspectMode != 0) coloredAspectModeList = { 0, bestResult.coloredAspectMode }; else @@ -1044,7 +1084,8 @@ void LutCalibrator::fineTune(bool precise) { if ((r % 4 == 0 && g % 4 == 0 && b % 2 == 0) || (r == g * 2 && g > b) || (r <= 6 && g <= 6 && b <= 6) || (r == b && b == g) || (r == g && r > 0) || (r == b && r > 0) - || _capturedColors->all[r][g][b].isLchPrimary(nullptr) != CapturedColor::LchPrimaries::NONE) + || _capturedColors->all[r][g][b].isLchPrimary(nullptr) != CapturedColor::LchPrimaries::NONE + || (bestResult->signal.isSourceP010 && ((r - g > 0 && r - g <= 3 && b == 0) || (r > 0 && g == 0 && b == 0) || (r == 0 && g > 0 && b == 0) || (r == 0 && g == 0 && b > 0)))) { vertex.push_back(&_capturedColors->all[r][g][b]); @@ -1069,6 +1110,17 @@ void LutCalibrator::fineTune(bool precise) bestResult->signal.range = _capturedColors->getRange(); _capturedColors->getSignalParams(bestResult->signal.yRange, bestResult->signal.upYLimit, bestResult->signal.downYLimit, bestResult->signal.yShift); + if (bestResult->signal.isSourceP010) + { + double up = bestResult->signal.upYLimit; + unpackP010(&up, nullptr, nullptr); + bestResult->signal.upYLimit = up; + + double down = bestResult->signal.downYLimit; + unpackP010(&down, nullptr, nullptr); + bestResult->signal.downYLimit = down; + } + if (bestResult->signal.range == YuvConverter::COLOR_RANGE::LIMITED) { maxLevel = (white - 16.0) / (235.0 - 16.0); @@ -1094,9 +1146,13 @@ void LutCalibrator::fineTune(bool precise) sampleColors[SampleColor::LOW_GREEN] = (std::pair(sampleGreenLow.getInputYuvColors().front().first, byte2{ sampleGreenLow.U(), sampleGreenLow.V() })); sampleColors[SampleColor::LOW_BLUE] = (std::pair(sampleBlueLow.getInputYuvColors().front().first, byte2{ sampleBlueLow.U(), sampleBlueLow.V() })); - for (int gamma = (precise) ? bestResult->gamma : HDR_GAMMA::PQ; gamma <= HDR_GAMMA::PQinSRGB; gamma++) + for (int gamma = (precise) ? (bestResult->gamma) : ((bestResult->signal.isSourceP010) ? HDR_GAMMA::P010 : HDR_GAMMA::PQ); + gamma <= HDR_GAMMA::P010; gamma++) { std::vector gammasHLG; + + if (gamma == HDR_GAMMA::P010 && !bestResult->signal.isSourceP010) + continue; if (gamma == HDR_GAMMA::HLG) { @@ -1116,6 +1172,14 @@ void LutCalibrator::fineTune(bool precise) { NITS = 10000.0 * PQ_ST2084(1.0, maxLevel); } + else if (gamma == HDR_GAMMA::P010) + { + double unpackWhite = white / 255.0; + unpackP010(&unpackWhite, nullptr, nullptr); + unpackWhite *= 255.0; + maxLevel = (unpackWhite - 16.0) / (235.0 - 16.0); + NITS = 10000.0 * PQ_ST2084(1.0, maxLevel); + } else if (gamma == HDR_GAMMA::PQinSRGB) { NITS = 10000.0 * PQ_ST2084(1.0, srgb_linear_to_nonlinear(maxLevel)); @@ -1230,11 +1294,12 @@ void LutCalibrator::calibration() { Debug(_log, "Selected nits: %f", (bestResult->gamma == HDR_GAMMA::HLG) ? 1000.0 * (1 / bestResult->nits) : bestResult->nits); } - Debug(_log, "Selected bt2020 gamma range: %i", bestResult->bt2020Range); - Debug(_log, "Selected alternative conversion of primaries: %i", bestResult->altConvert); + Debug(_log, "Selected bt2020 gamma range: %s", (bestResult->bt2020Range) ? "yes" : "no"); + Debug(_log, "Selected alternative conversion of primaries: %s", (bestResult->altConvert) ? "yes" : "no"); Debug(_log, "Selected aspect: %f %f %f", bestResult->aspect.x, bestResult->aspect.y, bestResult->aspect.z); Debug(_log, "Selected color aspect mode: %i", bestResult->coloredAspectMode); Debug(_log, "Selected color aspect: %s %s", QSTRING_CSTR(vecToString(bestResult->colorAspect.first)), QSTRING_CSTR(vecToString(bestResult->colorAspect.second))); + Debug(_log, "Selected source is P010: %s", (bestResult->signal.isSourceP010) ? "yes" : "no"); if (_debug) { @@ -1286,7 +1351,8 @@ void LutCalibrator::calibration() for (int b = MAX_IND; b >= 0; b--) { if ((r % 4 == 0 && g % 4 == 0 && b % 2 == 0) || (r == g * 2 && g > b) || (r <= 6 && g <= 6 && b <= 6) || (r == b && b == g) || (r == g && r > 0) || (r == b && r > 0) - || _capturedColors->all[r][g][b].isLchPrimary(nullptr) != CapturedColor::LchPrimaries::NONE) + || _capturedColors->all[r][g][b].isLchPrimary(nullptr) != CapturedColor::LchPrimaries::NONE + || (bestResult->signal.isSourceP010 && ((r - g > 0 && r - g <= 3 && b == 0) || (r > 0 && g == 0 && b == 0) || (r == 0 && g > 0 && b == 0) || (r == 0 && g == 0 && b > 0)))) { auto sample = _capturedColors->all[r][g][b]; auto sampleList = sample.getFinalRGB(); @@ -1543,8 +1609,7 @@ void LutCalibrator::calibrate() { emit GlobalSignals::getInstance()->SignalRequestComponent(hyperhdr::Components::COMP_FLATBUFSERVER, -1, false); } - - bestResult = std::make_shared(); + _capturedColors->finilizeBoard(); @@ -1577,9 +1642,16 @@ void LutCalibrator::capturedPrimariesCorrection(ColorSpaceMath::HDR_GAMMA gamma, for (auto& c : capturedPrimaries) { - auto a = _yuvConverter->toRgb(_capturedColors->getRange(), YuvConverter::YUV_COEFS(coef), c.yuv()); + auto yuv = c.yuv(); + + if (gamma == HDR_GAMMA::P010) + { + unpackP010(yuv); + } + + auto a = _yuvConverter->toRgb(_capturedColors->getRange(), YuvConverter::YUV_COEFS(coef), yuv); - if (gamma == ColorSpaceMath::HDR_GAMMA::PQ) + if (gamma == ColorSpaceMath::HDR_GAMMA::PQ || gamma == ColorSpaceMath::HDR_GAMMA::P010) { a = PQ_ST2084(10000.0 / nits, a); } diff --git a/sources/utils/CMakeLists.txt b/sources/utils/CMakeLists.txt index 6e9bac641..42fb9dc5d 100644 --- a/sources/utils/CMakeLists.txt +++ b/sources/utils/CMakeLists.txt @@ -15,6 +15,7 @@ add_library(hyperhdr-utils OBJECT ${Utils_SOURCES}) target_link_libraries(hyperhdr-utils Qt${Qt_VERSION}::Core Qt${Qt_VERSION}::Network + linalg ) if(USE_PRECOMPILED_HEADERS AND COMMAND target_precompile_headers) diff --git a/sources/utils/FrameDecoder.cpp b/sources/utils/FrameDecoder.cpp index 320acd200..5e24c6fda 100644 --- a/sources/utils/FrameDecoder.cpp +++ b/sources/utils/FrameDecoder.cpp @@ -29,17 +29,163 @@ #include #include -//#define TAKE_SCREEN_SHOT +#include +#include +#include +#include + +using namespace linalg; +using namespace aliases; + +namespace FrameDecoderUtils +{ + std::atomic initialized(false); + std::vector lutP010_y; + std::vector lutP010_uv; + + constexpr double signalBreakP010 = 0.9; + constexpr double signalBreakChromaP010 = 0.75; + + static double packChromaP010(double x) + { + constexpr double pi2 = M_PI / 2.0; + if (x < 0.0) + { + return 0.0; + } + else if (x <= 0.5) + { + return x * 1.5; + } + else if (x <= 1) + { + return std::sin(pi2 * ((x - 0.5) / 0.5)) * (1 - signalBreakChromaP010) + signalBreakChromaP010; + } + return 1; + }; + + double unpackChromaP010(double x) + { + constexpr double pi2 = M_PI / 2.0; + if (x < 0.0) + { + return 0.0; + } + else if (x <= signalBreakChromaP010) + { + x /= 1.5; + return x; + } + else if (x <= 1) + { + x = (x - signalBreakChromaP010) / (1.0 - signalBreakChromaP010); + x = std::asin(x); + x = x * 0.5 / pi2 + 0.5; + return x; + } + + return 1; + }; + + static double packLuminanceP010(double x) + { + constexpr double pi2 = M_PI / 2.0; + if (x < 0.0) + { + return 0.0; + } + else if (x <= 0.6) + { + return x * 1.5; + } + else if (x <= 1) + { + return std::sin(pi2 * ((x - 0.6) / 0.4)) * (1 - signalBreakP010) + signalBreakP010; + } + return 1; + }; + + double unpackLuminanceP010(double x) + { + constexpr double pi2 = M_PI / 2.0; + if (x < 0.0) + { + return 0.0; + } + else if (x <= signalBreakP010) + { + return x / 1.5; + } + else if (x <= 1) + { + x = (x - signalBreakP010) / (1.0 - signalBreakP010); + x = std::asin(x); + x = x * 0.4 / pi2 + 0.6; + return x; + } + + return 1; + }; + + + static void initP010() + { + static std::mutex lockerP010; + std::lock_guard locker(lockerP010); + + if (FrameDecoderUtils::initialized) + return; + + lutP010_y.resize(1024); + lutP010_uv.resize(1024); + + for (int i = 0; i < static_cast(lutP010_y.size()); ++i) + { + constexpr int sourceRange = 940 - 64; + const double sourceValue = std::min(std::max(i - 64, 0), sourceRange)/static_cast(sourceRange); + double val = packLuminanceP010(sourceValue); + lutP010_y[i] = std::lround(val * 255.0); + + /* + double unpack = unpackLuminanceP010(val); + double delta = sourceValue - unpack; + if (std::abs(delta) > 0.0000001) + { + bool error = true; + } + */ + } + + + for (int i = 0; i < static_cast(lutP010_uv.size()); ++i) + { + constexpr int sourceRange = (960 - 64) / 2; + const int current = std::abs(i - 512); + const double sourceValue = std::min(current, sourceRange) / static_cast(sourceRange); + double val = packChromaP010(sourceValue); + lutP010_uv[i] = std::max(std::min(128 + std::lround(((i < 512) ? -val : val) * 128.0), 255l), 0l);; + + /* + double unpack = unpackChromaP010(val); + double delta = sourceValue - unpack; + if (std::abs(delta) > 0.0000001) + { + bool error = true; + } + */ + } + + FrameDecoderUtils::initialized = true; + }; +}; + +using namespace FrameDecoderUtils; -#ifdef TAKE_SCREEN_SHOT - #include - int screenShotTaken = 300; -#endif void FrameDecoder::processImage( int _cropLeft, int _cropRight, int _cropTop, int _cropBottom, const uint8_t* data, const uint8_t* dataUV, int width, int height, int lineLength, - const PixelFormat pixelFormat, const uint8_t* lutBuffer, Image& outputImage) + const PixelFormat pixelFormat, const uint8_t* lutBuffer, Image& outputImage, bool toneMapping) { uint32_t ind_lutd, ind_lutd2; uint8_t buffer[8]; @@ -47,7 +193,7 @@ void FrameDecoder::processImage( // validate format if (pixelFormat != PixelFormat::YUYV && pixelFormat != PixelFormat::XRGB && pixelFormat != PixelFormat::RGB24 && - pixelFormat != PixelFormat::I420 && pixelFormat != PixelFormat::NV12 && pixelFormat != PixelFormat::MJPEG) + pixelFormat != PixelFormat::I420 && pixelFormat != PixelFormat::NV12 && pixelFormat != PixelFormat::P010 && pixelFormat != PixelFormat::MJPEG) { Error(Logger::getInstance("FrameDecoder"), "Invalid pixel format given"); return; @@ -55,7 +201,7 @@ void FrameDecoder::processImage( // validate format LUT if ((pixelFormat == PixelFormat::YUYV || pixelFormat == PixelFormat::I420 || pixelFormat == PixelFormat::MJPEG || - pixelFormat == PixelFormat::NV12) && lutBuffer == NULL) + pixelFormat == PixelFormat::NV12 || pixelFormat == PixelFormat::P010) && lutBuffer == NULL) { Error(Logger::getInstance("FrameDecoder"), "Missing LUT table for YUV colorspace"); return; @@ -99,14 +245,6 @@ void FrameDecoder::processImage( } } -#ifdef TAKE_SCREEN_SHOT - if (screenShotTaken > 0 && screenShotTaken-- == 1) - { - QImage jpgImage((const uint8_t*)outputImage.rawMem(), outputImage.width(), outputImage.height(), 3 * outputImage.width(), QImage::Format_RGB888); - jpgImage.save("D:/grabber_yuv.png", "png"); - } -#endif - return; } @@ -239,6 +377,64 @@ void FrameDecoder::processImage( return; } + if (pixelFormat == PixelFormat::P010) + { + uint16_t p010[2] = {}; + + if (!FrameDecoderUtils::initialized) + { + initP010(); + } + + auto deltaUV = (dataUV != nullptr) ? (uint8_t*)dataUV : (uint8_t*)data + lineLength * height; + for (int yDest = 0, ySource = _cropTop; yDest < outputHeight; ++ySource, ++yDest) + { + uint8_t* currentDest = destMemory + ((uint64_t)destLineSize) * yDest; + uint8_t* endDest = currentDest + destLineSize; + uint8_t* currentSource = (uint8_t*)data + (((uint64_t)lineLength * ySource) + ((uint64_t)_cropLeft)); + uint8_t* currentSourceUV = deltaUV + (((uint64_t)ySource / 2) * lineLength) + ((uint64_t)_cropLeft); + + while (currentDest < endDest) + { + memcpy(((uint32_t*)&p010), ((uint32_t*)currentSource), 4); + if (toneMapping) + { + buffer[0] = lutP010_y[p010[0] >> 6]; + buffer[1] = lutP010_y[p010[1] >> 6]; + } + else + { + buffer[0] = p010[0] >> 8; + buffer[1] = p010[1] >> 8; + } + + currentSource += 4; + memcpy(((uint32_t*)&p010), ((uint32_t*)currentSourceUV), 4); + if (toneMapping) + { + buffer[2] = lutP010_uv[p010[0] >> 6]; + buffer[3] = lutP010_uv[p010[1] >> 6]; + } + else + { + buffer[2] = p010[0] >> 8; + buffer[3] = p010[1] >> 8; + } + + currentSourceUV += 4; + + ind_lutd = LUT_INDEX(buffer[0], buffer[2], buffer[3]); + ind_lutd2 = LUT_INDEX(buffer[1], buffer[2], buffer[3]); + + *((uint32_t*)currentDest) = *((uint32_t*)(&lutBuffer[ind_lutd])); + currentDest += 3; + *((uint32_t*)currentDest) = *((uint32_t*)(&lutBuffer[ind_lutd2])); + currentDest += 3; + } + } + return; + } + if (pixelFormat == PixelFormat::NV12) { auto deltaUV = (dataUV != nullptr) ? (uint8_t*)dataUV : (uint8_t*)data + lineLength * height; @@ -265,20 +461,14 @@ void FrameDecoder::processImage( currentDest += 3; } } -#ifdef TAKE_SCREEN_SHOT - if (screenShotTaken > 0 && screenShotTaken-- == 1) - { - QImage jpgImage((const uint8_t*)outputImage.rawMem(), outputImage.width(), outputImage.height(), 3 * outputImage.width(), QImage::Format_RGB888); - jpgImage.save("D:/grabber_nv12.png", "png"); - } -#endif + return; } } void FrameDecoder::processQImage( const uint8_t* data, const uint8_t* dataUV, int width, int height, int lineLength, - const PixelFormat pixelFormat, const uint8_t* lutBuffer, Image& outputImage) + const PixelFormat pixelFormat, const uint8_t* lutBuffer, Image& outputImage, bool toneMapping) { uint32_t ind_lutd; uint8_t buffer[8]; @@ -286,7 +476,7 @@ void FrameDecoder::processQImage( // validate format if (pixelFormat != PixelFormat::YUYV && pixelFormat != PixelFormat::XRGB && pixelFormat != PixelFormat::RGB24 && - pixelFormat != PixelFormat::I420 && pixelFormat != PixelFormat::NV12) + pixelFormat != PixelFormat::I420 && pixelFormat != PixelFormat::NV12 && pixelFormat != PixelFormat::P010) { Error(Logger::getInstance("FrameDecoder"), "Invalid pixel format given"); return; @@ -294,7 +484,7 @@ void FrameDecoder::processQImage( // validate format LUT if ((pixelFormat == PixelFormat::YUYV || pixelFormat == PixelFormat::I420 || - pixelFormat == PixelFormat::NV12) && lutBuffer == NULL) + pixelFormat == PixelFormat::NV12 || pixelFormat == PixelFormat::P010) && lutBuffer == NULL) { Error(Logger::getInstance("FrameDecoder"), "Missing LUT table for YUV colorspace"); return; @@ -427,6 +617,58 @@ void FrameDecoder::processQImage( return; } + if (pixelFormat == PixelFormat::P010) + { + uint16_t p010[2] = {}; + + if (!FrameDecoderUtils::initialized) + { + initP010(); + } + + uint8_t* deltaUV = (dataUV != nullptr) ? (uint8_t*)dataUV : (uint8_t*)data + lineLength * height; + for (int yDest = 0, ySource = 0; yDest < outputHeight; ySource += 2, ++yDest) + { + uint8_t* currentDest = destMemory + ((uint64_t)destLineSize) * yDest; + uint8_t* endDest = currentDest + destLineSize; + uint8_t* currentSource = (uint8_t*)data + (((uint64_t)lineLength * ySource)); + uint8_t* currentSourceU = deltaUV + (((uint64_t)ySource / 2) * lineLength); + + while (currentDest < endDest) + { + memcpy(((uint16_t*)&p010), ((uint16_t*)currentSource), 2); + if (toneMapping) + { + buffer[0] = lutP010_y[p010[0] >> 6]; + } + else + { + buffer[0] = p010[0] >> 8; + } + currentSource += 4; + memcpy(((uint32_t*)&p010), ((uint32_t*)currentSourceU), 4); + if (toneMapping) + { + buffer[2] = lutP010_uv[p010[0] >> 6]; + buffer[3] = lutP010_uv[p010[1] >> 6]; + } + else + { + buffer[2] = p010[0] >> 8; + buffer[3] = p010[1] >> 8; + } + + currentSourceU += 4; + + ind_lutd = LUT_INDEX(buffer[0], buffer[2], buffer[3]); + + *((uint32_t*)currentDest) = *((uint32_t*)(&lutBuffer[ind_lutd])); + currentDest += 3; + } + } + return; + } + if (pixelFormat == PixelFormat::NV12) { uint8_t* deltaUV = (dataUV != nullptr) ? (uint8_t*)dataUV : (uint8_t*)data + lineLength * height; @@ -498,13 +740,6 @@ void FrameDecoder::applyLUT(uint8_t* _source, unsigned int width, unsigned int h } } } -#ifdef TAKE_SCREEN_SHOT - if (screenShotTaken > 0 && screenShotTaken-- == 1) - { - QImage jpgImage((const uint8_t*)_source, width, height, 3 * width, QImage::Format_RGB888); - jpgImage.save("D:/grabber_mjpeg.png", "png"); - } -#endif } void FrameDecoder::processSystemImageBGRA(Image& image, int targetSizeX, int targetSizeY, diff --git a/www/content/grabber_calibration.html b/www/content/grabber_calibration.html index d962f44ec..453c002c4 100644 --- a/www/content/grabber_calibration.html +++ b/www/content/grabber_calibration.html @@ -47,7 +47,7 @@

- +