diff --git a/doc/INSTALL-WINDOWS.md b/doc/INSTALL-WINDOWS.md
index 3b1267d9c..23c6c0bc1 100644
--- a/doc/INSTALL-WINDOWS.md
+++ b/doc/INSTALL-WINDOWS.md
@@ -193,6 +193,7 @@ pacman -Syu
```
pacman -S --needed base-devel mingw-w64-x86_64-toolchain
pacman -S mingw64/mingw-w64-x86_64-ffmpeg
+pacman -S mingw64/mingw-w64-x86_64-qt5
pacman -S mingw64/mingw-w64-x86_64-python3-pyqt5
pacman -S mingw64/mingw-w64-x86_64-swig
pacman -S mingw64/mingw-w64-x86_64-cmake
@@ -202,6 +203,11 @@ pacman -S mingw32/mingw-w64-i686-zeromq
pacman -S mingw64/mingw-w64-x86_64-python3-pyzmq
pacman -S mingw64/mingw-w64-x86_64-python3-cx_Freeze
pacman -S mingw64/mingw-w64-x86_64-ninja
+pacman -S mingw64/mingw-w64-x86_64-catch
+pacman -S mingw-w64-x86_64-python3-pyopengl
+pacman -S mingw-w64-clang-x86_64-python-pyopengl-accelerate
+pacman -S mingw-w64-x86_64-python-pyopengl-accelerate
+pacman -S mingw-w64-x86_64-python-pywin32
pacman -S git
# Install ImageMagick if needed (OPTIONAL and NOT NEEDED)
@@ -213,6 +219,7 @@ pacman -S mingw64/mingw-w64-x86_64-imagemagick
```
pacman -S --needed base-devel mingw32/mingw-w64-i686-toolchain
pacman -S mingw32/mingw-w64-i686-ffmpeg
+pacman -S mingw32/mingw-w64-i686-qt5
pacman -S mingw32/mingw-w64-i686-python3-pyqt5
pacman -S mingw32/mingw-w64-i686-swig
pacman -S mingw32/mingw-w64-i686-cmake
@@ -222,6 +229,10 @@ pacman -S mingw32/mingw-w64-i686-zeromq
pacman -S mingw32/mingw-w64-i686-python3-pyzmq
pacman -S mingw32/mingw-w64-i686-python3-cx_Freeze
pacman -S mingw32/mingw-w64-i686-ninja
+pacman -S mingw32/mingw-w64-i686-catch
+pacman -S mingw-w64-i686-python-pyopengl
+pacman -S mingw-w64-i686-python-pyopengl-accelerate
+pacman -S mingw-w64-i686-python-pywin32
pacman -S git
# Install ImageMagick if needed (OPTIONAL and NOT NEEDED)
@@ -237,6 +248,8 @@ pip3 install tinys3
pip3 install github3.py
pip3 install requests
pip3 install meson
+pip3 install PyOpenGL
+pip3 install PyOpenGL-accelerate
```
7) Download Unittest++ (https://github.com/unittest-cpp/unittest-cpp) into /MSYS2/[USER]/unittest-cpp-master/
@@ -251,12 +264,50 @@ mingw32-make install
```
git clone https://gitlab.gnome.org/GNOME/babl.git
cd babl
-meson build --prefix=C:/msys64/mingw32
+meson build --prefix=C:/msys64/mingw64 (or `--prefix=C:/msys64/mingw32` for a 32 bit build)
cd build
meson install
```
-9) ZMQ++ Header (This might not be needed anymore)
+9) Install opencv (used for AI and computer vision effects)
+
+Note: Had to edit 1 header file and add a missing typedef: `typedef unsigned int uint;`
+
+```
+git clone https://github.com/opencv/opencv
+cd opencv/
+git checkout '4.3.0'
+cd ..
+git clone https://github.com/opencv/opencv_contrib
+cd opencv_contrib/
+git checkout '4.3.0'
+cd ..
+cd opencv
+mkdir build
+cd build
+cmake -D CMAKE_BUILD_TYPE=RELEASE -D WITH_TBB=OFF -D WITH_QT=ON -D WITH_OPENGL=ON -D OPENCV_EXTRA_MODULES_PATH=../../opencv_contrib/modules -D OPENCV_GENERATE_PKGCONFIG=ON -D BUILD_opencv_python2=OFF -D BUILD_opencv_python3=ON -G "MSYS Makefiles" ..
+make -j4 -i (-i ignores errors on MSYS2 which happens for some reason)
+make install -i
+```
+
+10) Install ReSVG (SVG rasterizing)
+
+```
+git clone https://github.com/RazrFalcon/resvg
+cd resvg/c-api
+QT_DIR="C:\\msys64\\mingw64\\" cargo build --verbose --release
+ **OR**
+QT_DIR="C:\\msys64\\mingw32\\" cargo build --verbose --release
+
+cd ../
+
+# copy all required files into the system directories
+cp target/release/resvg.dll /usr/lib/
+mkdir -p /usr/include/resvg/
+cp c-api/*.h /usr/include/resvg/
+```
+
+11) ZMQ++ Header (This might not be needed anymore)
NOTE: Download and copy zmq.hpp into the /c/msys64/mingw64/include/ folder
## Manual Dependencies
diff --git a/examples/test_video_sync.mp4 b/examples/test_video_sync.mp4
new file mode 100644
index 000000000..f6f7fba73
Binary files /dev/null and b/examples/test_video_sync.mp4 differ
diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp
index 7e69f0124..d8c0c9412 100644
--- a/src/FFmpegReader.cpp
+++ b/src/FFmpegReader.cpp
@@ -74,7 +74,8 @@ FFmpegReader::FFmpegReader(const std::string &path, bool inspect_reader)
seek_audio_frame_found(0), seek_video_frame_found(0),is_duration_known(false), largest_frame_processed(0),
current_video_frame(0), packet(NULL), max_concurrent_frames(OPEN_MP_NUM_PROCESSORS), audio_pts(0),
video_pts(0), pFormatCtx(NULL), videoStream(-1), audioStream(-1), pCodecCtx(NULL), aCodecCtx(NULL),
- pStream(NULL), aStream(NULL), pFrame(NULL), previous_packet_location{-1,0} {
+ pStream(NULL), aStream(NULL), pFrame(NULL), previous_packet_location{-1,0},
+ hold_packet(false) {
// Initialize FFMpeg, and register all formats and codecs
AV_REGISTER_ALL
@@ -213,6 +214,7 @@ void FFmpegReader::Open() {
pFormatCtx = NULL;
{
hw_de_on = (openshot::Settings::Instance()->HARDWARE_DECODER == 0 ? 0 : 1);
+ ZmqLogger::Instance()->AppendDebugMethod("Decode hardware acceleration settings", "hw_de_on", hw_de_on, "HARDWARE_DECODER", openshot::Settings::Instance()->HARDWARE_DECODER);
}
// Open video file
@@ -646,6 +648,7 @@ void FFmpegReader::Close() {
// Reset some variables
last_frame = 0;
+ hold_packet = false;
largest_frame_processed = 0;
seek_audio_frame_found = 0;
seek_video_frame_found = 0;
@@ -952,11 +955,13 @@ std::shared_ptr FFmpegReader::ReadStream(int64_t requested_frame) {
break;
}
- // Get the next packet
- packet_error = GetNextPacket();
- if (packet_error < 0 && !packet) {
- // No more packets to be found
- packet_status.packets_eof = true;
+ if (!hold_packet || !packet) {
+ // Get the next packet
+ packet_error = GetNextPacket();
+ if (packet_error < 0 && !packet) {
+ // No more packets to be found
+ packet_status.packets_eof = true;
+ }
}
// Debug output
@@ -977,7 +982,7 @@ std::shared_ptr FFmpegReader::ReadStream(int64_t requested_frame) {
// Video packet
if ((info.has_video && packet && packet->stream_index == videoStream) ||
- (info.has_video && !packet && packet_status.video_decoded < packet_status.video_read) ||
+ (info.has_video && packet_status.video_decoded < packet_status.video_read) ||
(info.has_video && !packet && !packet_status.video_eof)) {
// Process Video Packet
ProcessVideoPacket(requested_frame);
@@ -1104,7 +1109,17 @@ bool FFmpegReader::GetAVFrame() {
AVFrame *next_frame = AV_ALLOCATE_FRAME();
#if IS_FFMPEG_3_2
- int send_packet_err = avcodec_send_packet(pCodecCtx, packet);
+ int send_packet_err = 0;
+ int64_t send_packet_pts = 0;
+ if ((packet && packet->stream_index == videoStream && !hold_packet) || !packet) {
+ send_packet_err = avcodec_send_packet(pCodecCtx, packet);
+
+ if (packet && send_packet_err >= 0) {
+ send_packet_pts = GetPacketPTS();
+ hold_packet = false;
+ ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (send packet succeeded)", "send_packet_err", send_packet_err, "send_packet_pts", send_packet_pts);
+ }
+ }
#if USE_HW_ACCEL
// Get the format from the variables set in get_hw_dec_format
@@ -1112,88 +1127,117 @@ bool FFmpegReader::GetAVFrame() {
hw_de_av_device_type = hw_de_av_device_type_global;
#endif // USE_HW_ACCEL
if (send_packet_err < 0 && send_packet_err != AVERROR_EOF) {
- ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Packet not sent)",
- "send_packet_err", send_packet_err);
- }
- else {
- int receive_frame_err = 0;
- AVFrame *next_frame2;
- #if USE_HW_ACCEL
- if (hw_de_on && hw_de_supported) {
- next_frame2 = AV_ALLOCATE_FRAME();
+ ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (send packet: Not sent [" + av_err2string(send_packet_err) + "])", "send_packet_err", send_packet_err, "send_packet_pts", send_packet_pts);
+ if (send_packet_err == AVERROR(EAGAIN)) {
+ hold_packet = true;
+ ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (send packet: AVERROR(EAGAIN): user must read output with avcodec_receive_frame()", "send_packet_pts", send_packet_pts);
}
- else
- #endif // USE_HW_ACCEL
- {
- next_frame2 = next_frame;
+ if (send_packet_err == AVERROR(EINVAL)) {
+ ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (send packet: AVERROR(EINVAL): codec not opened, it is an encoder, or requires flush", "send_packet_pts", send_packet_pts);
+ }
+ if (send_packet_err == AVERROR(ENOMEM)) {
+ ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (send packet: AVERROR(ENOMEM): failed to add packet to internal queue, or legitimate decoding errors", "send_packet_pts", send_packet_pts);
}
- pFrame = AV_ALLOCATE_FRAME();
- while (receive_frame_err >= 0) {
- receive_frame_err = avcodec_receive_frame(pCodecCtx, next_frame2);
+ }
+
+ // Always try and receive a packet, if not EOF.
+ // Even if the above avcodec_send_packet failed to send,
+ // we might still need to receive a packet.
+ int receive_frame_err = 0;
+ AVFrame *next_frame2;
+#if USE_HW_ACCEL
+ if (hw_de_on && hw_de_supported) {
+ next_frame2 = AV_ALLOCATE_FRAME();
+ }
+ else
+#endif // USE_HW_ACCEL
+ {
+ next_frame2 = next_frame;
+ }
+ pFrame = AV_ALLOCATE_FRAME();
+ while (receive_frame_err >= 0) {
+ receive_frame_err = avcodec_receive_frame(pCodecCtx, next_frame2);
+
+ if (receive_frame_err != 0) {
+ ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (receive frame: frame not ready yet from decoder [\" + av_err2string(receive_frame_err) + \"])", "receive_frame_err", receive_frame_err, "send_packet_pts", send_packet_pts);
if (receive_frame_err == AVERROR_EOF) {
- ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (EOF detected from decoder)");
+ ZmqLogger::Instance()->AppendDebugMethod(
+ "FFmpegReader::GetAVFrame (receive frame: AVERROR_EOF: EOF detected from decoder, flushing buffers)", "send_packet_pts", send_packet_pts);
+ avcodec_flush_buffers(pCodecCtx);
packet_status.video_eof = true;
}
- if (receive_frame_err == AVERROR(EINVAL) || receive_frame_err == AVERROR_EOF) {
- ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (invalid frame received or EOF from decoder)");
+ if (receive_frame_err == AVERROR(EINVAL)) {
+ ZmqLogger::Instance()->AppendDebugMethod(
+ "FFmpegReader::GetAVFrame (receive frame: AVERROR(EINVAL): invalid frame received, flushing buffers)", "send_packet_pts", send_packet_pts);
avcodec_flush_buffers(pCodecCtx);
}
- if (receive_frame_err != 0) {
- ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (frame not ready yet from decoder)");
- break;
+ if (receive_frame_err == AVERROR(EAGAIN)) {
+ ZmqLogger::Instance()->AppendDebugMethod(
+ "FFmpegReader::GetAVFrame (receive frame: AVERROR(EAGAIN): output is not available in this state - user must try to send new input)", "send_packet_pts", send_packet_pts);
}
-
- #if USE_HW_ACCEL
- if (hw_de_on && hw_de_supported) {
- int err;
- if (next_frame2->format == hw_de_av_pix_fmt) {
- next_frame->format = AV_PIX_FMT_YUV420P;
- if ((err = av_hwframe_transfer_data(next_frame,next_frame2,0)) < 0) {
- ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Failed to transfer data to output frame)");
- }
- if ((err = av_frame_copy_props(next_frame,next_frame2)) < 0) {
- ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Failed to copy props to output frame)");
- }
- }
- }
- else
- #endif // USE_HW_ACCEL
- { // No hardware acceleration used -> no copy from GPU memory needed
- next_frame = next_frame2;
- }
-
- // TODO also handle possible further frames
- // Use only the first frame like avcodec_decode_video2
- frameFinished = 1;
- packet_status.video_decoded++;
-
- av_image_alloc(pFrame->data, pFrame->linesize, info.width, info.height, (AVPixelFormat)(pStream->codecpar->format), 1);
- av_image_copy(pFrame->data, pFrame->linesize, (const uint8_t**)next_frame->data, next_frame->linesize,
- (AVPixelFormat)(pStream->codecpar->format), info.width, info.height);
-
- // Get display PTS from video frame, often different than packet->pts.
- // Sending packets to the decoder (i.e. packet->pts) is async,
- // and retrieving packets from the decoder (frame->pts) is async. In most decoders
- // sending and retrieving are separated by multiple calls to this method.
- if (next_frame->pts != AV_NOPTS_VALUE) {
- // This is the current decoded frame (and should be the pts used) for
- // processing this data
- video_pts = next_frame->pts;
- } else if (next_frame->pkt_dts != AV_NOPTS_VALUE) {
- // Some videos only set this timestamp (fallback)
- video_pts = next_frame->pkt_dts;
+ if (receive_frame_err == AVERROR_INPUT_CHANGED) {
+ ZmqLogger::Instance()->AppendDebugMethod(
+ "FFmpegReader::GetAVFrame (receive frame: AVERROR_INPUT_CHANGED: current decoded frame has changed parameters with respect to first decoded frame)", "send_packet_pts", send_packet_pts);
}
- // break out of loop after each successful image returned
+ // Break out of decoding loop
+ // Nothing ready for decoding yet
break;
}
- #if USE_HW_ACCEL
+
+#if USE_HW_ACCEL
if (hw_de_on && hw_de_supported) {
- AV_FREE_FRAME(&next_frame2);
+ int err;
+ if (next_frame2->format == hw_de_av_pix_fmt) {
+ next_frame->format = AV_PIX_FMT_YUV420P;
+ if ((err = av_hwframe_transfer_data(next_frame,next_frame2,0)) < 0) {
+ ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Failed to transfer data to output frame)", "hw_de_on", hw_de_on);
+ }
+ if ((err = av_frame_copy_props(next_frame,next_frame2)) < 0) {
+ ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Failed to copy props to output frame)", "hw_de_on", hw_de_on);
+ }
+ }
}
- #endif // USE_HW_ACCEL
+ else
+#endif // USE_HW_ACCEL
+ { // No hardware acceleration used -> no copy from GPU memory needed
+ next_frame = next_frame2;
+ }
+
+ // TODO also handle possible further frames
+ // Use only the first frame like avcodec_decode_video2
+ frameFinished = 1;
+ packet_status.video_decoded++;
+
+ av_image_alloc(pFrame->data, pFrame->linesize, info.width, info.height, (AVPixelFormat)(pStream->codecpar->format), 1);
+ av_image_copy(pFrame->data, pFrame->linesize, (const uint8_t**)next_frame->data, next_frame->linesize,
+ (AVPixelFormat)(pStream->codecpar->format), info.width, info.height);
+
+ // Get display PTS from video frame, often different than packet->pts.
+ // Sending packets to the decoder (i.e. packet->pts) is async,
+ // and retrieving packets from the decoder (frame->pts) is async. In most decoders
+ // sending and retrieving are separated by multiple calls to this method.
+ if (next_frame->pts != AV_NOPTS_VALUE) {
+ // This is the current decoded frame (and should be the pts used) for
+ // processing this data
+ video_pts = next_frame->pts;
+ } else if (next_frame->pkt_dts != AV_NOPTS_VALUE) {
+ // Some videos only set this timestamp (fallback)
+ video_pts = next_frame->pkt_dts;
+ }
+
+ ZmqLogger::Instance()->AppendDebugMethod(
+ "FFmpegReader::GetAVFrame (Successful frame received)", "video_pts", video_pts, "send_packet_pts", send_packet_pts);
+
+ // break out of loop after each successful image returned
+ break;
}
+#if USE_HW_ACCEL
+ if (hw_de_on && hw_de_supported) {
+ AV_FREE_FRAME(&next_frame2);
+ }
+ #endif // USE_HW_ACCEL
#else
avcodec_decode_video2(pCodecCtx, next_frame, &frameFinished, packet);
@@ -1744,6 +1788,7 @@ void FFmpegReader::Seek(int64_t requested_frame) {
video_pts_seconds = NO_PTS_OFFSET;
audio_pts = 0.0;
audio_pts_seconds = NO_PTS_OFFSET;
+ hold_packet = false;
last_frame = 0;
current_video_frame = 0;
largest_frame_processed = 0;
diff --git a/src/FFmpegReader.h b/src/FFmpegReader.h
index 83f90136e..540d46dd3 100644
--- a/src/FFmpegReader.h
+++ b/src/FFmpegReader.h
@@ -153,6 +153,7 @@ namespace openshot {
int64_t audio_pts;
int64_t video_pts;
+ bool hold_packet;
double pts_offset_seconds;
double audio_pts_seconds;
double video_pts_seconds;
diff --git a/tests/FFmpegReader.cpp b/tests/FFmpegReader.cpp
index c397fc0ae..0f272fdff 100644
--- a/tests/FFmpegReader.cpp
+++ b/tests/FFmpegReader.cpp
@@ -284,3 +284,70 @@ TEST_CASE( "DisplayInfo", "[libopenshot][ffmpegreader]" )
// Compare a [0, expected.size()) substring of output to expected
CHECK(output.str().substr(0, expected.size()) == expected);
}
+
+TEST_CASE( "Decoding AV1 Video", "[libopenshot][ffmpegreader]" )
+{
+ try {
+ // Create a reader
+ std::stringstream path;
+ path << TEST_MEDIA_PATH << "test_video_sync.mp4";
+ FFmpegReader r(path.str());
+ r.Open();
+
+ std::shared_ptr f = r.GetFrame(1);
+
+ // Get the image data
+ const unsigned char *pixels = f->GetPixels(10);
+ int pixel_index = 112 * 4;
+
+ // Check image properties on scanline 10, pixel 112
+ CHECK((int) pixels[pixel_index] == Approx(0).margin(5));
+ CHECK((int) pixels[pixel_index + 1] == Approx(0).margin(5));
+ CHECK((int) pixels[pixel_index + 2] == Approx(0).margin(5));
+ CHECK((int) pixels[pixel_index + 3] == Approx(255).margin(5));
+
+ f = r.GetFrame(90);
+
+ // Get the image data
+ pixels = f->GetPixels(820);
+ pixel_index = 930 * 4;
+
+ // Check image properties on scanline 820, pixel 930
+ CHECK((int) pixels[pixel_index] == Approx(255).margin(5));
+ CHECK((int) pixels[pixel_index + 1] == Approx(255).margin(5));
+ CHECK((int) pixels[pixel_index + 2] == Approx(255).margin(5));
+ CHECK((int) pixels[pixel_index + 3] == Approx(255).margin(5));
+
+ f = r.GetFrame(160);
+
+ // Get the image data
+ pixels = f->GetPixels(420);
+ pixel_index = 930 * 4;
+
+ // Check image properties on scanline 820, pixel 930
+ CHECK((int) pixels[pixel_index] == Approx(255).margin(5));
+ CHECK((int) pixels[pixel_index + 1] == Approx(255).margin(5));
+ CHECK((int) pixels[pixel_index + 2] == Approx(255).margin(5));
+ CHECK((int) pixels[pixel_index + 3] == Approx(255).margin(5));
+
+ f = r.GetFrame(240);
+
+ // Get the image data
+ pixels = f->GetPixels(624);
+ pixel_index = 930 * 4;
+
+ // Check image properties on scanline 820, pixel 930
+ CHECK((int) pixels[pixel_index] == Approx(255).margin(5));
+ CHECK((int) pixels[pixel_index + 1] == Approx(255).margin(5));
+ CHECK((int) pixels[pixel_index + 2] == Approx(255).margin(5));
+ CHECK((int) pixels[pixel_index + 3] == Approx(255).margin(5));
+
+ // Close reader
+ r.Close();
+
+ } catch (const InvalidCodec & e) {
+ // Ignore older FFmpeg versions which don't support AV1
+ } catch (const InvalidFile & e) {
+ // Ignore older FFmpeg versions which don't support AV1
+ }
+}
\ No newline at end of file