Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor FFmpegReader GetAVFrame (for AV1 & async decoding) #872

Merged
merged 14 commits into from
Nov 16, 2022
Merged
Show file tree
Hide file tree
Changes from 11 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
55 changes: 53 additions & 2 deletions doc/INSTALL-WINDOWS.md
Original file line number Diff line number Diff line change
Expand Up @@ -193,6 +193,7 @@ pacman -Syu
```
pacman -S --needed base-devel mingw-w64-x86_64-toolchain
pacman -S mingw64/mingw-w64-x86_64-ffmpeg
pacman -S mingw64/mingw-w64-x86_64-qt5
pacman -S mingw64/mingw-w64-x86_64-python3-pyqt5
pacman -S mingw64/mingw-w64-x86_64-swig
pacman -S mingw64/mingw-w64-x86_64-cmake
Expand All @@ -202,6 +203,11 @@ pacman -S mingw32/mingw-w64-i686-zeromq
pacman -S mingw64/mingw-w64-x86_64-python3-pyzmq
pacman -S mingw64/mingw-w64-x86_64-python3-cx_Freeze
pacman -S mingw64/mingw-w64-x86_64-ninja
pacman -S mingw64/mingw-w64-x86_64-catch
pacman -S mingw-w64-x86_64-python3-pyopengl
pacman -S mingw-w64-clang-x86_64-python-pyopengl-accelerate
pacman -S mingw-w64-x86_64-python-pyopengl-accelerate
pacman -S mingw-w64-x86_64-python-pywin32
pacman -S git

# Install ImageMagick if needed (OPTIONAL and NOT NEEDED)
Expand All @@ -213,6 +219,7 @@ pacman -S mingw64/mingw-w64-x86_64-imagemagick
```
pacman -S --needed base-devel mingw32/mingw-w64-i686-toolchain
pacman -S mingw32/mingw-w64-i686-ffmpeg
pacman -S mingw32/mingw-w64-i686-qt5
pacman -S mingw32/mingw-w64-i686-python3-pyqt5
pacman -S mingw32/mingw-w64-i686-swig
pacman -S mingw32/mingw-w64-i686-cmake
Expand All @@ -222,6 +229,10 @@ pacman -S mingw32/mingw-w64-i686-zeromq
pacman -S mingw32/mingw-w64-i686-python3-pyzmq
pacman -S mingw32/mingw-w64-i686-python3-cx_Freeze
pacman -S mingw32/mingw-w64-i686-ninja
pacman -S mingw32/mingw-w64-i686-catch
pacman -S mingw-w64-i686-python-pyopengl
pacman -S mingw-w64-i686-python-pyopengl-accelerate
pacman -S mingw-w64-i686-python-pywin32
pacman -S git

# Install ImageMagick if needed (OPTIONAL and NOT NEEDED)
Expand All @@ -237,6 +248,8 @@ pip3 install tinys3
pip3 install github3.py
pip3 install requests
pip3 install meson
pip3 install PyOpenGL
pip3 install PyOpenGL-accelerate
```

7) Download Unittest++ (https://github.com/unittest-cpp/unittest-cpp) into /MSYS2/[USER]/unittest-cpp-master/
Expand All @@ -251,12 +264,50 @@ mingw32-make install
```
git clone https://gitlab.gnome.org/GNOME/babl.git
cd babl
meson build --prefix=C:/msys64/mingw32
meson build --prefix=C:/msys64/mingw64 (or `--prefix=C:/msys64/mingw32` for a 32 bit build)
cd build
meson install
```

9) ZMQ++ Header (This might not be needed anymore)
9) Install opencv (used for AI and computer vision effects)

Note: Had to edit 1 header file and add a missing typedef: `typedef unsigned int uint;`

```
git clone https://github.com/opencv/opencv
cd opencv/
git checkout '4.3.0'
cd ..
git clone https://github.com/opencv/opencv_contrib
cd opencv_contrib/
git checkout '4.3.0'
cd ..
cd opencv
mkdir build
cd build
cmake -D CMAKE_BUILD_TYPE=RELEASE -D WITH_TBB=OFF -D WITH_QT=ON -D WITH_OPENGL=ON -D OPENCV_EXTRA_MODULES_PATH=../../opencv_contrib/modules -D OPENCV_GENERATE_PKGCONFIG=ON -D BUILD_opencv_python2=OFF -D BUILD_opencv_python3=ON -G "MSYS Makefiles" ..
make -j4 -i (-i ignores errors on MSYS2 which happens for some reason)
make install -i
```

10) Install ReSVG (SVG rasterizing)

```
git clone https://github.com/RazrFalcon/resvg
cd resvg/c-api
QT_DIR="C:\\msys64\\mingw64\\" cargo build --verbose --release
**OR**
QT_DIR="C:\\msys64\\mingw32\\" cargo build --verbose --release

cd ../

# copy all required files into the system directories
cp target/release/resvg.dll /usr/lib/
mkdir -p /usr/include/resvg/
cp c-api/*.h /usr/include/resvg/
```

11) ZMQ++ Header (This might not be needed anymore)
NOTE: Download and copy zmq.hpp into the /c/msys64/mingw64/include/ folder

## Manual Dependencies
Expand Down
Binary file added examples/test_video_sync.mp4
Binary file not shown.
195 changes: 121 additions & 74 deletions src/FFmpegReader.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,8 @@ FFmpegReader::FFmpegReader(const std::string &path, bool inspect_reader)
seek_audio_frame_found(0), seek_video_frame_found(0),is_duration_known(false), largest_frame_processed(0),
current_video_frame(0), packet(NULL), max_concurrent_frames(OPEN_MP_NUM_PROCESSORS), audio_pts(0),
video_pts(0), pFormatCtx(NULL), videoStream(-1), audioStream(-1), pCodecCtx(NULL), aCodecCtx(NULL),
pStream(NULL), aStream(NULL), pFrame(NULL), previous_packet_location{-1,0} {
pStream(NULL), aStream(NULL), pFrame(NULL), previous_packet_location{-1,0},
resend_packet(false) {

// Initialize FFMpeg, and register all formats and codecs
AV_REGISTER_ALL
Expand Down Expand Up @@ -213,6 +214,7 @@ void FFmpegReader::Open() {
pFormatCtx = NULL;
{
hw_de_on = (openshot::Settings::Instance()->HARDWARE_DECODER == 0 ? 0 : 1);
ZmqLogger::Instance()->AppendDebugMethod("Decode hardware acceleration settings", "hw_de_on", hw_de_on, "HARDWARE_DECODER", openshot::Settings::Instance()->HARDWARE_DECODER);
}

// Open video file
Expand Down Expand Up @@ -646,6 +648,7 @@ void FFmpegReader::Close() {

// Reset some variables
last_frame = 0;
resend_packet = false;
largest_frame_processed = 0;
seek_audio_frame_found = 0;
seek_video_frame_found = 0;
Expand Down Expand Up @@ -952,11 +955,13 @@ std::shared_ptr<Frame> FFmpegReader::ReadStream(int64_t requested_frame) {
break;
}

// Get the next packet
packet_error = GetNextPacket();
if (packet_error < 0 && !packet) {
// No more packets to be found
packet_status.packets_eof = true;
if (!resend_packet || !packet) {
jonoomph marked this conversation as resolved.
Show resolved Hide resolved
// Get the next packet
packet_error = GetNextPacket();
if (packet_error < 0 && !packet) {
// No more packets to be found
packet_status.packets_eof = true;
}
}

// Debug output
Expand All @@ -977,7 +982,7 @@ std::shared_ptr<Frame> FFmpegReader::ReadStream(int64_t requested_frame) {

// Video packet
if ((info.has_video && packet && packet->stream_index == videoStream) ||
(info.has_video && !packet && packet_status.video_decoded < packet_status.video_read) ||
(info.has_video && packet_status.video_decoded < packet_status.video_read) ||
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Call ProcessVideoPacket() anytime video_decoded < video_read... so we can potentially resend a packet if needed, or try and retrieve a processed AVFrame from the decoder.

(info.has_video && !packet && !packet_status.video_eof)) {
// Process Video Packet
ProcessVideoPacket(requested_frame);
Expand Down Expand Up @@ -1104,96 +1109,137 @@ bool FFmpegReader::GetAVFrame() {
AVFrame *next_frame = AV_ALLOCATE_FRAME();

#if IS_FFMPEG_3_2
int send_packet_err = avcodec_send_packet(pCodecCtx, packet);
int send_packet_err = 0;
int64_t send_packet_pts = 0;
if ((packet && packet->stream_index == videoStream && !resend_packet) || !packet) {
send_packet_err = avcodec_send_packet(pCodecCtx, packet);

if (packet && send_packet_err >= 0) {
send_packet_pts = GetPacketPTS();
resend_packet = false;
ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (send packet succeeded)",
"send_packet_err", send_packet_err, "send_packet_pts",
send_packet_pts);
}
}

#if USE_HW_ACCEL
// Get the format from the variables set in get_hw_dec_format
hw_de_av_pix_fmt = hw_de_av_pix_fmt_global;
hw_de_av_device_type = hw_de_av_device_type_global;
#endif // USE_HW_ACCEL
if (send_packet_err < 0 && send_packet_err != AVERROR_EOF) {
ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Packet not sent)",
"send_packet_err", send_packet_err);
}
else {
int receive_frame_err = 0;
AVFrame *next_frame2;
#if USE_HW_ACCEL
if (hw_de_on && hw_de_supported) {
next_frame2 = AV_ALLOCATE_FRAME();
ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (send packet: Not sent [" + av_err2string(send_packet_err) + "])", "send_packet_err", send_packet_err, "send_packet_pts", send_packet_pts);
if (send_packet_err == AVERROR(EAGAIN)) {
resend_packet = true;
ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (send packet: AVERROR(EAGAIN): user must read output with avcodec_receive_frame()", "send_packet_pts", send_packet_pts);
}
else
#endif // USE_HW_ACCEL
{
next_frame2 = next_frame;
if (send_packet_err == AVERROR(EINVAL)) {
ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (send packet: AVERROR(EINVAL): codec not opened, it is an encoder, or requires flush", "send_packet_pts", send_packet_pts);
}
if (send_packet_err == AVERROR(ENOMEM)) {
ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (send packet: AVERROR(ENOMEM): failed to add packet to internal queue, or legitimate decoding errors", "send_packet_pts", send_packet_pts);
}
pFrame = AV_ALLOCATE_FRAME();
while (receive_frame_err >= 0) {
receive_frame_err = avcodec_receive_frame(pCodecCtx, next_frame2);
}

// Always try and receive a packet, if not EOF.
// Even if the above avcodec_send_packet failed to send,
// we might still need to receive a packet.
int receive_frame_err = 0;
AVFrame *next_frame2;
#if USE_HW_ACCEL
if (hw_de_on && hw_de_supported) {
next_frame2 = AV_ALLOCATE_FRAME();
}
else
#endif // USE_HW_ACCEL
{
next_frame2 = next_frame;
}
pFrame = AV_ALLOCATE_FRAME();
while (receive_frame_err >= 0) {
receive_frame_err = avcodec_receive_frame(pCodecCtx, next_frame2);

if (receive_frame_err != 0) {
ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (receive frame: frame not ready yet from decoder [\" + av_err2string(receive_frame_err) + \"])", "receive_frame_err", receive_frame_err, "send_packet_pts", send_packet_pts);

if (receive_frame_err == AVERROR_EOF) {
ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (EOF detected from decoder)");
ZmqLogger::Instance()->AppendDebugMethod(
"FFmpegReader::GetAVFrame (receive frame: AVERROR_EOF: EOF detected from decoder, flushing buffers)", "send_packet_pts", send_packet_pts);
avcodec_flush_buffers(pCodecCtx);
packet_status.video_eof = true;
}
if (receive_frame_err == AVERROR(EINVAL) || receive_frame_err == AVERROR_EOF) {
ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (invalid frame received or EOF from decoder)");
if (receive_frame_err == AVERROR(EINVAL)) {
ZmqLogger::Instance()->AppendDebugMethod(
"FFmpegReader::GetAVFrame (receive frame: AVERROR(EINVAL): invalid frame received, flushing buffers)", "send_packet_pts", send_packet_pts);
avcodec_flush_buffers(pCodecCtx);
}
if (receive_frame_err != 0) {
ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (frame not ready yet from decoder)");
break;
if (receive_frame_err == AVERROR(EAGAIN)) {
ZmqLogger::Instance()->AppendDebugMethod(
"FFmpegReader::GetAVFrame (receive frame: AVERROR(EAGAIN): output is not available in this state - user must try to send new input)", "send_packet_pts", send_packet_pts);
}

#if USE_HW_ACCEL
if (hw_de_on && hw_de_supported) {
int err;
if (next_frame2->format == hw_de_av_pix_fmt) {
next_frame->format = AV_PIX_FMT_YUV420P;
if ((err = av_hwframe_transfer_data(next_frame,next_frame2,0)) < 0) {
ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Failed to transfer data to output frame)");
}
if ((err = av_frame_copy_props(next_frame,next_frame2)) < 0) {
ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Failed to copy props to output frame)");
}
}
}
else
#endif // USE_HW_ACCEL
{ // No hardware acceleration used -> no copy from GPU memory needed
next_frame = next_frame2;
}

// TODO also handle possible further frames
// Use only the first frame like avcodec_decode_video2
frameFinished = 1;
packet_status.video_decoded++;

av_image_alloc(pFrame->data, pFrame->linesize, info.width, info.height, (AVPixelFormat)(pStream->codecpar->format), 1);
av_image_copy(pFrame->data, pFrame->linesize, (const uint8_t**)next_frame->data, next_frame->linesize,
(AVPixelFormat)(pStream->codecpar->format), info.width, info.height);

// Get display PTS from video frame, often different than packet->pts.
// Sending packets to the decoder (i.e. packet->pts) is async,
// and retrieving packets from the decoder (frame->pts) is async. In most decoders
// sending and retrieving are separated by multiple calls to this method.
if (next_frame->pts != AV_NOPTS_VALUE) {
// This is the current decoded frame (and should be the pts used) for
// processing this data
video_pts = next_frame->pts;
} else if (next_frame->pkt_dts != AV_NOPTS_VALUE) {
// Some videos only set this timestamp (fallback)
video_pts = next_frame->pkt_dts;
if (receive_frame_err == AVERROR_INPUT_CHANGED) {
ZmqLogger::Instance()->AppendDebugMethod(
"FFmpegReader::GetAVFrame (receive frame: AVERROR_INPUT_CHANGED: current decoded frame has changed parameters with respect to first decoded frame)", "send_packet_pts", send_packet_pts);
}

// break out of loop after each successful image returned
// Break out of decoding loop
// Nothing ready for decoding yet
break;
}
#if USE_HW_ACCEL

#if USE_HW_ACCEL
if (hw_de_on && hw_de_supported) {
AV_FREE_FRAME(&next_frame2);
int err;
if (next_frame2->format == hw_de_av_pix_fmt) {
next_frame->format = AV_PIX_FMT_YUV420P;
if ((err = av_hwframe_transfer_data(next_frame,next_frame2,0)) < 0) {
ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Failed to transfer data to output frame)", "hw_de_on", hw_de_on);
}
if ((err = av_frame_copy_props(next_frame,next_frame2)) < 0) {
ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Failed to copy props to output frame)", "hw_de_on", hw_de_on);
}
}
}
#endif // USE_HW_ACCEL
else
#endif // USE_HW_ACCEL
{ // No hardware acceleration used -> no copy from GPU memory needed
next_frame = next_frame2;
}

// TODO also handle possible further frames
// Use only the first frame like avcodec_decode_video2
frameFinished = 1;
packet_status.video_decoded++;

av_image_alloc(pFrame->data, pFrame->linesize, info.width, info.height, (AVPixelFormat)(pStream->codecpar->format), 1);
av_image_copy(pFrame->data, pFrame->linesize, (const uint8_t**)next_frame->data, next_frame->linesize,
(AVPixelFormat)(pStream->codecpar->format), info.width, info.height);

// Get display PTS from video frame, often different than packet->pts.
// Sending packets to the decoder (i.e. packet->pts) is async,
// and retrieving packets from the decoder (frame->pts) is async. In most decoders
// sending and retrieving are separated by multiple calls to this method.
if (next_frame->pts != AV_NOPTS_VALUE) {
// This is the current decoded frame (and should be the pts used) for
// processing this data
video_pts = next_frame->pts;
} else if (next_frame->pkt_dts != AV_NOPTS_VALUE) {
// Some videos only set this timestamp (fallback)
video_pts = next_frame->pkt_dts;
}

ZmqLogger::Instance()->AppendDebugMethod(
"FFmpegReader::GetAVFrame (Successful frame received)", "video_pts", video_pts, "send_packet_pts", send_packet_pts);

// break out of loop after each successful image returned
break;
}
#if USE_HW_ACCEL
if (hw_de_on && hw_de_supported) {
AV_FREE_FRAME(&next_frame2);
}
#endif // USE_HW_ACCEL
#else
avcodec_decode_video2(pCodecCtx, next_frame, &frameFinished, packet);

Expand Down Expand Up @@ -1744,6 +1790,7 @@ void FFmpegReader::Seek(int64_t requested_frame) {
video_pts_seconds = NO_PTS_OFFSET;
audio_pts = 0.0;
audio_pts_seconds = NO_PTS_OFFSET;
resend_packet = false;
last_frame = 0;
current_video_frame = 0;
largest_frame_processed = 0;
Expand Down
1 change: 1 addition & 0 deletions src/FFmpegReader.h
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,7 @@ namespace openshot {

int64_t audio_pts;
int64_t video_pts;
bool resend_packet;
double pts_offset_seconds;
double audio_pts_seconds;
double video_pts_seconds;
Expand Down
Loading