-
Notifications
You must be signed in to change notification settings - Fork 11
/
video.c
5948 lines (5253 loc) · 179 KB
/
video.c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
/// Copyright (C) 2009 - 2015 by Johns. All Rights Reserved.
/// Copyright (C) 2018 by pesintta, rofafor.
///
/// SPDX-License-Identifier: AGPL-3.0-only
///
/// This module contains all video rendering functions.
///
/// Uses Xlib where it is needed for VA-API. XCB is used for
/// everything else.
///
#ifndef AV_INFO_TIME
#define AV_INFO_TIME (50 * 60) ///< a/v info every minute
#endif
#include <sys/time.h>
#include <sys/shm.h>
#include <sys/ipc.h>
#include <stdio.h>
#include <stdlib.h>
#include <stdint.h>
#include <string.h>
#include <unistd.h>
#include <math.h>
#ifndef __USE_GNU
#define __USE_GNU
#include <pthread.h>
#include <time.h>
#include <signal.h>
#endif
#include <X11/Xlib-xcb.h>
#include <X11/Xlib.h>
#include <X11/Xutil.h>
#include <X11/keysym.h>
#include <xcb/xcb.h>
#include <xcb/screensaver.h>
#include <xcb/dpms.h>
#include <xcb/xcb_icccm.h>
#include <xcb/xcb_ewmh.h>
#include <va/va_x11.h>
#if !VA_CHECK_VERSION(1,0,0)
#warning "libva is too old - please, upgrade!"
#define VA_FOURCC_I420 VA_FOURCC('I', '4', '2', '0')
#endif
#include <va/va_vpp.h>
#include <libavcodec/avcodec.h>
#if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(57,64,100)
#error "libavcodec is too old - please, upgrade!"
#endif
#include <libswscale/swscale.h>
#include <libavcodec/vaapi.h>
#include <libavutil/imgutils.h>
#include <libavutil/pixdesc.h>
#include <libavutil/hwcontext.h>
#include <libavutil/hwcontext_vaapi.h>
#include "iatomic.h" // portable atomic_t
#include "misc.h"
#include "video.h"
#include "audio.h"
#include "codec.h"
#define ARRAY_ELEMS(array) (sizeof(array)/sizeof(array[0]))
#define TO_AVHW_DEVICE_CTX(x) ((AVHWDeviceContext*)x->data)
#define TO_AVHW_FRAMES_CTX(x) ((AVHWFramesContext*)x->data)
#define TO_VAAPI_DEVICE_CTX(x) ((AVVAAPIDeviceContext*)TO_AVHW_DEVICE_CTX(x)->hwctx)
#define TO_VAAPI_FRAMES_CTX(x) ((AVVAAPIFramesContext*)TO_AVHW_FRAMES_CTX(x)->hwctx)
//----------------------------------------------------------------------------
// Declarations
//----------------------------------------------------------------------------
///
/// Video resolutions selector.
///
typedef enum _video_resolutions_
{
VideoResolution576i, ///< ...x576 interlaced
VideoResolution720p, ///< ...x720 progressive
VideoResolution1080i, ///< ...x1080 interlaced
VideoResolution1080p, ///< ...x1080 progressive
VideoResolution2160p, ///< ...x2160 progressive
VideoResolutionMax ///< number of resolution indexs
} VideoResolutions;
///
/// Video scaling modes.
///
typedef enum _video_scaling_modes_
{
VideoScalingNormal, ///< normal scaling
VideoScalingFast, ///< fastest scaling
VideoScalingHQ, ///< high quality scaling
VideoScalingAnamorphic, ///< anamorphic scaling
} VideoScalingModes;
///
/// Video zoom modes.
///
typedef enum _video_zoom_modes_
{
VideoNormal, ///< normal
VideoStretch, ///< stretch to all edges
VideoCenterCutOut, ///< center and cut out
VideoAnamorphic, ///< anamorphic scaled (unsupported)
} VideoZoomModes;
///
/// Video color space conversions.
///
typedef enum _video_color_space_
{
VideoColorSpaceNone, ///< no conversion
VideoColorSpaceBt601, ///< ITU.BT-601 Y'CbCr
VideoColorSpaceBt709, ///< ITU.BT-709 HDTV Y'CbCr
VideoColorSpaceSmpte240 ///< SMPTE-240M Y'PbPr
} VideoColorSpace;
///
/// Video output module structure and typedef.
///
typedef struct _video_module_
{
const char *Name; ///< video output module name
char Enabled; ///< flag output module enabled
/// allocate new video hw decoder
VideoHwDecoder *(*const NewHwDecoder)(VideoStream *);
void (*const DelHwDecoder) (VideoHwDecoder *);
unsigned (*const GetSurface) (VideoHwDecoder *, const AVCodecContext *);
void (*const ReleaseSurface) (VideoHwDecoder *, unsigned);
enum AVPixelFormat (*const get_format) (VideoHwDecoder *, AVCodecContext *, const enum AVPixelFormat *);
void (*const RenderFrame) (VideoHwDecoder *, const AVCodecContext *, const AVFrame *);
void (*const SetClock) (VideoHwDecoder *, int64_t);
int64_t(*const GetClock) (const VideoHwDecoder *);
void (*const SetClosing) (const VideoHwDecoder *);
void (*const ResetStart) (const VideoHwDecoder *);
void (*const SetTrickSpeed) (const VideoHwDecoder *, int);
uint8_t *(*const GrabOutput)(int *, int *, int *);
char *(*const GetStats)(VideoHwDecoder *);
char *(*const GetInfo)(VideoHwDecoder *, const char *);
void (*const SetBackground) (uint32_t);
void (*const SetVideoMode) (void);
void (*const ResetAutoCrop) (void);
/// module display handler thread
void (*const DisplayHandlerThread) (void);
void (*const OsdClear) (void); ///< clear OSD
/// draw OSD ARGB area
void (*const OsdDrawARGB) (int, int, int, int, int, const uint8_t *, int, int);
void (*const OsdInit) (int, int); ///< initialize OSD
void (*const OsdExit) (void); ///< cleanup OSD
int (*const Init) (const char *); ///< initialize video output module
void (*const Exit) (void); ///< cleanup video output module
} VideoModule;
///
/// Video configuration values typedef.
///
typedef struct _video_config_values_
{
int active;
float min_value;
float max_value;
float def_value;
float step;
float scale; // scale is normalized to match UI requirements
float drv_scale; // re-normalizing requires the original scale required for latching data to the driver
} VideoConfigValues;
//----------------------------------------------------------------------------
// Defines
//----------------------------------------------------------------------------
#define VIDEO_SURFACES_MAX 4 ///< video output surfaces for queue
#define POSTPROC_SURFACES_MAX 8 ///< video postprocessing surfaces for queue
//----------------------------------------------------------------------------
// Variables
//----------------------------------------------------------------------------
// Brightness (-100.00 - 100.00 ++ 1.00 = 0.00)
static VideoConfigValues VaapiConfigBrightness = {.active = 0,.min_value = -100.0,.max_value = 100.0,.def_value =
0.0,.step = 1.0,.scale = 1.0,.drv_scale = 1.0
};
// Contrast (0.00 - 10.00 ++ 0.10 = 1.00)
static VideoConfigValues VaapiConfigContrast = {.active = 0,.min_value = 0.0,.max_value = 10.0,.def_value = 1.0,.step =
0.1,.scale = 1.0,.drv_scale = 1.0
};
// Saturation (0.00 - 10.00 ++ 0.10 = 1.00)
static VideoConfigValues VaapiConfigSaturation = {.active = 0,.min_value = 0.0,.max_value = 10.0,.def_value =
1.0,.step = 0.1,.scale = 1.0,.drv_scale = 1.0
};
// Hue (-180.00 - 180.00 ++ 1.00 = 0.00)
static VideoConfigValues VaapiConfigHue = {.active = 0,.min_value = -180.0,.max_value = 180.0,.def_value = 0.0,.step =
1.0,.scale = 1.0,.drv_scale = 1.0
};
// Denoise (0.00 - 1.00 ++ 0.03 = 0.50)
static VideoConfigValues VaapiConfigDenoise = {.active = 0,.min_value = 0.0,.max_value = 1.0,.def_value = 0.5,.step =
0.03,.scale = 1.0,.drv_scale = 1.0
};
// Sharpen (0.00 - 1.00 ++ 0.03 = 0.50)
static VideoConfigValues VaapiConfigSharpen = {.active = 0,.min_value = 0.0,.max_value = 1.0,.def_value = 0.5,.step =
0.03,.scale = 1.0,.drv_scale = 1.0
};
static VideoConfigValues VaapiConfigStde = {.active = 1,.min_value = 0.0,.max_value = 4.0,.def_value = 0.0,.step =
1.0,.scale = 1.0,.drv_scale = 1.0
};
char VideoIgnoreRepeatPict; ///< disable repeat pict warning
static const char *VideoDriverName = "va-api"; ///< video output device - default to va-api
static Display *XlibDisplay; ///< Xlib X11 display
static xcb_connection_t *Connection; ///< xcb connection
static xcb_colormap_t VideoColormap; ///< video colormap
static xcb_window_t VideoWindow; ///< video window
static xcb_screen_t const *VideoScreen; ///< video screen
static uint32_t VideoBlankTick; ///< blank cursor timer
static xcb_pixmap_t VideoCursorPixmap; ///< blank curosr pixmap
static xcb_cursor_t VideoBlankCursor; ///< empty invisible cursor
static int VideoWindowX; ///< video output window x coordinate
static int VideoWindowY; ///< video outout window y coordinate
static unsigned VideoWindowWidth; ///< video output window width
static unsigned VideoWindowHeight; ///< video output window height
static const VideoModule NoopModule; ///< forward definition of noop module
/// selected video module
static const VideoModule *VideoUsedModule = &NoopModule;
static char VideoSurfaceModesChanged; ///< flag surface modes changed
static uint32_t VideoBackground; ///< video background color
/// Default color balance filter mode..
static int VideoColorBalance = 1;
/// Default skin tone enhancement mode.
static int VideoSkinToneEnhancement = 0;
/// Default deinterlace mode.
static VAProcDeinterlacingType VideoDeinterlace[VideoResolutionMax];
/// Default amount of noise reduction algorithm to apply (0 .. 1000).
static int VideoDenoise[VideoResolutionMax];
/// Default amount of sharpening, or blurring, to apply (-1000 .. 1000).
static int VideoSharpen[VideoResolutionMax];
/// Default cut top and bottom in pixels
static int VideoCutTopBottom[VideoResolutionMax];
/// Default cut left and right in pixels
static int VideoCutLeftRight[VideoResolutionMax];
/// Color space ITU-R BT.601, ITU-R BT.709, ...
static const VideoColorSpace VideoColorSpaces[VideoResolutionMax] = {
VideoColorSpaceBt601, VideoColorSpaceBt709, VideoColorSpaceBt709,
VideoColorSpaceBt709, VideoColorSpaceBt709
};
/// Default scaling mode
static VideoScalingModes VideoScaling[VideoResolutionMax];
/// Default audio/video delay
int VideoAudioDelay;
/// Default zoom mode for 4:3
static VideoZoomModes Video4to3ZoomMode;
/// Default zoom mode for 16:9 and others
static VideoZoomModes VideoOtherZoomMode;
static char Video60HzMode; ///< handle 60hz displays
static char VideoSoftStartSync; ///< soft start sync audio/video
static const int VideoSoftStartFrames = 100; ///< soft start frames
static xcb_atom_t WmDeleteWindowAtom; ///< WM delete message atom
static xcb_atom_t NetWmState; ///< wm-state message atom
static xcb_atom_t NetWmStateFullscreen; ///< fullscreen wm-state message atom
#ifdef DEBUG
extern uint32_t VideoSwitch; ///< ticks for channel switch
#endif
extern void AudioVideoReady(int64_t); ///< tell audio video is ready
extern int IsReplay(void);
static pthread_t VideoThread; ///< video decode thread
static pthread_cond_t VideoWakeupCond; ///< wakeup condition variable
static pthread_mutex_t VideoMutex; ///< video condition mutex
static pthread_mutex_t VideoLockMutex; ///< video lock mutex
extern pthread_mutex_t PTS_mutex; ///< PTS mutex
extern pthread_mutex_t ReadAdvance_mutex; ///< PTS mutex
static char OsdShown; ///< flag show osd
static int OsdDirtyX; ///< osd dirty area x
static int OsdDirtyY; ///< osd dirty area y
static int OsdDirtyWidth; ///< osd dirty area width
static int OsdDirtyHeight; ///< osd dirty area height
static int64_t VideoDeltaPTS; ///< FIXME: fix pts
static char DPMSDisabled; ///< flag we have disabled dpms
uint32_t mutex_start_time;
uint32_t max_mutex_delay = 1;
//----------------------------------------------------------------------------
// Common Functions
//----------------------------------------------------------------------------
static void VideoThreadLock(void); ///< lock video thread
static void VideoThreadUnlock(void); ///< unlock video thread
static void VideoThreadExit(void); ///< exit/kill video thread
static void X11SuspendScreenSaver(xcb_connection_t *, int);
static int X11HaveDPMS(xcb_connection_t *);
static void X11DPMSReenable(xcb_connection_t *);
static void X11DPMSDisable(xcb_connection_t *);
///
/// Update video pts.
///
/// @param pts_p pointer to pts
/// @param interlaced interlaced flag (frame isn't right)
/// @param frame frame to display
///
/// @note frame->interlaced_frame can't be used for interlace detection
///
static void VideoSetPts(int64_t * pts_p, int interlaced, const AVCodecContext * video_ctx, const AVFrame * frame)
{
int64_t pts;
int duration;
//
// Get duration for this frame.
// FIXME: using framerate as workaround for AVFrame->pkt_duration
//
if (video_ctx->framerate.num && video_ctx->framerate.den) {
duration = 1000 * video_ctx->framerate.den / video_ctx->framerate.num;
} else {
duration = interlaced ? 40 : 20; // 50Hz -> 20ms default
}
Debug8("video: %d/%d %" PRIx64 " -> %d", video_ctx->framerate.den, video_ctx->framerate.num, frame->pkt_duration,
duration);
// update video clock
if (*pts_p != (int64_t) AV_NOPTS_VALUE) {
*pts_p += duration * 90;
//Info("video: %s +pts", Timestamp2String(*pts_p));
}
//av_opt_ptr(avcodec_get_frame_class(), frame, "best_effort_timestamp");
//pts = frame->best_effort_timestamp;
pts = frame->pts;
if (pts == (int64_t) AV_NOPTS_VALUE || !pts) {
// libav: 0.8pre didn't set pts
pts = frame->pkt_dts;
}
// libav: sets only pkt_dts which can be 0
if (pts && pts != (int64_t) AV_NOPTS_VALUE) {
// build a monotonic pts
if (*pts_p != (int64_t) AV_NOPTS_VALUE) {
int64_t delta;
delta = pts - *pts_p;
// ignore negative jumps
if (delta > -600 * 90 && delta <= -40 * 90) {
if (-delta > VideoDeltaPTS) {
VideoDeltaPTS = -delta;
Debug8("video: %#012" PRIx64 "->%#012" PRIx64 " delta%+4" PRId64 " pts", *pts_p, pts,
pts - *pts_p);
}
return;
}
} else { // first new clock value
AudioVideoReady(pts);
}
if (*pts_p != pts) {
Debug8("video: %#012" PRIx64 "->%#012" PRIx64 " delta=%4" PRId64 " pts", *pts_p, pts, pts - *pts_p);
*pts_p = pts;
}
}
}
///
/// Update output for new size or aspect ratio.
///
/// @param input_aspect_ratio video stream aspect
///
static void VideoUpdateOutput(AVRational input_aspect_ratio, int input_width, int input_height,
VideoResolutions resolution, int video_x, int video_y, int video_width, int video_height, int *output_x,
int *output_y, int *output_width, int *output_height, int *crop_x, int *crop_y, int *crop_width, int *crop_height)
{
AVRational display_aspect_ratio;
AVRational tmp_ratio;
int scaled_width, scaled_height;
if (!input_aspect_ratio.num || !input_aspect_ratio.den) {
input_aspect_ratio.num = 1;
input_aspect_ratio.den = 1;
Debug7("video: aspect defaults to %d:%d", input_aspect_ratio.num, input_aspect_ratio.den);
}
av_reduce(&input_aspect_ratio.num, &input_aspect_ratio.den, input_width * input_aspect_ratio.num,
input_height * input_aspect_ratio.den, 1024 * 1024);
// InputWidth/Height can be zero = uninitialized
if (!input_aspect_ratio.num || !input_aspect_ratio.den) {
input_aspect_ratio.num = 1;
input_aspect_ratio.den = 1;
}
display_aspect_ratio.num = VideoScreen->width_in_pixels * VideoScreen->height_in_millimeters;
display_aspect_ratio.den = VideoScreen->height_in_pixels * VideoScreen->width_in_millimeters;
display_aspect_ratio = av_mul_q(input_aspect_ratio, display_aspect_ratio);
Debug7("video: aspect ratio %d:%d", display_aspect_ratio.num, display_aspect_ratio.den);
*crop_x = VideoCutLeftRight[resolution];
*crop_y = VideoCutTopBottom[resolution];
*crop_width = VideoWindowWidth - VideoCutLeftRight[resolution] * 2;
*crop_height = VideoWindowHeight - VideoCutTopBottom[resolution] * 2;
// FIXME: store different positions for the ratios
tmp_ratio.num = 4;
tmp_ratio.den = 3;
if (!av_cmp_q(input_aspect_ratio, tmp_ratio)) {
switch (Video4to3ZoomMode) {
case VideoNormal:
goto normal;
case VideoStretch:
goto stretch;
case VideoCenterCutOut:
goto center_cut_out;
case VideoAnamorphic:
// FIXME: rest should be done by hardware
goto stretch;
}
}
switch (VideoOtherZoomMode) {
case VideoNormal:
goto normal;
case VideoStretch:
goto stretch;
case VideoCenterCutOut:
goto center_cut_out;
case VideoAnamorphic:
// FIXME: rest should be done by hardware
goto stretch;
}
normal:
*output_x = video_x;
*output_y = video_y;
*output_width =
(video_height * display_aspect_ratio.num + display_aspect_ratio.den - 1) / display_aspect_ratio.den;
*output_height =
(video_width * display_aspect_ratio.den + display_aspect_ratio.num - 1) / display_aspect_ratio.num;
if (*output_width > video_width) {
*output_width = video_width;
*output_y += (video_height - *output_height) / 2;
} else if (*output_height > video_height) {
*output_height = video_height;
*output_x += (video_width - *output_width) / 2;
}
Debug7("video: aspect output %dx%d%+d%+d", *output_width, *output_height, *output_x, *output_y);
return;
stretch:
*output_x = video_x;
*output_y = video_y;
*output_width = video_width;
*output_height = video_height;
Debug7("video: stretch output %dx%d%+d%+d", *output_width, *output_height, *output_x, *output_y);
return;
center_cut_out:
*output_x = video_x;
*output_y = video_y;
*output_height = video_height;
*output_width = video_width;
scaled_width = (video_height * display_aspect_ratio.num + display_aspect_ratio.den - 1) / display_aspect_ratio.den;
scaled_height = (video_width * display_aspect_ratio.den + display_aspect_ratio.num - 1) / display_aspect_ratio.num;
// look which side must be cut
if (scaled_width > video_width) {
// adjust scaling
int tmp = (scaled_width - video_width) / 2;
*crop_x += tmp;
*crop_width = video_width - tmp * 2;
*crop_height = video_height - VideoCutTopBottom[resolution] * 2;
} else if (scaled_height > video_height) {
// adjust scaling
int tmp = (scaled_height - video_height) / 2;
*crop_y += tmp;
*crop_width = video_width - VideoCutLeftRight[resolution] * 2;
*crop_height = video_height - tmp * 2;
} else {
*crop_width = video_width - VideoCutLeftRight[resolution] * 2;
*crop_height = video_height - VideoCutTopBottom[resolution] * 2;
}
Debug7("video: aspect crop %dx%d%+d%+d @ %dx%d%+d%+d", *crop_width, *crop_height, *crop_x, *crop_y, *output_width,
*output_height, *output_x, *output_y);
return;
}
//----------------------------------------------------------------------------
// common functions
//----------------------------------------------------------------------------
///
/// Calculate resolution group.
///
/// @param width video picture raw width
/// @param height video picture raw height
/// @param interlace flag interlaced video picture
///
static VideoResolutions VideoResolutionGroup(int width, int height, int interlace)
{
if (height <= 576) {
Debug7("video: resolution 576i selected");
return VideoResolution576i;
} else if (height <= 720) {
Debug7("video: resolution 720p selected");
return VideoResolution720p;
} else if (height <= 1080) {
if (interlace) {
Debug7("video: resolution 1080i selected");
return VideoResolution1080i;
}
Debug7("video: resolution 1080p selected");
return VideoResolution1080p;
} else if (height <= 2160) {
Debug7("video: resolution 2160p selected");
return VideoResolution2160p;
}
Debug7("video: resolution 1080i selected");
return VideoResolution1080i;
}
///
/// Clamp given value against config limits
///
/// @param config config struct
/// @param valueIn sample value
/// @return clamped value
///
static inline int VideoConfigClamp(VideoConfigValues * config, float valueIn)
{
if (valueIn < config->min_value)
return config->min_value;
else if (valueIn > config->max_value)
return config->def_value;
return valueIn;
}
//----------------------------------------------------------------------------
// auto-crop
//----------------------------------------------------------------------------
///
/// auto-crop context structure and typedef.
///
typedef struct _auto_crop_ctx_
{
int X1; ///< detected left border
int X2; ///< detected right border
int Y1; ///< detected top border
int Y2; ///< detected bottom border
int Count; ///< counter to delay switch
int State; ///< auto-crop state (0, 14, 16)
} AutoCropCtx;
#define YBLACK 0x20 ///< below is black
#define UVBLACK 0x80 ///< around is black
#define M64 UINT64_C(0x0101010101010101) ///< 64bit multiplicator
/// auto-crop percent of video width to ignore logos
static const int AutoCropLogoIgnore = 24;
static int AutoCropInterval; ///< auto-crop check interval
static int AutoCropDelay; ///< auto-crop switch delay
static int AutoCropTolerance; ///< auto-crop tolerance
///
/// Detect black line Y.
///
/// @param data Y plane pixel data
/// @param length number of pixel to check
/// @param pitch offset of pixels
///
/// @note 8 pixel are checked at once, all values must be 8 aligned
///
static int AutoCropIsBlackLineY(const uint8_t * data, int length, int pitch)
{
int n;
int o;
uint64_t r;
const uint64_t *p;
#ifdef DEBUG
if ((size_t) data & 0x7 || pitch & 0x7) {
abort();
}
#endif
p = (const uint64_t *)data;
n = length; // FIXME: can remove n
o = pitch / 8;
r = 0UL;
while (--n >= 0) {
r |= *p;
p += o;
}
// below YBLACK(0x20) is black
return !(r & ~((YBLACK - 1) * M64));
}
///
/// Auto detect black borders and crop them.
///
/// @param autocrop auto-crop variables
/// @param width frame width in pixel
/// @param height frame height in pixel
/// @param data frame planes data (Y, U, V)
/// @param pitches frame planes pitches (Y, U, V)
///
/// @note FIXME: can reduce the checked range, left, right crop isn't
/// used yet.
///
/// @note FIXME: only Y is checked, for black.
///
static void AutoCropDetect(AutoCropCtx * autocrop, int width, int height, void *data[3], uint32_t pitches[3])
{
const void *data_y;
unsigned length_y;
int x;
int y;
int x1;
int x2;
int y1;
int y2;
int logo_skip;
//
// ignore top+bottom 6 lines and left+right 8 pixels
//
#define SKIP_X 8
#define SKIP_Y 6
x1 = width - 1;
x2 = 0;
y1 = height - 1;
y2 = 0;
logo_skip = SKIP_X + (((width * AutoCropLogoIgnore) / 100 + 8) / 8) * 8;
data_y = data[0];
length_y = pitches[0];
//
// search top
//
for (y = SKIP_Y; y < y1; ++y) {
if (!AutoCropIsBlackLineY(data_y + logo_skip + y * length_y, (width - 2 * logo_skip) / 8, 8)) {
if (y == SKIP_Y) {
y = 0;
}
y1 = y;
break;
}
}
//
// search bottom
//
for (y = height - SKIP_Y - 1; y > y2; --y) {
if (!AutoCropIsBlackLineY(data_y + logo_skip + y * length_y, (width - 2 * logo_skip) / 8, 8)) {
if (y == height - SKIP_Y - 1) {
y = height - 1;
}
y2 = y;
break;
}
}
//
// search left
//
for (x = SKIP_X; x < x1; x += 8) {
if (!AutoCropIsBlackLineY(data_y + x + SKIP_Y * length_y, height - 2 * SKIP_Y, length_y)) {
if (x == SKIP_X) {
x = 0;
}
x1 = x;
break;
}
}
//
// search right
//
for (x = width - SKIP_X - 8; x > x2; x -= 8) {
if (!AutoCropIsBlackLineY(data_y + x + SKIP_Y * length_y, height - 2 * SKIP_Y * 8, length_y)) {
if (x == width - SKIP_X - 8) {
x = width - 1;
}
x2 = x;
break;
}
}
autocrop->X1 = x1;
autocrop->X2 = x2;
autocrop->Y1 = y1;
autocrop->Y2 = y2;
}
//----------------------------------------------------------------------------
// VA-API
//----------------------------------------------------------------------------
AVBufferRef *HwDeviceContext; ///< ffmpeg HW device context
static VADisplay *VaDisplay; ///< VA-API display
static VAImage VaOsdImage = {
.image_id = VA_INVALID_ID
}; ///< osd VA-API image
static VASubpictureID VaOsdSubpicture = VA_INVALID_ID; ///< osd VA-API subpicture
static char VaapiUnscaledOsd; ///< unscaled osd supported
static char VaapiVideoProcessing; ///< supports video processing
/// VA-API decoder typedef
typedef struct _vaapi_decoder_ VaapiDecoder;
///
/// VA-API decoder
///
struct _vaapi_decoder_
{
VADisplay *VaDisplay; ///< VA-API display
xcb_window_t Window; ///< output window
int VideoX; ///< video base x coordinate
int VideoY; ///< video base y coordinate
int VideoWidth; ///< video base width
int VideoHeight; ///< video base height
int OutputX; ///< real video output x coordinate
int OutputY; ///< real video output y coordinate
int OutputWidth; ///< real video output width
int OutputHeight; ///< real video output height
/// flags for put surface for different resolutions groups
unsigned SurfaceFlagsTable[VideoResolutionMax];
unsigned SurfaceDeintTable[VideoResolutionMax];
enum AVPixelFormat PixFmt; ///< ffmpeg frame pixfmt
int WrongInterlacedWarned; ///< warning about interlace flag issued
int Interlaced; ///< ffmpeg interlaced flag
int Deinterlaced; ///< vpp deinterlace was run / not run
int TopFieldFirst; ///< ffmpeg top field displayed first
int GetPutImage; ///< flag get/put image can be used
VAImage Image[1]; ///< image buffer to update surface
VAEntrypoint VppEntrypoint; ///< VA-API postprocessing entrypoint
VAConfigID VppConfig; ///< VPP Config
VAContextID vpp_ctx; ///< VPP Context
int InputWidth; ///< video input width
int InputHeight; ///< video input height
AVRational InputAspect; ///< video input aspect ratio
VideoResolutions Resolution; ///< resolution group
int CropX; ///< video crop x
int CropY; ///< video crop y
int CropWidth; ///< video crop width
int CropHeight; ///< video crop height
AutoCropCtx AutoCrop[1]; ///< auto-crop variables
VASurfaceID BlackSurface; ///< empty black surface
/// video surface ring buffer
VASurfaceID SurfacesRb[VIDEO_SURFACES_MAX];
VASurfaceID PostProcSurfacesRb[POSTPROC_SURFACES_MAX]; ///< Posprocessing result surfaces
VASurfaceID *ForwardRefSurfaces; ///< Forward referencing surfaces for post processing
VASurfaceID *BackwardRefSurfaces; ///< Backward referencing surfaces for post processing
unsigned int ForwardRefCount; ///< Current number of forward references
unsigned int BackwardRefCount; ///< Current number of backward references
VASurfaceID PlaybackSurface; ///< Currently playing surface
int SurfaceWrite; ///< write pointer
int SurfaceRead; ///< read pointer
atomic_t SurfacesFilled; ///< how many of the buffer is used
int PostProcSurfaceWrite; ///< postprocessing write pointer
int SurfaceField; ///< current displayed field
int TrickSpeed; ///< current trick speed
int TrickCounter; ///< current trick speed counter
struct timespec FrameTime; ///< time of last display
VideoStream *Stream; ///< video stream
int Closing; ///< flag about closing current stream
int SyncOnAudio; ///< flag sync to audio
int64_t PTS; ///< video PTS clock
int LastAVDiff; ///< last audio - video difference
int SyncCounter; ///< counter to sync frames
int StartCounter; ///< counter for video start
int FramesDuped; ///< number of frames duplicated
int FramesMissed; ///< number of frames missed
int FramesDropped; ///< number of frames dropped
int FrameCounter; ///< number of frames decoded
int FramesDisplayed; ///< number of frames displayed
VABufferID filters[VAProcFilterCount]; ///< video postprocessing filters via vpp
VABufferID gpe_filters[VAProcFilterCount]; ///< video postprocessing filters via gpe
unsigned filter_n; ///< number of postprocessing filters
unsigned gpe_filter_n; ///< number of gpe postprocessing filters
unsigned SupportedDeinterlacers[VAProcDeinterlacingCount]; ///< supported deinterlacing methods
VABufferID *vpp_deinterlace_buf; ///< video postprocessing deinterlace buffer
VABufferID *vpp_denoise_buf; ///< video postprocessing denoise buffer
VABufferID *vpp_cbal_buf; ///< video color balance filters via vpp
VABufferID *vpp_sharpen_buf; ///< video postprocessing sharpen buffer
VABufferID *vpp_stde_buf; ///< video postprocessing skin tone enhancement buffer
int vpp_brightness_idx; ///< video postprocessing brightness buffer index
int vpp_contrast_idx; ///< video postprocessing contrast buffer index
int vpp_hue_idx; ///< video postprocessing hue buffer index
int vpp_saturation_idx; ///< video postprocessing saturation buffer index
};
static VaapiDecoder *VaapiDecoders[1]; ///< open decoder streams
static int VaapiDecoderN; ///< number of decoder streams
/// forward display back surface
static void VaapiBlackSurface(VaapiDecoder *);
/// forward definition release surface
static void VaapiReleaseSurface(VaapiDecoder *, VASurfaceID);
//----------------------------------------------------------------------------
// VA-API Functions
//----------------------------------------------------------------------------
//----------------------------------------------------------------------------
extern int SysLogLevel; ///< VDR's global log level
///
/// Output video messages.
///
/// Reduce output.
///
/// @param level message level (Error, Info, Debug, ...)
/// @param format printf format string (NULL to flush messages)
/// @param ... printf arguments
///
/// @returns true, if message shown
///
static int VaapiMessage(int level, const char *format, ...)
{
if (SysLogLevel > level) {
static const char *last_format;
static char buf[256];
va_list ap;
va_start(ap, format);
if (format != last_format) { // don't repeat same message
if (buf[0]) { // print last repeated message
switch (level) {
case 0:
Error("%s", buf);
break;
case 1:
Info("%s", buf);
break;
default:
Debug("%s", buf);
break;
}
buf[0] = '\0';
}
if (format) {
last_format = format;
switch (level) {
case 0:
Error(format, ap);
break;
case 1:
Info(format, ap);
break;
default:
Debug(format, ap);
break;
}
}
va_end(ap);
return 1;
}
vsnprintf(buf, sizeof(buf), format, ap);
va_end(ap);
}
return 0;
}
// Surfaces -------------------------------------------------------------
///
/// Associate OSD with surface.
///
/// @param decoder VA-API decoder
///
static void VaapiAssociate(VaapiDecoder * decoder)
{
}
///
/// Deassociate OSD with surface.
///
/// @param decoder VA-API decoder
///
static void VaapiDeassociate(VaapiDecoder * decoder)
{
if (VaOsdSubpicture != VA_INVALID_ID) {
vaDeassociateSubpicture(decoder->VaDisplay, VaOsdSubpicture, decoder->PostProcSurfacesRb,
POSTPROC_SURFACES_MAX);
}
}
///
/// Create surfaces for VA-API decoder.
///
/// @param decoder VA-API decoder
/// @param width surface source/video width
/// @param height surface source/video height
///
static void VaapiCreateSurfaces(VaapiDecoder * decoder, int width, int height)
{
if (vaCreateSurfaces(decoder->VaDisplay, VA_RT_FORMAT_YUV420, width, height, decoder->PostProcSurfacesRb,
POSTPROC_SURFACES_MAX, NULL, 0) != VA_STATUS_SUCCESS) {
Fatal("video/vaapi: can't create %d postproc surfaces", POSTPROC_SURFACES_MAX);
}
}
///
/// Destroy surfaces of VA-API decoder.
///
/// @param decoder VA-API decoder
///
static void VaapiDestroySurfaces(VaapiDecoder * decoder)
{
Debug7("video/vaapi: %s:", __FUNCTION__);
//
// update OSD associate
//
VaapiDeassociate(decoder);
if (vaDestroySurfaces(decoder->VaDisplay, decoder->PostProcSurfacesRb, POSTPROC_SURFACES_MAX) != VA_STATUS_SUCCESS) {
Error("video/vaapi: can't destroy %d surfaces", POSTPROC_SURFACES_MAX);
}
}
///
/// Check an array of surfaces whether they are valid and ready
///
/// @param va_display VADisplay to use
/// @param surfaces array of surfaces to check
/// @param num_surfaces number of surfaces
///
/// @returns status of the check (VA_STATUS_SUCCESS) if surfaces were ready
///
static VAStatus VaapiCheckSurfaces(VADisplay va_display, VASurfaceID * surfaces, unsigned int num_surfaces)