diff --git a/docs/pages/component/events.md b/docs/pages/component/events.md index f45a447769..aa7aa0c2a7 100644 --- a/docs/pages/component/events.md +++ b/docs/pages/component/events.md @@ -493,7 +493,24 @@ Example: } ``` -Platforms: Android +### `onTextTrackDataChanged` +Callback function that is called when new subtitle data is available. It provides the actual subtitle content for the current selected text track, if available (mainly WebVTT). + +Payload: + +Property | Type | Description +--- | --- | --- +`subtitleTracks` | `string` | The subtitles text content in a compatible format. + + +Example: +```javascript +{ + subtitleTracks: "This blade has a dark past.", +} +``` + +Platforms: iOS ### `onVideoTracks` Callback function that is called when video tracks change diff --git a/examples/basic/src/VideoPlayer.tsx b/examples/basic/src/VideoPlayer.tsx index c38b68e062..ed9f06f511 100644 --- a/examples/basic/src/VideoPlayer.tsx +++ b/examples/basic/src/VideoPlayer.tsx @@ -33,6 +33,8 @@ import Video, { ResizeMode, SelectedTrack, DRMType, + OnTextTrackDataChangedData, + SelectedTrackType, } from 'react-native-video'; import ToggleControl from './ToggleControl'; import MultiValueControl, { @@ -120,7 +122,12 @@ class VideoPlayer extends Component { }, ]; - srcIosList = []; + srcIosList = [ + { + description: 'sintel with subtitles', + uri: 'https://bitmovin-a.akamaihd.net/content/sintel/hls/playlist.m3u8', + }, + ]; srcAndroidList = [ { @@ -231,7 +238,7 @@ class VideoPlayer extends Component { onTextTracks = (data: OnTextTracksData) => { const selectedTrack = data.textTracks?.find((x: TextTrack) => { - return x.selected; + return x?.selected; }); this.setState({ @@ -248,6 +255,10 @@ class VideoPlayer extends Component { } }; + onTextTrackDataChanged = (data: OnTextTrackDataChangedData) => { + console.log(`Subtitles: ${JSON.stringify(data, null, 2)}`); + }; + onAspectRatio = (data: OnVideoAspectRatioData) => { console.log('onAspectRadio called ' + JSON.stringify(data)); this.setState({ @@ -749,6 +760,7 @@ class VideoPlayer extends Component { onLoad={this.onLoad} onAudioTracks={this.onAudioTracks} onTextTracks={this.onTextTracks} + onTextTrackDataChanged={this.onTextTrackDataChanged} onProgress={this.onProgress} onEnd={this.onEnd} progressUpdateInterval={1000} diff --git a/ios/Video/Features/RCTPlayerObserver.swift b/ios/Video/Features/RCTPlayerObserver.swift index dfdd832032..a11dcfec2c 100644 --- a/ios/Video/Features/RCTPlayerObserver.swift +++ b/ios/Video/Features/RCTPlayerObserver.swift @@ -26,11 +26,12 @@ protocol RCTPlayerObserverHandler: RCTPlayerObserverHandlerObjc { func handleExternalPlaybackActiveChange(player: AVPlayer, change: NSKeyValueObservedChange) func handleViewControllerOverlayViewFrameChange(overlayView: UIView, change: NSKeyValueObservedChange) func handleTracksChange(playerItem: AVPlayerItem, change: NSKeyValueObservedChange<[AVPlayerItemTrack]>) + func handleLegibleOutput(strings: [NSAttributedString]) } // MARK: - RCTPlayerObserver -class RCTPlayerObserver: NSObject, AVPlayerItemMetadataOutputPushDelegate { +class RCTPlayerObserver: NSObject, AVPlayerItemMetadataOutputPushDelegate, AVPlayerItemLegibleOutputPushDelegate { weak var _handlers: RCTPlayerObserverHandler? var player: AVPlayer? { @@ -57,8 +58,11 @@ class RCTPlayerObserver: NSObject, AVPlayerItemMetadataOutputPushDelegate { // handle timedMetadata let metadataOutput = AVPlayerItemMetadataOutput() + let legibleOutput = AVPlayerItemLegibleOutput() playerItem.add(metadataOutput) + playerItem.add(legibleOutput) metadataOutput.setDelegate(self, queue: .main) + legibleOutput.setDelegate(self, queue: .main) } } @@ -113,6 +117,14 @@ class RCTPlayerObserver: NSObject, AVPlayerItemMetadataOutputPushDelegate { } } + func legibleOutput(_: AVPlayerItemLegibleOutput, + didOutputAttributedStrings strings: [NSAttributedString], + nativeSampleBuffers _: [Any], + forItemTime _: CMTime) { + guard let _handlers else { return } + _handlers.handleLegibleOutput(strings: strings) + } + func addPlayerObservers() { guard let player, let _handlers else { return diff --git a/ios/Video/RCTVideo.swift b/ios/Video/RCTVideo.swift index 49fe170534..fb465b67a6 100644 --- a/ios/Video/RCTVideo.swift +++ b/ios/Video/RCTVideo.swift @@ -118,6 +118,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH @objc var onReceiveAdEvent: RCTDirectEventBlock? @objc var onTextTracks: RCTDirectEventBlock? @objc var onAudioTracks: RCTDirectEventBlock? + @objc var onTextTrackDataChanged: RCTDirectEventBlock? @objc func _onPictureInPictureStatusChanged() { @@ -1388,4 +1389,10 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH self.onAudioTracks?(["audioTracks": audioTracks]) } } + + func handleLegibleOutput(strings: [NSAttributedString]) { + if let subtitles = strings.first { + self.onTextTrackDataChanged?(["subtitleTracks": subtitles.string]) + } + } } diff --git a/ios/Video/RCTVideoManager.m b/ios/Video/RCTVideoManager.m index f7b5bcb51a..e30f6c9cb5 100644 --- a/ios/Video/RCTVideoManager.m +++ b/ios/Video/RCTVideoManager.m @@ -66,6 +66,7 @@ @interface RCT_EXTERN_MODULE (RCTVideoManager, RCTViewManager) RCT_EXPORT_VIEW_PROPERTY(onReceiveAdEvent, RCTDirectEventBlock); RCT_EXPORT_VIEW_PROPERTY(onTextTracks, RCTDirectEventBlock); RCT_EXPORT_VIEW_PROPERTY(onAudioTracks, RCTDirectEventBlock); +RCT_EXPORT_VIEW_PROPERTY(onTextTrackDataChanged, RCTDirectEventBlock); RCT_EXTERN_METHOD(save : (NSDictionary*)options reactTag diff --git a/src/Video.tsx b/src/Video.tsx index 41d2aa6497..e0abfbbb8e 100644 --- a/src/Video.tsx +++ b/src/Video.tsx @@ -29,6 +29,7 @@ import type { OnProgressData, OnReceiveAdEventData, OnSeekData, + OnTextTrackDataChangedData, OnTextTracksData, OnTimedMetadataData, OnVideoAspectRatioData, @@ -93,6 +94,7 @@ const Video = forwardRef( onTimedMetadata, onAudioTracks, onTextTracks, + onTextTrackDataChanged, onVideoTracks, onAspectRatio, ...rest @@ -333,6 +335,17 @@ const Video = forwardRef( [onTextTracks], ); + const _onTextTrackDataChanged = useCallback( + ( + e: NativeSyntheticEvent, + ) => { + const {...eventData} = e.nativeEvent; + delete eventData.target; + onTextTrackDataChanged?.(eventData as OnTextTrackDataChangedData); + }, + [onTextTrackDataChanged], + ); + const _onVideoTracks = useCallback( (e: NativeSyntheticEvent) => { onVideoTracks?.(e.nativeEvent); @@ -509,6 +522,7 @@ const Video = forwardRef( onTimedMetadata={_onTimedMetadata} onAudioTracks={_onAudioTracks} onTextTracks={_onTextTracks} + onTextTrackDataChanged={_onTextTrackDataChanged} onVideoTracks={_onVideoTracks} onVideoFullscreenPlayerDidDismiss={onFullscreenPlayerDidDismiss} onVideoFullscreenPlayerDidPresent={onFullscreenPlayerDidPresent} diff --git a/src/VideoNativeComponent.ts b/src/VideoNativeComponent.ts index 9692356129..f173a761e6 100644 --- a/src/VideoNativeComponent.ts +++ b/src/VideoNativeComponent.ts @@ -7,7 +7,12 @@ import {NativeModules, requireNativeComponent} from 'react-native'; import type ResizeMode from './types/ResizeMode'; import type FilterType from './types/FilterType'; import type Orientation from './types/Orientation'; -import type {AdEvent, EnumValues, OnTextTracksTypeData} from './types'; +import type { + AdEvent, + EnumValues, + OnTextTrackDataChangedData, + OnTextTracksTypeData, +} from './types'; // -------- There are types for native component (future codegen) -------- // if you are looking for types for react component, see src/types/video.ts @@ -366,6 +371,9 @@ export interface VideoNativeProps extends ViewProps { onTimedMetadata?: (event: NativeSyntheticEvent) => void; // ios, android onAudioTracks?: (event: NativeSyntheticEvent) => void; // android onTextTracks?: (event: NativeSyntheticEvent) => void; // android + onTextTrackDataChanged?: ( + event: NativeSyntheticEvent, + ) => void; // iOS onVideoTracks?: (event: NativeSyntheticEvent) => void; // android } diff --git a/src/types/events.ts b/src/types/events.ts index bfde7a4b70..9982eda535 100644 --- a/src/types/events.ts +++ b/src/types/events.ts @@ -79,6 +79,10 @@ export type OnTextTracksData = Readonly<{ textTracks: ReadonlyArray; }>; +export type OnTextTrackDataChangedData = Readonly<{ + subtitleTracks: string; +}>; + export type OnVideoTracksData = Readonly<{ videoTracks: ReadonlyArray< Readonly<{ @@ -181,6 +185,7 @@ export interface ReactVideoEvents { onTimedMetadata?: (e: OnTimedMetadataData) => void; //Android, iOS onAudioTracks?: (e: OnAudioTracksData) => void; // Android onTextTracks?: (e: OnTextTracksData) => void; //Android + onTextTrackDataChanged?: (e: OnTextTrackDataChangedData) => void; // iOS onVideoTracks?: (e: OnVideoTracksData) => void; //Android onAspectRatio?: (e: OnVideoAspectRatioData) => void; }