From c807053b52b2f66862a1c5a3ac69b4681845f907 Mon Sep 17 00:00:00 2001 From: Nicolas Gonzalez Date: Tue, 4 Dec 2018 20:49:11 -0600 Subject: [PATCH 1/4] only set filter if given --- ios/Video/RCTVideo.m | 37 +++++++++++++++++++++---------------- 1 file changed, 21 insertions(+), 16 deletions(-) diff --git a/ios/Video/RCTVideo.m b/ios/Video/RCTVideo.m index 741ac6719e..39d9905ad5 100644 --- a/ios/Video/RCTVideo.m +++ b/ios/Video/RCTVideo.m @@ -1266,33 +1266,38 @@ - (void)videoPlayerViewControllerDidDismiss:(AVPlayerViewController *)playerView - (void)setFilter:(NSString *)filterName { - _filterName = filterName; + _filterName = filterName; - AVAsset *asset = _playerItem.asset; + AVAsset *asset = _playerItem.asset; - if (asset != nil) { + if (asset != nil && filterName != nil) { - CIFilter *filter = [CIFilter filterWithName:filterName]; + CIFilter *filter = [CIFilter filterWithName:filterName]; - if (filter != nil) { + _playerItem.videoComposition = [AVVideoComposition + videoCompositionWithAsset:asset + applyingCIFiltersWithHandler:^(AVAsynchronousCIImageFilteringRequest *_Nonnull request) { - _playerItem.videoComposition = [AVVideoComposition - videoCompositionWithAsset:asset - applyingCIFiltersWithHandler:^(AVAsynchronousCIImageFilteringRequest *_Nonnull request) { + if (filter == nil) { - CIImage *image = request.sourceImage.imageByClampingToExtent; + [request finishWithImage:request.sourceImage context:nil]; - [filter setValue:image forKey:kCIInputImageKey]; + } else { - CIImage *output = [filter.outputImage imageByCroppingToRect:request.sourceImage.extent]; + CIImage *image = request.sourceImage.imageByClampingToExtent; - [request finishWithImage:output context:nil]; + [filter setValue:image forKey:kCIInputImageKey]; + CIImage *output = [filter.outputImage imageByCroppingToRect:request.sourceImage.extent]; - }]; - } + [request finishWithImage:output context:nil]; - } + } + + + }]; + + } } @@ -1456,4 +1461,4 @@ - (NSString *)cacheDirectoryPath { return array[0]; } -@end \ No newline at end of file +@end From 67a963328a9f64af304ab833a47e5f24b4bff422 Mon Sep 17 00:00:00 2001 From: Nicolas Gonzalez Date: Thu, 13 Dec 2018 21:30:38 -0600 Subject: [PATCH 2/4] updated basic example, added filterEnabled flag, check for HLS playlist before applying filter --- README.md | 10 +++ Video.js | 1 + examples/basic/index.ios.js | 71 +++++++++++++++- ios/Video/RCTVideo.m | 160 +++++++++++++++++++----------------- ios/Video/RCTVideoManager.m | 1 + package.json | 2 +- 6 files changed, 169 insertions(+), 76 deletions(-) diff --git a/README.md b/README.md index a7c8430e50..5dc8affc6a 100644 --- a/README.md +++ b/README.md @@ -260,6 +260,7 @@ var styles = StyleSheet.create({ * [bufferConfig](#bufferconfig) * [controls](#controls) * [filter](#filter) +* [filterEnabled](#filterEnabled) * [fullscreen](#fullscreen) * [fullscreenAutorotate](#fullscreenautorotate) * [fullscreenOrientation](#fullscreenorientation) @@ -379,6 +380,15 @@ For more details on these filters refer to the [iOS docs](https://developer.appl Notes: 1. Using a filter can impact CPU usage. A workaround is to save the video with the filter and then load the saved video. 2. Video filter is currently not supported on HLS playlists. +3. `filterEnabled` must be set to `true` + +Platforms: iOS + +#### filterEnabled +Enable video filter. + +* **false (default)** - Don't enable filter +* **true** - Enable filter Platforms: iOS diff --git a/Video.js b/Video.js index 77b02f0a9e..a0ca95749c 100644 --- a/Video.js +++ b/Video.js @@ -300,6 +300,7 @@ Video.propTypes = { FilterType.TRANSFER, FilterType.SEPIA ]), + filterEnabled: PropTypes.bool, /* Native only */ src: PropTypes.object, seek: PropTypes.oneOfType([ diff --git a/examples/basic/index.ios.js b/examples/basic/index.ios.js index 2abd3b9ff3..1bc0ac4739 100644 --- a/examples/basic/index.ios.js +++ b/examples/basic/index.ios.js @@ -13,7 +13,26 @@ import { View, } from 'react-native'; -import Video from 'react-native-video'; +import Video,{FilterType} from 'react-native-video'; + +const filterTypes = [ + FilterType.NONE, + FilterType.INVERT, + FilterType.MONOCHROME, + FilterType.POSTERIZE, + FilterType.FALSE, + FilterType.MAXIMUMCOMPONENT, + FilterType.MINIMUMCOMPONENT, + FilterType.CHROME, + FilterType.FADE, + FilterType.INSTANT, + FilterType.MONO, + FilterType.NOIR, + FilterType.PROCESS, + FilterType.TONAL, + FilterType.TRANSFER, + FilterType.SEPIA +]; class VideoPlayer extends Component { constructor(props) { @@ -34,6 +53,8 @@ class VideoPlayer extends Component { skin: 'custom', ignoreSilentSwitch: null, isBuffering: false, + filter: FilterType.NONE, + filterEnabled: true }; onLoad(data) { @@ -57,6 +78,20 @@ class VideoPlayer extends Component { } } + setFilter(step) { + let index = filterTypes.indexOf(this.state.filter) + step; + + if (index === filterTypes.length) { + index = 0; + } else if (index === -1) { + index = filterTypes.length - 1; + } + + this.setState({ + filter: filterTypes[index] + }) + } + renderSkinControl(skin) { const isSelected = this.state.skin == skin; const selectControls = skin == 'native' || skin == 'embed'; @@ -141,6 +176,8 @@ class VideoPlayer extends Component { onProgress={this.onProgress} onEnd={() => { AlertIOS.alert('Done!') }} repeat={true} + filter={this.state.filter} + filterEnabled={this.state.filterEnabled} /> @@ -151,6 +188,21 @@ class VideoPlayer extends Component { {this.renderSkinControl('native')} {this.renderSkinControl('embed')} + { + (this.state.filterEnabled) ? + + { + this.setFilter(-1) + }}> + Previous Filter + + { + this.setFilter(1) + }}> + Next Filter + + : null + } @@ -212,6 +264,8 @@ class VideoPlayer extends Component { onEnd={() => { AlertIOS.alert('Done!') }} repeat={true} controls={this.state.controls} + filter={this.state.filter} + filterEnabled={this.state.filterEnabled} /> @@ -221,6 +275,21 @@ class VideoPlayer extends Component { {this.renderSkinControl('native')} {this.renderSkinControl('embed')} + { + (this.state.filterEnabled) ? + + { + this.setFilter(-1) + }}> + Previous Filter + + { + this.setFilter(1) + }}> + Next Filter + + : null + } diff --git a/ios/Video/RCTVideo.m b/ios/Video/RCTVideo.m index 5c2477950c..2c8190ca6d 100644 --- a/ios/Video/RCTVideo.m +++ b/ios/Video/RCTVideo.m @@ -26,28 +26,29 @@ @implementation RCTVideo { AVPlayer *_player; AVPlayerItem *_playerItem; + NSDictionary *_source; BOOL _playerItemObserversSet; BOOL _playerBufferEmpty; AVPlayerLayer *_playerLayer; BOOL _playerLayerObserverSet; RCTVideoPlayerViewController *_playerViewController; NSURL *_videoURL; - + /* Required to publish events */ RCTEventDispatcher *_eventDispatcher; BOOL _playbackRateObserverRegistered; BOOL _isExternalPlaybackActiveObserverRegistered; BOOL _videoLoadStarted; - + bool _pendingSeek; float _pendingSeekTime; float _lastSeekTime; - + /* For sending videoProgress events */ Float64 _progressUpdateInterval; BOOL _controls; id _timeObserver; - + /* Keep track of any modifiers, need to be applied after each play */ float _volume; float _rate; @@ -68,6 +69,7 @@ @implementation RCTVideo NSString * _fullscreenOrientation; BOOL _fullscreenPlayerPresented; NSString *_filterName; + BOOL _filterEnabled; UIViewController * _presentingViewController; #if __has_include() RCTVideoCache * _videoCache; @@ -78,7 +80,7 @@ - (instancetype)initWithEventDispatcher:(RCTEventDispatcher *)eventDispatcher { if ((self = [super init])) { _eventDispatcher = eventDispatcher; - + _playbackRateObserverRegistered = NO; _isExternalPlaybackActiveObserverRegistered = NO; _playbackStalled = NO; @@ -104,23 +106,23 @@ - (instancetype)initWithEventDispatcher:(RCTEventDispatcher *)eventDispatcher selector:@selector(applicationWillResignActive:) name:UIApplicationWillResignActiveNotification object:nil]; - + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(applicationDidEnterBackground:) name:UIApplicationDidEnterBackgroundNotification object:nil]; - + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(applicationWillEnterForeground:) name:UIApplicationWillEnterForegroundNotification object:nil]; - + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(audioRouteChanged:) name:AVAudioSessionRouteChangeNotification object:nil]; } - + return self; } @@ -130,7 +132,7 @@ - (RCTVideoPlayerViewController*)createPlayerViewController:(AVPlayer*)player viewController.showsPlaybackControls = YES; viewController.rctDelegate = self; viewController.preferredOrientation = _fullscreenOrientation; - + viewController.view.frame = self.bounds; viewController.player = player; viewController.view.frame = self.bounds; @@ -148,7 +150,7 @@ - (CMTime)playerItemDuration { return([playerItem duration]); } - + return(kCMTimeInvalid); } @@ -159,7 +161,7 @@ - (CMTimeRange)playerItemSeekableTimeRange { return [playerItem seekableTimeRanges].firstObject.CMTimeRangeValue; } - + return (kCMTimeRangeZero); } @@ -200,7 +202,7 @@ - (void)dealloc - (void)applicationWillResignActive:(NSNotification *)notification { if (_playInBackground || _playWhenInactive || _paused) return; - + [_player pause]; [_player setRate:0.0]; } @@ -240,18 +242,18 @@ - (void)sendProgressUpdate if (video == nil || video.status != AVPlayerItemStatusReadyToPlay) { return; } - + CMTime playerDuration = [self playerItemDuration]; if (CMTIME_IS_INVALID(playerDuration)) { return; } - + CMTime currentTime = _player.currentTime; const Float64 duration = CMTimeGetSeconds(playerDuration); const Float64 currentTimeSecs = CMTimeGetSeconds(currentTime); - + [[NSNotificationCenter defaultCenter] postNotificationName:@"RCTVideo_progress" object:nil userInfo:@{@"progress": [NSNumber numberWithDouble: currentTimeSecs / duration]}]; - + if( currentTimeSecs >= 0 && self.onVideoProgress) { self.onVideoProgress(@{ @"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(currentTime)], @@ -326,6 +328,7 @@ - (void)removePlayerItemObservers - (void)setSrc:(NSDictionary *)source { + _source = source; [self removePlayerLayer]; [self removePlayerTimeObserver]; [self removePlayerItemObservers]; @@ -341,7 +344,7 @@ - (void)setSrc:(NSDictionary *)source [_player pause]; [_playerViewController.view removeFromSuperview]; _playerViewController = nil; - + if (_playbackRateObserverRegistered) { [_player removeObserver:self forKeyPath:playbackRate context:nil]; _playbackRateObserverRegistered = NO; @@ -350,16 +353,16 @@ - (void)setSrc:(NSDictionary *)source [_player removeObserver:self forKeyPath:externalPlaybackActive context:nil]; _isExternalPlaybackActiveObserverRegistered = NO; } - + _player = [AVPlayer playerWithPlayerItem:_playerItem]; _player.actionAtItemEnd = AVPlayerActionAtItemEndNone; - + [_player addObserver:self forKeyPath:playbackRate options:0 context:nil]; _playbackRateObserverRegistered = YES; - + [_player addObserver:self forKeyPath:externalPlaybackActive options:0 context:nil]; _isExternalPlaybackActiveObserverRegistered = YES; - + [self addPlayerTimeObserver]; //Perform on next run loop, otherwise onVideoLoadStart is nil @@ -382,7 +385,7 @@ - (NSURL*) urlFilePath:(NSString*) filepath { if ([filepath containsString:@"file://"]) { return [NSURL URLWithString:filepath]; } - + // if no file found, check if the file exists in the Document directory NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); NSString* relativeFilePath = [filepath lastPathComponent]; @@ -391,7 +394,7 @@ - (NSURL*) urlFilePath:(NSString*) filepath { if (fileComponents.count > 1) { relativeFilePath = [fileComponents objectAtIndex:1]; } - + NSString *path = [paths.firstObject stringByAppendingPathComponent:relativeFilePath]; if ([[NSFileManager defaultManager] fileExistsAtPath:path]) { return [NSURL fileURLWithPath:path]; @@ -408,21 +411,21 @@ - (void)playerItemPrepareText:(AVAsset *)asset assetOptions:(NSDictionary * __nu // sideload text tracks AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init]; - + AVAssetTrack *videoAsset = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject; AVMutableCompositionTrack *videoCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; [videoCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration) ofTrack:videoAsset atTime:kCMTimeZero error:nil]; - + AVAssetTrack *audioAsset = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject; AVMutableCompositionTrack *audioCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; [audioCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration) ofTrack:audioAsset atTime:kCMTimeZero error:nil]; - + NSMutableArray* validTextTracks = [NSMutableArray array]; for (int i = 0; i < _textTracks.count; ++i) { AVURLAsset *textURLAsset; @@ -461,7 +464,7 @@ - (void)playerItemForSource:(NSDictionary *)source withCallback:(void(^)(AVPlaye ? [NSURL URLWithString:uri] : [[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]]; NSMutableDictionary *assetOptions = [[NSMutableDictionary alloc] init]; - + if (isNetwork) { /* Per #1091, this is not a public API. * We need to either get approval from Apple to use this or use a different approach. @@ -527,7 +530,7 @@ - (void)playerItemForSourceUsingCache:(NSString *)uri assetOptions:(NSDictionary DVURLAsset *asset = [[DVURLAsset alloc] initWithURL:url options:options networkTimeout:10000]; asset.loaderDelegate = self; - + /* More granular code to have control over the DVURLAsset DVAssetLoaderDelegate *resourceLoaderDelegate = [[DVAssetLoaderDelegate alloc] initWithURL:url]; resourceLoaderDelegate.delegate = self; @@ -564,40 +567,40 @@ - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(N for (AVMetadataItem *item in items) { NSString *value = (NSString *)item.value; NSString *identifier = item.identifier; - + if (![value isEqual: [NSNull null]]) { NSDictionary *dictionary = [[NSDictionary alloc] initWithObjects:@[value, identifier] forKeys:@[@"value", @"identifier"]]; - + [array addObject:dictionary]; } } - + self.onTimedMetadata(@{ @"target": self.reactTag, @"metadata": array }); } } - + if ([keyPath isEqualToString:statusKeyPath]) { // Handle player item status change. if (_playerItem.status == AVPlayerItemStatusReadyToPlay) { float duration = CMTimeGetSeconds(_playerItem.asset.duration); - + if (isnan(duration)) { duration = 0.0; } - + NSObject *width = @"undefined"; NSObject *height = @"undefined"; NSString *orientation = @"undefined"; - + if ([_playerItem.asset tracksWithMediaType:AVMediaTypeVideo].count > 0) { AVAssetTrack *videoTrack = [[_playerItem.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; width = [NSNumber numberWithFloat:videoTrack.naturalSize.width]; height = [NSNumber numberWithFloat:videoTrack.naturalSize.height]; CGAffineTransform preferredTransform = [videoTrack preferredTransform]; - + if ((videoTrack.naturalSize.width == preferredTransform.tx && videoTrack.naturalSize.height == preferredTransform.ty) || (preferredTransform.tx == 0 && preferredTransform.ty == 0)) @@ -607,7 +610,7 @@ - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(N orientation = @"portrait"; } } - + if (self.onVideoLoad && _videoLoadStarted) { self.onVideoLoad(@{@"duration": [NSNumber numberWithFloat:duration], @"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(_playerItem.currentTime)], @@ -627,7 +630,7 @@ - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(N @"target": self.reactTag}); } _videoLoadStarted = NO; - + [self attachListeners]; [self applyModifiers]; } else if (_playerItem.status == AVPlayerItemStatusFailed && self.onVideoError) { @@ -687,7 +690,7 @@ - (void)attachListeners selector:@selector(playerItemDidReachEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:[_player currentItem]]; - + [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemPlaybackStalledNotification object:nil]; @@ -710,7 +713,7 @@ - (void)playerItemDidReachEnd:(NSNotification *)notification if(self.onVideoEnd) { self.onVideoEnd(@{@"target": self.reactTag}); } - + if (_repeat) { AVPlayerItem *item = [notification object]; [item seekToTime:kCMTimeZero]; @@ -771,7 +774,7 @@ - (void)setPaused:(BOOL)paused [_player play]; [_player setRate:_rate]; } - + _paused = paused; } @@ -793,19 +796,19 @@ - (void)setSeek:(NSDictionary *)info { NSNumber *seekTime = info[@"time"]; NSNumber *seekTolerance = info[@"tolerance"]; - + int timeScale = 1000; - + AVPlayerItem *item = _player.currentItem; if (item && item.status == AVPlayerItemStatusReadyToPlay) { // TODO check loadedTimeRanges - + CMTime cmSeekTime = CMTimeMakeWithSeconds([seekTime floatValue], timeScale); CMTime current = item.currentTime; // TODO figure out a good tolerance level CMTime tolerance = CMTimeMake([seekTolerance floatValue], timeScale); BOOL wasPaused = _paused; - + if (CMTimeCompare(current, cmSeekTime) != 0) { if (!wasPaused) [_player pause]; [_player seekToTime:cmSeekTime toleranceBefore:tolerance toleranceAfter:tolerance completionHandler:^(BOOL finished) { @@ -821,10 +824,10 @@ - (void)setSeek:(NSDictionary *)info @"target": self.reactTag}); } }]; - + _pendingSeek = false; } - + } else { // TODO: See if this makes sense and if so, actually implement it _pendingSeek = true; @@ -859,7 +862,7 @@ - (void)applyModifiers [_player setVolume:_volume]; [_player setMuted:NO]; } - + [self setSelectedAudioTrack:_selectedAudioTrack]; [self setSelectedTextTrack:_selectedTextTrack]; [self setResizeMode:_resizeMode]; @@ -880,7 +883,7 @@ - (void)setMediaSelectionTrackForCharacteristic:(AVMediaCharacteristic)character AVMediaSelectionGroup *group = [_player.currentItem.asset mediaSelectionGroupForMediaCharacteristic:characteristic]; AVMediaSelectionOption *mediaOption; - + if ([type isEqualToString:@"disabled"]) { // Do nothing. We want to ensure option is nil } else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) { @@ -913,7 +916,7 @@ - (void)setMediaSelectionTrackForCharacteristic:(AVMediaCharacteristic)character [_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group]; return; } - + // If a match isn't found, option will be nil and text tracks will be disabled [_player.currentItem selectMediaOption:mediaOption inMediaSelectionGroup:group]; } @@ -937,7 +940,7 @@ - (void)setSelectedTextTrack:(NSDictionary *)selectedTextTrack { - (void) setSideloadedText { NSString *type = _selectedTextTrack[@"type"]; NSArray *textTracks = [self getTextTrackInfo]; - + // The first few tracks will be audio & video track int firstTextIndex = 0; for (firstTextIndex = 0; firstTextIndex < _player.currentItem.tracks.count; ++firstTextIndex) { @@ -945,9 +948,9 @@ - (void) setSideloadedText { break; } } - + int selectedTrackIndex = RCTVideoUnset; - + if ([type isEqualToString:@"disabled"]) { // Do nothing. We want to ensure option is nil } else if ([type isEqualToString:@"language"]) { @@ -976,7 +979,7 @@ - (void) setSideloadedText { } } } - + // in the situation that a selected text track is not available (eg. specifies a textTrack not available) if (![type isEqualToString:@"disabled"] && selectedTrackIndex == RCTVideoUnset) { CFArrayRef captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(kMACaptionAppearanceDomainUser); @@ -993,7 +996,7 @@ - (void) setSideloadedText { } } } - + for (int i = firstTextIndex; i < _player.currentItem.tracks.count; ++i) { BOOL isEnabled = NO; if (selectedTrackIndex != RCTVideoUnset) { @@ -1008,7 +1011,7 @@ -(void) setStreamingText { AVMediaSelectionGroup *group = [_player.currentItem.asset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible]; AVMediaSelectionOption *mediaOption; - + if ([type isEqualToString:@"disabled"]) { // Do nothing. We want to ensure option is nil } else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) { @@ -1041,7 +1044,7 @@ -(void) setStreamingText { [_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group]; return; } - + // If a match isn't found, option will be nil and text tracks will be disabled [_player.currentItem selectMediaOption:mediaOption inMediaSelectionGroup:group]; } @@ -1049,7 +1052,7 @@ -(void) setStreamingText { - (void)setTextTracks:(NSArray*) textTracks; { _textTracks = textTracks; - + // in case textTracks was set after selectedTextTrack if (_selectedTextTrack) [self setSelectedTextTrack:_selectedTextTrack]; } @@ -1081,7 +1084,7 @@ - (NSArray *)getTextTrackInfo { // if sideloaded, textTracks will already be set if (_textTracks) return _textTracks; - + // if streaming video, we extract the text tracks NSMutableArray *textTracks = [[NSMutableArray alloc] init]; AVMediaSelectionGroup *group = [_player.currentItem.asset @@ -1119,7 +1122,7 @@ - (void)setFullscreen:(BOOL) fullscreen { } // Set presentation style to fullscreen [_playerViewController setModalPresentationStyle:UIModalPresentationFullScreen]; - + // Find the nearest view controller UIViewController *viewController = [self firstAvailableUIViewController]; if( !viewController ) @@ -1189,13 +1192,13 @@ - (void)usePlayerLayer _playerLayer = [AVPlayerLayer playerLayerWithPlayer:_player]; _playerLayer.frame = self.bounds; _playerLayer.needsDisplayOnBoundsChange = YES; - + // to prevent video from being animated when resizeMode is 'cover' // resize mode must be set before layer is added [self setResizeMode:_resizeMode]; [_playerLayer addObserver:self forKeyPath:readyForDisplayKeyPath options:NSKeyValueObservingOptionNew context:nil]; _playerLayerObserverSet = YES; - + [self.layer addSublayer:_playerLayer]; self.layer.needsDisplayOnBoundsChange = YES; } @@ -1223,7 +1226,7 @@ - (void)setControls:(BOOL)controls - (void)setProgressUpdateInterval:(float)progressUpdateInterval { _progressUpdateInterval = progressUpdateInterval; - + if (_timeObserver) { [self removePlayerTimeObserver]; [self addPlayerTimeObserver]; @@ -1266,12 +1269,17 @@ - (void)videoPlayerViewControllerDidDismiss:(AVPlayerViewController *)playerView - (void)setFilter:(NSString *)filterName { _filterName = filterName; + + if (!_filterEnabled) { + return; + } else if ([[_source objectForKey:@"uri"] rangeOfString:@"m3u8"].location != NSNotFound) { + return; // filters don't work for HLS... return + } + AVAsset *asset = _playerItem.asset; - + if (!asset) { return; - } else if (!_playerItem.videoComposition && (filterName == nil || [filterName isEqualToString:@""])) { - return; // Setting up an empty filter has a cost so avoid whenever possible } // TODO: filters don't work for HLS, check & return @@ -1290,6 +1298,10 @@ - (void)setFilter:(NSString *)filterName { }]; } +- (void)setFilterEnabled:(BOOL)filterEnabled { + _filterEnabled = filterEnabled; +} + #pragma mark - React View Management - (void)insertReactSubview:(UIView *)view atIndex:(NSInteger)atIndex @@ -1300,7 +1312,7 @@ - (void)insertReactSubview:(UIView *)view atIndex:(NSInteger)atIndex { [self setControls:true]; } - + if( _controls ) { view.frame = self.bounds; @@ -1332,7 +1344,7 @@ - (void)layoutSubviews if( _controls ) { _playerViewController.view.frame = self.bounds; - + // also adjust all subviews of contentOverlayView for (UIView* subview in _playerViewController.contentOverlayView.subviews) { subview.frame = self.bounds; @@ -1361,18 +1373,18 @@ - (void)removeFromSuperview _isExternalPlaybackActiveObserverRegistered = NO; } _player = nil; - + [self removePlayerLayer]; - + [_playerViewController.view removeFromSuperview]; _playerViewController = nil; - + [self removePlayerTimeObserver]; [self removePlayerItemObservers]; - + _eventDispatcher = nil; [[NSNotificationCenter defaultCenter] removeObserver:self]; - + [super removeFromSuperview]; } diff --git a/ios/Video/RCTVideoManager.m b/ios/Video/RCTVideoManager.m index 9823dcfb94..d9578dc3dc 100644 --- a/ios/Video/RCTVideoManager.m +++ b/ios/Video/RCTVideoManager.m @@ -39,6 +39,7 @@ - (dispatch_queue_t)methodQueue RCT_EXPORT_VIEW_PROPERTY(fullscreenAutorotate, BOOL); RCT_EXPORT_VIEW_PROPERTY(fullscreenOrientation, NSString); RCT_EXPORT_VIEW_PROPERTY(filter, NSString); +RCT_EXPORT_VIEW_PROPERTY(filterEnabled, BOOL); RCT_EXPORT_VIEW_PROPERTY(progressUpdateInterval, float); /* Should support: onLoadStart, onLoad, and onError to stay consistent with Image */ RCT_EXPORT_VIEW_PROPERTY(onVideoLoadStart, RCTBubblingEventBlock); diff --git a/package.json b/package.json index 3392e4796e..8419ce14e7 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "react-native-video", - "version": "4.0.1", + "version": "4.0.2", "description": "A