From 88a3a561f16c289beecb86ddc31c854a82efb684 Mon Sep 17 00:00:00 2001 From: stuartmorgan Date: Fri, 19 Apr 2024 09:22:18 -0400 Subject: [PATCH] [camera] Convert iOS Obj-C->Dart calls to Pigeon (#6568) Converts all of the Obj-C -> Dart calls to Pigeon, using the new suffix-based Pigeon API instantiation feature. This required decentralizing some threading code slightly: since method channel calls only involve one method (due to the lack of strong types), a wrapper that automatically did thread bouncing was feasible, but with Pigeon it's not since a wrapper would have to duplicate the entire API surface, and that's more work than just doing the dispatches at the call site. Part of https://github.com/flutter/flutter/issues/117905 --- .../camera/camera_avfoundation/CHANGELOG.md | 4 + .../ios/Runner.xcodeproj/project.pbxproj | 4 - .../ios/RunnerTests/CameraFocusTests.m | 10 +- .../ios/RunnerTests/CameraOrientationTests.m | 139 +++++---- .../ios/RunnerTests/CameraPropertiesTests.m | 45 +-- .../ThreadSafeMethodChannelTests.m | 55 ---- .../ios/Classes/CameraPlugin.m | 72 ++--- .../ios/Classes/CameraPlugin.modulemap | 1 - .../ios/Classes/CameraPlugin_Test.h | 12 +- .../ios/Classes/CameraProperties.h | 43 +-- .../ios/Classes/CameraProperties.m | 59 ++-- .../camera_avfoundation/ios/Classes/FLTCam.h | 15 +- .../camera_avfoundation/ios/Classes/FLTCam.m | 123 ++++---- .../ios/Classes/FLTThreadSafeMethodChannel.h | 22 -- .../ios/Classes/FLTThreadSafeMethodChannel.m | 29 -- .../ios/Classes/messages.g.h | 98 ++++++ .../ios/Classes/messages.g.m | 291 ++++++++++++++++++ .../lib/src/avfoundation_camera.dart | 206 +++++++------ .../lib/src/messages.g.dart | 259 ++++++++++++++++ .../camera_avfoundation/lib/src/utils.dart | 40 ++- .../camera_avfoundation/pigeons/messages.dart | 78 +++++ .../camera/camera_avfoundation/pubspec.yaml | 2 +- .../test/avfoundation_camera_test.dart | 119 +------ .../camera_avfoundation/test/utils_test.dart | 16 +- 24 files changed, 1133 insertions(+), 609 deletions(-) delete mode 100644 packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeMethodChannelTests.m delete mode 100644 packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeMethodChannel.h delete mode 100644 packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeMethodChannel.m diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md index 265cad31e810..584c95f6adb1 100644 --- a/packages/camera/camera_avfoundation/CHANGELOG.md +++ b/packages/camera/camera_avfoundation/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.9.15+4 + +* Converts host-to-Dart communcation to Pigeon. + ## 0.9.15+3 * Moves `pigeon` to `dev_dependencies`. diff --git a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj index 86296ea6bc50..dc00e49c0422 100644 --- a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj @@ -29,7 +29,6 @@ E071CF7227B3061B006EF3BA /* FLTCamPhotoCaptureTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E071CF7127B3061B006EF3BA /* FLTCamPhotoCaptureTests.m */; }; E071CF7427B31DE4006EF3BA /* FLTCamSampleBufferTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E071CF7327B31DE4006EF3BA /* FLTCamSampleBufferTests.m */; }; E0B0D2BB27DFF2AF00E71E4B /* CameraPermissionTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0B0D2BA27DFF2AF00E71E4B /* CameraPermissionTests.m */; }; - E0C6E2002770F01A00EA6AA3 /* ThreadSafeMethodChannelTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0C6E1FD2770F01A00EA6AA3 /* ThreadSafeMethodChannelTests.m */; }; E0C6E2012770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0C6E1FE2770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m */; }; E0C6E2022770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0C6E1FF2770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m */; }; E0CDBAC227CD9729002561D9 /* CameraTestUtils.m in Sources */ = {isa = PBXBuildFile; fileRef = E0CDBAC127CD9729002561D9 /* CameraTestUtils.m */; }; @@ -96,7 +95,6 @@ E071CF7127B3061B006EF3BA /* FLTCamPhotoCaptureTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FLTCamPhotoCaptureTests.m; sourceTree = ""; }; E071CF7327B31DE4006EF3BA /* FLTCamSampleBufferTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = FLTCamSampleBufferTests.m; sourceTree = ""; }; E0B0D2BA27DFF2AF00E71E4B /* CameraPermissionTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraPermissionTests.m; sourceTree = ""; }; - E0C6E1FD2770F01A00EA6AA3 /* ThreadSafeMethodChannelTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ThreadSafeMethodChannelTests.m; sourceTree = ""; }; E0C6E1FE2770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ThreadSafeTextureRegistryTests.m; sourceTree = ""; }; E0C6E1FF2770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ThreadSafeEventChannelTests.m; sourceTree = ""; }; E0CDBAC027CD9729002561D9 /* CameraTestUtils.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = CameraTestUtils.h; sourceTree = ""; }; @@ -134,7 +132,6 @@ 03BB766C2665316900CE5A93 /* Info.plist */, 033B94BD269C40A200B4DF97 /* CameraMethodChannelTests.m */, E0C6E1FF2770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m */, - E0C6E1FD2770F01A00EA6AA3 /* ThreadSafeMethodChannelTests.m */, E0C6E1FE2770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m */, E04F108527A87CA600573D0C /* FLTSavePhotoDelegateTests.m */, E071CF7127B3061B006EF3BA /* FLTCamPhotoCaptureTests.m */, @@ -454,7 +451,6 @@ E0C6E2022770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m in Sources */, E0C6E2012770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m in Sources */, E0B0D2BB27DFF2AF00E71E4B /* CameraPermissionTests.m in Sources */, - E0C6E2002770F01A00EA6AA3 /* ThreadSafeMethodChannelTests.m in Sources */, E01EE4A82799F3A5008C1950 /* QueueUtilsTests.m in Sources */, ); runOnlyForDeploymentPostprocessing = 0; diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m index 3575cd20e120..577304018dee 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m @@ -37,7 +37,7 @@ - (void)testAutoFocusWithContinuousModeSupported_ShouldSetContinuousAutoFocus { [[_mockDevice reject] setFocusMode:AVCaptureFocusModeAutoFocus]; // Run test - [_camera applyFocusMode:FLTFocusModeAuto onDevice:_mockDevice]; + [_camera applyFocusMode:FCPPlatformFocusModeAuto onDevice:_mockDevice]; // Expect setFocusMode:AVCaptureFocusModeContinuousAutoFocus OCMVerify([_mockDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus]); @@ -54,7 +54,7 @@ - (void)testAutoFocusWithContinuousModeNotSupported_ShouldSetAutoFocus { [[_mockDevice reject] setFocusMode:AVCaptureFocusModeContinuousAutoFocus]; // Run test - [_camera applyFocusMode:FLTFocusModeAuto onDevice:_mockDevice]; + [_camera applyFocusMode:FCPPlatformFocusModeAuto onDevice:_mockDevice]; // Expect setFocusMode:AVCaptureFocusModeAutoFocus OCMVerify([_mockDevice setFocusMode:AVCaptureFocusModeAutoFocus]); @@ -72,7 +72,7 @@ - (void)testAutoFocusWithNoModeSupported_ShouldSetNothing { [[_mockDevice reject] setFocusMode:AVCaptureFocusModeAutoFocus]; // Run test - [_camera applyFocusMode:FLTFocusModeAuto onDevice:_mockDevice]; + [_camera applyFocusMode:FCPPlatformFocusModeAuto onDevice:_mockDevice]; } - (void)testLockedFocusWithModeSupported_ShouldSetModeAutoFocus { @@ -85,7 +85,7 @@ - (void)testLockedFocusWithModeSupported_ShouldSetModeAutoFocus { [[_mockDevice reject] setFocusMode:AVCaptureFocusModeContinuousAutoFocus]; // Run test - [_camera applyFocusMode:FLTFocusModeLocked onDevice:_mockDevice]; + [_camera applyFocusMode:FCPPlatformFocusModeLocked onDevice:_mockDevice]; // Expect setFocusMode:AVCaptureFocusModeAutoFocus OCMVerify([_mockDevice setFocusMode:AVCaptureFocusModeAutoFocus]); @@ -102,7 +102,7 @@ - (void)testLockedFocusWithModeNotSupported_ShouldSetNothing { [[_mockDevice reject] setFocusMode:AVCaptureFocusModeAutoFocus]; // Run test - [_camera applyFocusMode:FLTFocusModeLocked onDevice:_mockDevice]; + [_camera applyFocusMode:FCPPlatformFocusModeLocked onDevice:_mockDevice]; } - (void)testSetFocusPointWithResult_SetsFocusPointOfInterest { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m index f63c3fb89576..57787a968a2d 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m @@ -9,44 +9,87 @@ #import +@interface StubGlobalEventApi : FCPCameraGlobalEventApi +@property(nonatomic) BOOL called; +@property(nonatomic) FCPPlatformDeviceOrientation lastOrientation; +@end + +@implementation StubGlobalEventApi +- (void)deviceOrientationChangedOrientation:(FCPPlatformDeviceOrientation)orientation + completion:(void (^)(FlutterError *_Nullable))completion { + self.called = YES; + self.lastOrientation = orientation; + completion(nil); +} + +- (FlutterBinaryMessengerConnection)setMessageHandlerOnChannel:(nonnull NSString *)channel + binaryMessageHandler: + (nullable FlutterBinaryMessageHandler)handler { + return 0; +} + +@end + +#pragma mark - + @interface CameraOrientationTests : XCTestCase @end @implementation CameraOrientationTests +// Ensure that the given queue and then the main queue have both cycled, to wait for any pending +// async events that may have been bounced between them. +- (void)waitForRoundTripWithQueue:(dispatch_queue_t)queue { + XCTestExpectation *expectation = [[XCTestExpectation alloc] initWithDescription:@"Queue flush"]; + dispatch_async(queue, ^{ + dispatch_async(dispatch_get_main_queue(), ^{ + [expectation fulfill]; + }); + }); + [self waitForExpectations:@[ expectation ]]; +} + +- (void)sendOrientation:(UIDeviceOrientation)orientation toCamera:(CameraPlugin *)cameraPlugin { + [cameraPlugin orientationChanged:[self createMockNotificationForOrientation:orientation]]; + [self waitForRoundTripWithQueue:cameraPlugin.captureSessionQueue]; +} + - (void)testOrientationNotifications { - id mockMessenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger)); - CameraPlugin *cameraPlugin = [[CameraPlugin alloc] initWithRegistry:nil messenger:mockMessenger]; - - [mockMessenger setExpectationOrderMatters:YES]; - - [self rotate:UIDeviceOrientationPortraitUpsideDown - expectedChannelOrientation:@"portraitDown" - cameraPlugin:cameraPlugin - messenger:mockMessenger]; - [self rotate:UIDeviceOrientationPortrait - expectedChannelOrientation:@"portraitUp" - cameraPlugin:cameraPlugin - messenger:mockMessenger]; - [self rotate:UIDeviceOrientationLandscapeLeft - expectedChannelOrientation:@"landscapeLeft" - cameraPlugin:cameraPlugin - messenger:mockMessenger]; - [self rotate:UIDeviceOrientationLandscapeRight - expectedChannelOrientation:@"landscapeRight" - cameraPlugin:cameraPlugin - messenger:mockMessenger]; - - OCMReject([mockMessenger sendOnChannel:[OCMArg any] message:[OCMArg any]]); - - // No notification when flat. - [cameraPlugin - orientationChanged:[self createMockNotificationForOrientation:UIDeviceOrientationFaceUp]]; - // No notification when facedown. - [cameraPlugin - orientationChanged:[self createMockNotificationForOrientation:UIDeviceOrientationFaceDown]]; - - OCMVerifyAll(mockMessenger); + StubGlobalEventApi *eventAPI = [[StubGlobalEventApi alloc] init]; + CameraPlugin *cameraPlugin = [[CameraPlugin alloc] initWithRegistry:nil + messenger:nil + globalAPI:eventAPI]; + + [self sendOrientation:UIDeviceOrientationPortraitUpsideDown toCamera:cameraPlugin]; + XCTAssertEqual(eventAPI.lastOrientation, FCPPlatformDeviceOrientationPortraitDown); + [self sendOrientation:UIDeviceOrientationPortrait toCamera:cameraPlugin]; + XCTAssertEqual(eventAPI.lastOrientation, FCPPlatformDeviceOrientationPortraitUp); + [self sendOrientation:UIDeviceOrientationLandscapeLeft toCamera:cameraPlugin]; + XCTAssertEqual(eventAPI.lastOrientation, FCPPlatformDeviceOrientationLandscapeLeft); + [self sendOrientation:UIDeviceOrientationLandscapeRight toCamera:cameraPlugin]; + XCTAssertEqual(eventAPI.lastOrientation, FCPPlatformDeviceOrientationLandscapeRight); +} + +- (void)testOrientationNotificationsNotCalledForFaceUp { + StubGlobalEventApi *eventAPI = [[StubGlobalEventApi alloc] init]; + CameraPlugin *cameraPlugin = [[CameraPlugin alloc] initWithRegistry:nil + messenger:nil + globalAPI:eventAPI]; + + [self sendOrientation:UIDeviceOrientationFaceUp toCamera:cameraPlugin]; + + XCTAssertFalse(eventAPI.called); +} + +- (void)testOrientationNotificationsNotCalledForFaceDown { + StubGlobalEventApi *eventAPI = [[StubGlobalEventApi alloc] init]; + CameraPlugin *cameraPlugin = [[CameraPlugin alloc] initWithRegistry:nil + messenger:nil + globalAPI:eventAPI]; + + [self sendOrientation:UIDeviceOrientationFaceDown toCamera:cameraPlugin]; + + XCTAssertFalse(eventAPI.called); } - (void)testOrientationUpdateMustBeOnCaptureSessionQueue { @@ -71,40 +114,20 @@ - (void)testOrientationUpdateMustBeOnCaptureSessionQueue { [self waitForExpectationsWithTimeout:1 handler:nil]; } -- (void)rotate:(UIDeviceOrientation)deviceOrientation - expectedChannelOrientation:(NSString *)channelOrientation - cameraPlugin:(CameraPlugin *)cameraPlugin - messenger:(NSObject *)messenger { - XCTestExpectation *orientationExpectation = [self expectationWithDescription:channelOrientation]; - - OCMExpect([messenger - sendOnChannel:[OCMArg any] - message:[OCMArg checkWithBlock:^BOOL(NSData *data) { - NSObject *codec = [FlutterStandardMethodCodec sharedInstance]; - FlutterMethodCall *methodCall = [codec decodeMethodCall:data]; - [orientationExpectation fulfill]; - return - [methodCall.method isEqualToString:@"orientation_changed"] && - [methodCall.arguments isEqualToDictionary:@{@"orientation" : channelOrientation}]; - }]]); - - [cameraPlugin orientationChanged:[self createMockNotificationForOrientation:deviceOrientation]]; - [self waitForExpectationsWithTimeout:30.0 handler:nil]; -} - - (void)testOrientationChanged_noRetainCycle { dispatch_queue_t captureSessionQueue = dispatch_queue_create("capture_session_queue", NULL); FLTCam *mockCam = OCMClassMock([FLTCam class]); - FLTThreadSafeMethodChannel *mockChannel = OCMClassMock([FLTThreadSafeMethodChannel class]); + StubGlobalEventApi *stubAPI = [[StubGlobalEventApi alloc] init]; __weak CameraPlugin *weakCamera; @autoreleasepool { - CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil]; + CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil + messenger:nil + globalAPI:stubAPI]; weakCamera = camera; camera.captureSessionQueue = captureSessionQueue; camera.camera = mockCam; - camera.deviceEventMethodChannel = mockChannel; [camera orientationChanged: [self createMockNotificationForOrientation:UIDeviceOrientationLandscapeLeft]]; @@ -118,11 +141,11 @@ - (void)testOrientationChanged_noRetainCycle { [self expectationWithDescription:@"Dispatched to capture session queue"]; dispatch_async(captureSessionQueue, ^{ OCMVerify(never(), [mockCam setDeviceOrientation:UIDeviceOrientationLandscapeLeft]); - OCMVerify(never(), [mockChannel invokeMethod:@"orientation_changed" arguments:OCMOCK_ANY]); + XCTAssertFalse(stubAPI.called); [expectation fulfill]; }); - [self waitForExpectationsWithTimeout:1 handler:nil]; + [self waitForExpectationsWithTimeout:30 handler:nil]; } - (NSNotification *)createMockNotificationForOrientation:(UIDeviceOrientation)deviceOrientation { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPropertiesTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPropertiesTests.m index 70db6448a6dc..14ced24bfc1d 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPropertiesTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPropertiesTests.m @@ -2,6 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. +@import camera_avfoundation; @import camera_avfoundation.Test; @import AVFoundation; @import XCTest; @@ -31,30 +32,16 @@ - (void)testFLTGetAVCaptureFlashModeForFLTFlashMode { #pragma mark - exposure mode tests -- (void)testFLTGetStringForFLTExposureMode { - XCTAssertEqualObjects(@"auto", FLTGetStringForFLTExposureMode(FLTExposureModeAuto)); - XCTAssertEqualObjects(@"locked", FLTGetStringForFLTExposureMode(FLTExposureModeLocked)); - XCTAssertNil(FLTGetStringForFLTExposureMode(-1)); -} - -- (void)testFLTGetFLTExposureModeForString { - XCTAssertEqual(FLTExposureModeAuto, FLTGetFLTExposureModeForString(@"auto")); - XCTAssertEqual(FLTExposureModeLocked, FLTGetFLTExposureModeForString(@"locked")); - XCTAssertEqual(FLTExposureModeInvalid, FLTGetFLTExposureModeForString(@"unknown")); +- (void)testFCPGetExposureModeForString { + XCTAssertEqual(FCPPlatformExposureModeAuto, FCPGetExposureModeForString(@"auto")); + XCTAssertEqual(FCPPlatformExposureModeLocked, FCPGetExposureModeForString(@"locked")); } #pragma mark - focus mode tests -- (void)testFLTGetStringForFLTFocusMode { - XCTAssertEqualObjects(@"auto", FLTGetStringForFLTFocusMode(FLTFocusModeAuto)); - XCTAssertEqualObjects(@"locked", FLTGetStringForFLTFocusMode(FLTFocusModeLocked)); - XCTAssertNil(FLTGetStringForFLTFocusMode(-1)); -} - - (void)testFLTGetFLTFocusModeForString { - XCTAssertEqual(FLTFocusModeAuto, FLTGetFLTFocusModeForString(@"auto")); - XCTAssertEqual(FLTFocusModeLocked, FLTGetFLTFocusModeForString(@"locked")); - XCTAssertEqual(FLTFocusModeInvalid, FLTGetFLTFocusModeForString(@"unknown")); + XCTAssertEqual(FCPPlatformFocusModeAuto, FCPGetFocusModeForString(@"auto")); + XCTAssertEqual(FCPPlatformFocusModeLocked, FCPGetFocusModeForString(@"locked")); } #pragma mark - resolution preset tests @@ -93,15 +80,17 @@ - (void)testFLTGetUIDeviceOrientationForString { } - (void)testFLTGetStringForUIDeviceOrientation { - XCTAssertEqualObjects(@"portraitDown", - FLTGetStringForUIDeviceOrientation(UIDeviceOrientationPortraitUpsideDown)); - XCTAssertEqualObjects(@"landscapeLeft", - FLTGetStringForUIDeviceOrientation(UIDeviceOrientationLandscapeLeft)); - XCTAssertEqualObjects(@"landscapeRight", - FLTGetStringForUIDeviceOrientation(UIDeviceOrientationLandscapeRight)); - XCTAssertEqualObjects(@"portraitUp", - FLTGetStringForUIDeviceOrientation(UIDeviceOrientationPortrait)); - XCTAssertEqualObjects(@"portraitUp", FLTGetStringForUIDeviceOrientation(-1)); + XCTAssertEqual( + FCPPlatformDeviceOrientationPortraitDown, + FCPGetPigeonDeviceOrientationForOrientation(UIDeviceOrientationPortraitUpsideDown)); + XCTAssertEqual(FCPPlatformDeviceOrientationLandscapeLeft, + FCPGetPigeonDeviceOrientationForOrientation(UIDeviceOrientationLandscapeLeft)); + XCTAssertEqual(FCPPlatformDeviceOrientationLandscapeRight, + FCPGetPigeonDeviceOrientationForOrientation(UIDeviceOrientationLandscapeRight)); + XCTAssertEqual(FCPPlatformDeviceOrientationPortraitUp, + FCPGetPigeonDeviceOrientationForOrientation(UIDeviceOrientationPortrait)); + XCTAssertEqual(FCPPlatformDeviceOrientationPortraitUp, + FCPGetPigeonDeviceOrientationForOrientation(-1)); } #pragma mark - file format tests diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeMethodChannelTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeMethodChannelTests.m deleted file mode 100644 index 36e87db74411..000000000000 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeMethodChannelTests.m +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2013 The Flutter Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -@import camera_avfoundation; -@import camera_avfoundation.Test; -@import XCTest; -#import - -@interface ThreadSafeMethodChannelTests : XCTestCase -@end - -@implementation ThreadSafeMethodChannelTests - -- (void)testInvokeMethod_shouldStayOnMainThreadIfCalledFromMainThread { - FlutterMethodChannel *mockMethodChannel = OCMClassMock([FlutterMethodChannel class]); - FLTThreadSafeMethodChannel *threadSafeMethodChannel = - [[FLTThreadSafeMethodChannel alloc] initWithMethodChannel:mockMethodChannel]; - - XCTestExpectation *mainThreadExpectation = - [self expectationWithDescription:@"invokeMethod must be called on the main thread"]; - - OCMStub([mockMethodChannel invokeMethod:[OCMArg any] arguments:[OCMArg any]]) - .andDo(^(NSInvocation *invocation) { - if (NSThread.isMainThread) { - [mainThreadExpectation fulfill]; - } - }); - - [threadSafeMethodChannel invokeMethod:@"foo" arguments:nil]; - [self waitForExpectationsWithTimeout:1 handler:nil]; -} - -- (void)testInvokeMethod__shouldDispatchToMainThreadIfCalledFromBackgroundThread { - FlutterMethodChannel *mockMethodChannel = OCMClassMock([FlutterMethodChannel class]); - FLTThreadSafeMethodChannel *threadSafeMethodChannel = - [[FLTThreadSafeMethodChannel alloc] initWithMethodChannel:mockMethodChannel]; - - XCTestExpectation *mainThreadExpectation = - [self expectationWithDescription:@"invokeMethod must be called on the main thread"]; - - OCMStub([mockMethodChannel invokeMethod:[OCMArg any] arguments:[OCMArg any]]) - .andDo(^(NSInvocation *invocation) { - if (NSThread.isMainThread) { - [mainThreadExpectation fulfill]; - } - }); - - dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ - [threadSafeMethodChannel invokeMethod:@"foo" arguments:nil]; - }); - [self waitForExpectationsWithTimeout:1 handler:nil]; -} - -@end diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m index 5aa8087ab4c4..90a124ebafd4 100644 --- a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m +++ b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m @@ -11,7 +11,6 @@ #import "CameraProperties.h" #import "FLTCam.h" #import "FLTThreadSafeEventChannel.h" -#import "FLTThreadSafeMethodChannel.h" #import "FLTThreadSafeTextureRegistry.h" #import "QueueUtils.h" #import "messages.g.h" @@ -25,6 +24,7 @@ @interface CameraPlugin () @property(readonly, nonatomic) FLTThreadSafeTextureRegistry *registry; @property(readonly, nonatomic) NSObject *messenger; +@property(nonatomic) FCPCameraGlobalEventApi *globalEventAPI; @end @implementation CameraPlugin @@ -41,37 +41,34 @@ + (void)registerWithRegistrar:(NSObject *)registrar { - (instancetype)initWithRegistry:(NSObject *)registry messenger:(NSObject *)messenger { + return + [self initWithRegistry:registry + messenger:messenger + globalAPI:[[FCPCameraGlobalEventApi alloc] initWithBinaryMessenger:messenger]]; +} + +- (instancetype)initWithRegistry:(NSObject *)registry + messenger:(NSObject *)messenger + globalAPI:(FCPCameraGlobalEventApi *)globalAPI { self = [super init]; NSAssert(self, @"super init cannot be nil"); _registry = [[FLTThreadSafeTextureRegistry alloc] initWithTextureRegistry:registry]; _messenger = messenger; + _globalEventAPI = globalAPI; _captureSessionQueue = dispatch_queue_create("io.flutter.camera.captureSessionQueue", NULL); dispatch_queue_set_specific(_captureSessionQueue, FLTCaptureSessionQueueSpecific, (void *)FLTCaptureSessionQueueSpecific, NULL); - [self initDeviceEventMethodChannel]; - [self startOrientationListener]; - return self; -} - -- (void)initDeviceEventMethodChannel { - FlutterMethodChannel *methodChannel = [FlutterMethodChannel - methodChannelWithName:@"plugins.flutter.io/camera_avfoundation/fromPlatform" - binaryMessenger:_messenger]; - _deviceEventMethodChannel = - [[FLTThreadSafeMethodChannel alloc] initWithMethodChannel:methodChannel]; -} - -- (void)detachFromEngineForRegistrar:(NSObject *)registrar { - [UIDevice.currentDevice endGeneratingDeviceOrientationNotifications]; -} - -- (void)startOrientationListener { [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(orientationChanged:) name:UIDeviceOrientationDidChangeNotification object:[UIDevice currentDevice]]; + return self; +} + +- (void)detachFromEngineForRegistrar:(NSObject *)registrar { + [UIDevice.currentDevice endGeneratingDeviceOrientationNotifications]; } - (void)orientationChanged:(NSNotification *)note { @@ -93,9 +90,17 @@ - (void)orientationChanged:(NSNotification *)note { } - (void)sendDeviceOrientation:(UIDeviceOrientation)orientation { - [_deviceEventMethodChannel - invokeMethod:@"orientation_changed" - arguments:@{@"orientation" : FLTGetStringForUIDeviceOrientation(orientation)}]; + __weak typeof(self) weakSelf = self; + dispatch_async(dispatch_get_main_queue(), ^{ + [weakSelf.globalEventAPI + deviceOrientationChangedOrientation:FCPGetPigeonDeviceOrientationForOrientation(orientation) + completion:^(FlutterError *error){ + // Ignore errors; this is essentially a broadcast stream, and + // it's fine if the other end + // doesn't receive the message (e.g., if it doesn't currently + // have a listener set up). + }]; + }); } - (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result { @@ -169,25 +174,10 @@ - (void)handleMethodCallAsync:(FlutterMethodCall *)call result:(FlutterResult)re [weakSelf.registry textureFrameAvailable:cameraId]; } }; - FlutterMethodChannel *methodChannel = [FlutterMethodChannel - methodChannelWithName: - [NSString stringWithFormat:@"plugins.flutter.io/camera_avfoundation/camera%lu", - (unsigned long)cameraId] - binaryMessenger:_messenger]; - FLTThreadSafeMethodChannel *threadSafeMethodChannel = - [[FLTThreadSafeMethodChannel alloc] initWithMethodChannel:methodChannel]; - _camera.methodChannel = threadSafeMethodChannel; - [threadSafeMethodChannel - invokeMethod:@"initialized" - arguments:@{ - @"previewWidth" : @(_camera.previewSize.width), - @"previewHeight" : @(_camera.previewSize.height), - @"exposureMode" : FLTGetStringForFLTExposureMode([_camera exposureMode]), - @"focusMode" : FLTGetStringForFLTFocusMode([_camera focusMode]), - @"exposurePointSupported" : - @([_camera.captureDevice isExposurePointOfInterestSupported]), - @"focusPointSupported" : @([_camera.captureDevice isFocusPointOfInterestSupported]), - }]; + _camera.dartAPI = [[FCPCameraEventApi alloc] + initWithBinaryMessenger:_messenger + messageChannelSuffix:[NSString stringWithFormat:@"%ld", cameraId]]; + [_camera reportInitializationState]; [self sendDeviceOrientation:[UIDevice currentDevice].orientation]; [_camera start]; result(nil); diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.modulemap b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.modulemap index 8de3cde2a536..65a82b70bc28 100644 --- a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.modulemap +++ b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.modulemap @@ -12,7 +12,6 @@ framework module camera_avfoundation { header "FLTCam_Test.h" header "FLTSavePhotoDelegate_Test.h" header "FLTThreadSafeEventChannel.h" - header "FLTThreadSafeMethodChannel.h" header "FLTThreadSafeTextureRegistry.h" header "QueueUtils.h" } diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin_Test.h b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin_Test.h index 97ccd4810945..ab6fb186ad78 100644 --- a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin_Test.h +++ b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin_Test.h @@ -6,6 +6,7 @@ #import "CameraPlugin.h" #import "FLTCam.h" +#import "messages.g.h" /// APIs exposed for unit testing. @interface CameraPlugin () @@ -16,14 +17,15 @@ /// An internal camera object that manages camera's state and performs camera operations. @property(nonatomic, strong) FLTCam *camera; -/// A thread safe wrapper of the method channel used to send device events such as orientation -/// changes. -@property(nonatomic, strong) FLTThreadSafeMethodChannel *deviceEventMethodChannel; - /// Inject @p FlutterTextureRegistry and @p FlutterBinaryMessenger for unit testing. +- (instancetype)initWithRegistry:(NSObject *)registry + messenger:(NSObject *)messenger; + +/// Inject @p FlutterTextureRegistry, @p FlutterBinaryMessenger, and Pigeon callback handler for +/// unit testing. - (instancetype)initWithRegistry:(NSObject *)registry messenger:(NSObject *)messenger - NS_DESIGNATED_INITIALIZER; + globalAPI:(FCPCameraGlobalEventApi *)globalAPI NS_DESIGNATED_INITIALIZER; /// Hide the default public constructor. - (instancetype)init NS_UNAVAILABLE; diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.h b/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.h index 406f36af501c..e19f98faa2a2 100644 --- a/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.h +++ b/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.h @@ -5,6 +5,8 @@ @import AVFoundation; @import Foundation; +#import "messages.g.h" + NS_ASSUME_NONNULL_BEGIN #pragma mark - flash mode @@ -30,49 +32,24 @@ extern AVCaptureFlashMode FLTGetAVCaptureFlashModeForFLTFlashMode(FLTFlashMode m #pragma mark - exposure mode -/// Represents camera's exposure mode. Mirrors ExposureMode in camera.dart. -typedef NS_ENUM(NSInteger, FLTExposureMode) { - FLTExposureModeAuto, - FLTExposureModeLocked, - // This should never occur; it indicates an unknown value was received over - // the platform channel. - FLTExposureModeInvalid, -}; - -/// Gets a string representation of exposure mode. -/// @param mode exposure mode -extern NSString *FLTGetStringForFLTExposureMode(FLTExposureMode mode); - -/// Gets FLTExposureMode from its string representation. -/// @param mode a string representation of the FLTExposureMode. -extern FLTExposureMode FLTGetFLTExposureModeForString(NSString *mode); +/// Gets FCPPlatformExposureMode from its string representation. +/// @param mode a string representation of the exposure mode. +extern FCPPlatformExposureMode FCPGetExposureModeForString(NSString *mode); #pragma mark - focus mode -/// Represents camera's focus mode. Mirrors FocusMode in camera.dart. -typedef NS_ENUM(NSInteger, FLTFocusMode) { - FLTFocusModeAuto, - FLTFocusModeLocked, - // This should never occur; it indicates an unknown value was received over - // the platform channel. - FLTFocusModeInvalid, -}; - -/// Gets a string representation from FLTFocusMode. -/// @param mode focus mode -extern NSString *FLTGetStringForFLTFocusMode(FLTFocusMode mode); - -/// Gets FLTFocusMode from its string representation. +/// Gets FCPPlatformFocusMode from its string representation. /// @param mode a string representation of focus mode. -extern FLTFocusMode FLTGetFLTFocusModeForString(NSString *mode); +extern FCPPlatformFocusMode FCPGetFocusModeForString(NSString *mode); #pragma mark - device orientation /// Gets UIDeviceOrientation from its string representation. extern UIDeviceOrientation FLTGetUIDeviceOrientationForString(NSString *orientation); -/// Gets a string representation of UIDeviceOrientation. -extern NSString *FLTGetStringForUIDeviceOrientation(UIDeviceOrientation orientation); +/// Gets a Pigeon representation of UIDeviceOrientation. +extern FCPPlatformDeviceOrientation FCPGetPigeonDeviceOrientationForOrientation( + UIDeviceOrientation orientation); #pragma mark - resolution preset diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.m b/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.m index e61db402cfd9..e068c186474d 100644 --- a/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.m +++ b/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.m @@ -36,51 +36,29 @@ AVCaptureFlashMode FLTGetAVCaptureFlashModeForFLTFlashMode(FLTFlashMode mode) { #pragma mark - exposure mode -NSString *FLTGetStringForFLTExposureMode(FLTExposureMode mode) { - switch (mode) { - case FLTExposureModeAuto: - return @"auto"; - case FLTExposureModeLocked: - return @"locked"; - case FLTExposureModeInvalid: - // This value should never actually be used. - return nil; - } - return nil; -} - -FLTExposureMode FLTGetFLTExposureModeForString(NSString *mode) { +FCPPlatformExposureMode FCPGetExposureModeForString(NSString *mode) { if ([mode isEqualToString:@"auto"]) { - return FLTExposureModeAuto; + return FCPPlatformExposureModeAuto; } else if ([mode isEqualToString:@"locked"]) { - return FLTExposureModeLocked; + return FCPPlatformExposureModeLocked; } else { - return FLTExposureModeInvalid; + // This should be unreachable; see _serializeExposureMode in avfoundation_camera.dart. + NSCAssert(false, @"Unsupported exposure mode"); + return FCPPlatformExposureModeAuto; } } #pragma mark - focus mode -NSString *FLTGetStringForFLTFocusMode(FLTFocusMode mode) { - switch (mode) { - case FLTFocusModeAuto: - return @"auto"; - case FLTFocusModeLocked: - return @"locked"; - case FLTFocusModeInvalid: - // This value should never actually be used. - return nil; - } - return nil; -} - -FLTFocusMode FLTGetFLTFocusModeForString(NSString *mode) { +FCPPlatformFocusMode FCPGetFocusModeForString(NSString *mode) { if ([mode isEqualToString:@"auto"]) { - return FLTFocusModeAuto; + return FCPPlatformFocusModeAuto; } else if ([mode isEqualToString:@"locked"]) { - return FLTFocusModeLocked; + return FCPPlatformFocusModeLocked; } else { - return FLTFocusModeInvalid; + // This should be unreachable; see _serializeFocusMode in avfoundation_camera.dart. + NSCAssert(false, @"Unsupported focus mode"); + return FCPPlatformFocusModeAuto; } } @@ -100,17 +78,18 @@ UIDeviceOrientation FLTGetUIDeviceOrientationForString(NSString *orientation) { } } -NSString *FLTGetStringForUIDeviceOrientation(UIDeviceOrientation orientation) { +FCPPlatformDeviceOrientation FCPGetPigeonDeviceOrientationForOrientation( + UIDeviceOrientation orientation) { switch (orientation) { case UIDeviceOrientationPortraitUpsideDown: - return @"portraitDown"; + return FCPPlatformDeviceOrientationPortraitDown; case UIDeviceOrientationLandscapeLeft: - return @"landscapeLeft"; + return FCPPlatformDeviceOrientationLandscapeLeft; case UIDeviceOrientationLandscapeRight: - return @"landscapeRight"; + return FCPPlatformDeviceOrientationLandscapeRight; case UIDeviceOrientationPortrait: default: - return @"portraitUp"; + return FCPPlatformDeviceOrientationPortraitUp; }; } @@ -159,4 +138,4 @@ FCPFileFormat FCPGetFileFormatFromString(NSString *fileFormatString) { } else { return FCPFileFormatInvalid; } -} \ No newline at end of file +} diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h index 281ff143bcb0..ddc1e25ded15 100644 --- a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h +++ b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h @@ -9,9 +9,8 @@ #import "CameraProperties.h" #import "FLTCamMediaSettings.h" #import "FLTCamMediaSettingsAVWrapper.h" -#import "FLTThreadSafeEventChannel.h" -#import "FLTThreadSafeMethodChannel.h" #import "FLTThreadSafeTextureRegistry.h" +#import "messages.g.h" NS_ASSUME_NONNULL_BEGIN @@ -22,10 +21,12 @@ NS_ASSUME_NONNULL_BEGIN @property(readonly, nonatomic) CGSize previewSize; @property(assign, nonatomic) BOOL isPreviewPaused; @property(nonatomic, copy) void (^onFrameAvailable)(void); -@property(nonatomic) FLTThreadSafeMethodChannel *methodChannel; +/// The API instance used to communicate with the Dart side of the plugin. Once initially set, this +/// should only ever be accessed on the main thread. +@property(nonatomic) FCPCameraEventApi *dartAPI; @property(assign, nonatomic) FLTResolutionPreset resolutionPreset; -@property(assign, nonatomic) FLTExposureMode exposureMode; -@property(assign, nonatomic) FLTFocusMode focusMode; +@property(assign, nonatomic) FCPPlatformExposureMode exposureMode; +@property(assign, nonatomic) FCPPlatformFocusMode focusMode; @property(assign, nonatomic) FLTFlashMode flashMode; // Format used for video and image streaming. @property(assign, nonatomic) FourCharCode videoFormat; @@ -48,6 +49,8 @@ NS_ASSUME_NONNULL_BEGIN captureSessionQueue:(dispatch_queue_t)captureSessionQueue error:(NSError **)error; +/// Informs the Dart side of the plugin of the current camera state and capabilities. +- (void)reportInitializationState; - (void)start; - (void)stop; - (void)setDeviceOrientation:(UIDeviceOrientation)orientation; @@ -91,7 +94,7 @@ NS_ASSUME_NONNULL_BEGIN /// /// @param focusMode The focus mode that should be applied to the @captureDevice instance. /// @param captureDevice The AVCaptureDevice to which the @focusMode will be applied. -- (void)applyFocusMode:(FLTFocusMode)focusMode onDevice:(AVCaptureDevice *)captureDevice; +- (void)applyFocusMode:(FCPPlatformFocusMode)focusMode onDevice:(AVCaptureDevice *)captureDevice; - (void)pausePreviewWithResult:(FlutterResult)result; - (void)resumePreviewWithResult:(FlutterResult)result; - (void)setDescriptionWhileRecording:(NSString *)cameraName result:(FlutterResult)result; diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m index 9544dbc7b8d7..f65af0794187 100644 --- a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m @@ -4,12 +4,16 @@ #import "FLTCam.h" #import "FLTCam_Test.h" -#import "FLTSavePhotoDelegate.h" -#import "QueueUtils.h" @import CoreMotion; +@import Flutter; #import +#import "FLTSavePhotoDelegate.h" +#import "FLTThreadSafeEventChannel.h" +#import "QueueUtils.h" +#import "messages.g.h" + static FlutterError *FlutterErrorFromNSError(NSError *error) { return [FlutterError errorWithCode:[NSString stringWithFormat:@"Error %d", (int)error.code] message:error.localizedDescription @@ -98,6 +102,11 @@ @interface FLTCam () *)messen }); }]; } else { - [_methodChannel invokeMethod:errorMethod - arguments:@"Images from camera are already streaming!"]; + [self reportErrorMessage:@"Images from camera are already streaming!"]; } } @@ -1217,7 +1206,7 @@ - (void)stopImageStream { _isStreamingImages = NO; _imageStreamHandler = nil; } else { - [_methodChannel invokeMethod:errorMethod arguments:@"Images from camera are not streaming!"]; + [self reportErrorMessage:@"Images from camera are not streaming!"]; } } @@ -1286,7 +1275,7 @@ - (BOOL)setupWriterForPath:(NSString *)path { error:&error]; NSParameterAssert(_videoWriter); if (error) { - [_methodChannel invokeMethod:errorMethod arguments:error.description]; + [self reportErrorMessage:error.description]; return NO; } @@ -1372,7 +1361,7 @@ - (void)setUpCaptureSessionForAudio { AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error]; if (error) { - [_methodChannel invokeMethod:errorMethod arguments:error.description]; + [self reportErrorMessage:error.description]; } // Setup the audio output. _audioOutput = [[AVCaptureAudioDataOutput alloc] init]; @@ -1384,10 +1373,20 @@ - (void)setUpCaptureSessionForAudio { [_audioCaptureSession addOutput:_audioOutput]; _isAudioSetup = YES; } else { - [_methodChannel invokeMethod:errorMethod - arguments:@"Unable to add Audio input/output to session capture"]; + [self reportErrorMessage:@"Unable to add Audio input/output to session capture"]; _isAudioSetup = NO; } } } + +- (void)reportErrorMessage:(NSString *)errorMessage { + __weak typeof(self) weakSelf = self; + FLTEnsureToRunOnMainQueue(^{ + [weakSelf.dartAPI reportError:errorMessage + completion:^(FlutterError *error){ + // Ignore any errors, as this is just an event broadcast. + }]; + }); +} + @end diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeMethodChannel.h b/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeMethodChannel.h deleted file mode 100644 index 1ca0a7312e45..000000000000 --- a/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeMethodChannel.h +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2013 The Flutter Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#import - -NS_ASSUME_NONNULL_BEGIN - -/// A thread safe wrapper for FlutterMethodChannel that can be called from any thread, by -/// dispatching its underlying engine calls to the main thread. -@interface FLTThreadSafeMethodChannel : NSObject - -/// Creates a FLTThreadSafeMethodChannel by wrapping a FlutterMethodChannel object. -/// @param channel The FlutterMethodChannel object to be wrapped. -- (instancetype)initWithMethodChannel:(FlutterMethodChannel *)channel; - -/// Invokes the specified flutter method on the main thread with the specified arguments. -- (void)invokeMethod:(NSString *)method arguments:(nullable id)arguments; - -@end - -NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeMethodChannel.m b/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeMethodChannel.m deleted file mode 100644 index df7c169bd43f..000000000000 --- a/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeMethodChannel.m +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2013 The Flutter Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#import "FLTThreadSafeMethodChannel.h" -#import "QueueUtils.h" - -@interface FLTThreadSafeMethodChannel () -@property(nonatomic, strong) FlutterMethodChannel *channel; -@end - -@implementation FLTThreadSafeMethodChannel - -- (instancetype)initWithMethodChannel:(FlutterMethodChannel *)channel { - self = [super init]; - if (self) { - _channel = channel; - } - return self; -} - -- (void)invokeMethod:(NSString *)method arguments:(id)arguments { - __weak typeof(self) weakSelf = self; - FLTEnsureToRunOnMainQueue(^{ - [weakSelf.channel invokeMethod:method arguments:arguments]; - }); -} - -@end diff --git a/packages/camera/camera_avfoundation/ios/Classes/messages.g.h b/packages/camera/camera_avfoundation/ios/Classes/messages.g.h index 219aef19a665..4f17971bf7ca 100644 --- a/packages/camera/camera_avfoundation/ios/Classes/messages.g.h +++ b/packages/camera/camera_avfoundation/ios/Classes/messages.g.h @@ -28,7 +28,44 @@ typedef NS_ENUM(NSUInteger, FCPPlatformCameraLensDirection) { - (instancetype)initWithValue:(FCPPlatformCameraLensDirection)value; @end +typedef NS_ENUM(NSUInteger, FCPPlatformDeviceOrientation) { + FCPPlatformDeviceOrientationPortraitUp = 0, + FCPPlatformDeviceOrientationLandscapeLeft = 1, + FCPPlatformDeviceOrientationPortraitDown = 2, + FCPPlatformDeviceOrientationLandscapeRight = 3, +}; + +/// Wrapper for FCPPlatformDeviceOrientation to allow for nullability. +@interface FCPPlatformDeviceOrientationBox : NSObject +@property(nonatomic, assign) FCPPlatformDeviceOrientation value; +- (instancetype)initWithValue:(FCPPlatformDeviceOrientation)value; +@end + +typedef NS_ENUM(NSUInteger, FCPPlatformExposureMode) { + FCPPlatformExposureModeAuto = 0, + FCPPlatformExposureModeLocked = 1, +}; + +/// Wrapper for FCPPlatformExposureMode to allow for nullability. +@interface FCPPlatformExposureModeBox : NSObject +@property(nonatomic, assign) FCPPlatformExposureMode value; +- (instancetype)initWithValue:(FCPPlatformExposureMode)value; +@end + +typedef NS_ENUM(NSUInteger, FCPPlatformFocusMode) { + FCPPlatformFocusModeAuto = 0, + FCPPlatformFocusModeLocked = 1, +}; + +/// Wrapper for FCPPlatformFocusMode to allow for nullability. +@interface FCPPlatformFocusModeBox : NSObject +@property(nonatomic, assign) FCPPlatformFocusMode value; +- (instancetype)initWithValue:(FCPPlatformFocusMode)value; +@end + @class FCPPlatformCameraDescription; +@class FCPPlatformCameraState; +@class FCPPlatformSize; @interface FCPPlatformCameraDescription : NSObject /// `init` unavailable to enforce nonnull fields, see the `make` class method. @@ -41,6 +78,34 @@ typedef NS_ENUM(NSUInteger, FCPPlatformCameraLensDirection) { @property(nonatomic, assign) FCPPlatformCameraLensDirection lensDirection; @end +@interface FCPPlatformCameraState : NSObject +/// `init` unavailable to enforce nonnull fields, see the `make` class method. +- (instancetype)init NS_UNAVAILABLE; ++ (instancetype)makeWithPreviewSize:(FCPPlatformSize *)previewSize + exposureMode:(FCPPlatformExposureMode)exposureMode + focusMode:(FCPPlatformFocusMode)focusMode + exposurePointSupported:(BOOL)exposurePointSupported + focusPointSupported:(BOOL)focusPointSupported; +/// The size of the preview, in pixels. +@property(nonatomic, strong) FCPPlatformSize *previewSize; +/// The default exposure mode +@property(nonatomic, assign) FCPPlatformExposureMode exposureMode; +/// The default focus mode +@property(nonatomic, assign) FCPPlatformFocusMode focusMode; +/// Whether setting exposure points is supported. +@property(nonatomic, assign) BOOL exposurePointSupported; +/// Whether setting focus points is supported. +@property(nonatomic, assign) BOOL focusPointSupported; +@end + +@interface FCPPlatformSize : NSObject +/// `init` unavailable to enforce nonnull fields, see the `make` class method. +- (instancetype)init NS_UNAVAILABLE; ++ (instancetype)makeWithWidth:(double)width height:(double)height; +@property(nonatomic, assign) double width; +@property(nonatomic, assign) double height; +@end + /// The codec used by FCPCameraApi. NSObject *FCPCameraApiGetCodec(void); @@ -57,4 +122,37 @@ extern void SetUpFCPCameraApiWithSuffix(id binaryMesseng NSObject *_Nullable api, NSString *messageChannelSuffix); +/// The codec used by FCPCameraGlobalEventApi. +NSObject *FCPCameraGlobalEventApiGetCodec(void); + +/// Handler for native callbacks that are not tied to a specific camera ID. +@interface FCPCameraGlobalEventApi : NSObject +- (instancetype)initWithBinaryMessenger:(id)binaryMessenger; +- (instancetype)initWithBinaryMessenger:(id)binaryMessenger + messageChannelSuffix:(nullable NSString *)messageChannelSuffix; +/// Called when the device's physical orientation changes. +- (void)deviceOrientationChangedOrientation:(FCPPlatformDeviceOrientation)orientation + completion:(void (^)(FlutterError *_Nullable))completion; +@end + +/// The codec used by FCPCameraEventApi. +NSObject *FCPCameraEventApiGetCodec(void); + +/// Handler for native callbacks that are tied to a specific camera ID. +/// +/// This is intended to be initialized with the camera ID as a suffix. +@interface FCPCameraEventApi : NSObject +- (instancetype)initWithBinaryMessenger:(id)binaryMessenger; +- (instancetype)initWithBinaryMessenger:(id)binaryMessenger + messageChannelSuffix:(nullable NSString *)messageChannelSuffix; +/// Called when the camera is inialitized for use. +- (void)initializedWithState:(FCPPlatformCameraState *)initialState + completion:(void (^)(FlutterError *_Nullable))completion; +/// Called when an error occurs in the camera. +/// +/// This should be used for errors that occur outside of the context of +/// handling a specific HostApi call, such as during streaming. +- (void)reportError:(NSString *)message completion:(void (^)(FlutterError *_Nullable))completion; +@end + NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/Classes/messages.g.m b/packages/camera/camera_avfoundation/ios/Classes/messages.g.m index 2e5063dcfc72..fd1100c0b778 100644 --- a/packages/camera/camera_avfoundation/ios/Classes/messages.g.m +++ b/packages/camera/camera_avfoundation/ios/Classes/messages.g.m @@ -25,6 +25,15 @@ return @[ result ?: [NSNull null] ]; } +static FlutterError *createConnectionError(NSString *channelName) { + return [FlutterError + errorWithCode:@"channel-error" + message:[NSString stringWithFormat:@"%@/%@/%@", + @"Unable to establish connection on channel: '", + channelName, @"'."] + details:@""]; +} + static id GetNullableObjectAtIndex(NSArray *array, NSInteger key) { id result = array[key]; return (result == [NSNull null]) ? nil : result; @@ -40,12 +49,54 @@ - (instancetype)initWithValue:(FCPPlatformCameraLensDirection)value { } @end +@implementation FCPPlatformDeviceOrientationBox +- (instancetype)initWithValue:(FCPPlatformDeviceOrientation)value { + self = [super init]; + if (self) { + _value = value; + } + return self; +} +@end + +@implementation FCPPlatformExposureModeBox +- (instancetype)initWithValue:(FCPPlatformExposureMode)value { + self = [super init]; + if (self) { + _value = value; + } + return self; +} +@end + +@implementation FCPPlatformFocusModeBox +- (instancetype)initWithValue:(FCPPlatformFocusMode)value { + self = [super init]; + if (self) { + _value = value; + } + return self; +} +@end + @interface FCPPlatformCameraDescription () + (FCPPlatformCameraDescription *)fromList:(NSArray *)list; + (nullable FCPPlatformCameraDescription *)nullableFromList:(NSArray *)list; - (NSArray *)toList; @end +@interface FCPPlatformCameraState () ++ (FCPPlatformCameraState *)fromList:(NSArray *)list; ++ (nullable FCPPlatformCameraState *)nullableFromList:(NSArray *)list; +- (NSArray *)toList; +@end + +@interface FCPPlatformSize () ++ (FCPPlatformSize *)fromList:(NSArray *)list; ++ (nullable FCPPlatformSize *)nullableFromList:(NSArray *)list; +- (NSArray *)toList; +@end + @implementation FCPPlatformCameraDescription + (instancetype)makeWithName:(NSString *)name lensDirection:(FCPPlatformCameraLensDirection)lensDirection { @@ -71,6 +122,67 @@ - (NSArray *)toList { } @end +@implementation FCPPlatformCameraState ++ (instancetype)makeWithPreviewSize:(FCPPlatformSize *)previewSize + exposureMode:(FCPPlatformExposureMode)exposureMode + focusMode:(FCPPlatformFocusMode)focusMode + exposurePointSupported:(BOOL)exposurePointSupported + focusPointSupported:(BOOL)focusPointSupported { + FCPPlatformCameraState *pigeonResult = [[FCPPlatformCameraState alloc] init]; + pigeonResult.previewSize = previewSize; + pigeonResult.exposureMode = exposureMode; + pigeonResult.focusMode = focusMode; + pigeonResult.exposurePointSupported = exposurePointSupported; + pigeonResult.focusPointSupported = focusPointSupported; + return pigeonResult; +} ++ (FCPPlatformCameraState *)fromList:(NSArray *)list { + FCPPlatformCameraState *pigeonResult = [[FCPPlatformCameraState alloc] init]; + pigeonResult.previewSize = [FCPPlatformSize nullableFromList:(GetNullableObjectAtIndex(list, 0))]; + pigeonResult.exposureMode = [GetNullableObjectAtIndex(list, 1) integerValue]; + pigeonResult.focusMode = [GetNullableObjectAtIndex(list, 2) integerValue]; + pigeonResult.exposurePointSupported = [GetNullableObjectAtIndex(list, 3) boolValue]; + pigeonResult.focusPointSupported = [GetNullableObjectAtIndex(list, 4) boolValue]; + return pigeonResult; +} ++ (nullable FCPPlatformCameraState *)nullableFromList:(NSArray *)list { + return (list) ? [FCPPlatformCameraState fromList:list] : nil; +} +- (NSArray *)toList { + return @[ + (self.previewSize ? [self.previewSize toList] : [NSNull null]), + @(self.exposureMode), + @(self.focusMode), + @(self.exposurePointSupported), + @(self.focusPointSupported), + ]; +} +@end + +@implementation FCPPlatformSize ++ (instancetype)makeWithWidth:(double)width height:(double)height { + FCPPlatformSize *pigeonResult = [[FCPPlatformSize alloc] init]; + pigeonResult.width = width; + pigeonResult.height = height; + return pigeonResult; +} ++ (FCPPlatformSize *)fromList:(NSArray *)list { + FCPPlatformSize *pigeonResult = [[FCPPlatformSize alloc] init]; + pigeonResult.width = [GetNullableObjectAtIndex(list, 0) doubleValue]; + pigeonResult.height = [GetNullableObjectAtIndex(list, 1) doubleValue]; + return pigeonResult; +} ++ (nullable FCPPlatformSize *)nullableFromList:(NSArray *)list { + return (list) ? [FCPPlatformSize fromList:list] : nil; +} +- (NSArray *)toList { + return @[ + @(self.width), + @(self.height), + ]; +} +@end + @interface FCPCameraApiCodecReader : FlutterStandardReader @end @implementation FCPCameraApiCodecReader @@ -153,3 +265,182 @@ void SetUpFCPCameraApiWithSuffix(id binaryMessenger, } } } +NSObject *FCPCameraGlobalEventApiGetCodec(void) { + static FlutterStandardMessageCodec *sSharedObject = nil; + sSharedObject = [FlutterStandardMessageCodec sharedInstance]; + return sSharedObject; +} + +@interface FCPCameraGlobalEventApi () +@property(nonatomic, strong) NSObject *binaryMessenger; +@property(nonatomic, strong) NSString *messageChannelSuffix; +@end + +@implementation FCPCameraGlobalEventApi + +- (instancetype)initWithBinaryMessenger:(NSObject *)binaryMessenger { + return [self initWithBinaryMessenger:binaryMessenger messageChannelSuffix:@""]; +} +- (instancetype)initWithBinaryMessenger:(NSObject *)binaryMessenger + messageChannelSuffix:(nullable NSString *)messageChannelSuffix { + self = [self init]; + if (self) { + _binaryMessenger = binaryMessenger; + _messageChannelSuffix = [messageChannelSuffix length] == 0 + ? @"" + : [NSString stringWithFormat:@".%@", messageChannelSuffix]; + } + return self; +} +- (void)deviceOrientationChangedOrientation:(FCPPlatformDeviceOrientation)arg_orientation + completion:(void (^)(FlutterError *_Nullable))completion { + NSString *channelName = [NSString + stringWithFormat: + @"%@%@", + @"dev.flutter.pigeon.camera_avfoundation.CameraGlobalEventApi.deviceOrientationChanged", + _messageChannelSuffix]; + FlutterBasicMessageChannel *channel = + [FlutterBasicMessageChannel messageChannelWithName:channelName + binaryMessenger:self.binaryMessenger + codec:FCPCameraGlobalEventApiGetCodec()]; + [channel sendMessage:@[ [NSNumber numberWithInteger:arg_orientation] ] + reply:^(NSArray *reply) { + if (reply != nil) { + if (reply.count > 1) { + completion([FlutterError errorWithCode:reply[0] + message:reply[1] + details:reply[2]]); + } else { + completion(nil); + } + } else { + completion(createConnectionError(channelName)); + } + }]; +} +@end + +@interface FCPCameraEventApiCodecReader : FlutterStandardReader +@end +@implementation FCPCameraEventApiCodecReader +- (nullable id)readValueOfType:(UInt8)type { + switch (type) { + case 128: + return [FCPPlatformCameraState fromList:[self readValue]]; + case 129: + return [FCPPlatformSize fromList:[self readValue]]; + default: + return [super readValueOfType:type]; + } +} +@end + +@interface FCPCameraEventApiCodecWriter : FlutterStandardWriter +@end +@implementation FCPCameraEventApiCodecWriter +- (void)writeValue:(id)value { + if ([value isKindOfClass:[FCPPlatformCameraState class]]) { + [self writeByte:128]; + [self writeValue:[value toList]]; + } else if ([value isKindOfClass:[FCPPlatformSize class]]) { + [self writeByte:129]; + [self writeValue:[value toList]]; + } else { + [super writeValue:value]; + } +} +@end + +@interface FCPCameraEventApiCodecReaderWriter : FlutterStandardReaderWriter +@end +@implementation FCPCameraEventApiCodecReaderWriter +- (FlutterStandardWriter *)writerWithData:(NSMutableData *)data { + return [[FCPCameraEventApiCodecWriter alloc] initWithData:data]; +} +- (FlutterStandardReader *)readerWithData:(NSData *)data { + return [[FCPCameraEventApiCodecReader alloc] initWithData:data]; +} +@end + +NSObject *FCPCameraEventApiGetCodec(void) { + static FlutterStandardMessageCodec *sSharedObject = nil; + static dispatch_once_t sPred = 0; + dispatch_once(&sPred, ^{ + FCPCameraEventApiCodecReaderWriter *readerWriter = + [[FCPCameraEventApiCodecReaderWriter alloc] init]; + sSharedObject = [FlutterStandardMessageCodec codecWithReaderWriter:readerWriter]; + }); + return sSharedObject; +} + +@interface FCPCameraEventApi () +@property(nonatomic, strong) NSObject *binaryMessenger; +@property(nonatomic, strong) NSString *messageChannelSuffix; +@end + +@implementation FCPCameraEventApi + +- (instancetype)initWithBinaryMessenger:(NSObject *)binaryMessenger { + return [self initWithBinaryMessenger:binaryMessenger messageChannelSuffix:@""]; +} +- (instancetype)initWithBinaryMessenger:(NSObject *)binaryMessenger + messageChannelSuffix:(nullable NSString *)messageChannelSuffix { + self = [self init]; + if (self) { + _binaryMessenger = binaryMessenger; + _messageChannelSuffix = [messageChannelSuffix length] == 0 + ? @"" + : [NSString stringWithFormat:@".%@", messageChannelSuffix]; + } + return self; +} +- (void)initializedWithState:(FCPPlatformCameraState *)arg_initialState + completion:(void (^)(FlutterError *_Nullable))completion { + NSString *channelName = [NSString + stringWithFormat:@"%@%@", + @"dev.flutter.pigeon.camera_avfoundation.CameraEventApi.initialized", + _messageChannelSuffix]; + FlutterBasicMessageChannel *channel = + [FlutterBasicMessageChannel messageChannelWithName:channelName + binaryMessenger:self.binaryMessenger + codec:FCPCameraEventApiGetCodec()]; + [channel sendMessage:@[ arg_initialState ?: [NSNull null] ] + reply:^(NSArray *reply) { + if (reply != nil) { + if (reply.count > 1) { + completion([FlutterError errorWithCode:reply[0] + message:reply[1] + details:reply[2]]); + } else { + completion(nil); + } + } else { + completion(createConnectionError(channelName)); + } + }]; +} +- (void)reportError:(NSString *)arg_message + completion:(void (^)(FlutterError *_Nullable))completion { + NSString *channelName = [NSString + stringWithFormat:@"%@%@", @"dev.flutter.pigeon.camera_avfoundation.CameraEventApi.error", + _messageChannelSuffix]; + FlutterBasicMessageChannel *channel = + [FlutterBasicMessageChannel messageChannelWithName:channelName + binaryMessenger:self.binaryMessenger + codec:FCPCameraEventApiGetCodec()]; + [channel sendMessage:@[ arg_message ?: [NSNull null] ] + reply:^(NSArray *reply) { + if (reply != nil) { + if (reply.count > 1) { + completion([FlutterError errorWithCode:reply[0] + message:reply[1] + details:reply[2]]); + } else { + completion(nil); + } + } else { + completion(createConnectionError(channelName)); + } + }]; +} +@end diff --git a/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart b/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart index 383f195507e3..dc9f3c74d82c 100644 --- a/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart +++ b/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart @@ -33,14 +33,6 @@ class AVFoundationCamera extends CameraPlatform { /// Interface for calling host-side code. final CameraApi _hostApi; - final Map _channels = {}; - - /// The name of the channel that device events from the platform side are - /// sent on. - @visibleForTesting - static const String deviceEventChannelName = - 'plugins.flutter.io/camera_avfoundation/fromPlatform'; - /// The controller we need to broadcast the different events coming /// from handleMethodCall, specific to camera events. /// @@ -52,20 +44,19 @@ class AVFoundationCamera extends CameraPlatform { final StreamController cameraEventStreamController = StreamController.broadcast(); - /// The controller we need to broadcast the different events coming - /// from handleMethodCall, specific to general device events. - /// - /// It is a `broadcast` because multiple controllers will connect to - /// different stream views of this Controller. - late final StreamController _deviceEventStreamController = - _createDeviceEventStreamController(); - - StreamController _createDeviceEventStreamController() { + /// The handler for device-level messages that should be rebroadcast to + /// clients as [DeviceEvent]s. + @visibleForTesting + late final HostDeviceMessageHandler hostHandler = () { // Set up the method handler lazily. - const MethodChannel channel = MethodChannel(deviceEventChannelName); - channel.setMethodCallHandler(_handleDeviceMethodCall); - return StreamController.broadcast(); - } + return HostDeviceMessageHandler(); + }(); + + /// The per-camera handlers for messages that should be rebroadcast to + /// clients as [CameraEvent]s. + @visibleForTesting + final Map hostCameraHandlers = + {}; // The stream to receive frames from the native code. StreamSubscription? _platformImageStreamSubscription; @@ -132,13 +123,8 @@ class AVFoundationCamera extends CameraPlatform { int cameraId, { ImageFormatGroup imageFormatGroup = ImageFormatGroup.unknown, }) { - _channels.putIfAbsent(cameraId, () { - final MethodChannel channel = MethodChannel( - 'plugins.flutter.io/camera_avfoundation/camera$cameraId'); - channel.setMethodCallHandler( - (MethodCall call) => handleCameraMethodCall(call, cameraId)); - return channel; - }); + hostCameraHandlers.putIfAbsent(cameraId, + () => HostCameraMessageHandler(cameraId, cameraEventStreamController)); final Completer completer = Completer(); @@ -174,11 +160,9 @@ class AVFoundationCamera extends CameraPlatform { @override Future dispose(int cameraId) async { - if (_channels.containsKey(cameraId)) { - final MethodChannel? cameraChannel = _channels[cameraId]; - cameraChannel?.setMethodCallHandler(null); - _channels.remove(cameraId); - } + final HostCameraMessageHandler? handler = + hostCameraHandlers.remove(cameraId); + handler?.dispose(); await _channel.invokeMethod( 'dispose', @@ -213,7 +197,7 @@ class AVFoundationCamera extends CameraPlatform { @override Stream onDeviceOrientationChanged() { - return _deviceEventStreamController.stream + return hostHandler.deviceEventStreamController.stream .whereType(); } @@ -385,7 +369,7 @@ class AVFoundationCamera extends CameraPlatform { 'setExposureMode', { 'cameraId': cameraId, - 'mode': serializeExposureMode(mode), + 'mode': _serializeExposureMode(mode), }, ); @@ -454,7 +438,7 @@ class AVFoundationCamera extends CameraPlatform { 'setFocusMode', { 'cameraId': cameraId, - 'mode': serializeFocusMode(mode), + 'mode': _serializeFocusMode(mode), }, ); @@ -552,6 +536,38 @@ class AVFoundationCamera extends CameraPlatform { return Texture(textureId: cameraId); } + String _serializeFocusMode(FocusMode mode) { + switch (mode) { + case FocusMode.locked: + return 'locked'; + case FocusMode.auto: + return 'auto'; + } + // The enum comes from a different package, which could get a new value at + // any time, so provide a fallback that ensures this won't break when used + // with a version that contains new values. This is deliberately outside + // the switch rather than a `default` so that the linter will flag the + // switch as needing an update. + // ignore: dead_code + return 'auto'; + } + + String _serializeExposureMode(ExposureMode mode) { + switch (mode) { + case ExposureMode.locked: + return 'locked'; + case ExposureMode.auto: + return 'auto'; + } + // The enum comes from a different package, which could get a new value at + // any time, so provide a fallback that ensures this won't break when used + // with a version that contains new values. This is deliberately outside + // the switch rather than a `default` so that the linter will flag the + // switch as needing an update. + // ignore: dead_code + return 'auto'; + } + /// Returns the flash mode as a String. String _serializeFlashMode(FlashMode flashMode) { switch (flashMode) { @@ -597,73 +613,67 @@ class AVFoundationCamera extends CameraPlatform { // ignore: dead_code return 'max'; } +} - /// Converts messages received from the native platform into device events. - Future _handleDeviceMethodCall(MethodCall call) async { - switch (call.method) { - case 'orientation_changed': - final Map arguments = _getArgumentDictionary(call); - _deviceEventStreamController.add(DeviceOrientationChangedEvent( - deserializeDeviceOrientation(arguments['orientation']! as String))); - default: - throw MissingPluginException(); - } +/// Callback handler for device-level events from the platform host. +@visibleForTesting +class HostDeviceMessageHandler implements CameraGlobalEventApi { + /// Creates a new handler and registers it to listen to its platform channel. + HostDeviceMessageHandler() { + CameraGlobalEventApi.setUp(this); } - /// Converts messages received from the native platform into camera events. + /// The controller used to broadcast general device events coming from the + /// host platform. /// - /// This is only exposed for test purposes. It shouldn't be used by clients of - /// the plugin as it may break or change at any time. - @visibleForTesting - Future handleCameraMethodCall(MethodCall call, int cameraId) async { - switch (call.method) { - case 'initialized': - final Map arguments = _getArgumentDictionary(call); - cameraEventStreamController.add(CameraInitializedEvent( - cameraId, - arguments['previewWidth']! as double, - arguments['previewHeight']! as double, - deserializeExposureMode(arguments['exposureMode']! as String), - arguments['exposurePointSupported']! as bool, - deserializeFocusMode(arguments['focusMode']! as String), - arguments['focusPointSupported']! as bool, - )); - case 'resolution_changed': - final Map arguments = _getArgumentDictionary(call); - cameraEventStreamController.add(CameraResolutionChangedEvent( - cameraId, - arguments['captureWidth']! as double, - arguments['captureHeight']! as double, - )); - case 'camera_closing': - cameraEventStreamController.add(CameraClosingEvent( - cameraId, - )); - case 'video_recorded': - final Map arguments = _getArgumentDictionary(call); - cameraEventStreamController.add(VideoRecordedEvent( - cameraId, - XFile(arguments['path']! as String), - arguments['maxVideoDuration'] != null - ? Duration(milliseconds: arguments['maxVideoDuration']! as int) - : null, - )); - case 'error': - final Map arguments = _getArgumentDictionary(call); - cameraEventStreamController.add(CameraErrorEvent( - cameraId, - arguments['description']! as String, - )); - default: - throw MissingPluginException(); - } + /// It is a `broadcast` because multiple controllers will connect to + /// different stream views of this Controller. + final StreamController deviceEventStreamController = + StreamController.broadcast(); + + @override + void deviceOrientationChanged(PlatformDeviceOrientation orientation) { + deviceEventStreamController.add(DeviceOrientationChangedEvent( + deviceOrientationFromPlatform(orientation))); } +} - /// Returns the arguments of [call] as typed string-keyed Map. - /// - /// This does not do any type validation, so is only safe to call if the - /// arguments are known to be a map. - Map _getArgumentDictionary(MethodCall call) { - return (call.arguments as Map).cast(); +/// Callback handler for camera-level events from the platform host. +@visibleForTesting +class HostCameraMessageHandler implements CameraEventApi { + /// Creates a new handler that listens for events from camera [cameraId], and + /// broadcasts them to [streamController]. + HostCameraMessageHandler(this.cameraId, this.streamController) { + CameraEventApi.setUp(this, messageChannelSuffix: cameraId.toString()); + } + + /// Removes the handler for native messages. + void dispose() { + CameraEventApi.setUp(null, messageChannelSuffix: cameraId.toString()); + } + + /// The camera ID this handler listens for events from. + final int cameraId; + + /// The controller used to broadcast camera events coming from the + /// host platform. + final StreamController streamController; + + @override + void error(String message) { + streamController.add(CameraErrorEvent(cameraId, message)); + } + + @override + void initialized(PlatformCameraState initialState) { + streamController.add(CameraInitializedEvent( + cameraId, + initialState.previewSize.width, + initialState.previewSize.height, + exposureModeFromPlatform(initialState.exposureMode), + initialState.exposurePointSupported, + focusModeFromPlatform(initialState.focusMode), + initialState.focusPointSupported, + )); } } diff --git a/packages/camera/camera_avfoundation/lib/src/messages.g.dart b/packages/camera/camera_avfoundation/lib/src/messages.g.dart index 34b9004976a0..a4b399217eb6 100644 --- a/packages/camera/camera_avfoundation/lib/src/messages.g.dart +++ b/packages/camera/camera_avfoundation/lib/src/messages.g.dart @@ -18,6 +18,17 @@ PlatformException _createConnectionError(String channelName) { ); } +List wrapResponse( + {Object? result, PlatformException? error, bool empty = false}) { + if (empty) { + return []; + } + if (error == null) { + return [result]; + } + return [error.code, error.message, error.details]; +} + enum PlatformCameraLensDirection { /// Front facing camera (a user looking at the screen is seen by the camera). front, @@ -29,6 +40,23 @@ enum PlatformCameraLensDirection { external, } +enum PlatformDeviceOrientation { + portraitUp, + landscapeLeft, + portraitDown, + landscapeRight, +} + +enum PlatformExposureMode { + auto, + locked, +} + +enum PlatformFocusMode { + auto, + locked, +} + class PlatformCameraDescription { PlatformCameraDescription({ required this.name, @@ -57,6 +85,78 @@ class PlatformCameraDescription { } } +class PlatformCameraState { + PlatformCameraState({ + required this.previewSize, + required this.exposureMode, + required this.focusMode, + required this.exposurePointSupported, + required this.focusPointSupported, + }); + + /// The size of the preview, in pixels. + PlatformSize previewSize; + + /// The default exposure mode + PlatformExposureMode exposureMode; + + /// The default focus mode + PlatformFocusMode focusMode; + + /// Whether setting exposure points is supported. + bool exposurePointSupported; + + /// Whether setting focus points is supported. + bool focusPointSupported; + + Object encode() { + return [ + previewSize.encode(), + exposureMode.index, + focusMode.index, + exposurePointSupported, + focusPointSupported, + ]; + } + + static PlatformCameraState decode(Object result) { + result as List; + return PlatformCameraState( + previewSize: PlatformSize.decode(result[0]! as List), + exposureMode: PlatformExposureMode.values[result[1]! as int], + focusMode: PlatformFocusMode.values[result[2]! as int], + exposurePointSupported: result[3]! as bool, + focusPointSupported: result[4]! as bool, + ); + } +} + +class PlatformSize { + PlatformSize({ + required this.width, + required this.height, + }); + + double width; + + double height; + + Object encode() { + return [ + width, + height, + ]; + } + + static PlatformSize decode(Object result) { + result as List; + return PlatformSize( + width: result[0]! as double, + height: result[1]! as double, + ); + } +} + class _CameraApiCodec extends StandardMessageCodec { const _CameraApiCodec(); @override @@ -126,3 +226,162 @@ class CameraApi { } } } + +/// Handler for native callbacks that are not tied to a specific camera ID. +abstract class CameraGlobalEventApi { + static const MessageCodec pigeonChannelCodec = + StandardMessageCodec(); + + /// Called when the device's physical orientation changes. + void deviceOrientationChanged(PlatformDeviceOrientation orientation); + + static void setUp( + CameraGlobalEventApi? api, { + BinaryMessenger? binaryMessenger, + String messageChannelSuffix = '', + }) { + messageChannelSuffix = + messageChannelSuffix.isNotEmpty ? '.$messageChannelSuffix' : ''; + { + final BasicMessageChannel __pigeon_channel = BasicMessageChannel< + Object?>( + 'dev.flutter.pigeon.camera_avfoundation.CameraGlobalEventApi.deviceOrientationChanged$messageChannelSuffix', + pigeonChannelCodec, + binaryMessenger: binaryMessenger); + if (api == null) { + __pigeon_channel.setMessageHandler(null); + } else { + __pigeon_channel.setMessageHandler((Object? message) async { + assert(message != null, + 'Argument for dev.flutter.pigeon.camera_avfoundation.CameraGlobalEventApi.deviceOrientationChanged was null.'); + final List args = (message as List?)!; + final PlatformDeviceOrientation? arg_orientation = args[0] == null + ? null + : PlatformDeviceOrientation.values[args[0]! as int]; + assert(arg_orientation != null, + 'Argument for dev.flutter.pigeon.camera_avfoundation.CameraGlobalEventApi.deviceOrientationChanged was null, expected non-null PlatformDeviceOrientation.'); + try { + api.deviceOrientationChanged(arg_orientation!); + return wrapResponse(empty: true); + } on PlatformException catch (e) { + return wrapResponse(error: e); + } catch (e) { + return wrapResponse( + error: PlatformException(code: 'error', message: e.toString())); + } + }); + } + } + } +} + +class _CameraEventApiCodec extends StandardMessageCodec { + const _CameraEventApiCodec(); + @override + void writeValue(WriteBuffer buffer, Object? value) { + if (value is PlatformCameraState) { + buffer.putUint8(128); + writeValue(buffer, value.encode()); + } else if (value is PlatformSize) { + buffer.putUint8(129); + writeValue(buffer, value.encode()); + } else { + super.writeValue(buffer, value); + } + } + + @override + Object? readValueOfType(int type, ReadBuffer buffer) { + switch (type) { + case 128: + return PlatformCameraState.decode(readValue(buffer)!); + case 129: + return PlatformSize.decode(readValue(buffer)!); + default: + return super.readValueOfType(type, buffer); + } + } +} + +/// Handler for native callbacks that are tied to a specific camera ID. +/// +/// This is intended to be initialized with the camera ID as a suffix. +abstract class CameraEventApi { + static const MessageCodec pigeonChannelCodec = + _CameraEventApiCodec(); + + /// Called when the camera is inialitized for use. + void initialized(PlatformCameraState initialState); + + /// Called when an error occurs in the camera. + /// + /// This should be used for errors that occur outside of the context of + /// handling a specific HostApi call, such as during streaming. + void error(String message); + + static void setUp( + CameraEventApi? api, { + BinaryMessenger? binaryMessenger, + String messageChannelSuffix = '', + }) { + messageChannelSuffix = + messageChannelSuffix.isNotEmpty ? '.$messageChannelSuffix' : ''; + { + final BasicMessageChannel __pigeon_channel = BasicMessageChannel< + Object?>( + 'dev.flutter.pigeon.camera_avfoundation.CameraEventApi.initialized$messageChannelSuffix', + pigeonChannelCodec, + binaryMessenger: binaryMessenger); + if (api == null) { + __pigeon_channel.setMessageHandler(null); + } else { + __pigeon_channel.setMessageHandler((Object? message) async { + assert(message != null, + 'Argument for dev.flutter.pigeon.camera_avfoundation.CameraEventApi.initialized was null.'); + final List args = (message as List?)!; + final PlatformCameraState? arg_initialState = + (args[0] as PlatformCameraState?); + assert(arg_initialState != null, + 'Argument for dev.flutter.pigeon.camera_avfoundation.CameraEventApi.initialized was null, expected non-null PlatformCameraState.'); + try { + api.initialized(arg_initialState!); + return wrapResponse(empty: true); + } on PlatformException catch (e) { + return wrapResponse(error: e); + } catch (e) { + return wrapResponse( + error: PlatformException(code: 'error', message: e.toString())); + } + }); + } + } + { + final BasicMessageChannel __pigeon_channel = BasicMessageChannel< + Object?>( + 'dev.flutter.pigeon.camera_avfoundation.CameraEventApi.error$messageChannelSuffix', + pigeonChannelCodec, + binaryMessenger: binaryMessenger); + if (api == null) { + __pigeon_channel.setMessageHandler(null); + } else { + __pigeon_channel.setMessageHandler((Object? message) async { + assert(message != null, + 'Argument for dev.flutter.pigeon.camera_avfoundation.CameraEventApi.error was null.'); + final List args = (message as List?)!; + final String? arg_message = (args[0] as String?); + assert(arg_message != null, + 'Argument for dev.flutter.pigeon.camera_avfoundation.CameraEventApi.error was null, expected non-null String.'); + try { + api.error(arg_message!); + return wrapResponse(empty: true); + } on PlatformException catch (e) { + return wrapResponse(error: e); + } catch (e) { + return wrapResponse( + error: PlatformException(code: 'error', message: e.toString())); + } + }); + } + } + } +} diff --git a/packages/camera/camera_avfoundation/lib/src/utils.dart b/packages/camera/camera_avfoundation/lib/src/utils.dart index 7ebd84cb0da6..5c38f809ebaf 100644 --- a/packages/camera/camera_avfoundation/lib/src/utils.dart +++ b/packages/camera/camera_avfoundation/lib/src/utils.dart @@ -47,18 +47,30 @@ String serializeDeviceOrientation(DeviceOrientation orientation) { return 'portraitUp'; } -/// Returns the device orientation for a given String. -DeviceOrientation deserializeDeviceOrientation(String str) { - switch (str) { - case 'portraitUp': - return DeviceOrientation.portraitUp; - case 'portraitDown': - return DeviceOrientation.portraitDown; - case 'landscapeRight': - return DeviceOrientation.landscapeRight; - case 'landscapeLeft': - return DeviceOrientation.landscapeLeft; - default: - throw ArgumentError('"$str" is not a valid DeviceOrientation value'); - } +/// Converts a Pigeon [PlatformDeviceOrientation] to a [DeviceOrientation]. +DeviceOrientation deviceOrientationFromPlatform( + PlatformDeviceOrientation orientation) { + return switch (orientation) { + PlatformDeviceOrientation.portraitUp => DeviceOrientation.portraitUp, + PlatformDeviceOrientation.portraitDown => DeviceOrientation.portraitDown, + PlatformDeviceOrientation.landscapeLeft => DeviceOrientation.landscapeLeft, + PlatformDeviceOrientation.landscapeRight => + DeviceOrientation.landscapeRight, + }; +} + +/// Converts a Pigeon [PlatformExposureMode] to an [ExposureMode]. +ExposureMode exposureModeFromPlatform(PlatformExposureMode mode) { + return switch (mode) { + PlatformExposureMode.auto => ExposureMode.auto, + PlatformExposureMode.locked => ExposureMode.locked, + }; +} + +/// Converts a Pigeon [PlatformFocusMode] to an [FocusMode]. +FocusMode focusModeFromPlatform(PlatformFocusMode mode) { + return switch (mode) { + PlatformFocusMode.auto => FocusMode.auto, + PlatformFocusMode.locked => FocusMode.locked, + }; } diff --git a/packages/camera/camera_avfoundation/pigeons/messages.dart b/packages/camera/camera_avfoundation/pigeons/messages.dart index f973e037576b..e88b9cc7aefb 100644 --- a/packages/camera/camera_avfoundation/pigeons/messages.dart +++ b/packages/camera/camera_avfoundation/pigeons/messages.dart @@ -24,6 +24,26 @@ enum PlatformCameraLensDirection { external, } +// Pigeon version of DeviceOrientation. +enum PlatformDeviceOrientation { + portraitUp, + landscapeLeft, + portraitDown, + landscapeRight, +} + +// Pigeon version of ExposureMode. +enum PlatformExposureMode { + auto, + locked, +} + +// Pigeon version of FocusMode. +enum PlatformFocusMode { + auto, + locked, +} + // Pigeon version of CameraDescription. class PlatformCameraDescription { PlatformCameraDescription({ @@ -38,6 +58,40 @@ class PlatformCameraDescription { final PlatformCameraLensDirection lensDirection; } +// Pigeon version of the data needed for a CameraInitializedEvent. +class PlatformCameraState { + PlatformCameraState({ + required this.previewSize, + required this.exposureMode, + required this.focusMode, + required this.exposurePointSupported, + required this.focusPointSupported, + }); + + /// The size of the preview, in pixels. + final PlatformSize previewSize; + + /// The default exposure mode + final PlatformExposureMode exposureMode; + + /// The default focus mode + final PlatformFocusMode focusMode; + + /// Whether setting exposure points is supported. + final bool exposurePointSupported; + + /// Whether setting focus points is supported. + final bool focusPointSupported; +} + +// Pigeon equivalent of CGSize. +class PlatformSize { + PlatformSize({required this.width, required this.height}); + + final double width; + final double height; +} + @HostApi() abstract class CameraApi { /// Returns the list of available cameras. @@ -48,3 +102,27 @@ abstract class CameraApi { @ObjCSelector('availableCamerasWithCompletion') List getAvailableCameras(); } + +/// Handler for native callbacks that are not tied to a specific camera ID. +@FlutterApi() +abstract class CameraGlobalEventApi { + /// Called when the device's physical orientation changes. + void deviceOrientationChanged(PlatformDeviceOrientation orientation); +} + +/// Handler for native callbacks that are tied to a specific camera ID. +/// +/// This is intended to be initialized with the camera ID as a suffix. +@FlutterApi() +abstract class CameraEventApi { + /// Called when the camera is inialitized for use. + @ObjCSelector('initializedWithState:') + void initialized(PlatformCameraState initialState); + + /// Called when an error occurs in the camera. + /// + /// This should be used for errors that occur outside of the context of + /// handling a specific HostApi call, such as during streaming. + @ObjCSelector('reportError:') + void error(String message); +} diff --git a/packages/camera/camera_avfoundation/pubspec.yaml b/packages/camera/camera_avfoundation/pubspec.yaml index 8dd2b6663a3e..7cec9cd06777 100644 --- a/packages/camera/camera_avfoundation/pubspec.yaml +++ b/packages/camera/camera_avfoundation/pubspec.yaml @@ -2,7 +2,7 @@ name: camera_avfoundation description: iOS implementation of the camera plugin. repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_avfoundation issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22 -version: 0.9.15+3 +version: 0.9.15+4 environment: sdk: ^3.2.3 diff --git a/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart index 9b8379eb08eb..5ee2dabb02ae 100644 --- a/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart +++ b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart @@ -30,24 +30,6 @@ void main() { expect(CameraPlatform.instance, isA()); }); - test('registration does not set message handlers', () async { - AVFoundationCamera.registerWith(); - - // Setting up a handler requires bindings to be initialized, and since - // registerWith is called very early in initialization the bindings won't - // have been initialized. While registerWith could initialize them, that - // could slow down startup, so instead the handler should be set up lazily. - final ByteData? response = await TestDefaultBinaryMessengerBinding - .instance.defaultBinaryMessenger - .handlePlatformMessage( - AVFoundationCamera.deviceEventChannelName, - const StandardMethodCodec().encodeMethodCall(const MethodCall( - 'orientation_changed', - {'orientation': 'portraitDown'})), - (ByteData? data) {}); - expect(response, null); - }); - group('Creation, Initialization & Disposal Tests', () { test('Should send creation data and receive back a camera id', () async { // Arrange @@ -361,77 +343,27 @@ void main() { final StreamQueue streamQueue = StreamQueue(eventStream); + final PlatformSize previewSize = PlatformSize(width: 3840, height: 2160); // Emit test events final CameraInitializedEvent event = CameraInitializedEvent( cameraId, - 3840, - 2160, + previewSize.width, + previewSize.height, ExposureMode.auto, true, FocusMode.auto, true, ); - await camera.handleCameraMethodCall( - MethodCall('initialized', event.toJson()), cameraId); - - // Assert - expect(await streamQueue.next, event); - - // Clean up - await streamQueue.cancel(); - }); - - test('Should receive resolution changes', () async { - // Act - final Stream resolutionStream = - camera.onCameraResolutionChanged(cameraId); - final StreamQueue streamQueue = - StreamQueue(resolutionStream); - - // Emit test events - final CameraResolutionChangedEvent fhdEvent = - CameraResolutionChangedEvent(cameraId, 1920, 1080); - final CameraResolutionChangedEvent uhdEvent = - CameraResolutionChangedEvent(cameraId, 3840, 2160); - await camera.handleCameraMethodCall( - MethodCall('resolution_changed', fhdEvent.toJson()), cameraId); - await camera.handleCameraMethodCall( - MethodCall('resolution_changed', uhdEvent.toJson()), cameraId); - await camera.handleCameraMethodCall( - MethodCall('resolution_changed', fhdEvent.toJson()), cameraId); - await camera.handleCameraMethodCall( - MethodCall('resolution_changed', uhdEvent.toJson()), cameraId); - - // Assert - expect(await streamQueue.next, fhdEvent); - expect(await streamQueue.next, uhdEvent); - expect(await streamQueue.next, fhdEvent); - expect(await streamQueue.next, uhdEvent); - - // Clean up - await streamQueue.cancel(); - }); - - test('Should receive camera closing events', () async { - // Act - final Stream eventStream = - camera.onCameraClosing(cameraId); - final StreamQueue streamQueue = - StreamQueue(eventStream); - - // Emit test events - final CameraClosingEvent event = CameraClosingEvent(cameraId); - await camera.handleCameraMethodCall( - MethodCall('camera_closing', event.toJson()), cameraId); - await camera.handleCameraMethodCall( - MethodCall('camera_closing', event.toJson()), cameraId); - await camera.handleCameraMethodCall( - MethodCall('camera_closing', event.toJson()), cameraId); + camera.hostCameraHandlers[cameraId]!.initialized(PlatformCameraState( + previewSize: previewSize, + exposureMode: PlatformExposureMode.auto, + focusMode: PlatformFocusMode.auto, + exposurePointSupported: true, + focusPointSupported: true, + )); // Assert expect(await streamQueue.next, event); - expect(await streamQueue.next, event); - expect(await streamQueue.next, event); // Clean up await streamQueue.cancel(); @@ -445,14 +377,11 @@ void main() { StreamQueue(errorStream); // Emit test events - final CameraErrorEvent event = - CameraErrorEvent(cameraId, 'Error Description'); - await camera.handleCameraMethodCall( - MethodCall('error', event.toJson()), cameraId); - await camera.handleCameraMethodCall( - MethodCall('error', event.toJson()), cameraId); - await camera.handleCameraMethodCall( - MethodCall('error', event.toJson()), cameraId); + const String errorMessage = 'Error Description'; + final CameraErrorEvent event = CameraErrorEvent(cameraId, errorMessage); + camera.hostCameraHandlers[cameraId]!.error(errorMessage); + camera.hostCameraHandlers[cameraId]!.error(errorMessage); + camera.hostCameraHandlers[cameraId]!.error(errorMessage); // Assert expect(await streamQueue.next, event); @@ -474,12 +403,8 @@ void main() { const DeviceOrientationChangedEvent event = DeviceOrientationChangedEvent(DeviceOrientation.portraitUp); for (int i = 0; i < 3; i++) { - await TestDefaultBinaryMessengerBinding.instance.defaultBinaryMessenger - .handlePlatformMessage( - AVFoundationCamera.deviceEventChannelName, - const StandardMethodCodec().encodeMethodCall( - MethodCall('orientation_changed', event.toJson())), - null); + camera.hostHandler + .deviceOrientationChanged(PlatformDeviceOrientation.portraitUp); } // Assert @@ -969,16 +894,6 @@ void main() { expect((widget as Texture).textureId, cameraId); }); - test('Should throw MissingPluginException when handling unknown method', - () { - final AVFoundationCamera camera = AVFoundationCamera(); - - expect( - () => camera.handleCameraMethodCall( - const MethodCall('unknown_method'), 1), - throwsA(isA())); - }); - test('Should get the max zoom level', () async { // Arrange final MethodChannelMock channel = MethodChannelMock( diff --git a/packages/camera/camera_avfoundation/test/utils_test.dart b/packages/camera/camera_avfoundation/test/utils_test.dart index 06babfb53b51..53fc72b43dcc 100644 --- a/packages/camera/camera_avfoundation/test/utils_test.dart +++ b/packages/camera/camera_avfoundation/test/utils_test.dart @@ -36,14 +36,20 @@ void main() { 'landscapeLeft'); }); - test('deserializeDeviceOrientation() should deserialize correctly', () { - expect(deserializeDeviceOrientation('portraitUp'), + test('deviceOrientationFromPlatform() should convert correctly', () { + expect( + deviceOrientationFromPlatform(PlatformDeviceOrientation.portraitUp), DeviceOrientation.portraitUp); - expect(deserializeDeviceOrientation('portraitDown'), + expect( + deviceOrientationFromPlatform(PlatformDeviceOrientation.portraitDown), DeviceOrientation.portraitDown); - expect(deserializeDeviceOrientation('landscapeRight'), + expect( + deviceOrientationFromPlatform( + PlatformDeviceOrientation.landscapeRight), DeviceOrientation.landscapeRight); - expect(deserializeDeviceOrientation('landscapeLeft'), + expect( + deviceOrientationFromPlatform( + PlatformDeviceOrientation.landscapeLeft), DeviceOrientation.landscapeLeft); }); });