From c4f140ad5e98f7691a45a72a9164f7d89585abde Mon Sep 17 00:00:00 2001 From: Robert Odrowaz Date: Sun, 8 Feb 2026 20:20:30 +0100 Subject: [PATCH] Migrate to Swift-based pigeon interface --- .../camera/camera_avfoundation/CHANGELOG.md | 4 + .../RunnerTests/AvailableCamerasTests.swift | 36 +- .../CameraInitRaceConditionsTests.swift | 14 +- .../CameraMethodChannelTests.swift | 23 +- .../RunnerTests/CameraPermissionTests.swift | 50 +- .../CameraPluginCreateCameraTests.swift | 30 +- .../CameraPluginDelegatingMethodTests.swift | 293 +++- .../CameraPluginInitializeCameraTests.swift | 44 +- .../RunnerTests/CameraPropertiesTests.swift | 10 +- .../CameraSessionPresetsTests.swift | 6 +- .../ios/RunnerTests/CameraSettingsTests.swift | 75 +- .../ios/RunnerTests/CameraTestUtils.swift | 10 +- .../ios/RunnerTests/FLTCamExposureTests.swift | 22 +- .../ios/RunnerTests/FLTCamFocusTests.swift | 22 +- .../FLTCamSetDeviceOrientationTests.swift | 2 +- .../RunnerTests/FLTCamSetFlashModeTests.swift | 68 +- .../ios/RunnerTests/FLTCamZoomTests.swift | 30 +- .../ios/RunnerTests/Mocks/MockCamera.swift | 62 +- .../MockFLTCameraPermissionManager.swift | 4 +- .../Mocks/MockGlobalEventApi.swift | 17 +- .../ios/RunnerTests/PhotoCaptureTests.swift | 60 +- .../ios/RunnerTests/SampleBufferTests.swift | 25 +- .../ios/camera_avfoundation.podspec | 5 +- .../ios/camera_avfoundation/Package.swift | 16 +- .../Sources/camera_avfoundation/Camera.swift | 34 +- .../CameraConfiguration.swift | 4 +- .../CameraPermissionManager.swift | 14 +- .../camera_avfoundation/CameraPlugin.swift | 200 ++- .../CameraProperties.swift | 10 +- .../camera_avfoundation/DefaultCamera.swift | 260 ++-- .../camera_avfoundation/FormatUtils.swift | 4 +- .../camera_avfoundation/Messages.swift | 1323 ++++++++++++++++ .../Resources/PrivacyInfo.xcprivacy | 14 - .../camera_avfoundation/camera_avfoundation.h | 5 - .../include/camera_avfoundation/messages.g.h | 334 ---- .../camera_avfoundation_objc/messages.g.m | 1367 ----------------- .../camera_avfoundation/pigeons/messages.dart | 10 +- .../camera/camera_avfoundation/pubspec.yaml | 2 +- 38 files changed, 2180 insertions(+), 2329 deletions(-) create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Messages.swift delete mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/Resources/PrivacyInfo.xcprivacy delete mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/camera_avfoundation.h delete mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/messages.g.h delete mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/messages.g.m diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md index f66df6476e7d..2f27875cfea7 100644 --- a/packages/camera/camera_avfoundation/CHANGELOG.md +++ b/packages/camera/camera_avfoundation/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.9.23+3 + +* Migrates to Swift-based pigeon interface. + ## 0.9.23+2 * Code refactor related to Swift pigeon's generated struct MediaSettings being immutable. diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTests.swift index 13038b6bf48b..d03bcd99d7cf 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTests.swift @@ -62,10 +62,14 @@ final class AvailableCamerasTest: XCTestCase { return cameras } - var resultValue: [FCPPlatformCameraDescription]? - cameraPlugin.availableCameras { result, error in - XCTAssertNil(error) - resultValue = result + var resultValue: [PlatformCameraDescription]? + cameraPlugin.getAvailableCameras { result in + switch result { + case .success(let result): + resultValue = result + case .failure(_): + XCTFail("Unexpected failure") + } expectation.fulfill() } waitForExpectations(timeout: 30, handler: nil) @@ -100,10 +104,14 @@ final class AvailableCamerasTest: XCTestCase { return cameras } - var resultValue: [FCPPlatformCameraDescription]? - cameraPlugin.availableCameras { result, error in - XCTAssertNil(error) - resultValue = result + var resultValue: [PlatformCameraDescription]? + cameraPlugin.getAvailableCameras { result in + switch result { + case .success(let result): + resultValue = result + case .failure(_): + XCTFail("Unexpected failure") + } expectation.fulfill() } waitForExpectations(timeout: 30, handler: nil) @@ -133,10 +141,14 @@ final class AvailableCamerasTest: XCTestCase { return cameras } - var resultValue: [FCPPlatformCameraDescription]? - cameraPlugin.availableCameras { result, error in - XCTAssertNil(error) - resultValue = result + var resultValue: [PlatformCameraDescription]? + cameraPlugin.getAvailableCameras { result in + switch result { + case .success(let result): + resultValue = result + case .failure(_): + XCTFail("Unexpected failure") + } expectation.fulfill() } waitForExpectations(timeout: 30, handler: nil) diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraInitRaceConditionsTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraInitRaceConditionsTests.swift index 533b385755db..790d8d57c266 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraInitRaceConditionsTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraInitRaceConditionsTests.swift @@ -37,20 +37,20 @@ final class CameraInitRaceConditionsTests: XCTestCase { // Mimic a dispose call followed by a create call, which can be triggered by slightly dragging the // home bar, causing the app to be inactive, and immediately regain active. - cameraPlugin.disposeCamera(0) { error in + cameraPlugin.dispose(cameraId: 0) { error in disposeExpectation.fulfill() } cameraPlugin.createCameraOnSessionQueue( withName: "acamera", - settings: FCPPlatformMediaSettings.make( - with: .medium, + settings: PlatformMediaSettings( + resolutionPreset: .medium, framesPerSecond: nil, videoBitrate: nil, audioBitrate: nil, enableAudio: true ) - ) { result, error in + ) { result in createExpectation.fulfill() } @@ -69,14 +69,14 @@ final class CameraInitRaceConditionsTests: XCTestCase { cameraPlugin.createCameraOnSessionQueue( withName: "acamera", - settings: FCPPlatformMediaSettings.make( - with: .medium, + settings: PlatformMediaSettings( + resolutionPreset: .medium, framesPerSecond: nil, videoBitrate: nil, audioBitrate: nil, enableAudio: true ) - ) { result, error in + ) { result in createExpectation.fulfill() } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.swift index d55429a7c423..f8120383d2a3 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.swift @@ -33,18 +33,23 @@ final class CameraMethodChannelTests: XCTestCase { let camera = createCameraPlugin(with: avCaptureSessionMock) let expectation = self.expectation(description: "Result finished") - var resultValue: NSNumber? + var resultValue: Int64? camera.createCameraOnSessionQueue( withName: "acamera", - settings: FCPPlatformMediaSettings.make( - with: FCPPlatformResolutionPreset.medium, + settings: PlatformMediaSettings( + resolutionPreset: PlatformResolutionPreset.medium, framesPerSecond: nil, videoBitrate: nil, audioBitrate: nil, enableAudio: true ) - ) { result, error in - resultValue = result + ) { result in + switch result { + case .success(let result): + resultValue = result + case .failure(_): + XCTFail("Unexpected failure") + } expectation.fulfill() } @@ -60,14 +65,14 @@ final class CameraMethodChannelTests: XCTestCase { camera.createCameraOnSessionQueue( withName: "acamera", - settings: FCPPlatformMediaSettings.make( - with: .medium, + settings: PlatformMediaSettings( + resolutionPreset: .medium, framesPerSecond: nil, videoBitrate: nil, audioBitrate: nil, enableAudio: true ) - ) { result, error in + ) { result in createExpectation.fulfill() } @@ -75,7 +80,7 @@ final class CameraMethodChannelTests: XCTestCase { XCTAssertNotNil(camera.camera) let disposeExpectation = self.expectation(description: "dispose's result block must be called") - camera.disposeCamera(0) { error in + camera.dispose(cameraId: 0) { error in disposeExpectation.fulfill() } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPermissionTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPermissionTests.swift index 6d21fd3792f7..33f56feee05f 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPermissionTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPermissionTests.swift @@ -57,18 +57,17 @@ final class CameraPermissionManagerTests: XCTestCase { let (permissionManager, mockPermissionService) = createSutAndMocks() let expectation = self.expectation( description: "Must complete with error if camera access was previously denied.") - let expectedError = FlutterError( - code: "CameraAccessDeniedWithoutPrompt", - message: - "User has previously denied the camera access request. Go to Settings to enable camera access.", - details: nil) mockPermissionService.authorizationStatusStub = { mediaType in XCTAssertEqual(mediaType, .video) return .denied } permissionManager.requestCameraPermission { error in - XCTAssertEqual(error, expectedError) + XCTAssertEqual(error?.code, "CameraAccessDeniedWithoutPrompt") + XCTAssertEqual( + error?.message, + "User has previously denied the camera access request. Go to Settings to enable camera access." + ) expectation.fulfill() } @@ -79,17 +78,14 @@ final class CameraPermissionManagerTests: XCTestCase { let (permissionManager, mockPermissionService) = createSutAndMocks() let expectation = self.expectation( description: "Must complete with error if camera access is restricted.") - let expectedError = FlutterError( - code: "CameraAccessRestricted", - message: "Camera access is restricted.", - details: nil) mockPermissionService.authorizationStatusStub = { mediaType in XCTAssertEqual(mediaType, .video) return .restricted } permissionManager.requestCameraPermission { error in - XCTAssertEqual(error, expectedError) + XCTAssertEqual(error?.code, "CameraAccessRestricted") + XCTAssertEqual(error?.message, "Camera access is restricted.") expectation.fulfill() } @@ -122,10 +118,6 @@ final class CameraPermissionManagerTests: XCTestCase { let (permissionManager, mockPermissionService) = createSutAndMocks() let expectation = self.expectation( description: "Must complete with error if user denied access.") - let expectedError = FlutterError( - code: "CameraAccessDenied", - message: "User denied the camera access request.", - details: nil) mockPermissionService.authorizationStatusStub = { mediaType in XCTAssertEqual(mediaType, .video) @@ -137,7 +129,8 @@ final class CameraPermissionManagerTests: XCTestCase { handler(false) } permissionManager.requestCameraPermission { error in - XCTAssertEqual(error, expectedError) + XCTAssertEqual(error?.code, "CameraAccessDenied") + XCTAssertEqual(error?.message, "User denied the camera access request.") expectation.fulfill() } @@ -167,18 +160,17 @@ final class CameraPermissionManagerTests: XCTestCase { let (permissionManager, mockPermissionService) = createSutAndMocks() let expectation = self.expectation( description: "Must complete with error if audio access was previously denied.") - let expectedError = FlutterError( - code: "AudioAccessDeniedWithoutPrompt", - message: - "User has previously denied the audio access request. Go to Settings to enable audio access.", - details: nil) mockPermissionService.authorizationStatusStub = { mediaType in XCTAssertEqual(mediaType, .audio) return .denied } permissionManager.requestAudioPermission { error in - XCTAssertEqual(error, expectedError) + XCTAssertEqual(error?.code, "AudioAccessDeniedWithoutPrompt") + XCTAssertEqual( + error?.message, + "User has previously denied the audio access request. Go to Settings to enable audio access." + ) expectation.fulfill() } @@ -189,17 +181,14 @@ final class CameraPermissionManagerTests: XCTestCase { let (permissionManager, mockPermissionService) = createSutAndMocks() let expectation = self.expectation( description: "Must complete with error if audio access is restricted.") - let expectedError = FlutterError( - code: "AudioAccessRestricted", - message: "Audio access is restricted.", - details: nil) mockPermissionService.authorizationStatusStub = { mediaType in XCTAssertEqual(mediaType, .audio) return .restricted } permissionManager.requestAudioPermission { error in - XCTAssertEqual(error, expectedError) + XCTAssertEqual(error?.code, "AudioAccessRestricted") + XCTAssertEqual(error?.message, "Audio access is restricted.") expectation.fulfill() } @@ -232,10 +221,6 @@ final class CameraPermissionManagerTests: XCTestCase { let (permissionManager, mockPermissionService) = createSutAndMocks() let expectation = self.expectation( description: "Must complete with error if user denied access") - let expectedError = FlutterError( - code: "AudioAccessDenied", - message: "User denied the audio access request.", - details: nil) mockPermissionService.authorizationStatusStub = { mediaType in XCTAssertEqual(mediaType, .audio) @@ -247,7 +232,8 @@ final class CameraPermissionManagerTests: XCTestCase { handler(false) } permissionManager.requestAudioPermission { error in - XCTAssertEqual(error, expectedError) + XCTAssertEqual(error?.code, "AudioAccessDenied") + XCTAssertEqual(error?.message, "User denied the audio access request.") expectation.fulfill() } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginCreateCameraTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginCreateCameraTests.swift index 79abd5ccdbc1..006b84f84a9c 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginCreateCameraTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginCreateCameraTests.swift @@ -50,15 +50,15 @@ final class CameraPluginCreateCameraTests: XCTestCase { completion(nil) } - cameraPlugin.createCamera( - withName: "camera_name", - settings: FCPPlatformMediaSettings.make( - with: .medium, + cameraPlugin.create( + cameraName: "camera_name", + settings: PlatformMediaSettings( + resolutionPreset: .medium, framesPerSecond: nil, videoBitrate: nil, audioBitrate: nil, enableAudio: false) - ) { result, error in + ) { result in expectation.fulfill() } @@ -85,15 +85,15 @@ final class CameraPluginCreateCameraTests: XCTestCase { completion(nil) } - cameraPlugin.createCamera( - withName: "camera_name", - settings: FCPPlatformMediaSettings.make( - with: .medium, + cameraPlugin.create( + cameraName: "camera_name", + settings: PlatformMediaSettings( + resolutionPreset: .medium, framesPerSecond: nil, videoBitrate: nil, audioBitrate: nil, enableAudio: true) - ) { result, error in + ) { result in expectation.fulfill() } @@ -117,15 +117,15 @@ final class CameraPluginCreateCameraTests: XCTestCase { } mockCaptureSession.canSetSessionPresetStub = { _ in true } - cameraPlugin.createCamera( - withName: "camera_name", - settings: FCPPlatformMediaSettings.make( - with: .medium, + cameraPlugin.create( + cameraName: "camera_name", + settings: PlatformMediaSettings( + resolutionPreset: .medium, framesPerSecond: nil, videoBitrate: nil, audioBitrate: nil, enableAudio: true) - ) { result, error in + ) { result in expectation.fulfill() } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginDelegatingMethodTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginDelegatingMethodTests.swift index 14db9a7d478b..1b302cb0f842 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginDelegatingMethodTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginDelegatingMethodTests.swift @@ -36,7 +36,7 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { let (cameraPlugin, mockCamera) = createCameraPlugin() let expectation = expectation(description: "Call completed") - let targetOrientation = FCPPlatformDeviceOrientation.landscapeLeft + let targetOrientation = PlatformDeviceOrientation.landscapeLeft var lockCaptureCalled = false mockCamera.lockCaptureOrientationStub = { orientation in @@ -44,8 +44,13 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { lockCaptureCalled = true } - cameraPlugin.lockCapture(targetOrientation) { error in - XCTAssertNil(error) + cameraPlugin.lockCaptureOrientation(orientation: targetOrientation) { result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } @@ -63,8 +68,13 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { pausePreviewCalled = true } - cameraPlugin.pausePreview { error in - XCTAssertNil(error) + cameraPlugin.pausePreview { result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } @@ -82,8 +92,13 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { pauseVideoRecordingCalled = true } - cameraPlugin.pauseVideoRecording { error in - XCTAssertNil(error) + cameraPlugin.pauseVideoRecording { result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } @@ -101,8 +116,13 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { setUpCaptureSessionForAudioIfNeededCalled = true } - cameraPlugin.prepareForVideoRecording { error in - XCTAssertNil(error) + cameraPlugin.prepareForVideoRecording { result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } @@ -120,8 +140,13 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { receivedImageStreamDataCalled = true } - cameraPlugin.receivedImageStreamData { error in - XCTAssertNil(error) + cameraPlugin.receivedImageStreamData { result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } @@ -139,8 +164,13 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { resumeVideoRecordingCalled = true } - cameraPlugin.resumeVideoRecording { error in - XCTAssertNil(error) + cameraPlugin.resumeVideoRecording { result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } @@ -158,8 +188,13 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { resumePreviewCalled = true } - cameraPlugin.resumePreview { error in - XCTAssertNil(error) + cameraPlugin.resumePreview { result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } @@ -172,7 +207,7 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { let (cameraPlugin, mockCamera) = createCameraPlugin() let expectation = expectation(description: "Call completed") - let targetExposureMode = FCPPlatformExposureMode.locked + let targetExposureMode = PlatformExposureMode.locked var setExposureModeCalled = false mockCamera.setExposureModeStub = { mode in @@ -180,8 +215,13 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { setExposureModeCalled = true } - cameraPlugin.setExposureMode(targetExposureMode) { error in - XCTAssertNil(error) + cameraPlugin.setExposureMode(mode: targetExposureMode) { result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } @@ -202,8 +242,13 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { setExposureOffsetCalled = true } - cameraPlugin.setExposureOffset(targetExposureOffset) { error in - XCTAssertNil(error) + cameraPlugin.setExposureOffset(offset: targetExposureOffset) { result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } @@ -216,7 +261,7 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { let (cameraPlugin, mockCamera) = createCameraPlugin() let expectation = expectation(description: "Call completed") - let targetFocusMode = FCPPlatformFocusMode.locked + let targetFocusMode = PlatformFocusMode.locked var setFocusModeCalled = false mockCamera.setFocusModeStub = { mode in @@ -224,8 +269,13 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { setFocusModeCalled = true } - cameraPlugin.setFocusMode(targetFocusMode) { error in - XCTAssertNil(error) + cameraPlugin.setFocusMode(mode: targetFocusMode) { result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } @@ -238,7 +288,7 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { let (cameraPlugin, mockCamera) = createCameraPlugin() let expectation = expectation(description: "Call completed") - let targetFileFormat = FCPPlatformImageFileFormat.heif + let targetFileFormat = PlatformImageFileFormat.heif var setImageFileFormatCalled = false mockCamera.setImageFileFormatStub = { fileFormat in @@ -246,8 +296,13 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { setImageFileFormatCalled = true } - cameraPlugin.setImageFileFormat(targetFileFormat) { error in - XCTAssertNil(error) + cameraPlugin.setImageFileFormat(format: targetFileFormat) { result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } @@ -263,11 +318,16 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { var startImageStreamCalled = false mockCamera.startImageStreamStub = { messenger, completion in startImageStreamCalled = true - completion(nil) + completion(.success(())) } - cameraPlugin.startImageStream { error in - XCTAssertNil(error) + cameraPlugin.startImageStream { result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } @@ -285,8 +345,13 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { stopImageStreamCalled = true } - cameraPlugin.stopImageStream { error in - XCTAssertNil(error) + cameraPlugin.stopImageStream { result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } @@ -302,12 +367,17 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { var startVideoRecordingCalled = false mockCamera.startVideoRecordingStub = { completion, messenger in XCTAssertNotNil(messenger) - completion(nil) + completion(.success(())) startVideoRecordingCalled = true } - cameraPlugin.startVideoRecording(withStreaming: true) { error in - XCTAssertNil(error) + cameraPlugin.startVideoRecording(enableStream: true) { result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } @@ -323,12 +393,17 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { var startVideoRecordingCalled = false mockCamera.startVideoRecordingStub = { completion, messenger in XCTAssertNil(messenger) - completion(nil) + completion(.success(())) startVideoRecordingCalled = true } - cameraPlugin.startVideoRecording(withStreaming: false) { error in - XCTAssertNil(error) + cameraPlugin.startVideoRecording(enableStream: false) { result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } @@ -345,13 +420,17 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { var stopVideoRecordingCalled = false mockCamera.stopVideoRecordingStub = { completion in - completion?(targetPath, nil) + completion(.success(targetPath)) stopVideoRecordingCalled = true } - cameraPlugin.stopVideoRecording { path, error in - XCTAssertEqual(path, targetPath) - XCTAssertNil(error) + cameraPlugin.stopVideoRecording { result in + switch result { + case .success(let path): + XCTAssertEqual(path, targetPath) + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } @@ -369,8 +448,13 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { unlockCaptureOrientationCalled = true } - cameraPlugin.unlockCaptureOrientation { error in - XCTAssertNil(error) + cameraPlugin.unlockCaptureOrientation { result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } @@ -383,17 +467,22 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { let (cameraPlugin, mockCamera) = createCameraPlugin() let expectation = expectation(description: "Call completed") - let targetExposurePoint = FCPPlatformPoint.makeWith(x: 1.0, y: 1.0) + let targetExposurePoint = PlatformPoint(x: 1.0, y: 1.0) var setExposurePointCalled = false mockCamera.setExposurePointStub = { point, completion in XCTAssertEqual(point, targetExposurePoint) - completion?(nil) + completion(.success(())) setExposurePointCalled = true } - cameraPlugin.setExposurePoint(targetExposurePoint) { error in - XCTAssertNil(error) + cameraPlugin.setExposurePoint(point: targetExposurePoint) { result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } @@ -406,17 +495,22 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { let (cameraPlugin, mockCamera) = createCameraPlugin() let expectation = expectation(description: "Call completed") - let targetFlashMode = FCPPlatformFlashMode.auto + let targetFlashMode = PlatformFlashMode.auto var setFlashModeCalled = false mockCamera.setFlashModeStub = { mode, completion in XCTAssertEqual(mode, targetFlashMode) - completion?(nil) + completion(.success(())) setFlashModeCalled = true } - cameraPlugin.setFlashMode(targetFlashMode) { error in - XCTAssertNil(error) + cameraPlugin.setFlashMode(mode: targetFlashMode) { result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } @@ -429,17 +523,22 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { let (cameraPlugin, mockCamera) = createCameraPlugin() let expectation = expectation(description: "Call completed") - let targetFocusPoint = FCPPlatformPoint.makeWith(x: 1.0, y: 1.0) + let targetFocusPoint = PlatformPoint(x: 1.0, y: 1.0) var setFocusPointCalled = false mockCamera.setFocusPointStub = { point, completion in XCTAssertEqual(point, targetFocusPoint) - completion?(nil) + completion(.success(())) setFocusPointCalled = true } - cameraPlugin.setFocus(targetFocusPoint) { error in - XCTAssertNil(error) + cameraPlugin.setFocusPoint(point: targetFocusPoint) { result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } @@ -457,12 +556,17 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { var setZoomLevelCalled = false mockCamera.setZoomLevelStub = { zoom, completion in XCTAssertEqual(zoom, targetZoomLevel) - completion?(nil) + completion(.success(())) setZoomLevelCalled = true } - cameraPlugin.setZoomLevel(targetZoomLevel) { error in - XCTAssertNil(error) + cameraPlugin.setZoomLevel(zoom: targetZoomLevel) { result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } @@ -479,13 +583,17 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { var captureToFileCalled = false mockCamera.captureToFileStub = { completion in - completion?(targetPath, nil) + completion(.success(targetPath)) captureToFileCalled = true } - cameraPlugin.takePicture { path, error in - XCTAssertEqual(path, targetPath) - XCTAssertNil(error) + cameraPlugin.takePicture { result in + switch result { + case .success(let path): + XCTAssertEqual(path, targetPath) + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } @@ -494,7 +602,7 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { XCTAssertTrue(captureToFileCalled) } - func testUpdateDescriptionWhileRecordingCameraName_callsCameraSetDescriptionWhileRecording() { + func testUpdateDescriptionWhileRecording_callsCameraSetDescriptionWhileRecording() { let (cameraPlugin, mockCamera) = createCameraPlugin() let expectation = expectation(description: "Call completed") @@ -503,12 +611,17 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { var setDescriptionWhileRecordingCalled = false mockCamera.setDescriptionWhileRecordingStub = { cameraName, completion in XCTAssertEqual(cameraName, targetCameraName) - completion?(nil) + completion(.success(())) setDescriptionWhileRecordingCalled = true } - cameraPlugin.updateDescriptionWhileRecordingCameraName(targetCameraName) { error in - XCTAssertNil(error) + cameraPlugin.updateDescriptionWhileRecording(cameraName: targetCameraName) { result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } @@ -517,7 +630,7 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { XCTAssertTrue(setDescriptionWhileRecordingCalled) } - func testGetMaximumZoomLevel_returnsValueFromCameraGetMaximumAvailableZoomFactor() { + func testGetMaxZoomLevel_returnsValueFromCameraGetMaximumAvailableZoomFactor() { let (cameraPlugin, mockCamera) = createCameraPlugin() let expectation = expectation(description: "Call completed") @@ -529,9 +642,13 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { return targetMaximumZoomLevel } - cameraPlugin.getMaximumZoomLevel { zoom, error in - XCTAssertEqual(zoom?.doubleValue, targetMaximumZoomLevel) - XCTAssertNil(error) + cameraPlugin.getMaxZoomLevel { result in + switch result { + case .success(let zoom): + XCTAssertEqual(zoom, targetMaximumZoomLevel) + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } @@ -540,7 +657,7 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { XCTAssertTrue(getMaximumAvailableZoomFactorCalled) } - func testGetMinimumZoomLevel_returnsValueFromCameraGetMinimumAvailableZoomFactor() { + func testGetMinZoomLevel_returnsValueFromCameraGetMinimumAvailableZoomFactor() { let (cameraPlugin, mockCamera) = createCameraPlugin() let expectation = expectation(description: "Call completed") @@ -552,9 +669,13 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { return targetMinimumZoomLevel } - cameraPlugin.getMinimumZoomLevel { zoom, error in - XCTAssertEqual(zoom?.doubleValue, targetMinimumZoomLevel) - XCTAssertNil(error) + cameraPlugin.getMinZoomLevel { result in + switch result { + case .success(let zoom): + XCTAssertEqual(zoom, targetMinimumZoomLevel) + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } @@ -563,7 +684,7 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { XCTAssertTrue(getMinimumAvailableZoomFactorCalled) } - func testGetMaximumExposureOffset_returnsValueFromCameraGetMaximumExposureOffset() { + func testGetMaxExposureOffset_returnsValueFromCameraGetMaximumExposureOffset() { let (cameraPlugin, mockCamera) = createCameraPlugin() let expectation = expectation(description: "Call completed") @@ -575,9 +696,13 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { return targetMaximumExposureOffset } - cameraPlugin.getMaximumExposureOffset { offset, error in - XCTAssertEqual(offset?.doubleValue, targetMaximumExposureOffset) - XCTAssertNil(error) + cameraPlugin.getMaxExposureOffset { result in + switch result { + case .success(let offset): + XCTAssertEqual(offset, targetMaximumExposureOffset) + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } @@ -586,7 +711,7 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { XCTAssertTrue(getMaximumExposureOffsetCalled) } - func testGetMinimumExposureOffset_returnsValueFromCameraGetMinimumExposureOffset() { + func testGetMinExposureOffset_returnsValueFromCameraGetMinimumExposureOffset() { let (cameraPlugin, mockCamera) = createCameraPlugin() let expectation = expectation(description: "Call completed") @@ -598,9 +723,13 @@ final class CameraPluginDelegatingMethodTests: XCTestCase { return targetMinimumExposureOffset } - cameraPlugin.getMinimumExposureOffset { offset, error in - XCTAssertEqual(offset?.doubleValue, targetMinimumExposureOffset) - XCTAssertNil(error) + cameraPlugin.getMinExposureOffset { result in + switch result { + case .success(let offset): + XCTAssertEqual(offset, targetMinimumExposureOffset) + case .failure: + XCTFail("Unexpected error") + } expectation.fulfill() } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginInitializeCameraTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginInitializeCameraTests.swift index 3006d0375e13..9b0232ed9e2e 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginInitializeCameraTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPluginInitializeCameraTests.swift @@ -44,9 +44,14 @@ final class CameraPluginInitializeCameraTests: XCTestCase { onFrameAvailableSet = true } - cameraPlugin.initializeCamera(0, withImageFormat: FCPPlatformImageFormatGroup.bgra8888) { - error in - XCTAssertNil(error) + cameraPlugin.initialize(cameraId: 0, imageFormat: PlatformImageFormatGroup.bgra8888) { + result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected failure") + } expectation.fulfill() } @@ -64,9 +69,14 @@ final class CameraPluginInitializeCameraTests: XCTestCase { dartAPISet = true } - cameraPlugin.initializeCamera(0, withImageFormat: FCPPlatformImageFormatGroup.bgra8888) { - error in - XCTAssertNil(error) + cameraPlugin.initialize(cameraId: 0, imageFormat: PlatformImageFormatGroup.bgra8888) { + result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected failure") + } expectation.fulfill() } @@ -78,9 +88,14 @@ final class CameraPluginInitializeCameraTests: XCTestCase { func testInitializeCamera_sendsDeviceOrientation() { let (cameraPlugin, _, mockGlobalEventApi, captureSessionQueue) = createCameraPlugin() - cameraPlugin.initializeCamera(0, withImageFormat: FCPPlatformImageFormatGroup.bgra8888) { - error in - XCTAssertNil(error) + cameraPlugin.initialize(cameraId: 0, imageFormat: PlatformImageFormatGroup.bgra8888) { + result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected failure") + } } waitForQueueRoundTrip(with: captureSessionQueue) @@ -97,9 +112,14 @@ final class CameraPluginInitializeCameraTests: XCTestCase { startCalled = true } - cameraPlugin.initializeCamera(0, withImageFormat: FCPPlatformImageFormatGroup.bgra8888) { - error in - XCTAssertNil(error) + cameraPlugin.initialize(cameraId: 0, imageFormat: PlatformImageFormatGroup.bgra8888) { + result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected failure") + } expectation.fulfill() } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPropertiesTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPropertiesTests.swift index e22e3044af77..dc006a571675 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPropertiesTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPropertiesTests.swift @@ -59,20 +59,20 @@ final class CameraPropertiesTests: XCTestCase { func testGetPigeonDeviceOrientationForUIDeviceOrientation() { XCTAssertEqual( - FCPPlatformDeviceOrientation.portraitDown, + PlatformDeviceOrientation.portraitDown, getPigeonDeviceOrientation(for: .portraitUpsideDown)) XCTAssertEqual( - FCPPlatformDeviceOrientation.landscapeLeft, + PlatformDeviceOrientation.landscapeLeft, getPigeonDeviceOrientation(for: .landscapeLeft)) XCTAssertEqual( - FCPPlatformDeviceOrientation.landscapeRight, + PlatformDeviceOrientation.landscapeRight, getPigeonDeviceOrientation(for: .landscapeRight)) XCTAssertEqual( - FCPPlatformDeviceOrientation.portraitUp, + PlatformDeviceOrientation.portraitUp, getPigeonDeviceOrientation(for: .portrait)) // Test default case. XCTAssertEqual( - FCPPlatformDeviceOrientation.portraitUp, + PlatformDeviceOrientation.portraitUp, getPigeonDeviceOrientation(for: .unknown)) } } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.swift index 43d02a8b5e6e..69e0a828c5c5 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.swift @@ -41,7 +41,7 @@ final class CameraSessionPresetsTests: XCTestCase { } configuration.videoCaptureSession = videoSessionMock configuration.mediaSettings = CameraTestUtils.createDefaultMediaSettings( - resolutionPreset: FCPPlatformResolutionPreset.max) + resolutionPreset: PlatformResolutionPreset.max) let _ = CameraTestUtils.createTestCamera(configuration) @@ -64,7 +64,7 @@ final class CameraSessionPresetsTests: XCTestCase { let configuration = CameraTestUtils.createTestCameraConfiguration() configuration.videoCaptureSession = videoSessionMock configuration.mediaSettings = CameraTestUtils.createDefaultMediaSettings( - resolutionPreset: FCPPlatformResolutionPreset.max) + resolutionPreset: PlatformResolutionPreset.max) configuration.videoCaptureDeviceFactory = { _ in MockCaptureDevice() } let _ = CameraTestUtils.createTestCamera(configuration) @@ -89,7 +89,7 @@ final class CameraSessionPresetsTests: XCTestCase { let configuration = CameraTestUtils.createTestCameraConfiguration() configuration.videoCaptureSession = videoSessionMock configuration.mediaSettings = CameraTestUtils.createDefaultMediaSettings( - resolutionPreset: FCPPlatformResolutionPreset.ultraHigh) + resolutionPreset: PlatformResolutionPreset.ultraHigh) let _ = CameraTestUtils.createTestCamera(configuration) diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.swift index 2db565ff541c..a3c8f33f38ca 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.swift @@ -12,10 +12,10 @@ import XCTest import camera_avfoundation_objc #endif -private let testResolutionPreset = FCPPlatformResolutionPreset.medium -private let testFramesPerSecond = 15 -private let testVideoBitrate = 200000 -private let testAudioBitrate = 32000 +private let testResolutionPreset = PlatformResolutionPreset.medium +private let testFramesPerSecond: Int64 = 15 +private let testVideoBitrate: Int64 = 200000 +private let testAudioBitrate: Int64 = 32000 private final class TestMediaSettingsAVWrapper: FLTCamMediaSettingsAVWrapper { let lockExpectation: XCTestExpectation @@ -74,7 +74,8 @@ private final class TestMediaSettingsAVWrapper: FLTCamMediaSettingsAVWrapper { override func assetWriterAudioInput(withOutputSettings outputSettings: [String: Any]?) -> AssetWriterInput { - if let bitrate = outputSettings?[AVEncoderBitRateKey] as? Int, bitrate == testAudioBitrate { + if let bitrate = outputSettings?[AVEncoderBitRateKey] as? Int, bitrate == Int(testAudioBitrate) + { audioSettingsExpectation.fulfill() } return MockAssetWriterInput() @@ -115,11 +116,11 @@ private final class TestMediaSettingsAVWrapper: FLTCamMediaSettingsAVWrapper { final class CameraSettingsTests: XCTestCase { func testSettings_shouldPassConfigurationToCameraDeviceAndWriter() { let enableAudio: Bool = true - let settings = FCPPlatformMediaSettings.make( - with: testResolutionPreset, - framesPerSecond: NSNumber(value: testFramesPerSecond), - videoBitrate: NSNumber(value: testVideoBitrate), - audioBitrate: NSNumber(value: testAudioBitrate), + let settings = PlatformMediaSettings( + resolutionPreset: testResolutionPreset, + framesPerSecond: testFramesPerSecond, + videoBitrate: testVideoBitrate, + audioBitrate: testAudioBitrate, enableAudio: enableAudio ) let injectedWrapper = TestMediaSettingsAVWrapper(test: self, expectAudio: enableAudio) @@ -169,20 +170,25 @@ final class CameraSettingsTests: XCTestCase { ) let expectation = self.expectation(description: "Result finished") - let mediaSettings = FCPPlatformMediaSettings.make( - with: testResolutionPreset, - framesPerSecond: NSNumber(value: testFramesPerSecond), - videoBitrate: NSNumber(value: testVideoBitrate), - audioBitrate: NSNumber(value: testAudioBitrate), + let mediaSettings = PlatformMediaSettings( + resolutionPreset: testResolutionPreset, + framesPerSecond: testFramesPerSecond, + videoBitrate: testVideoBitrate, + audioBitrate: testAudioBitrate, enableAudio: false ) - var resultValue: NSNumber? + var resultValue: Int64? camera.createCameraOnSessionQueue( withName: "acamera", settings: mediaSettings - ) { result, error in - XCTAssertNil(error) - resultValue = result + ) { result in + switch result { + case .success(let result): + resultValue = result + case .failure: + XCTFail("Unexpected failure") + } + expectation.fulfill() } @@ -191,11 +197,11 @@ final class CameraSettingsTests: XCTestCase { } func testSettings_ShouldSelectFormatWhichSupports60FPS() { - let settings = FCPPlatformMediaSettings.make( - with: testResolutionPreset, - framesPerSecond: NSNumber(value: 60), - videoBitrate: NSNumber(value: testVideoBitrate), - audioBitrate: NSNumber(value: testAudioBitrate), + let settings = PlatformMediaSettings( + resolutionPreset: testResolutionPreset, + framesPerSecond: 60, + videoBitrate: testVideoBitrate, + audioBitrate: testAudioBitrate, enableAudio: false ) @@ -207,12 +213,13 @@ final class CameraSettingsTests: XCTestCase { XCTAssertLessThanOrEqual(range.minFrameRate, 60) XCTAssertGreaterThanOrEqual(range.maxFrameRate, 60) } + func test_setUpCaptureSessionForAudioIfNeeded_skipsAudioSession_whenAudioDisabled() { - let settings = FCPPlatformMediaSettings.make( - with: testResolutionPreset, - framesPerSecond: NSNumber(value: testFramesPerSecond), - videoBitrate: NSNumber(value: testVideoBitrate), - audioBitrate: NSNumber(value: testAudioBitrate), + let settings = PlatformMediaSettings( + resolutionPreset: testResolutionPreset, + framesPerSecond: testFramesPerSecond, + videoBitrate: testVideoBitrate, + audioBitrate: testAudioBitrate, enableAudio: false ) @@ -255,11 +262,11 @@ final class CameraSettingsTests: XCTestCase { } func test_setUpCaptureSessionForAudioIfNeeded_addsAudioSession_whenAudioEnabled() { - let settings = FCPPlatformMediaSettings.make( - with: testResolutionPreset, - framesPerSecond: NSNumber(value: testFramesPerSecond), - videoBitrate: NSNumber(value: testVideoBitrate), - audioBitrate: NSNumber(value: testAudioBitrate), + let settings = PlatformMediaSettings( + resolutionPreset: testResolutionPreset, + framesPerSecond: testFramesPerSecond, + videoBitrate: testVideoBitrate, + audioBitrate: testAudioBitrate, enableAudio: true ) diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift index 46b42f440239..2f801a46548f 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift @@ -16,11 +16,11 @@ import XCTest enum CameraTestUtils { /// This method provides a convenient way to create media settings with minimal configuration. /// Audio is enabled by default, while other parameters use platform-specific defaults. - static func createDefaultMediaSettings(resolutionPreset: FCPPlatformResolutionPreset) - -> FCPPlatformMediaSettings + static func createDefaultMediaSettings(resolutionPreset: PlatformResolutionPreset) + -> PlatformMediaSettings { - return FCPPlatformMediaSettings.make( - with: resolutionPreset, + return PlatformMediaSettings( + resolutionPreset: resolutionPreset, framesPerSecond: nil, videoBitrate: nil, audioBitrate: nil, @@ -59,7 +59,7 @@ enum CameraTestUtils { let configuration = CameraConfiguration( mediaSettings: createDefaultMediaSettings( - resolutionPreset: FCPPlatformResolutionPreset.medium), + resolutionPreset: PlatformResolutionPreset.medium), mediaSettingsWrapper: FLTCamMediaSettingsAVWrapper(), captureDeviceFactory: { _ in captureDeviceMock }, audioCaptureDeviceFactory: { MockCaptureDevice() }, diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamExposureTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamExposureTests.swift index 8310bbba606c..e8eaf4f0ad03 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamExposureTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamExposureTests.swift @@ -80,8 +80,14 @@ final class FLTCamExposureTests: XCTestCase { } let expectation = expectation(description: "Completion called") - camera.setExposurePoint(FCPPlatformPoint.makeWith(x: 1, y: 1)) { error in - XCTAssertNil(error) + camera.setExposurePoint(PlatformPoint(x: 1, y: 1)) { + result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected failure") + } expectation.fulfill() } @@ -98,10 +104,14 @@ final class FLTCamExposureTests: XCTestCase { let expectation = expectation(description: "Completion with error") - camera.setExposurePoint(FCPPlatformPoint.makeWith(x: 1, y: 1)) { error in - XCTAssertNotNil(error) - XCTAssertEqual(error?.code, "setExposurePointFailed") - XCTAssertEqual(error?.message, "Device does not have exposure point capabilities") + camera.setExposurePoint(PlatformPoint(x: 1, y: 1)) { result in + switch result { + case .failure(let error as PigeonError): + XCTAssertEqual(error.code, "setExposurePointFailed") + XCTAssertEqual(error.message, "Device does not have exposure point capabilities") + default: + XCTFail("Expected failure") + } expectation.fulfill() } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamFocusTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamFocusTests.swift index 570fd2022b82..dfedb4e94951 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamFocusTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamFocusTests.swift @@ -138,8 +138,14 @@ final class FLTCamSetFocusModeTests: XCTestCase { } } - camera.setFocusPoint(FCPPlatformPoint.makeWith(x: 1, y: 1)) { error in - XCTAssertNil(error) + camera.setFocusPoint(PlatformPoint(x: 1, y: 1)) { + result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected failure") + } } XCTAssertTrue(setFocusPointOfInterestCalled) @@ -154,10 +160,14 @@ final class FLTCamSetFocusModeTests: XCTestCase { let expectation = self.expectation(description: "Completion with error") - camera.setFocusPoint(FCPPlatformPoint.makeWith(x: 1, y: 1)) { error in - XCTAssertNotNil(error) - XCTAssertEqual(error?.code, "setFocusPointFailed") - XCTAssertEqual(error?.message, "Device does not have focus point capabilities") + camera.setFocusPoint(PlatformPoint(x: 1, y: 1)) { result in + switch result { + case .failure(let error as PigeonError): + XCTAssertEqual(error.code, "setFocusPointFailed") + XCTAssertEqual(error.message, "Device does not have focus point capabilities") + default: + XCTFail("Expected failure") + } expectation.fulfill() } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetDeviceOrientationTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetDeviceOrientationTests.swift index 627929761d63..775b533fae49 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetDeviceOrientationTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetDeviceOrientationTests.swift @@ -73,7 +73,7 @@ final class FLTCamSetDeviceOrientationTests: XCTestCase { videoSetVideoOrientationCalled = true } - camera.lockCaptureOrientation(FCPPlatformDeviceOrientation.portraitDown) + camera.lockCaptureOrientation(PlatformDeviceOrientation.portraitDown) camera.deviceOrientation = .landscapeLeft diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetFlashModeTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetFlashModeTests.swift index 34f47c09346e..fcce376ae18b 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetFlashModeTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetFlashModeTests.swift @@ -39,8 +39,14 @@ final class FLTCamSetFlashModeTests: XCTestCase { let expectation = expectation(description: "Call completed") - camera.setFlashMode(.torch) { error in - XCTAssertNil(error) + camera.setFlashMode(.torch) { + result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected failure") + } expectation.fulfill() } @@ -56,10 +62,14 @@ final class FLTCamSetFlashModeTests: XCTestCase { let expectation = expectation(description: "Call completed") - camera.setFlashMode(.torch) { error in - XCTAssertNotNil(error) - XCTAssertEqual(error?.code, "setFlashModeFailed") - XCTAssertEqual(error?.message, "Device does not support torch mode") + camera.setFlashMode(.torch) { result in + switch result { + case .failure(let error as PigeonError): + XCTAssertEqual(error.code, "setFlashModeFailed") + XCTAssertEqual(error.message, "Device does not support torch mode") + default: + XCTFail("Expected failure") + } expectation.fulfill() } @@ -74,10 +84,14 @@ final class FLTCamSetFlashModeTests: XCTestCase { let expectation = expectation(description: "Call completed") - camera.setFlashMode(.torch) { error in - XCTAssertNotNil(error) - XCTAssertEqual(error?.code, "setFlashModeFailed") - XCTAssertEqual(error?.message, "Torch mode is currently not available") + camera.setFlashMode(.torch) { result in + switch result { + case .failure(let error as PigeonError): + XCTAssertEqual(error.code, "setFlashModeFailed") + XCTAssertEqual(error.message, "Torch mode is currently not available") + default: + XCTFail("Expected failure") + } expectation.fulfill() } @@ -101,8 +115,14 @@ final class FLTCamSetFlashModeTests: XCTestCase { let expectation = expectation(description: "Call completed") - camera.setFlashMode(.auto) { error in - XCTAssertNil(error) + camera.setFlashMode(.auto) { + result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected failure") + } expectation.fulfill() } @@ -118,10 +138,14 @@ final class FLTCamSetFlashModeTests: XCTestCase { let expectation = expectation(description: "Call completed") - camera.setFlashMode(.auto) { error in - XCTAssertNotNil(error) - XCTAssertEqual(error?.code, "setFlashModeFailed") - XCTAssertEqual(error?.message, "Device does not have flash capabilities") + camera.setFlashMode(.auto) { result in + switch result { + case .failure(let error as PigeonError): + XCTAssertEqual(error.code, "setFlashModeFailed") + XCTAssertEqual(error.message, "Device does not have flash capabilities") + default: + XCTFail("Expected failure") + } expectation.fulfill() } @@ -138,10 +162,14 @@ final class FLTCamSetFlashModeTests: XCTestCase { let expectation = expectation(description: "Call completed") - camera.setFlashMode(.auto) { error in - XCTAssertNotNil(error) - XCTAssertEqual(error?.code, "setFlashModeFailed") - XCTAssertEqual(error?.message, "Device does not support this specific flash mode") + camera.setFlashMode(.auto) { result in + switch result { + case .failure(let error as PigeonError): + XCTAssertEqual(error.code, "setFlashModeFailed") + XCTAssertEqual(error.message, "Device does not support this specific flash mode") + default: + XCTFail("Expected failure") + } expectation.fulfill() } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamZoomTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamZoomTests.swift index b0641fc6daf7..13f61650afb6 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamZoomTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamZoomTests.swift @@ -39,8 +39,14 @@ final class FLTCamZoomTests: XCTestCase { let expectation = expectation(description: "Call completed") - camera.setZoomLevel(targetZoom) { error in - XCTAssertNil(error) + camera.setZoomLevel(targetZoom) { + result in + switch result { + case .success: + break + case .failure: + XCTFail("Unexpected failure") + } expectation.fulfill() } @@ -58,9 +64,13 @@ final class FLTCamZoomTests: XCTestCase { let expectation = expectation(description: "Call completed") - camera.setZoomLevel(CGFloat(1.0)) { error in - XCTAssertNotNil(error) - XCTAssertEqual(error?.code, "ZOOM_ERROR") + camera.setZoomLevel(CGFloat(1.0)) { result in + switch result { + case .failure(let error as PigeonError): + XCTAssertEqual(error.code, "ZOOM_ERROR") + default: + XCTFail("Expected failure") + } expectation.fulfill() } @@ -76,9 +86,13 @@ final class FLTCamZoomTests: XCTestCase { let expectation = expectation(description: "Call completed") - camera.setZoomLevel(CGFloat(2.0)) { error in - XCTAssertNotNil(error) - XCTAssertEqual(error?.code, "ZOOM_ERROR") + camera.setZoomLevel(CGFloat(2.0)) { result in + switch result { + case .failure(let error as PigeonError): + XCTAssertEqual(error.code, "ZOOM_ERROR") + default: + XCTFail("Expected failure") + } expectation.fulfill() } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCamera.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCamera.swift index b3803b119ea8..a1ddd6b88db4 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCamera.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCamera.swift @@ -13,7 +13,7 @@ import Flutter #endif final class MockCamera: NSObject, Camera { - var setDartApiStub: ((FCPCameraEventApi?) -> Void)? + var setDartApiStub: ((CameraEventApi?) -> Void)? var setOnFrameAvailableStub: (((() -> Void)?) -> Void)? var getMinimumExposureOffsetStub: (() -> CGFloat)? var getMaximumExposureOffsetStub: (() -> CGFloat)? @@ -23,29 +23,31 @@ final class MockCamera: NSObject, Camera { var receivedImageStreamDataStub: (() -> Void)? var startStub: (() -> Void)? var startVideoRecordingStub: - ((@escaping (FlutterError?) -> Void, FlutterBinaryMessenger?) -> Void)? + ((@escaping (Result) -> Void, FlutterBinaryMessenger?) -> Void)? var pauseVideoRecordingStub: (() -> Void)? var resumeVideoRecordingStub: (() -> Void)? - var stopVideoRecordingStub: ((((String?, FlutterError?) -> Void)?) -> Void)? - var captureToFileStub: ((((String?, FlutterError?) -> Void)?) -> Void)? + var stopVideoRecordingStub: ((@escaping (Result) -> Void) -> Void)? + var captureToFileStub: ((@escaping (Result) -> Void) -> Void)? var setDeviceOrientationStub: ((UIDeviceOrientation) -> Void)? - var lockCaptureOrientationStub: ((FCPPlatformDeviceOrientation) -> Void)? + var lockCaptureOrientationStub: ((PlatformDeviceOrientation) -> Void)? var unlockCaptureOrientationStub: (() -> Void)? - var setImageFileFormatStub: ((FCPPlatformImageFileFormat) -> Void)? - var setExposureModeStub: ((FCPPlatformExposureMode) -> Void)? + var setImageFileFormatStub: ((PlatformImageFileFormat) -> Void)? + var setExposureModeStub: ((PlatformExposureMode) -> Void)? var setExposureOffsetStub: ((Double) -> Void)? - var setExposurePointStub: ((FCPPlatformPoint?, ((FlutterError?) -> Void)?) -> Void)? - var setFocusModeStub: ((FCPPlatformFocusMode) -> Void)? - var setFocusPointStub: ((FCPPlatformPoint?, ((FlutterError?) -> Void)?) -> Void)? - var setZoomLevelStub: ((CGFloat, ((FlutterError?) -> Void)?) -> Void)? - var setFlashModeStub: ((FCPPlatformFlashMode, ((FlutterError?) -> Void)?) -> Void)? + var setExposurePointStub: ((PlatformPoint?, @escaping (Result) -> Void) -> Void)? + var setFocusModeStub: ((PlatformFocusMode) -> Void)? + var setFocusPointStub: ((PlatformPoint?, @escaping (Result) -> Void) -> Void)? + var setZoomLevelStub: ((CGFloat, @escaping (Result) -> Void) -> Void)? + var setFlashModeStub: ((PlatformFlashMode, @escaping (Result) -> Void) -> Void)? var pausePreviewStub: (() -> Void)? var resumePreviewStub: (() -> Void)? - var setDescriptionWhileRecordingStub: ((String, ((FlutterError?) -> Void)?) -> Void)? - var startImageStreamStub: ((FlutterBinaryMessenger, (FlutterError?) -> Void) -> Void)? + var setDescriptionWhileRecordingStub: + ((String, @escaping (Result) -> Void) -> Void)? + var startImageStreamStub: + ((FlutterBinaryMessenger, @escaping (Result) -> Void) -> Void)? var stopImageStreamStub: (() -> Void)? - var dartAPI: FCPCameraEventApi? { + var dartAPI: CameraEventApi? { get { preconditionFailure("Attempted to access unimplemented property: dartAPI") } @@ -110,7 +112,7 @@ final class MockCamera: NSObject, Camera { func stop() {} func startVideoRecording( - completion: @escaping (FlutterError?) -> Void, + completion: @escaping (Result) -> Void, messengerForStreaming messenger: FlutterBinaryMessenger? ) { startVideoRecordingStub?(completion, messenger) @@ -124,15 +126,15 @@ final class MockCamera: NSObject, Camera { resumeVideoRecordingStub?() } - func stopVideoRecording(completion: @escaping (String?, FlutterError?) -> Void) { + func stopVideoRecording(completion: @escaping (Result) -> Void) { stopVideoRecordingStub?(completion) } - func captureToFile(completion: @escaping (String?, FlutterError?) -> Void) { + func captureToFile(completion: @escaping (Result) -> Void) { captureToFileStub?(completion) } - func lockCaptureOrientation(_ orientation: FCPPlatformDeviceOrientation) { + func lockCaptureOrientation(_ orientation: PlatformDeviceOrientation) { lockCaptureOrientationStub?(orientation) } @@ -140,11 +142,11 @@ final class MockCamera: NSObject, Camera { unlockCaptureOrientationStub?() } - func setImageFileFormat(_ fileFormat: FCPPlatformImageFileFormat) { + func setImageFileFormat(_ fileFormat: PlatformImageFileFormat) { setImageFileFormatStub?(fileFormat) } - func setExposureMode(_ mode: FCPPlatformExposureMode) { + func setExposureMode(_ mode: PlatformExposureMode) { setExposureModeStub?(mode) } @@ -153,29 +155,31 @@ final class MockCamera: NSObject, Camera { } func setExposurePoint( - _ point: FCPPlatformPoint?, withCompletion: @escaping (FlutterError?) -> Void + _ point: PlatformPoint?, withCompletion: @escaping (Result) -> Void ) { setExposurePointStub?(point, withCompletion) } - func setFocusMode(_ mode: FCPPlatformFocusMode) { + func setFocusMode(_ mode: PlatformFocusMode) { setFocusModeStub?(mode) } - func setFocusPoint(_ point: FCPPlatformPoint?, completion: @escaping (FlutterError?) -> Void) { + func setFocusPoint( + _ point: PlatformPoint?, completion: @escaping (Result) -> Void + ) { setFocusPointStub?(point, completion) } func setZoomLevel( _ zoom: CGFloat, - withCompletion completion: @escaping (FlutterError?) -> Void + withCompletion completion: @escaping (Result) -> Void ) { setZoomLevelStub?(zoom, completion) } func setFlashMode( - _ mode: FCPPlatformFlashMode, - withCompletion completion: @escaping (FlutterError?) -> Void + _ mode: PlatformFlashMode, + withCompletion completion: @escaping (Result) -> Void ) { setFlashModeStub?(mode, completion) } @@ -190,14 +194,14 @@ final class MockCamera: NSObject, Camera { func setDescriptionWhileRecording( _ cameraName: String, - withCompletion completion: @escaping (FlutterError?) -> Void + withCompletion completion: @escaping (Result) -> Void ) { setDescriptionWhileRecordingStub?(cameraName, completion) } func startImageStream( with messenger: FlutterBinaryMessenger, - completion: @escaping (FlutterError?) -> Void + completion: @escaping (Result) -> Void ) { startImageStreamStub?(messenger, completion) } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockFLTCameraPermissionManager.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockFLTCameraPermissionManager.swift index 0b140c1b78c0..126b2237adce 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockFLTCameraPermissionManager.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockFLTCameraPermissionManager.swift @@ -19,7 +19,7 @@ final class MockCameraPermissionManager: CameraPermissionManager { super.init(permissionService: DefaultPermissionService()) } - override func requestCameraPermission(completionHandler: @escaping (FlutterError?) -> Void) { + override func requestCameraPermission(completionHandler: @escaping (PigeonError?) -> Void) { if let stub = requestCameraPermissionStub { stub(completionHandler) } else { @@ -27,7 +27,7 @@ final class MockCameraPermissionManager: CameraPermissionManager { } } - override func requestAudioPermission(completionHandler: @escaping (FlutterError?) -> Void) { + override func requestAudioPermission(completionHandler: @escaping (PigeonError?) -> Void) { if let stub = requestAudioPermissionStub { stub(completionHandler) } else { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockGlobalEventApi.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockGlobalEventApi.swift index 3680725e263d..ce8a9625c6d7 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockGlobalEventApi.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockGlobalEventApi.swift @@ -11,22 +11,21 @@ import Flutter import camera_avfoundation_objc #endif -/// A mock implementation of `FCPCameraGlobalEventApi` that captures received +/// A mock implementation of `CameraGlobalEventApi` that captures received /// `deviceOrientationChanged` events and exposes whether they were received to the testing code. -final class MockGlobalEventApi: FCPCameraGlobalEventApi { - +final class MockGlobalEventApi: CameraGlobalEventApiProtocol { /// Whether the `deviceOrientationChanged` callback was called. var deviceOrientationChangedCalled = false /// The last orientation received by the `deviceOrientationChanged` callback. - var lastOrientation = FCPPlatformDeviceOrientation.portraitUp + var lastOrientation = PlatformDeviceOrientation.portraitUp - override func deviceOrientationChangedOrientation( - _ orientation: FCPPlatformDeviceOrientation, - completion: @escaping (FlutterError?) -> Void + func deviceOrientationChanged( + orientation orientationArg: PlatformDeviceOrientation, + completion: @escaping (Result) -> Void ) { deviceOrientationChangedCalled = true - lastOrientation = orientation - completion(nil) + lastOrientation = orientationArg + completion(.success(())) } } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/PhotoCaptureTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/PhotoCaptureTests.swift index 11bda3f8b71f..05fbe5c574fd 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/PhotoCaptureTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/PhotoCaptureTests.swift @@ -43,9 +43,13 @@ final class PhotoCaptureTests: XCTestCase { // `FLTCam::captureToFile` runs on capture session queue. captureSessionQueue.async { - cam.captureToFile { result, error in - XCTAssertNil(result) - XCTAssertNotNil(error) + cam.captureToFile { result in + switch result { + case .success(_): + XCTFail("Expected failure") + case .failure(_): + break + } errorExpectation.fulfill() } } @@ -76,8 +80,13 @@ final class PhotoCaptureTests: XCTestCase { // `FLTCam::captureToFile` runs on capture session queue. captureSessionQueue.async { - cam.captureToFile { result, error in - XCTAssertEqual(result, filePath) + cam.captureToFile { result in + switch result { + case .success(let result): + XCTAssertEqual(result, filePath) + case .failure(_): + XCTFail("Unexpected failure") + } pathExpectation.fulfill() } } @@ -93,7 +102,7 @@ final class PhotoCaptureTests: XCTestCase { captureSessionQueue.setSpecific( key: captureSessionQueueSpecificKey, value: captureSessionQueueSpecificValue) let cam = createCam(with: captureSessionQueue) - cam.setImageFileFormat(FCPPlatformImageFileFormat.heif) + cam.setImageFileFormat(PlatformImageFileFormat.heif) let mockOutput = MockCapturePhotoOutput() mockOutput.availablePhotoCodecTypes = [AVVideoCodecType.hevc] @@ -110,8 +119,13 @@ final class PhotoCaptureTests: XCTestCase { // `FLTCam::captureToFile` runs on capture session queue. captureSessionQueue.async { - cam.captureToFile { filePath, error in - XCTAssertEqual((filePath! as NSString).pathExtension, "heif") + cam.captureToFile { result in + switch result { + case .success(let filePath): + XCTAssertEqual((filePath as NSString).pathExtension, "heif") + case .failure(_): + XCTFail("Undexpected failure") + } expectation.fulfill() } } @@ -128,7 +142,7 @@ final class PhotoCaptureTests: XCTestCase { captureSessionQueue.setSpecific( key: captureSessionQueueSpecificKey, value: captureSessionQueueSpecificValue) let cam = createCam(with: captureSessionQueue) - cam.setImageFileFormat(FCPPlatformImageFileFormat.heif) + cam.setImageFileFormat(PlatformImageFileFormat.heif) let mockOutput = MockCapturePhotoOutput() mockOutput.capturePhotoWithSettingsStub = { settings, photoDelegate in @@ -144,8 +158,13 @@ final class PhotoCaptureTests: XCTestCase { // `FLTCam::captureToFile` runs on capture session queue. captureSessionQueue.async { - cam.captureToFile { filePath, error in - XCTAssertEqual((filePath! as NSString).pathExtension, "jpg") + cam.captureToFile { result in + switch result { + case .success(let filePath): + XCTAssertEqual((filePath as NSString).pathExtension, "jpg") + case .failure(_): + XCTFail("Undexpected failure") + } expectation.fulfill() } } @@ -193,8 +212,13 @@ final class PhotoCaptureTests: XCTestCase { // `FLTCam::captureToFile` runs on capture session queue. captureSessionQueue.async { cam.setFlashMode(.torch) { _ in } - cam.captureToFile { result, error in - XCTAssertEqual(result, filePath) + cam.captureToFile { result in + switch result { + case .success(let result): + XCTAssertEqual(result, filePath) + case .failure(_): + XCTFail("Unexpected failure") + } pathExpectation.fulfill() } } @@ -223,15 +247,15 @@ final class PhotoCaptureTests: XCTestCase { cam.capturePhotoOutput = mockOutput captureSessionQueue.async { - cam.captureToFile { filePath, error in - XCTAssertNil(error) - XCTAssertNotNil(filePath) - - if let filePath = filePath { + cam.captureToFile { result in + switch result { + case .success(let filePath): XCTAssertTrue( filePath.contains(expectedPath) ) expectation.fulfill() + case .failure: + XCTFail("Unexpected failure") } } } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift index ec9af3f6d2c0..c762e75bf992 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift @@ -79,8 +79,8 @@ final class CameraSampleBufferTests: XCTestCase { let input = MockAssetWriterInput() let configuration = CameraTestUtils.createTestCameraConfiguration() - configuration.mediaSettings = FCPPlatformMediaSettings.make( - with: .medium, + configuration.mediaSettings = PlatformMediaSettings( + resolutionPreset: .medium, framesPerSecond: nil, videoBitrate: nil, audioBitrate: nil, @@ -300,7 +300,7 @@ final class CameraSampleBufferTests: XCTestCase { camera.startVideoRecording(completion: { error in }, messengerForStreaming: nil) var completionCalled = false - camera.stopVideoRecording(completion: { path, error in + camera.stopVideoRecording(completion: { result in completionCalled = true }) @@ -528,13 +528,18 @@ final class CameraSampleBufferTests: XCTestCase { let expectation = self.expectation(description: "Completion handler called with error") camera.startVideoRecording( - completion: { error in - XCTAssertNotNil(error) - XCTAssertEqual(error?.code, "IOError") - XCTAssertEqual(error?.message, "AVAssetWriter failed to start writing") - XCTAssertEqual(error?.details as? String, "Mock write error") - XCTAssertFalse(camera.isRecording) - expectation.fulfill() + completion: { result in + switch result { + case .failure(let error as PigeonError): + XCTAssertEqual(error.code, "IOError") + XCTAssertEqual(error.message, "AVAssetWriter failed to start writing") + XCTAssertEqual(error.details as? String, "Mock write error") + XCTAssertFalse(camera.isRecording) + expectation.fulfill() + default: + XCTFail("Expected PigeonError") + } + }, messengerForStreaming: nil) waitForExpectations(timeout: 1.0, handler: nil) diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation.podspec b/packages/camera/camera_avfoundation/ios/camera_avfoundation.podspec index 34722dd2ebda..d7e009b8ceaf 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation.podspec +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation.podspec @@ -13,10 +13,7 @@ A Flutter plugin to use the camera from your Flutter app. s.author = { 'Flutter Dev Team' => 'flutter-dev@googlegroups.com' } s.source = { :http => 'https://github.com/flutter/packages/tree/main/packages/camera_avfoundation' } s.documentation_url = 'https://pub.dev/packages/camera_avfoundation' - # Combine camera_avfoundation and camera_avfoundation_objc sources into a single pod, unlike - # SwiftPM, where separate Swift and Objective-C targets are required. - s.source_files = 'camera_avfoundation/Sources/camera_avfoundation*/**/*.{h,m,swift}' - s.public_header_files = 'camera_avfoundation/Sources/camera_avfoundation_objc/include/**/*.h' + s.source_files = 'camera_avfoundation/Sources/camera_avfoundation/**/*.swift' s.swift_version = '5.0' s.xcconfig = { 'LIBRARY_SEARCH_PATHS' => '$(TOOLCHAIN_DIR)/usr/lib/swift/$(PLATFORM_NAME)/ $(SDKROOT)/usr/lib/swift', diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Package.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Package.swift index 48fe6b56e30b..302146b1634d 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Package.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Package.swift @@ -13,28 +13,16 @@ let package = Package( ], products: [ .library( - name: "camera-avfoundation", targets: ["camera_avfoundation", "camera_avfoundation_objc"]) + name: "camera-avfoundation", targets: ["camera_avfoundation"]) ], dependencies: [], targets: [ .target( name: "camera_avfoundation", - dependencies: ["camera_avfoundation_objc"], path: "Sources/camera_avfoundation", resources: [ .process("Resources") ] - ), - .target( - name: "camera_avfoundation_objc", - dependencies: [], - path: "Sources/camera_avfoundation_objc", - resources: [ - .process("Resources") - ], - cSettings: [ - .headerSearchPath("include/camera_avfoundation") - ] - ), + ) ] ) diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift index 72eb13b81023..1414e8128cc6 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift @@ -17,7 +17,7 @@ protocol Camera: FlutterTexture, AVCaptureVideoDataOutputSampleBufferDelegate, { /// The API instance used to communicate with the Dart side of the plugin. /// Once initially set, this should only ever be accessed on the main thread. - var dartAPI: FCPCameraEventApi? { get set } + var dartAPI: CameraEventApi? { get set } var onFrameAvailable: (() -> Void)? { get set } @@ -54,29 +54,29 @@ protocol Camera: FlutterTexture, AVCaptureVideoDataOutputSampleBufferDelegate, /// /// @param messenger Nullable messenger for capturing each frame. func startVideoRecording( - completion: @escaping (_ error: FlutterError?) -> Void, + completion: @escaping (Result) -> Void, messengerForStreaming: FlutterBinaryMessenger? ) func pauseVideoRecording() func resumeVideoRecording() - func stopVideoRecording(completion: @escaping (_ path: String?, _ error: FlutterError?) -> Void) + func stopVideoRecording(completion: @escaping (Result) -> Void) - func captureToFile(completion: @escaping (_ path: String?, _ error: FlutterError?) -> Void) + func captureToFile(completion: @escaping (Result) -> Void) - func lockCaptureOrientation(_ orientation: FCPPlatformDeviceOrientation) + func lockCaptureOrientation(_ orientation: PlatformDeviceOrientation) func unlockCaptureOrientation() - func setImageFileFormat(_ fileFormat: FCPPlatformImageFileFormat) + func setImageFileFormat(_ fileFormat: PlatformImageFileFormat) - func setExposureMode(_ mode: FCPPlatformExposureMode) + func setExposureMode(_ mode: PlatformExposureMode) func setExposureOffset(_ offset: Double) /// Sets the exposure point, in a (0,1) coordinate system. /// /// If @c point is nil, the exposure point will reset to the center. func setExposurePoint( - _ point: FCPPlatformPoint?, - withCompletion: @escaping (_ error: FlutterError?) -> Void + _ point: PlatformPoint?, + withCompletion: @escaping (Result) -> Void ) /// Sets FocusMode on the current AVCaptureDevice. @@ -90,21 +90,21 @@ protocol Camera: FlutterTexture, AVCaptureVideoDataOutputSampleBufferDelegate, /// be set. /// /// @param mode The focus mode that should be applied. - func setFocusMode(_ mode: FCPPlatformFocusMode) + func setFocusMode(_ mode: PlatformFocusMode) /// Sets the focus point, in a (0,1) coordinate system. /// /// If @c point is nil, the focus point will reset to the center. func setFocusPoint( - _ point: FCPPlatformPoint?, - completion: @escaping (_ error: FlutterError?) -> Void + _ point: PlatformPoint?, + completion: @escaping (Result) -> Void ) - func setZoomLevel(_ zoom: CGFloat, withCompletion: @escaping (_ error: FlutterError?) -> Void) + func setZoomLevel(_ zoom: CGFloat, withCompletion: @escaping (Result) -> Void) func setFlashMode( - _ mode: FCPPlatformFlashMode, - withCompletion: @escaping (_ error: FlutterError?) -> Void + _ mode: PlatformFlashMode, + withCompletion: @escaping (Result) -> Void ) func pausePreview() @@ -112,11 +112,11 @@ protocol Camera: FlutterTexture, AVCaptureVideoDataOutputSampleBufferDelegate, func setDescriptionWhileRecording( _ cameraName: String, - withCompletion: @escaping (_ error: FlutterError?) -> Void + withCompletion: @escaping (Result) -> Void ) func startImageStream( - with: FlutterBinaryMessenger, completion: @escaping (_ error: FlutterError?) -> Void) + with: FlutterBinaryMessenger, completion: @escaping (Result) -> Void) func stopImageStream() // Override to make `AVCaptureVideoDataOutputSampleBufferDelegate`/ diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraConfiguration.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraConfiguration.swift index 8e60ae35dd0f..41aac22b6948 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraConfiguration.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraConfiguration.swift @@ -28,7 +28,7 @@ typealias InputPixelBufferAdaptorFactory = ( /// A configuration object that centralizes dependencies for `DefaultCamera`. class CameraConfiguration { - var mediaSettings: FCPPlatformMediaSettings + var mediaSettings: PlatformMediaSettings var mediaSettingsWrapper: FLTCamMediaSettingsAVWrapper var captureSessionQueue: DispatchQueue var videoCaptureSession: CaptureSession @@ -44,7 +44,7 @@ class CameraConfiguration { var orientation: UIDeviceOrientation init( - mediaSettings: FCPPlatformMediaSettings, + mediaSettings: PlatformMediaSettings, mediaSettingsWrapper: FLTCamMediaSettingsAVWrapper, captureDeviceFactory: @escaping VideoCaptureDeviceFactory, audioCaptureDeviceFactory: @escaping AudioCaptureDeviceFactory, diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPermissionManager.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPermissionManager.swift index 7bb1dd6bd477..733351bd0716 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPermissionManager.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPermissionManager.swift @@ -6,7 +6,7 @@ import AVFoundation import Flutter /// Completion handler for camera permission requests. -typealias CameraPermissionRequestCompletionHandler = (FlutterError?) -> Void +typealias CameraPermissionRequestCompletionHandler = (PigeonError?) -> Void private enum Permission { case camera @@ -70,14 +70,14 @@ class CameraPermissionManager: NSObject { let flutterError = switch permission { case .audio: - FlutterError( + PigeonError( code: "AudioAccessDeniedWithoutPrompt", message: "User has previously denied the audio access request. Go to Settings to enable audio access.", details: nil ) case .camera: - FlutterError( + PigeonError( code: "CameraAccessDeniedWithoutPrompt", message: "User has previously denied the camera access request. Go to Settings to enable camera access.", @@ -90,13 +90,13 @@ class CameraPermissionManager: NSObject { let flutterError = switch permission { case .audio: - FlutterError( + PigeonError( code: "AudioAccessRestricted", message: "Audio access is restricted.", details: nil ) case .camera: - FlutterError( + PigeonError( code: "CameraAccessRestricted", message: "Camera access is restricted.", details: nil @@ -113,13 +113,13 @@ class CameraPermissionManager: NSObject { let flutterError = switch permission { case .audio: - FlutterError( + PigeonError( code: "AudioAccessDenied", message: "User denied the audio access request.", details: nil ) case .camera: - FlutterError( + PigeonError( code: "CameraAccessDenied", message: "User denied the camera access request.", details: nil diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift index a998e08d9047..67436c255b80 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift @@ -13,7 +13,7 @@ import Flutter public final class CameraPlugin: NSObject, FlutterPlugin { private let registry: FlutterTextureRegistry private let messenger: FlutterBinaryMessenger - private let globalEventAPI: FCPCameraGlobalEventApi + private let globalEventAPI: CameraGlobalEventApiProtocol private let deviceDiscoverer: CameraDeviceDiscoverer private let permissionManager: CameraPermissionManager private let captureDeviceFactory: VideoCaptureDeviceFactory @@ -30,7 +30,7 @@ public final class CameraPlugin: NSObject, FlutterPlugin { let instance = CameraPlugin( registry: registrar.textures(), messenger: registrar.messenger(), - globalAPI: FCPCameraGlobalEventApi(binaryMessenger: registrar.messenger()), + globalAPI: CameraGlobalEventApi(binaryMessenger: registrar.messenger()), deviceDiscoverer: DefaultCameraDeviceDiscoverer(), permissionManager: CameraPermissionManager( permissionService: DefaultPermissionService()), @@ -43,13 +43,13 @@ public final class CameraPlugin: NSObject, FlutterPlugin { captureSessionQueue: DispatchQueue(label: "io.flutter.camera.captureSessionQueue") ) - SetUpFCPCameraApi(registrar.messenger(), instance) + CameraApiSetup.setUp(binaryMessenger: registrar.messenger(), api: instance) } init( registry: FlutterTextureRegistry, messenger: FlutterBinaryMessenger, - globalAPI: FCPCameraGlobalEventApi, + globalAPI: CameraGlobalEventApiProtocol, deviceDiscoverer: CameraDeviceDiscoverer, permissionManager: CameraPermissionManager, deviceFactory: @escaping VideoCaptureDeviceFactory, @@ -86,8 +86,8 @@ public final class CameraPlugin: NSObject, FlutterPlugin { UIDevice.current.endGeneratingDeviceOrientationNotifications() } - private static func flutterErrorFromNSError(_ error: NSError) -> FlutterError { - return FlutterError( + private static func pigeonErrorFromNSError(_ error: NSError) -> PigeonError { + return PigeonError( code: "Error \(error.code)", message: error.localizedDescription, details: error.domain) @@ -113,8 +113,8 @@ public final class CameraPlugin: NSObject, FlutterPlugin { func sendDeviceOrientation(_ orientation: UIDeviceOrientation) { DispatchQueue.main.async { [weak self] in - self?.globalEventAPI.deviceOrientationChangedOrientation( - getPigeonDeviceOrientation(for: orientation) + self?.globalEventAPI.deviceOrientationChanged( + orientation: getPigeonDeviceOrientation(for: orientation) ) { _ in // Ignore errors; this is essentially a broadcast stream, and // it's fine if the other end doesn't receive the message @@ -124,9 +124,10 @@ public final class CameraPlugin: NSObject, FlutterPlugin { } } -extension CameraPlugin: FCPCameraApi { - public func availableCameras( - completion: @escaping ([FCPPlatformCameraDescription]?, FlutterError?) -> Void +extension CameraPlugin: CameraApi { + + func getAvailableCameras( + completion: @escaping (Result<[PlatformCameraDescription], any Error>) -> Void ) { captureSessionQueue.async { [weak self] in guard let strongSelf = self else { return } @@ -142,24 +143,24 @@ extension CameraPlugin: FCPCameraApi { mediaType: .video, position: .unspecified) - var reply: [FCPPlatformCameraDescription] = [] + var reply: [PlatformCameraDescription] = [] for device in devices { let lensFacing = strongSelf.platformLensDirection(for: device) let lensType = strongSelf.platformLensType(for: device) - let cameraDescription = FCPPlatformCameraDescription.make( - withName: device.uniqueID, + let cameraDescription = PlatformCameraDescription( + name: device.uniqueID, lensDirection: lensFacing, lensType: lensType ) reply.append(cameraDescription) } - completion(reply, nil) + completion(.success(reply)) } } - private func platformLensDirection(for device: CaptureDevice) -> FCPPlatformCameraLensDirection { + private func platformLensDirection(for device: CaptureDevice) -> PlatformCameraLensDirection { switch device.position { case .back: return .back @@ -172,7 +173,7 @@ extension CameraPlugin: FCPCameraApi { } } - private func platformLensType(for device: CaptureDevice) -> FCPPlatformCameraLensType { + private func platformLensType(for device: CaptureDevice) -> PlatformCameraLensType { switch device.deviceType { case .builtInWideAngleCamera: return .wide @@ -187,10 +188,9 @@ extension CameraPlugin: FCPCameraApi { } } - public func createCamera( - withName cameraName: String, - settings: FCPPlatformMediaSettings, - completion: @escaping (NSNumber?, FlutterError?) -> Void + func create( + cameraName: String, settings: PlatformMediaSettings, + completion: @escaping (Result) -> Void ) { // Create FLTCam only if granted camera access (and audio access if audio is enabled) captureSessionQueue.async { [weak self] in @@ -198,7 +198,7 @@ extension CameraPlugin: FCPCameraApi { guard let strongSelf = self else { return } if let error = error { - completion(nil, error) + completion(.failure(error)) return } @@ -212,7 +212,7 @@ extension CameraPlugin: FCPCameraApi { guard let strongSelf = self else { return } if let audioError = audioError { - completion(nil, audioError) + completion(.failure(audioError)) return } @@ -233,8 +233,8 @@ extension CameraPlugin: FCPCameraApi { func createCameraOnSessionQueue( withName: String, - settings: FCPPlatformMediaSettings, - completion: @escaping (NSNumber?, FlutterError?) -> Void + settings: PlatformMediaSettings, + completion: @escaping (Result) -> Void ) { captureSessionQueue.async { [weak self] in self?.sessionQueueCreateCamera(name: withName, settings: settings, completion: completion) @@ -245,8 +245,8 @@ extension CameraPlugin: FCPCameraApi { // to make it easier to reason about strong/weak self pointers. private func sessionQueueCreateCamera( name: String, - settings: FCPPlatformMediaSettings, - completion: @escaping (NSNumber?, FlutterError?) -> Void + settings: PlatformMediaSettings, + completion: @escaping (Result) -> Void ) { let mediaSettingsAVWrapper = FLTCamMediaSettingsAVWrapper() @@ -269,17 +269,16 @@ extension CameraPlugin: FCPCameraApi { ensureToRunOnMainQueue { [weak self] in guard let strongSelf = self else { return } - completion(NSNumber(value: strongSelf.registry.register(newCamera)), nil) + completion(.success(strongSelf.registry.register(newCamera))) } } catch let error as NSError { - completion(nil, CameraPlugin.flutterErrorFromNSError(error)) + completion(.failure(CameraPlugin.pigeonErrorFromNSError(error))) } } - public func initializeCamera( - _ cameraId: Int, - withImageFormat imageFormat: FCPPlatformImageFormatGroup, - completion: @escaping (FlutterError?) -> Void + func initialize( + cameraId: Int64, imageFormat: PlatformImageFormatGroup, + completion: @escaping (Result) -> Void ) { captureSessionQueue.async { [weak self] in self?.sessionQueueInitializeCamera( @@ -292,9 +291,9 @@ extension CameraPlugin: FCPCameraApi { // This must be called on captureSessionQueue. It is extracted from initializeCamera to make it // easier to reason about strong/weak self pointers. private func sessionQueueInitializeCamera( - _ cameraId: Int, - withImageFormat imageFormat: FCPPlatformImageFormatGroup, - completion: @escaping (FlutterError?) -> Void + _ cameraId: Int64, + withImageFormat imageFormat: PlatformImageFormatGroup, + completion: @escaping (Result) -> Void ) { guard let camera = camera else { return } @@ -309,7 +308,7 @@ extension CameraPlugin: FCPCameraApi { } } - camera.dartAPI = FCPCameraEventApi( + camera.dartAPI = CameraEventApi( binaryMessenger: messenger, messageChannelSuffix: "\(cameraId)" ) @@ -317,77 +316,75 @@ extension CameraPlugin: FCPCameraApi { camera.reportInitializationState() sendDeviceOrientation(UIDevice.current.orientation) camera.start() - completion(nil) + completion(.success(())) } - public func startImageStream(completion: @escaping (FlutterError?) -> Void) { + func startImageStream(completion: @escaping (Result) -> Void) { captureSessionQueue.async { [weak self] in guard let strongSelf = self else { - completion(nil) + completion(.success(())) return } strongSelf.camera?.startImageStream(with: strongSelf.messenger, completion: completion) } } - public func stopImageStream(completion: @escaping (FlutterError?) -> Void) { + func stopImageStream(completion: @escaping (Result) -> Void) { captureSessionQueue.async { [weak self] in self?.camera?.stopImageStream() - completion(nil) + completion(.success(())) } } - public func receivedImageStreamData(completion: @escaping (FlutterError?) -> Void) { + func receivedImageStreamData(completion: @escaping (Result) -> Void) { captureSessionQueue.async { [weak self] in self?.camera?.receivedImageStreamData() - completion(nil) + completion(.success(())) } } - public func disposeCamera(_ cameraId: Int, completion: @escaping (FlutterError?) -> Void) { + func dispose(cameraId: Int64, completion: @escaping (Result) -> Void) { registry.unregisterTexture(Int64(cameraId)) captureSessionQueue.async { [weak self] in if let strongSelf = self { strongSelf.camera?.close() strongSelf.camera = nil } - completion(nil) + completion(.success(())) } } - public func lockCapture( - _ orientation: FCPPlatformDeviceOrientation, - completion: @escaping (FlutterError?) -> Void + func lockCaptureOrientation( + orientation: PlatformDeviceOrientation, completion: @escaping (Result) -> Void ) { captureSessionQueue.async { [weak self] in self?.camera?.lockCaptureOrientation(orientation) - completion(nil) + completion(.success(())) } } - public func unlockCaptureOrientation(completion: @escaping (FlutterError?) -> Void) { + func unlockCaptureOrientation(completion: @escaping (Result) -> Void) { captureSessionQueue.async { [weak self] in self?.camera?.unlockCaptureOrientation() - completion(nil) + completion(.success(())) } } - public func takePicture(completion: @escaping (String?, FlutterError?) -> Void) { + func takePicture(completion: @escaping (Result) -> Void) { captureSessionQueue.async { [weak self] in self?.camera?.captureToFile(completion: completion) } } - public func prepareForVideoRecording(completion: @escaping (FlutterError?) -> Void) { + func prepareForVideoRecording(completion: @escaping (Result) -> Void) { captureSessionQueue.async { [weak self] in self?.camera?.setUpCaptureSessionForAudioIfNeeded() - completion(nil) + completion(.success(())) } } - public func startVideoRecording( - withStreaming enableStream: Bool, - completion: @escaping (FlutterError?) -> Void + func startVideoRecording( + enableStream: Bool, completion: @escaping (Result) -> Void ) { captureSessionQueue.async { [weak self] in guard let strongSelf = self else { return } @@ -397,153 +394,148 @@ extension CameraPlugin: FCPCameraApi { } } - public func stopVideoRecording(completion: @escaping (String?, FlutterError?) -> Void) { + func stopVideoRecording(completion: @escaping (Result) -> Void) { captureSessionQueue.async { [weak self] in self?.camera?.stopVideoRecording(completion: completion) } } - public func pauseVideoRecording(completion: @escaping (FlutterError?) -> Void) { + func pauseVideoRecording(completion: @escaping (Result) -> Void) { captureSessionQueue.async { [weak self] in self?.camera?.pauseVideoRecording() - completion(nil) + completion(.success(())) } } - public func resumeVideoRecording(completion: @escaping (FlutterError?) -> Void) { + func resumeVideoRecording(completion: @escaping (Result) -> Void) { captureSessionQueue.async { [weak self] in self?.camera?.resumeVideoRecording() - completion(nil) + completion(.success(())) } } - public func setFlashMode( - _ mode: FCPPlatformFlashMode, - completion: @escaping (FlutterError?) -> Void + func setFlashMode( + mode: PlatformFlashMode, completion: @escaping (Result) -> Void ) { captureSessionQueue.async { [weak self] in self?.camera?.setFlashMode(mode, withCompletion: completion) } } - public func setExposureMode( - _ mode: FCPPlatformExposureMode, - completion: @escaping (FlutterError?) -> Void + func setExposureMode( + mode: PlatformExposureMode, completion: @escaping (Result) -> Void ) { captureSessionQueue.async { [weak self] in self?.camera?.setExposureMode(mode) - completion(nil) + completion(.success(())) } } - public func setExposurePoint( - _ point: FCPPlatformPoint?, - completion: @escaping (FlutterError?) -> Void + func setExposurePoint( + point: PlatformPoint?, completion: @escaping (Result) -> Void ) { captureSessionQueue.async { [weak self] in self?.camera?.setExposurePoint(point, withCompletion: completion) } } - public func getMinimumExposureOffset(_ completion: @escaping (NSNumber?, FlutterError?) -> Void) { + func getMinExposureOffset(completion: @escaping (Result) -> Void) { captureSessionQueue.async { [weak self] in if let minOffset = self?.camera?.minimumExposureOffset { - completion(NSNumber(value: minOffset), nil) + completion(.success(minOffset)) } else { - completion(nil, nil) + completion(.success(0)) } } } - public func getMaximumExposureOffset(_ completion: @escaping (NSNumber?, FlutterError?) -> Void) { + func getMaxExposureOffset(completion: @escaping (Result) -> Void) { captureSessionQueue.async { [weak self] in if let maxOffset = self?.camera?.maximumExposureOffset { - completion(NSNumber(value: maxOffset), nil) + completion(.success(maxOffset)) } else { - completion(nil, nil) + completion(.success(0)) } } } - public func setExposureOffset(_ offset: Double, completion: @escaping (FlutterError?) -> Void) { + func setExposureOffset(offset: Double, completion: @escaping (Result) -> Void) { captureSessionQueue.async { [weak self] in self?.camera?.setExposureOffset(offset) - completion(nil) + completion(.success(())) } } - public func setFocusMode( - _ mode: FCPPlatformFocusMode, - completion: @escaping (FlutterError?) -> Void + func setFocusMode( + mode: PlatformFocusMode, completion: @escaping (Result) -> Void ) { captureSessionQueue.async { [weak self] in self?.camera?.setFocusMode(mode) - completion(nil) + completion(.success(())) } } - public func setFocus(_ point: FCPPlatformPoint?, completion: @escaping (FlutterError?) -> Void) { + func setFocusPoint(point: PlatformPoint?, completion: @escaping (Result) -> Void) + { captureSessionQueue.async { [weak self] in self?.camera?.setFocusPoint(point, completion: completion) } } - public func getMinimumZoomLevel(_ completion: @escaping (NSNumber?, FlutterError?) -> Void) { + func getMinZoomLevel(completion: @escaping (Result) -> Void) { captureSessionQueue.async { [weak self] in if let minZoom = self?.camera?.minimumAvailableZoomFactor { - completion(NSNumber(value: minZoom), nil) + completion(.success(minZoom)) } else { - completion(nil, nil) + completion(.success(0)) } } } - public func getMaximumZoomLevel(_ completion: @escaping (NSNumber?, FlutterError?) -> Void) { + func getMaxZoomLevel(completion: @escaping (Result) -> Void) { captureSessionQueue.async { [weak self] in if let maxZoom = self?.camera?.maximumAvailableZoomFactor { - completion(NSNumber(value: maxZoom), nil) + completion(.success(maxZoom)) } else { - completion(nil, nil) + completion(.success(0)) } } } - public func setZoomLevel(_ zoom: Double, completion: @escaping (FlutterError?) -> Void) { + func setZoomLevel(zoom: Double, completion: @escaping (Result) -> Void) { captureSessionQueue.async { [weak self] in self?.camera?.setZoomLevel(zoom, withCompletion: completion) } } - public func pausePreview(completion: @escaping (FlutterError?) -> Void) { + func pausePreview(completion: @escaping (Result) -> Void) { captureSessionQueue.async { [weak self] in self?.camera?.pausePreview() - completion(nil) + completion(.success(())) } } - public func resumePreview(completion: @escaping (FlutterError?) -> Void) { + func resumePreview(completion: @escaping (Result) -> Void) { captureSessionQueue.async { [weak self] in self?.camera?.resumePreview() - completion(nil) + completion(.success(())) } } - public func updateDescriptionWhileRecordingCameraName( - _ cameraName: String, - completion: @escaping (FlutterError?) -> Void + func updateDescriptionWhileRecording( + cameraName: String, completion: @escaping (Result) -> Void ) { captureSessionQueue.async { [weak self] in self?.camera?.setDescriptionWhileRecording(cameraName, withCompletion: completion) } } - public func setImageFileFormat( - _ format: FCPPlatformImageFileFormat, - completion: @escaping (FlutterError?) -> Void + func setImageFileFormat( + format: PlatformImageFileFormat, completion: @escaping (Result) -> Void ) { captureSessionQueue.async { [weak self] in self?.camera?.setImageFileFormat(format) - completion(nil) + completion(.success(())) } } } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraProperties.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraProperties.swift index fca5c0fef139..e996ef631088 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraProperties.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraProperties.swift @@ -10,9 +10,9 @@ import UIKit import camera_avfoundation_objc #endif -/// Gets AVCaptureFlashMode from FCPPlatformFlashMode. +/// Gets AVCaptureFlashMode from PlatformFlashMode. /// mode - flash mode. -func getAVCaptureFlashMode(for mode: FCPPlatformFlashMode) -> AVCaptureDevice.FlashMode { +func getAVCaptureFlashMode(for mode: PlatformFlashMode) -> AVCaptureDevice.FlashMode { switch mode { case .off: return .off @@ -32,7 +32,7 @@ func getAVCaptureFlashMode(for mode: FCPPlatformFlashMode) -> AVCaptureDevice.Fl /// Gets UIDeviceOrientation from its Pigeon representation. /// orientation - the Pigeon device orientation. func getUIDeviceOrientation( - for orientation: FCPPlatformDeviceOrientation + for orientation: PlatformDeviceOrientation ) -> UIDeviceOrientation { switch orientation { case .portraitDown: @@ -53,7 +53,7 @@ func getUIDeviceOrientation( /// orientation - the UIDeviceOrientation. func getPigeonDeviceOrientation( for orientation: UIDeviceOrientation -) -> FCPPlatformDeviceOrientation { +) -> PlatformDeviceOrientation { switch orientation { case .portraitUpsideDown: return .portraitDown @@ -70,7 +70,7 @@ func getPigeonDeviceOrientation( /// Gets pixel format from its Pigeon representation. /// imageFormat - the Pigeon image format. -func getPixelFormat(for imageFormat: FCPPlatformImageFormatGroup) -> OSType { +func getPixelFormat(for imageFormat: PlatformImageFormatGroup) -> OSType { switch imageFormat { case .bgra8888: return kCVPixelFormatType_32BGRA diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift index 36b5e3cbfca3..63b9150b2019 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift @@ -12,7 +12,7 @@ import Flutter #endif final class DefaultCamera: NSObject, Camera { - var dartAPI: FCPCameraEventApi? + var dartAPI: CameraEventApi? var onFrameAvailable: (() -> Void)? var videoFormat: FourCharCode = kCVPixelFormatType_32BGRA { @@ -42,7 +42,7 @@ final class DefaultCamera: NSObject, Camera { /// All DefaultCamera's state access and capture session related operations should be run on this queue. private let captureSessionQueue: DispatchQueue - private let mediaSettings: FCPPlatformMediaSettings + private let mediaSettings: PlatformMediaSettings private var framesPerSecond: Double? private let mediaSettingsAVWrapper: FLTCamMediaSettingsAVWrapper @@ -125,14 +125,14 @@ final class DefaultCamera: NSObject, Camera { /// https://github.com/flutter/plugins/pull/4520#discussion_r766335637 private var maxStreamingPendingFramesCount = 4 - private var fileFormat = FCPPlatformImageFileFormat.jpeg + private var fileFormat = PlatformImageFileFormat.jpeg private var lockedCaptureOrientation = UIDeviceOrientation.unknown - private var exposureMode = FCPPlatformExposureMode.auto - private var focusMode = FCPPlatformFocusMode.auto - private var flashMode: FCPPlatformFlashMode + private var exposureMode = PlatformExposureMode.auto + private var focusMode = PlatformFocusMode.auto + private var flashMode: PlatformFlashMode - private static func flutterErrorFromNSError(_ error: NSError) -> FlutterError { - return FlutterError( + private static func pigeonErrorFromNSError(_ error: NSError) -> PigeonError { + return PigeonError( code: "Error \(error.code)", message: error.localizedDescription, details: error.domain) @@ -208,7 +208,7 @@ final class DefaultCamera: NSObject, Camera { motionManager.startAccelerometerUpdates() - if mediaSettings.framesPerSecond != nil { + if configuration.mediaSettings.framesPerSecond != nil { // The frame rate can be changed only on a locked for configuration device. try mediaSettingsAVWrapper.lockDevice(captureDevice) defer { mediaSettingsAVWrapper.unlockDevice(captureDevice) } @@ -273,7 +273,7 @@ final class DefaultCamera: NSObject, Camera { // fallback to lower resolution presets. // If none can be selected there is error condition. private func setCaptureSessionPreset( - _ resolutionPreset: FCPPlatformResolutionPreset + _ resolutionPreset: PlatformResolutionPreset ) throws { switch resolutionPreset { case .max: @@ -459,10 +459,10 @@ final class DefaultCamera: NSObject, Camera { func reportInitializationState() { // Get all the state on the current thread, not the main thread. - let state = FCPPlatformCameraState.make( - withPreviewSize: FCPPlatformSize.make( + let state = PlatformCameraState( + previewSize: PlatformSize( // previewSize is set during init, so it will never be nil. - withWidth: previewSize!.width, + width: previewSize!.width, height: previewSize!.height ), exposureMode: exposureMode, @@ -472,7 +472,7 @@ final class DefaultCamera: NSObject, Camera { ) ensureToRunOnMainQueue { [weak self] in - self?.dartAPI?.initialized(with: state) { _ in + self?.dartAPI?.initialized(initialState: state) { _ in // Ignore any errors, as this is just an event broadcast. } } @@ -493,15 +493,16 @@ final class DefaultCamera: NSObject, Camera { } func startVideoRecording( - completion: @escaping (FlutterError?) -> Void, + completion: @escaping (Result) -> Void, messengerForStreaming messenger: FlutterBinaryMessenger? ) { guard !isRecording else { completion( - FlutterError( - code: "Error", - message: "Video is already recording", - details: nil)) + .failure( + PigeonError( + code: "Error", + message: "Video is already recording", + details: nil))) return } @@ -516,7 +517,7 @@ final class DefaultCamera: NSObject, Camera { } /// Main logic to setup the video recording. - private func setUpVideoRecording(completion: @escaping (FlutterError?) -> Void) { + private func setUpVideoRecording(completion: @escaping (Result) -> Void) { let videoRecordingPath: String do { videoRecordingPath = try getTemporaryFilePath( @@ -525,16 +526,17 @@ final class DefaultCamera: NSObject, Camera { prefix: "REC_") self.videoRecordingPath = videoRecordingPath } catch let error as NSError { - completion(DefaultCamera.flutterErrorFromNSError(error)) + completion(.failure(DefaultCamera.pigeonErrorFromNSError(error))) return } guard setupWriter(forPath: videoRecordingPath) else { completion( - FlutterError( - code: "IOError", - message: "Setup Writer Failed", - details: nil)) + .failure( + PigeonError( + code: "IOError", + message: "Setup Writer Failed", + details: nil))) return } @@ -546,10 +548,11 @@ final class DefaultCamera: NSObject, Camera { // https://github.com/flutter/flutter/issues/151319 guard let videoWriter = videoWriter, videoWriter.startWriting() else { completion( - FlutterError( - code: "IOError", - message: "AVAssetWriter failed to start writing", - details: videoWriter?.error?.localizedDescription)) + .failure( + PigeonError( + code: "IOError", + message: "AVAssetWriter failed to start writing", + details: videoWriter?.error?.localizedDescription))) return } isFirstVideoSample = true @@ -559,7 +562,7 @@ final class DefaultCamera: NSObject, Camera { recordingTimeOffset = CMTime.zero outputForOffsetAdjusting = captureVideoOutput.avOutput lastAppendedVideoSampleTime = CMTime.negativeInfinity - completion(nil) + completion(.success(())) } private func setupWriter(forPath path: String) -> Bool { @@ -585,7 +588,7 @@ final class DefaultCamera: NSObject, Camera { var compressionProperties: [String: Any] = [:] if let videoBitrate = mediaSettings.videoBitrate { - compressionProperties[AVVideoAverageBitRateKey] = videoBitrate + compressionProperties[AVVideoAverageBitRateKey] = Int(videoBitrate) } if let framesPerSecond = framesPerSecond { @@ -622,7 +625,7 @@ final class DefaultCamera: NSObject, Camera { ] if let audioBitrate = mediaSettings.audioBitrate { - audioSettings[AVEncoderBitRateKey] = audioBitrate + audioSettings[AVEncoderBitRateKey] = Int(audioBitrate) } let newAudioWriterInput = mediaSettingsAVWrapper.assetWriterAudioInput( @@ -654,14 +657,14 @@ final class DefaultCamera: NSObject, Camera { isRecordingPaused = false } - func stopVideoRecording(completion: @escaping (String?, FlutterError?) -> Void) { + func stopVideoRecording(completion: @escaping (Result) -> Void) { guard isRecording else { let error = NSError( domain: NSCocoaErrorDomain, code: URLError.resourceUnavailable.rawValue, userInfo: [NSLocalizedDescriptionKey: "Video is not recording!"] ) - completion(nil, DefaultCamera.flutterErrorFromNSError(error)) + completion(.failure(DefaultCamera.pigeonErrorFromNSError(error))) return } @@ -675,20 +678,20 @@ final class DefaultCamera: NSObject, Camera { if strongSelf.videoWriter?.status == .completed { strongSelf.updateOrientation() - completion(strongSelf.videoRecordingPath, nil) + completion(.success(strongSelf.videoRecordingPath!)) strongSelf.videoRecordingPath = nil } else { completion( - nil, - FlutterError( - code: "IOError", - message: "AVAssetWriter could not finish writing!", - details: nil)) + .failure( + PigeonError( + code: "IOError", + message: "AVAssetWriter could not finish writing!", + details: nil))) } } } - func captureToFile(completion: @escaping (String?, FlutterError?) -> Void) { + func captureToFile(completion: @escaping (Result) -> Void) { var settings = AVCapturePhotoSettings() if mediaSettings.resolutionPreset == .max { @@ -718,7 +721,7 @@ final class DefaultCamera: NSObject, Camera { subfolder: "pictures", prefix: "CAP_") } catch let error as NSError { - completion(nil, DefaultCamera.flutterErrorFromNSError(error)) + completion(.failure(DefaultCamera.pigeonErrorFromNSError(error))) return } @@ -733,10 +736,10 @@ final class DefaultCamera: NSObject, Camera { } if let error = error { - completion(nil, DefaultCamera.flutterErrorFromNSError(error as NSError)) + completion(.failure(DefaultCamera.pigeonErrorFromNSError(error as NSError))) } else { assert(path != nil, "Path must not be nil if no error.") - completion(path, nil) + completion(.success(path!)) } } ) @@ -811,7 +814,7 @@ final class DefaultCamera: NSObject, Camera { } } - func lockCaptureOrientation(_ pigeonOrientation: FCPPlatformDeviceOrientation) { + func lockCaptureOrientation(_ pigeonOrientation: PlatformDeviceOrientation) { let orientation = getUIDeviceOrientation(for: pigeonOrientation) if lockedCaptureOrientation != orientation { lockedCaptureOrientation = orientation @@ -824,11 +827,11 @@ final class DefaultCamera: NSObject, Camera { updateOrientation() } - func setImageFileFormat(_ fileFormat: FCPPlatformImageFileFormat) { + func setImageFileFormat(_ fileFormat: PlatformImageFileFormat) { self.fileFormat = fileFormat } - func setExposureMode(_ mode: FCPPlatformExposureMode) { + func setExposureMode(_ mode: PlatformExposureMode) { exposureMode = mode applyExposureMode() } @@ -858,14 +861,15 @@ final class DefaultCamera: NSObject, Camera { } func setExposurePoint( - _ point: FCPPlatformPoint?, withCompletion completion: @escaping (FlutterError?) -> Void + _ point: PlatformPoint?, withCompletion completion: @escaping (Result) -> Void ) { guard captureDevice.isExposurePointOfInterestSupported else { completion( - FlutterError( - code: "setExposurePointFailed", - message: "Device does not have exposure point capabilities", - details: nil)) + .failure( + PigeonError( + code: "setExposurePointFailed", + message: "Device does not have exposure point capabilities", + details: nil))) return } @@ -873,26 +877,29 @@ final class DefaultCamera: NSObject, Camera { try? captureDevice.lockForConfiguration() // A nil point resets to the center. let exposurePoint = cgPoint( - for: point ?? FCPPlatformPoint.makeWith(x: 0.5, y: 0.5), withOrientation: orientation) + for: point ?? PlatformPoint(x: 0.5, y: 0.5), withOrientation: orientation) captureDevice.exposurePointOfInterest = exposurePoint captureDevice.unlockForConfiguration() // Retrigger auto exposure applyExposureMode() - completion(nil) + completion(.success(())) } - func setFocusMode(_ mode: FCPPlatformFocusMode) { + func setFocusMode(_ mode: PlatformFocusMode) { focusMode = mode applyFocusMode() } - func setFocusPoint(_ point: FCPPlatformPoint?, completion: @escaping (FlutterError?) -> Void) { + func setFocusPoint( + _ point: PlatformPoint?, completion: @escaping (Result) -> Void + ) { guard captureDevice.isFocusPointOfInterestSupported else { completion( - FlutterError( - code: "setFocusPointFailed", - message: "Device does not have focus point capabilities", - details: nil)) + .failure( + PigeonError( + code: "setFocusPointFailed", + message: "Device does not have focus point capabilities", + details: nil))) return } @@ -901,13 +908,13 @@ final class DefaultCamera: NSObject, Camera { // A nil point resets to the center. captureDevice.focusPointOfInterest = cgPoint( - for: point ?? .makeWith(x: 0.5, y: 0.5), + for: point ?? PlatformPoint(x: 0.5, y: 0.5), withOrientation: orientation) captureDevice.unlockForConfiguration() // Retrigger auto focus applyFocusMode() - completion(nil) + completion(.success(())) } private func applyFocusMode() { @@ -915,7 +922,7 @@ final class DefaultCamera: NSObject, Camera { } private func applyFocusMode( - _ focusMode: FCPPlatformFocusMode, onDevice captureDevice: CaptureDevice + _ focusMode: PlatformFocusMode, onDevice captureDevice: CaptureDevice ) { try? captureDevice.lockForConfiguration() switch focusMode { @@ -937,7 +944,7 @@ final class DefaultCamera: NSObject, Camera { } private func cgPoint( - for point: FCPPlatformPoint, withOrientation orientation: UIDeviceOrientation + for point: PlatformPoint, withOrientation orientation: UIDeviceOrientation ) -> CGPoint { @@ -963,52 +970,57 @@ final class DefaultCamera: NSObject, Camera { return CGPoint(x: x, y: y) } - func setZoomLevel(_ zoom: CGFloat, withCompletion completion: @escaping (FlutterError?) -> Void) { + func setZoomLevel( + _ zoom: CGFloat, withCompletion completion: @escaping (Result) -> Void + ) { if zoom < captureDevice.minAvailableVideoZoomFactor || zoom > captureDevice.maxAvailableVideoZoomFactor { completion( - FlutterError( - code: "ZOOM_ERROR", - message: - "Zoom level out of bounds (zoom level should be between \(captureDevice.minAvailableVideoZoomFactor) and \(captureDevice.maxAvailableVideoZoomFactor).", - details: nil)) + .failure( + PigeonError( + code: "ZOOM_ERROR", + message: + "Zoom level out of bounds (zoom level should be between \(captureDevice.minAvailableVideoZoomFactor) and \(captureDevice.maxAvailableVideoZoomFactor).", + details: nil))) return } do { try captureDevice.lockForConfiguration() } catch let error as NSError { - completion(DefaultCamera.flutterErrorFromNSError(error)) + completion(.failure(DefaultCamera.pigeonErrorFromNSError(error))) return } captureDevice.videoZoomFactor = zoom captureDevice.unlockForConfiguration() - completion(nil) + completion(.success(())) } func setFlashMode( - _ mode: FCPPlatformFlashMode, - withCompletion completion: @escaping (FlutterError?) -> Void + _ mode: PlatformFlashMode, + withCompletion completion: @escaping (Result) -> Void ) { switch mode { case .torch: guard captureDevice.hasTorch else { completion( - FlutterError( - code: "setFlashModeFailed", - message: "Device does not support torch mode", - details: nil) + .failure( + PigeonError( + code: "setFlashModeFailed", + message: "Device does not support torch mode", + details: nil)) ) return } guard captureDevice.isTorchAvailable else { completion( - FlutterError( - code: "setFlashModeFailed", - message: "Torch mode is currently not available", - details: nil)) + .failure( + PigeonError( + code: "setFlashModeFailed", + message: "Torch mode is currently not available", + details: nil))) return } if captureDevice.torchMode != .on { @@ -1019,20 +1031,22 @@ final class DefaultCamera: NSObject, Camera { case .off, .auto, .always: guard captureDevice.hasFlash else { completion( - FlutterError( - code: "setFlashModeFailed", - message: "Device does not have flash capabilities", - details: nil)) + .failure( + PigeonError( + code: "setFlashModeFailed", + message: "Device does not have flash capabilities", + details: nil))) return } let avFlashMode = getAVCaptureFlashMode(for: mode) guard capturePhotoOutput.supportedFlashModes.contains(avFlashMode) else { completion( - FlutterError( - code: "setFlashModeFailed", - message: "Device does not support this specific flash mode", - details: nil)) + .failure( + PigeonError( + code: "setFlashModeFailed", + message: "Device does not support this specific flash mode", + details: nil))) return } if captureDevice.torchMode != .off { @@ -1045,7 +1059,7 @@ final class DefaultCamera: NSObject, Camera { } flashMode = mode - completion(nil) + completion(.success(())) } func pausePreview() { @@ -1057,14 +1071,15 @@ final class DefaultCamera: NSObject, Camera { } func setDescriptionWhileRecording( - _ cameraName: String, withCompletion completion: @escaping (FlutterError?) -> Void + _ cameraName: String, withCompletion completion: @escaping (Result) -> Void ) { guard isRecording else { completion( - FlutterError( - code: "setDescriptionWhileRecordingFailed", - message: "Device was not recording", - details: nil)) + .failure( + PigeonError( + code: "setDescriptionWhileRecordingFailed", + message: "Device was not recording", + details: nil))) return } @@ -1091,10 +1106,11 @@ final class DefaultCamera: NSObject, Camera { captureVideoOutput.setSampleBufferDelegate(self, queue: captureSessionQueue) } catch { completion( - FlutterError( - code: "VideoError", - message: "Unable to create video connection", - details: nil)) + .failure( + PigeonError( + code: "VideoError", + message: "Unable to create video connection", + details: nil))) return } @@ -1106,37 +1122,41 @@ final class DefaultCamera: NSObject, Camera { // Add the new connections to the session. if !videoCaptureSession.canAddInput(captureVideoInput) { completion( - FlutterError( - code: "VideoError", - message: "Unable to switch video input", - details: nil)) + .failure( + PigeonError( + code: "VideoError", + message: "Unable to switch video input", + details: nil))) } videoCaptureSession.addInputWithNoConnections(captureVideoInput) if !videoCaptureSession.canAddOutput(captureVideoOutput.avOutput) { completion( - FlutterError( - code: "VideoError", - message: "Unable to switch video output", - details: nil)) + .failure( + PigeonError( + code: "VideoError", + message: "Unable to switch video output", + details: nil))) } videoCaptureSession.addOutputWithNoConnections(captureVideoOutput.avOutput) if !videoCaptureSession.canAddConnection(newConnection) { completion( - FlutterError( - code: "VideoError", - message: "Unable to switch video connection", - details: nil)) + .failure( + PigeonError( + code: "VideoError", + message: "Unable to switch video connection", + details: nil))) } videoCaptureSession.addConnection(newConnection) videoCaptureSession.commitConfiguration() - completion(nil) + completion(.success(())) } func startImageStream( - with messenger: any FlutterBinaryMessenger, completion: @escaping (FlutterError?) -> Void + with messenger: any FlutterBinaryMessenger, + completion: @escaping (Result) -> Void ) { startImageStream( with: messenger, @@ -1148,11 +1168,11 @@ final class DefaultCamera: NSObject, Camera { func startImageStream( with messenger: FlutterBinaryMessenger, imageStreamHandler: ImageStreamHandler & NSObjectProtocol, - completion: @escaping (FlutterError?) -> Void + completion: @escaping (Result) -> Void ) { if isStreamingImages { reportErrorMessage("Images from camera are already streaming!") - completion(nil) + completion(.success(())) return } @@ -1165,19 +1185,19 @@ final class DefaultCamera: NSObject, Camera { self.imageStreamHandler = imageStreamHandler threadSafeEventChannel.setStreamHandler(imageStreamHandler) { [weak self] in guard let strongSelf = self else { - completion(nil) + completion(.success(())) return } strongSelf.captureSessionQueue.async { [weak self] in guard let strongSelf = self else { - completion(nil) + completion(.success(())) return } strongSelf.isStreamingImages = true strongSelf.streamingPendingFramesCount = 0 - completion(nil) + completion(.success(())) } } } @@ -1445,7 +1465,7 @@ final class DefaultCamera: NSObject, Camera { /// Can be called from any thread. private func reportErrorMessage(_ errorMessage: String) { ensureToRunOnMainQueue { [weak self] in - self?.dartAPI?.reportError(errorMessage) { _ in + self?.dartAPI?.error(message: errorMessage) { _ in // Ignore any errors, as this is just an event broadcast. } } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FormatUtils.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FormatUtils.swift index d441d2bce6e3..ca2ebfc7aaa9 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FormatUtils.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FormatUtils.swift @@ -40,11 +40,11 @@ enum FormatUtils { /// and frame rate which bestFrameRate returned for that format. static func findBestFormat( for captureDevice: CaptureDevice, - mediaSettings: FCPPlatformMediaSettings, + mediaSettings: PlatformMediaSettings, videoDimensionsConverter: VideoDimensionsConverter ) -> (format: CaptureDeviceFormat, frameRate: Double) { let targetResolution = videoDimensionsConverter(captureDevice.flutterActiveFormat) - let targetFrameRate = mediaSettings.framesPerSecond?.doubleValue ?? 0 + let targetFrameRate = Double(mediaSettings.framesPerSecond ?? 0) let preferredSubType = CMFormatDescriptionGetMediaSubType( captureDevice.flutterActiveFormat.formatDescription) diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Messages.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Messages.swift new file mode 100644 index 000000000000..eb99c0cf7a90 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Messages.swift @@ -0,0 +1,1323 @@ +// Copyright 2013 The Flutter Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// Autogenerated from Pigeon (v26.1.0), do not edit directly. +// See also: https://pub.dev/packages/pigeon + +import Foundation + +#if os(iOS) + import Flutter +#elseif os(macOS) + import FlutterMacOS +#else + #error("Unsupported platform.") +#endif + +/// Error class for passing custom error details to Dart side. +final class PigeonError: Error { + let code: String + let message: String? + let details: Sendable? + + init(code: String, message: String?, details: Sendable?) { + self.code = code + self.message = message + self.details = details + } + + var localizedDescription: String { + return + "PigeonError(code: \(code), message: \(message ?? ""), details: \(details ?? "")" + } +} + +private func wrapResult(_ result: Any?) -> [Any?] { + return [result] +} + +private func wrapError(_ error: Any) -> [Any?] { + if let pigeonError = error as? PigeonError { + return [ + pigeonError.code, + pigeonError.message, + pigeonError.details, + ] + } + if let flutterError = error as? FlutterError { + return [ + flutterError.code, + flutterError.message, + flutterError.details, + ] + } + return [ + "\(error)", + "\(type(of: error))", + "Stacktrace: \(Thread.callStackSymbols)", + ] +} + +private func createConnectionError(withChannelName channelName: String) -> PigeonError { + return PigeonError( + code: "channel-error", message: "Unable to establish connection on channel: '\(channelName)'.", + details: "") +} + +private func isNullish(_ value: Any?) -> Bool { + return value is NSNull || value == nil +} + +private func nilOrValue(_ value: Any?) -> T? { + if value is NSNull { return nil } + return value as! T? +} + +func deepEqualsMessages(_ lhs: Any?, _ rhs: Any?) -> Bool { + let cleanLhs = nilOrValue(lhs) as Any? + let cleanRhs = nilOrValue(rhs) as Any? + switch (cleanLhs, cleanRhs) { + case (nil, nil): + return true + + case (nil, _), (_, nil): + return false + + case is (Void, Void): + return true + + case let (cleanLhsHashable, cleanRhsHashable) as (AnyHashable, AnyHashable): + return cleanLhsHashable == cleanRhsHashable + + case let (cleanLhsArray, cleanRhsArray) as ([Any?], [Any?]): + guard cleanLhsArray.count == cleanRhsArray.count else { return false } + for (index, element) in cleanLhsArray.enumerated() { + if !deepEqualsMessages(element, cleanRhsArray[index]) { + return false + } + } + return true + + case let (cleanLhsDictionary, cleanRhsDictionary) as ([AnyHashable: Any?], [AnyHashable: Any?]): + guard cleanLhsDictionary.count == cleanRhsDictionary.count else { return false } + for (key, cleanLhsValue) in cleanLhsDictionary { + guard cleanRhsDictionary.index(forKey: key) != nil else { return false } + if !deepEqualsMessages(cleanLhsValue, cleanRhsDictionary[key]!) { + return false + } + } + return true + + default: + // Any other type shouldn't be able to be used with pigeon. File an issue if you find this to be untrue. + return false + } +} + +func deepHashMessages(value: Any?, hasher: inout Hasher) { + if let valueList = value as? [AnyHashable] { + for item in valueList { deepHashMessages(value: item, hasher: &hasher) } + return + } + + if let valueDict = value as? [AnyHashable: AnyHashable] { + for key in valueDict.keys { + hasher.combine(key) + deepHashMessages(value: valueDict[key]!, hasher: &hasher) + } + return + } + + if let hashableValue = value as? AnyHashable { + hasher.combine(hashableValue.hashValue) + } + + return hasher.combine(String(describing: value)) +} + +enum PlatformCameraLensDirection: Int { + /// Front facing camera (a user looking at the screen is seen by the camera). + case front = 0 + /// Back facing camera (a user looking at the screen is not seen by the camera). + case back = 1 + /// External camera which may not be mounted to the device. + case external = 2 +} + +enum PlatformCameraLensType: Int { + /// A built-in wide-angle camera device type. + case wide = 0 + /// A built-in camera device type with a longer focal length than a wide-angle camera. + case telephoto = 1 + /// A built-in camera device type with a shorter focal length than a wide-angle camera. + case ultraWide = 2 + /// Unknown camera device type. + case unknown = 3 +} + +enum PlatformDeviceOrientation: Int { + case portraitUp = 0 + case landscapeLeft = 1 + case portraitDown = 2 + case landscapeRight = 3 +} + +enum PlatformExposureMode: Int { + case auto = 0 + case locked = 1 +} + +enum PlatformFlashMode: Int { + case off = 0 + case auto = 1 + case always = 2 + case torch = 3 +} + +enum PlatformFocusMode: Int { + case auto = 0 + case locked = 1 +} + +/// Pigeon version of ImageFileFormat. +enum PlatformImageFileFormat: Int { + case jpeg = 0 + case heif = 1 +} + +enum PlatformImageFormatGroup: Int { + case bgra8888 = 0 + case yuv420 = 1 +} + +enum PlatformResolutionPreset: Int { + case low = 0 + case medium = 1 + case high = 2 + case veryHigh = 3 + case ultraHigh = 4 + case max = 5 +} + +/// Generated class from Pigeon that represents data sent in messages. +struct PlatformCameraDescription: Hashable { + /// The name of the camera device. + var name: String + /// The direction the camera is facing. + var lensDirection: PlatformCameraLensDirection + /// The type of the camera lens. + var lensType: PlatformCameraLensType + + // swift-format-ignore: AlwaysUseLowerCamelCase + static func fromList(_ pigeonVar_list: [Any?]) -> PlatformCameraDescription? { + let name = pigeonVar_list[0] as! String + let lensDirection = pigeonVar_list[1] as! PlatformCameraLensDirection + let lensType = pigeonVar_list[2] as! PlatformCameraLensType + + return PlatformCameraDescription( + name: name, + lensDirection: lensDirection, + lensType: lensType + ) + } + func toList() -> [Any?] { + return [ + name, + lensDirection, + lensType, + ] + } + static func == (lhs: PlatformCameraDescription, rhs: PlatformCameraDescription) -> Bool { + return deepEqualsMessages(lhs.toList(), rhs.toList()) + } + func hash(into hasher: inout Hasher) { + deepHashMessages(value: toList(), hasher: &hasher) + } +} + +/// Generated class from Pigeon that represents data sent in messages. +struct PlatformCameraState: Hashable { + /// The size of the preview, in pixels. + var previewSize: PlatformSize + /// The default exposure mode + var exposureMode: PlatformExposureMode + /// The default focus mode + var focusMode: PlatformFocusMode + /// Whether setting exposure points is supported. + var exposurePointSupported: Bool + /// Whether setting focus points is supported. + var focusPointSupported: Bool + + // swift-format-ignore: AlwaysUseLowerCamelCase + static func fromList(_ pigeonVar_list: [Any?]) -> PlatformCameraState? { + let previewSize = pigeonVar_list[0] as! PlatformSize + let exposureMode = pigeonVar_list[1] as! PlatformExposureMode + let focusMode = pigeonVar_list[2] as! PlatformFocusMode + let exposurePointSupported = pigeonVar_list[3] as! Bool + let focusPointSupported = pigeonVar_list[4] as! Bool + + return PlatformCameraState( + previewSize: previewSize, + exposureMode: exposureMode, + focusMode: focusMode, + exposurePointSupported: exposurePointSupported, + focusPointSupported: focusPointSupported + ) + } + func toList() -> [Any?] { + return [ + previewSize, + exposureMode, + focusMode, + exposurePointSupported, + focusPointSupported, + ] + } + static func == (lhs: PlatformCameraState, rhs: PlatformCameraState) -> Bool { + return deepEqualsMessages(lhs.toList(), rhs.toList()) + } + func hash(into hasher: inout Hasher) { + deepHashMessages(value: toList(), hasher: &hasher) + } +} + +/// Generated class from Pigeon that represents data sent in messages. +struct PlatformMediaSettings: Hashable { + var resolutionPreset: PlatformResolutionPreset + var framesPerSecond: Int64? = nil + var videoBitrate: Int64? = nil + var audioBitrate: Int64? = nil + var enableAudio: Bool + + // swift-format-ignore: AlwaysUseLowerCamelCase + static func fromList(_ pigeonVar_list: [Any?]) -> PlatformMediaSettings? { + let resolutionPreset = pigeonVar_list[0] as! PlatformResolutionPreset + let framesPerSecond: Int64? = nilOrValue(pigeonVar_list[1]) + let videoBitrate: Int64? = nilOrValue(pigeonVar_list[2]) + let audioBitrate: Int64? = nilOrValue(pigeonVar_list[3]) + let enableAudio = pigeonVar_list[4] as! Bool + + return PlatformMediaSettings( + resolutionPreset: resolutionPreset, + framesPerSecond: framesPerSecond, + videoBitrate: videoBitrate, + audioBitrate: audioBitrate, + enableAudio: enableAudio + ) + } + func toList() -> [Any?] { + return [ + resolutionPreset, + framesPerSecond, + videoBitrate, + audioBitrate, + enableAudio, + ] + } + static func == (lhs: PlatformMediaSettings, rhs: PlatformMediaSettings) -> Bool { + return deepEqualsMessages(lhs.toList(), rhs.toList()) + } + func hash(into hasher: inout Hasher) { + deepHashMessages(value: toList(), hasher: &hasher) + } +} + +/// Generated class from Pigeon that represents data sent in messages. +struct PlatformPoint: Hashable { + var x: Double + var y: Double + + // swift-format-ignore: AlwaysUseLowerCamelCase + static func fromList(_ pigeonVar_list: [Any?]) -> PlatformPoint? { + let x = pigeonVar_list[0] as! Double + let y = pigeonVar_list[1] as! Double + + return PlatformPoint( + x: x, + y: y + ) + } + func toList() -> [Any?] { + return [ + x, + y, + ] + } + static func == (lhs: PlatformPoint, rhs: PlatformPoint) -> Bool { + return deepEqualsMessages(lhs.toList(), rhs.toList()) + } + func hash(into hasher: inout Hasher) { + deepHashMessages(value: toList(), hasher: &hasher) + } +} + +/// Generated class from Pigeon that represents data sent in messages. +struct PlatformSize: Hashable { + var width: Double + var height: Double + + // swift-format-ignore: AlwaysUseLowerCamelCase + static func fromList(_ pigeonVar_list: [Any?]) -> PlatformSize? { + let width = pigeonVar_list[0] as! Double + let height = pigeonVar_list[1] as! Double + + return PlatformSize( + width: width, + height: height + ) + } + func toList() -> [Any?] { + return [ + width, + height, + ] + } + static func == (lhs: PlatformSize, rhs: PlatformSize) -> Bool { + return deepEqualsMessages(lhs.toList(), rhs.toList()) + } + func hash(into hasher: inout Hasher) { + deepHashMessages(value: toList(), hasher: &hasher) + } +} + +private class MessagesPigeonCodecReader: FlutterStandardReader { + override func readValue(ofType type: UInt8) -> Any? { + switch type { + case 129: + let enumResultAsInt: Int? = nilOrValue(self.readValue() as! Int?) + if let enumResultAsInt = enumResultAsInt { + return PlatformCameraLensDirection(rawValue: enumResultAsInt) + } + return nil + case 130: + let enumResultAsInt: Int? = nilOrValue(self.readValue() as! Int?) + if let enumResultAsInt = enumResultAsInt { + return PlatformCameraLensType(rawValue: enumResultAsInt) + } + return nil + case 131: + let enumResultAsInt: Int? = nilOrValue(self.readValue() as! Int?) + if let enumResultAsInt = enumResultAsInt { + return PlatformDeviceOrientation(rawValue: enumResultAsInt) + } + return nil + case 132: + let enumResultAsInt: Int? = nilOrValue(self.readValue() as! Int?) + if let enumResultAsInt = enumResultAsInt { + return PlatformExposureMode(rawValue: enumResultAsInt) + } + return nil + case 133: + let enumResultAsInt: Int? = nilOrValue(self.readValue() as! Int?) + if let enumResultAsInt = enumResultAsInt { + return PlatformFlashMode(rawValue: enumResultAsInt) + } + return nil + case 134: + let enumResultAsInt: Int? = nilOrValue(self.readValue() as! Int?) + if let enumResultAsInt = enumResultAsInt { + return PlatformFocusMode(rawValue: enumResultAsInt) + } + return nil + case 135: + let enumResultAsInt: Int? = nilOrValue(self.readValue() as! Int?) + if let enumResultAsInt = enumResultAsInt { + return PlatformImageFileFormat(rawValue: enumResultAsInt) + } + return nil + case 136: + let enumResultAsInt: Int? = nilOrValue(self.readValue() as! Int?) + if let enumResultAsInt = enumResultAsInt { + return PlatformImageFormatGroup(rawValue: enumResultAsInt) + } + return nil + case 137: + let enumResultAsInt: Int? = nilOrValue(self.readValue() as! Int?) + if let enumResultAsInt = enumResultAsInt { + return PlatformResolutionPreset(rawValue: enumResultAsInt) + } + return nil + case 138: + return PlatformCameraDescription.fromList(self.readValue() as! [Any?]) + case 139: + return PlatformCameraState.fromList(self.readValue() as! [Any?]) + case 140: + return PlatformMediaSettings.fromList(self.readValue() as! [Any?]) + case 141: + return PlatformPoint.fromList(self.readValue() as! [Any?]) + case 142: + return PlatformSize.fromList(self.readValue() as! [Any?]) + default: + return super.readValue(ofType: type) + } + } +} + +private class MessagesPigeonCodecWriter: FlutterStandardWriter { + override func writeValue(_ value: Any) { + if let value = value as? PlatformCameraLensDirection { + super.writeByte(129) + super.writeValue(value.rawValue) + } else if let value = value as? PlatformCameraLensType { + super.writeByte(130) + super.writeValue(value.rawValue) + } else if let value = value as? PlatformDeviceOrientation { + super.writeByte(131) + super.writeValue(value.rawValue) + } else if let value = value as? PlatformExposureMode { + super.writeByte(132) + super.writeValue(value.rawValue) + } else if let value = value as? PlatformFlashMode { + super.writeByte(133) + super.writeValue(value.rawValue) + } else if let value = value as? PlatformFocusMode { + super.writeByte(134) + super.writeValue(value.rawValue) + } else if let value = value as? PlatformImageFileFormat { + super.writeByte(135) + super.writeValue(value.rawValue) + } else if let value = value as? PlatformImageFormatGroup { + super.writeByte(136) + super.writeValue(value.rawValue) + } else if let value = value as? PlatformResolutionPreset { + super.writeByte(137) + super.writeValue(value.rawValue) + } else if let value = value as? PlatformCameraDescription { + super.writeByte(138) + super.writeValue(value.toList()) + } else if let value = value as? PlatformCameraState { + super.writeByte(139) + super.writeValue(value.toList()) + } else if let value = value as? PlatformMediaSettings { + super.writeByte(140) + super.writeValue(value.toList()) + } else if let value = value as? PlatformPoint { + super.writeByte(141) + super.writeValue(value.toList()) + } else if let value = value as? PlatformSize { + super.writeByte(142) + super.writeValue(value.toList()) + } else { + super.writeValue(value) + } + } +} + +private class MessagesPigeonCodecReaderWriter: FlutterStandardReaderWriter { + override func reader(with data: Data) -> FlutterStandardReader { + return MessagesPigeonCodecReader(data: data) + } + + override func writer(with data: NSMutableData) -> FlutterStandardWriter { + return MessagesPigeonCodecWriter(data: data) + } +} + +class MessagesPigeonCodec: FlutterStandardMessageCodec, @unchecked Sendable { + static let shared = MessagesPigeonCodec(readerWriter: MessagesPigeonCodecReaderWriter()) +} + +/// Generated protocol from Pigeon that represents a handler of messages from Flutter. +protocol CameraApi { + /// Returns the list of available cameras. + func getAvailableCameras( + completion: @escaping (Result<[PlatformCameraDescription], Error>) -> Void) + /// Create a new camera with the given settings, and returns its ID. + func create( + cameraName: String, settings: PlatformMediaSettings, + completion: @escaping (Result) -> Void) + /// Initializes the camera with the given ID. + func initialize( + cameraId: Int64, imageFormat: PlatformImageFormatGroup, + completion: @escaping (Result) -> Void) + /// Begins streaming frames from the camera. + func startImageStream(completion: @escaping (Result) -> Void) + /// Stops streaming frames from the camera. + func stopImageStream(completion: @escaping (Result) -> Void) + /// Called by the Dart side of the plugin when it has received the last image + /// frame sent. + /// + /// This is used to throttle sending frames across the channel. + func receivedImageStreamData(completion: @escaping (Result) -> Void) + /// Indicates that the given camera is no longer being used on the Dart side, + /// and any associated resources can be cleaned up. + func dispose(cameraId: Int64, completion: @escaping (Result) -> Void) + /// Locks the camera capture to the current device orientation. + func lockCaptureOrientation( + orientation: PlatformDeviceOrientation, completion: @escaping (Result) -> Void) + /// Unlocks camera capture orientation, allowing it to automatically adapt to + /// device orientation. + func unlockCaptureOrientation(completion: @escaping (Result) -> Void) + /// Takes a picture with the current settings, and returns the path to the + /// resulting file. + func takePicture(completion: @escaping (Result) -> Void) + /// Does any preprocessing necessary before beginning to record video. + func prepareForVideoRecording(completion: @escaping (Result) -> Void) + /// Begins recording video, optionally enabling streaming to Dart at the same + /// time. + func startVideoRecording(enableStream: Bool, completion: @escaping (Result) -> Void) + /// Stops recording video, and results the path to the resulting file. + func stopVideoRecording(completion: @escaping (Result) -> Void) + /// Pauses video recording. + func pauseVideoRecording(completion: @escaping (Result) -> Void) + /// Resumes a previously paused video recording. + func resumeVideoRecording(completion: @escaping (Result) -> Void) + /// Switches the camera to the given flash mode. + func setFlashMode(mode: PlatformFlashMode, completion: @escaping (Result) -> Void) + /// Switches the camera to the given exposure mode. + func setExposureMode( + mode: PlatformExposureMode, completion: @escaping (Result) -> Void) + /// Anchors auto-exposure to the given point in (0,1) coordinate space. + /// + /// A null value resets to the default exposure point. + func setExposurePoint(point: PlatformPoint?, completion: @escaping (Result) -> Void) + /// Returns the minimum exposure offset supported by the camera. + func getMinExposureOffset(completion: @escaping (Result) -> Void) + /// Returns the maximum exposure offset supported by the camera. + func getMaxExposureOffset(completion: @escaping (Result) -> Void) + /// Sets the exposure offset manually to the given value. + func setExposureOffset(offset: Double, completion: @escaping (Result) -> Void) + /// Switches the camera to the given focus mode. + func setFocusMode(mode: PlatformFocusMode, completion: @escaping (Result) -> Void) + /// Anchors auto-focus to the given point in (0,1) coordinate space. + /// + /// A null value resets to the default focus point. + func setFocusPoint(point: PlatformPoint?, completion: @escaping (Result) -> Void) + /// Returns the minimum zoom level supported by the camera. + func getMinZoomLevel(completion: @escaping (Result) -> Void) + /// Returns the maximum zoom level supported by the camera. + func getMaxZoomLevel(completion: @escaping (Result) -> Void) + /// Sets the zoom factor. + func setZoomLevel(zoom: Double, completion: @escaping (Result) -> Void) + /// Pauses streaming of preview frames. + func pausePreview(completion: @escaping (Result) -> Void) + /// Resumes a previously paused preview stream. + func resumePreview(completion: @escaping (Result) -> Void) + /// Changes the camera used while recording video. + /// + /// This should only be called while video recording is active. + func updateDescriptionWhileRecording( + cameraName: String, completion: @escaping (Result) -> Void) + /// Sets the file format used for taking pictures. + func setImageFileFormat( + format: PlatformImageFileFormat, completion: @escaping (Result) -> Void) +} + +/// Generated setup class from Pigeon to handle messages through the `binaryMessenger`. +class CameraApiSetup { + static var codec: FlutterStandardMessageCodec { MessagesPigeonCodec.shared } + /// Sets up an instance of `CameraApi` to handle messages through the `binaryMessenger`. + static func setUp( + binaryMessenger: FlutterBinaryMessenger, api: CameraApi?, messageChannelSuffix: String = "" + ) { + let channelSuffix = messageChannelSuffix.count > 0 ? ".\(messageChannelSuffix)" : "" + /// Returns the list of available cameras. + let getAvailableCamerasChannel = FlutterBasicMessageChannel( + name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getAvailableCameras\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + getAvailableCamerasChannel.setMessageHandler { _, reply in + api.getAvailableCameras { result in + switch result { + case .success(let res): + reply(wrapResult(res)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + getAvailableCamerasChannel.setMessageHandler(nil) + } + /// Create a new camera with the given settings, and returns its ID. + let createChannel = FlutterBasicMessageChannel( + name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.create\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + createChannel.setMessageHandler { message, reply in + let args = message as! [Any?] + let cameraNameArg = args[0] as! String + let settingsArg = args[1] as! PlatformMediaSettings + api.create(cameraName: cameraNameArg, settings: settingsArg) { result in + switch result { + case .success(let res): + reply(wrapResult(res)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + createChannel.setMessageHandler(nil) + } + /// Initializes the camera with the given ID. + let initializeChannel = FlutterBasicMessageChannel( + name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.initialize\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + initializeChannel.setMessageHandler { message, reply in + let args = message as! [Any?] + let cameraIdArg = args[0] as! Int64 + let imageFormatArg = args[1] as! PlatformImageFormatGroup + api.initialize(cameraId: cameraIdArg, imageFormat: imageFormatArg) { result in + switch result { + case .success: + reply(wrapResult(nil)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + initializeChannel.setMessageHandler(nil) + } + /// Begins streaming frames from the camera. + let startImageStreamChannel = FlutterBasicMessageChannel( + name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.startImageStream\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + startImageStreamChannel.setMessageHandler { _, reply in + api.startImageStream { result in + switch result { + case .success: + reply(wrapResult(nil)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + startImageStreamChannel.setMessageHandler(nil) + } + /// Stops streaming frames from the camera. + let stopImageStreamChannel = FlutterBasicMessageChannel( + name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.stopImageStream\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + stopImageStreamChannel.setMessageHandler { _, reply in + api.stopImageStream { result in + switch result { + case .success: + reply(wrapResult(nil)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + stopImageStreamChannel.setMessageHandler(nil) + } + /// Called by the Dart side of the plugin when it has received the last image + /// frame sent. + /// + /// This is used to throttle sending frames across the channel. + let receivedImageStreamDataChannel = FlutterBasicMessageChannel( + name: + "dev.flutter.pigeon.camera_avfoundation.CameraApi.receivedImageStreamData\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + receivedImageStreamDataChannel.setMessageHandler { _, reply in + api.receivedImageStreamData { result in + switch result { + case .success: + reply(wrapResult(nil)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + receivedImageStreamDataChannel.setMessageHandler(nil) + } + /// Indicates that the given camera is no longer being used on the Dart side, + /// and any associated resources can be cleaned up. + let disposeChannel = FlutterBasicMessageChannel( + name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.dispose\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + disposeChannel.setMessageHandler { message, reply in + let args = message as! [Any?] + let cameraIdArg = args[0] as! Int64 + api.dispose(cameraId: cameraIdArg) { result in + switch result { + case .success: + reply(wrapResult(nil)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + disposeChannel.setMessageHandler(nil) + } + /// Locks the camera capture to the current device orientation. + let lockCaptureOrientationChannel = FlutterBasicMessageChannel( + name: + "dev.flutter.pigeon.camera_avfoundation.CameraApi.lockCaptureOrientation\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + lockCaptureOrientationChannel.setMessageHandler { message, reply in + let args = message as! [Any?] + let orientationArg = args[0] as! PlatformDeviceOrientation + api.lockCaptureOrientation(orientation: orientationArg) { result in + switch result { + case .success: + reply(wrapResult(nil)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + lockCaptureOrientationChannel.setMessageHandler(nil) + } + /// Unlocks camera capture orientation, allowing it to automatically adapt to + /// device orientation. + let unlockCaptureOrientationChannel = FlutterBasicMessageChannel( + name: + "dev.flutter.pigeon.camera_avfoundation.CameraApi.unlockCaptureOrientation\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + unlockCaptureOrientationChannel.setMessageHandler { _, reply in + api.unlockCaptureOrientation { result in + switch result { + case .success: + reply(wrapResult(nil)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + unlockCaptureOrientationChannel.setMessageHandler(nil) + } + /// Takes a picture with the current settings, and returns the path to the + /// resulting file. + let takePictureChannel = FlutterBasicMessageChannel( + name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.takePicture\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + takePictureChannel.setMessageHandler { _, reply in + api.takePicture { result in + switch result { + case .success(let res): + reply(wrapResult(res)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + takePictureChannel.setMessageHandler(nil) + } + /// Does any preprocessing necessary before beginning to record video. + let prepareForVideoRecordingChannel = FlutterBasicMessageChannel( + name: + "dev.flutter.pigeon.camera_avfoundation.CameraApi.prepareForVideoRecording\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + prepareForVideoRecordingChannel.setMessageHandler { _, reply in + api.prepareForVideoRecording { result in + switch result { + case .success: + reply(wrapResult(nil)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + prepareForVideoRecordingChannel.setMessageHandler(nil) + } + /// Begins recording video, optionally enabling streaming to Dart at the same + /// time. + let startVideoRecordingChannel = FlutterBasicMessageChannel( + name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.startVideoRecording\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + startVideoRecordingChannel.setMessageHandler { message, reply in + let args = message as! [Any?] + let enableStreamArg = args[0] as! Bool + api.startVideoRecording(enableStream: enableStreamArg) { result in + switch result { + case .success: + reply(wrapResult(nil)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + startVideoRecordingChannel.setMessageHandler(nil) + } + /// Stops recording video, and results the path to the resulting file. + let stopVideoRecordingChannel = FlutterBasicMessageChannel( + name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.stopVideoRecording\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + stopVideoRecordingChannel.setMessageHandler { _, reply in + api.stopVideoRecording { result in + switch result { + case .success(let res): + reply(wrapResult(res)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + stopVideoRecordingChannel.setMessageHandler(nil) + } + /// Pauses video recording. + let pauseVideoRecordingChannel = FlutterBasicMessageChannel( + name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.pauseVideoRecording\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + pauseVideoRecordingChannel.setMessageHandler { _, reply in + api.pauseVideoRecording { result in + switch result { + case .success: + reply(wrapResult(nil)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + pauseVideoRecordingChannel.setMessageHandler(nil) + } + /// Resumes a previously paused video recording. + let resumeVideoRecordingChannel = FlutterBasicMessageChannel( + name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.resumeVideoRecording\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + resumeVideoRecordingChannel.setMessageHandler { _, reply in + api.resumeVideoRecording { result in + switch result { + case .success: + reply(wrapResult(nil)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + resumeVideoRecordingChannel.setMessageHandler(nil) + } + /// Switches the camera to the given flash mode. + let setFlashModeChannel = FlutterBasicMessageChannel( + name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setFlashMode\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + setFlashModeChannel.setMessageHandler { message, reply in + let args = message as! [Any?] + let modeArg = args[0] as! PlatformFlashMode + api.setFlashMode(mode: modeArg) { result in + switch result { + case .success: + reply(wrapResult(nil)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + setFlashModeChannel.setMessageHandler(nil) + } + /// Switches the camera to the given exposure mode. + let setExposureModeChannel = FlutterBasicMessageChannel( + name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposureMode\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + setExposureModeChannel.setMessageHandler { message, reply in + let args = message as! [Any?] + let modeArg = args[0] as! PlatformExposureMode + api.setExposureMode(mode: modeArg) { result in + switch result { + case .success: + reply(wrapResult(nil)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + setExposureModeChannel.setMessageHandler(nil) + } + /// Anchors auto-exposure to the given point in (0,1) coordinate space. + /// + /// A null value resets to the default exposure point. + let setExposurePointChannel = FlutterBasicMessageChannel( + name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposurePoint\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + setExposurePointChannel.setMessageHandler { message, reply in + let args = message as! [Any?] + let pointArg: PlatformPoint? = nilOrValue(args[0]) + api.setExposurePoint(point: pointArg) { result in + switch result { + case .success: + reply(wrapResult(nil)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + setExposurePointChannel.setMessageHandler(nil) + } + /// Returns the minimum exposure offset supported by the camera. + let getMinExposureOffsetChannel = FlutterBasicMessageChannel( + name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMinExposureOffset\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + getMinExposureOffsetChannel.setMessageHandler { _, reply in + api.getMinExposureOffset { result in + switch result { + case .success(let res): + reply(wrapResult(res)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + getMinExposureOffsetChannel.setMessageHandler(nil) + } + /// Returns the maximum exposure offset supported by the camera. + let getMaxExposureOffsetChannel = FlutterBasicMessageChannel( + name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMaxExposureOffset\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + getMaxExposureOffsetChannel.setMessageHandler { _, reply in + api.getMaxExposureOffset { result in + switch result { + case .success(let res): + reply(wrapResult(res)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + getMaxExposureOffsetChannel.setMessageHandler(nil) + } + /// Sets the exposure offset manually to the given value. + let setExposureOffsetChannel = FlutterBasicMessageChannel( + name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposureOffset\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + setExposureOffsetChannel.setMessageHandler { message, reply in + let args = message as! [Any?] + let offsetArg = args[0] as! Double + api.setExposureOffset(offset: offsetArg) { result in + switch result { + case .success: + reply(wrapResult(nil)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + setExposureOffsetChannel.setMessageHandler(nil) + } + /// Switches the camera to the given focus mode. + let setFocusModeChannel = FlutterBasicMessageChannel( + name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setFocusMode\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + setFocusModeChannel.setMessageHandler { message, reply in + let args = message as! [Any?] + let modeArg = args[0] as! PlatformFocusMode + api.setFocusMode(mode: modeArg) { result in + switch result { + case .success: + reply(wrapResult(nil)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + setFocusModeChannel.setMessageHandler(nil) + } + /// Anchors auto-focus to the given point in (0,1) coordinate space. + /// + /// A null value resets to the default focus point. + let setFocusPointChannel = FlutterBasicMessageChannel( + name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setFocusPoint\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + setFocusPointChannel.setMessageHandler { message, reply in + let args = message as! [Any?] + let pointArg: PlatformPoint? = nilOrValue(args[0]) + api.setFocusPoint(point: pointArg) { result in + switch result { + case .success: + reply(wrapResult(nil)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + setFocusPointChannel.setMessageHandler(nil) + } + /// Returns the minimum zoom level supported by the camera. + let getMinZoomLevelChannel = FlutterBasicMessageChannel( + name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMinZoomLevel\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + getMinZoomLevelChannel.setMessageHandler { _, reply in + api.getMinZoomLevel { result in + switch result { + case .success(let res): + reply(wrapResult(res)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + getMinZoomLevelChannel.setMessageHandler(nil) + } + /// Returns the maximum zoom level supported by the camera. + let getMaxZoomLevelChannel = FlutterBasicMessageChannel( + name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.getMaxZoomLevel\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + getMaxZoomLevelChannel.setMessageHandler { _, reply in + api.getMaxZoomLevel { result in + switch result { + case .success(let res): + reply(wrapResult(res)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + getMaxZoomLevelChannel.setMessageHandler(nil) + } + /// Sets the zoom factor. + let setZoomLevelChannel = FlutterBasicMessageChannel( + name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setZoomLevel\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + setZoomLevelChannel.setMessageHandler { message, reply in + let args = message as! [Any?] + let zoomArg = args[0] as! Double + api.setZoomLevel(zoom: zoomArg) { result in + switch result { + case .success: + reply(wrapResult(nil)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + setZoomLevelChannel.setMessageHandler(nil) + } + /// Pauses streaming of preview frames. + let pausePreviewChannel = FlutterBasicMessageChannel( + name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.pausePreview\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + pausePreviewChannel.setMessageHandler { _, reply in + api.pausePreview { result in + switch result { + case .success: + reply(wrapResult(nil)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + pausePreviewChannel.setMessageHandler(nil) + } + /// Resumes a previously paused preview stream. + let resumePreviewChannel = FlutterBasicMessageChannel( + name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.resumePreview\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + resumePreviewChannel.setMessageHandler { _, reply in + api.resumePreview { result in + switch result { + case .success: + reply(wrapResult(nil)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + resumePreviewChannel.setMessageHandler(nil) + } + /// Changes the camera used while recording video. + /// + /// This should only be called while video recording is active. + let updateDescriptionWhileRecordingChannel = FlutterBasicMessageChannel( + name: + "dev.flutter.pigeon.camera_avfoundation.CameraApi.updateDescriptionWhileRecording\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + updateDescriptionWhileRecordingChannel.setMessageHandler { message, reply in + let args = message as! [Any?] + let cameraNameArg = args[0] as! String + api.updateDescriptionWhileRecording(cameraName: cameraNameArg) { result in + switch result { + case .success: + reply(wrapResult(nil)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + updateDescriptionWhileRecordingChannel.setMessageHandler(nil) + } + /// Sets the file format used for taking pictures. + let setImageFileFormatChannel = FlutterBasicMessageChannel( + name: "dev.flutter.pigeon.camera_avfoundation.CameraApi.setImageFileFormat\(channelSuffix)", + binaryMessenger: binaryMessenger, codec: codec) + if let api = api { + setImageFileFormatChannel.setMessageHandler { message, reply in + let args = message as! [Any?] + let formatArg = args[0] as! PlatformImageFileFormat + api.setImageFileFormat(format: formatArg) { result in + switch result { + case .success: + reply(wrapResult(nil)) + case .failure(let error): + reply(wrapError(error)) + } + } + } + } else { + setImageFileFormatChannel.setMessageHandler(nil) + } + } +} +/// Handler for native callbacks that are not tied to a specific camera ID. +/// +/// Generated protocol from Pigeon that represents Flutter messages that can be called from Swift. +protocol CameraGlobalEventApiProtocol { + /// Called when the device's physical orientation changes. + func deviceOrientationChanged( + orientation orientationArg: PlatformDeviceOrientation, + completion: @escaping (Result) -> Void) +} +class CameraGlobalEventApi: CameraGlobalEventApiProtocol { + private let binaryMessenger: FlutterBinaryMessenger + private let messageChannelSuffix: String + init(binaryMessenger: FlutterBinaryMessenger, messageChannelSuffix: String = "") { + self.binaryMessenger = binaryMessenger + self.messageChannelSuffix = messageChannelSuffix.count > 0 ? ".\(messageChannelSuffix)" : "" + } + var codec: MessagesPigeonCodec { + return MessagesPigeonCodec.shared + } + /// Called when the device's physical orientation changes. + func deviceOrientationChanged( + orientation orientationArg: PlatformDeviceOrientation, + completion: @escaping (Result) -> Void + ) { + let channelName: String = + "dev.flutter.pigeon.camera_avfoundation.CameraGlobalEventApi.deviceOrientationChanged\(messageChannelSuffix)" + let channel = FlutterBasicMessageChannel( + name: channelName, binaryMessenger: binaryMessenger, codec: codec) + channel.sendMessage([orientationArg] as [Any?]) { response in + guard let listResponse = response as? [Any?] else { + completion(.failure(createConnectionError(withChannelName: channelName))) + return + } + if listResponse.count > 1 { + let code: String = listResponse[0] as! String + let message: String? = nilOrValue(listResponse[1]) + let details: String? = nilOrValue(listResponse[2]) + completion(.failure(PigeonError(code: code, message: message, details: details))) + } else { + completion(.success(())) + } + } + } +} +/// Handler for native callbacks that are tied to a specific camera ID. +/// +/// This is intended to be initialized with the camera ID as a suffix. +/// +/// Generated protocol from Pigeon that represents Flutter messages that can be called from Swift. +protocol CameraEventApiProtocol { + /// Called when the camera is inialitized for use. + func initialized( + initialState initialStateArg: PlatformCameraState, + completion: @escaping (Result) -> Void) + /// Called when an error occurs in the camera. + /// + /// This should be used for errors that occur outside of the context of + /// handling a specific HostApi call, such as during streaming. + func error(message messageArg: String, completion: @escaping (Result) -> Void) +} +class CameraEventApi: CameraEventApiProtocol { + private let binaryMessenger: FlutterBinaryMessenger + private let messageChannelSuffix: String + init(binaryMessenger: FlutterBinaryMessenger, messageChannelSuffix: String = "") { + self.binaryMessenger = binaryMessenger + self.messageChannelSuffix = messageChannelSuffix.count > 0 ? ".\(messageChannelSuffix)" : "" + } + var codec: MessagesPigeonCodec { + return MessagesPigeonCodec.shared + } + /// Called when the camera is inialitized for use. + func initialized( + initialState initialStateArg: PlatformCameraState, + completion: @escaping (Result) -> Void + ) { + let channelName: String = + "dev.flutter.pigeon.camera_avfoundation.CameraEventApi.initialized\(messageChannelSuffix)" + let channel = FlutterBasicMessageChannel( + name: channelName, binaryMessenger: binaryMessenger, codec: codec) + channel.sendMessage([initialStateArg] as [Any?]) { response in + guard let listResponse = response as? [Any?] else { + completion(.failure(createConnectionError(withChannelName: channelName))) + return + } + if listResponse.count > 1 { + let code: String = listResponse[0] as! String + let message: String? = nilOrValue(listResponse[1]) + let details: String? = nilOrValue(listResponse[2]) + completion(.failure(PigeonError(code: code, message: message, details: details))) + } else { + completion(.success(())) + } + } + } + /// Called when an error occurs in the camera. + /// + /// This should be used for errors that occur outside of the context of + /// handling a specific HostApi call, such as during streaming. + func error(message messageArg: String, completion: @escaping (Result) -> Void) + { + let channelName: String = + "dev.flutter.pigeon.camera_avfoundation.CameraEventApi.error\(messageChannelSuffix)" + let channel = FlutterBasicMessageChannel( + name: channelName, binaryMessenger: binaryMessenger, codec: codec) + channel.sendMessage([messageArg] as [Any?]) { response in + guard let listResponse = response as? [Any?] else { + completion(.failure(createConnectionError(withChannelName: channelName))) + return + } + if listResponse.count > 1 { + let code: String = listResponse[0] as! String + let message: String? = nilOrValue(listResponse[1]) + let details: String? = nilOrValue(listResponse[2]) + completion(.failure(PigeonError(code: code, message: message, details: details))) + } else { + completion(.success(())) + } + } + } +} diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/Resources/PrivacyInfo.xcprivacy b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/Resources/PrivacyInfo.xcprivacy deleted file mode 100644 index a34b7e2e60cc..000000000000 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/Resources/PrivacyInfo.xcprivacy +++ /dev/null @@ -1,14 +0,0 @@ - - - - - NSPrivacyTrackingDomains - - NSPrivacyAccessedAPITypes - - NSPrivacyCollectedDataTypes - - NSPrivacyTracking - - - diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/camera_avfoundation.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/camera_avfoundation.h deleted file mode 100644 index c16177a7f69e..000000000000 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/camera_avfoundation.h +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2013 The Flutter Authors -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#import "messages.g.h" diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/messages.g.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/messages.g.h deleted file mode 100644 index 7064a15caac7..000000000000 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/messages.g.h +++ /dev/null @@ -1,334 +0,0 @@ -// Copyright 2013 The Flutter Authors -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. -// Autogenerated from Pigeon (v26.1.0), do not edit directly. -// See also: https://pub.dev/packages/pigeon - -#import - -@protocol FlutterBinaryMessenger; -@protocol FlutterMessageCodec; -@class FlutterError; -@class FlutterStandardTypedData; - -NS_ASSUME_NONNULL_BEGIN - -typedef NS_ENUM(NSUInteger, FCPPlatformCameraLensDirection) { - /// Front facing camera (a user looking at the screen is seen by the camera). - FCPPlatformCameraLensDirectionFront = 0, - /// Back facing camera (a user looking at the screen is not seen by the camera). - FCPPlatformCameraLensDirectionBack = 1, - /// External camera which may not be mounted to the device. - FCPPlatformCameraLensDirectionExternal = 2, -}; - -/// Wrapper for FCPPlatformCameraLensDirection to allow for nullability. -@interface FCPPlatformCameraLensDirectionBox : NSObject -@property(nonatomic, assign) FCPPlatformCameraLensDirection value; -- (instancetype)initWithValue:(FCPPlatformCameraLensDirection)value; -@end - -typedef NS_ENUM(NSUInteger, FCPPlatformCameraLensType) { - /// A built-in wide-angle camera device type. - FCPPlatformCameraLensTypeWide = 0, - /// A built-in camera device type with a longer focal length than a wide-angle camera. - FCPPlatformCameraLensTypeTelephoto = 1, - /// A built-in camera device type with a shorter focal length than a wide-angle camera. - FCPPlatformCameraLensTypeUltraWide = 2, - /// Unknown camera device type. - FCPPlatformCameraLensTypeUnknown = 3, -}; - -/// Wrapper for FCPPlatformCameraLensType to allow for nullability. -@interface FCPPlatformCameraLensTypeBox : NSObject -@property(nonatomic, assign) FCPPlatformCameraLensType value; -- (instancetype)initWithValue:(FCPPlatformCameraLensType)value; -@end - -typedef NS_ENUM(NSUInteger, FCPPlatformDeviceOrientation) { - FCPPlatformDeviceOrientationPortraitUp = 0, - FCPPlatformDeviceOrientationLandscapeLeft = 1, - FCPPlatformDeviceOrientationPortraitDown = 2, - FCPPlatformDeviceOrientationLandscapeRight = 3, -}; - -/// Wrapper for FCPPlatformDeviceOrientation to allow for nullability. -@interface FCPPlatformDeviceOrientationBox : NSObject -@property(nonatomic, assign) FCPPlatformDeviceOrientation value; -- (instancetype)initWithValue:(FCPPlatformDeviceOrientation)value; -@end - -typedef NS_ENUM(NSUInteger, FCPPlatformExposureMode) { - FCPPlatformExposureModeAuto = 0, - FCPPlatformExposureModeLocked = 1, -}; - -/// Wrapper for FCPPlatformExposureMode to allow for nullability. -@interface FCPPlatformExposureModeBox : NSObject -@property(nonatomic, assign) FCPPlatformExposureMode value; -- (instancetype)initWithValue:(FCPPlatformExposureMode)value; -@end - -typedef NS_ENUM(NSUInteger, FCPPlatformFlashMode) { - FCPPlatformFlashModeOff = 0, - FCPPlatformFlashModeAuto = 1, - FCPPlatformFlashModeAlways = 2, - FCPPlatformFlashModeTorch = 3, -}; - -/// Wrapper for FCPPlatformFlashMode to allow for nullability. -@interface FCPPlatformFlashModeBox : NSObject -@property(nonatomic, assign) FCPPlatformFlashMode value; -- (instancetype)initWithValue:(FCPPlatformFlashMode)value; -@end - -typedef NS_ENUM(NSUInteger, FCPPlatformFocusMode) { - FCPPlatformFocusModeAuto = 0, - FCPPlatformFocusModeLocked = 1, -}; - -/// Wrapper for FCPPlatformFocusMode to allow for nullability. -@interface FCPPlatformFocusModeBox : NSObject -@property(nonatomic, assign) FCPPlatformFocusMode value; -- (instancetype)initWithValue:(FCPPlatformFocusMode)value; -@end - -/// Pigeon version of ImageFileFormat. -typedef NS_ENUM(NSUInteger, FCPPlatformImageFileFormat) { - FCPPlatformImageFileFormatJpeg = 0, - FCPPlatformImageFileFormatHeif = 1, -}; - -/// Wrapper for FCPPlatformImageFileFormat to allow for nullability. -@interface FCPPlatformImageFileFormatBox : NSObject -@property(nonatomic, assign) FCPPlatformImageFileFormat value; -- (instancetype)initWithValue:(FCPPlatformImageFileFormat)value; -@end - -typedef NS_ENUM(NSUInteger, FCPPlatformImageFormatGroup) { - FCPPlatformImageFormatGroupBgra8888 = 0, - FCPPlatformImageFormatGroupYuv420 = 1, -}; - -/// Wrapper for FCPPlatformImageFormatGroup to allow for nullability. -@interface FCPPlatformImageFormatGroupBox : NSObject -@property(nonatomic, assign) FCPPlatformImageFormatGroup value; -- (instancetype)initWithValue:(FCPPlatformImageFormatGroup)value; -@end - -typedef NS_ENUM(NSUInteger, FCPPlatformResolutionPreset) { - FCPPlatformResolutionPresetLow = 0, - FCPPlatformResolutionPresetMedium = 1, - FCPPlatformResolutionPresetHigh = 2, - FCPPlatformResolutionPresetVeryHigh = 3, - FCPPlatformResolutionPresetUltraHigh = 4, - FCPPlatformResolutionPresetMax = 5, -}; - -/// Wrapper for FCPPlatformResolutionPreset to allow for nullability. -@interface FCPPlatformResolutionPresetBox : NSObject -@property(nonatomic, assign) FCPPlatformResolutionPreset value; -- (instancetype)initWithValue:(FCPPlatformResolutionPreset)value; -@end - -@class FCPPlatformCameraDescription; -@class FCPPlatformCameraState; -@class FCPPlatformMediaSettings; -@class FCPPlatformPoint; -@class FCPPlatformSize; - -@interface FCPPlatformCameraDescription : NSObject -/// `init` unavailable to enforce nonnull fields, see the `make` class method. -- (instancetype)init NS_UNAVAILABLE; -+ (instancetype)makeWithName:(NSString *)name - lensDirection:(FCPPlatformCameraLensDirection)lensDirection - lensType:(FCPPlatformCameraLensType)lensType; -/// The name of the camera device. -@property(nonatomic, copy) NSString *name; -/// The direction the camera is facing. -@property(nonatomic, assign) FCPPlatformCameraLensDirection lensDirection; -/// The type of the camera lens. -@property(nonatomic, assign) FCPPlatformCameraLensType lensType; -@end - -@interface FCPPlatformCameraState : NSObject -/// `init` unavailable to enforce nonnull fields, see the `make` class method. -- (instancetype)init NS_UNAVAILABLE; -+ (instancetype)makeWithPreviewSize:(FCPPlatformSize *)previewSize - exposureMode:(FCPPlatformExposureMode)exposureMode - focusMode:(FCPPlatformFocusMode)focusMode - exposurePointSupported:(BOOL)exposurePointSupported - focusPointSupported:(BOOL)focusPointSupported; -/// The size of the preview, in pixels. -@property(nonatomic, strong) FCPPlatformSize *previewSize; -/// The default exposure mode -@property(nonatomic, assign) FCPPlatformExposureMode exposureMode; -/// The default focus mode -@property(nonatomic, assign) FCPPlatformFocusMode focusMode; -/// Whether setting exposure points is supported. -@property(nonatomic, assign) BOOL exposurePointSupported; -/// Whether setting focus points is supported. -@property(nonatomic, assign) BOOL focusPointSupported; -@end - -@interface FCPPlatformMediaSettings : NSObject -/// `init` unavailable to enforce nonnull fields, see the `make` class method. -- (instancetype)init NS_UNAVAILABLE; -+ (instancetype)makeWithResolutionPreset:(FCPPlatformResolutionPreset)resolutionPreset - framesPerSecond:(nullable NSNumber *)framesPerSecond - videoBitrate:(nullable NSNumber *)videoBitrate - audioBitrate:(nullable NSNumber *)audioBitrate - enableAudio:(BOOL)enableAudio; -@property(nonatomic, assign) FCPPlatformResolutionPreset resolutionPreset; -@property(nonatomic, strong, nullable) NSNumber *framesPerSecond; -@property(nonatomic, strong, nullable) NSNumber *videoBitrate; -@property(nonatomic, strong, nullable) NSNumber *audioBitrate; -@property(nonatomic, assign) BOOL enableAudio; -@end - -@interface FCPPlatformPoint : NSObject -/// `init` unavailable to enforce nonnull fields, see the `make` class method. -- (instancetype)init NS_UNAVAILABLE; -+ (instancetype)makeWithX:(double)x y:(double)y; -@property(nonatomic, assign) double x; -@property(nonatomic, assign) double y; -@end - -@interface FCPPlatformSize : NSObject -/// `init` unavailable to enforce nonnull fields, see the `make` class method. -- (instancetype)init NS_UNAVAILABLE; -+ (instancetype)makeWithWidth:(double)width height:(double)height; -@property(nonatomic, assign) double width; -@property(nonatomic, assign) double height; -@end - -/// The codec used by all APIs. -NSObject *FCPGetMessagesCodec(void); - -@protocol FCPCameraApi -/// Returns the list of available cameras. -- (void)availableCamerasWithCompletion:(void (^)(NSArray *_Nullable, - FlutterError *_Nullable))completion; -/// Create a new camera with the given settings, and returns its ID. -- (void)createCameraWithName:(NSString *)cameraName - settings:(FCPPlatformMediaSettings *)settings - completion:(void (^)(NSNumber *_Nullable, FlutterError *_Nullable))completion; -/// Initializes the camera with the given ID. -- (void)initializeCamera:(NSInteger)cameraId - withImageFormat:(FCPPlatformImageFormatGroup)imageFormat - completion:(void (^)(FlutterError *_Nullable))completion; -/// Begins streaming frames from the camera. -- (void)startImageStreamWithCompletion:(void (^)(FlutterError *_Nullable))completion; -/// Stops streaming frames from the camera. -- (void)stopImageStreamWithCompletion:(void (^)(FlutterError *_Nullable))completion; -/// Called by the Dart side of the plugin when it has received the last image -/// frame sent. -/// -/// This is used to throttle sending frames across the channel. -- (void)receivedImageStreamDataWithCompletion:(void (^)(FlutterError *_Nullable))completion; -/// Indicates that the given camera is no longer being used on the Dart side, -/// and any associated resources can be cleaned up. -- (void)disposeCamera:(NSInteger)cameraId completion:(void (^)(FlutterError *_Nullable))completion; -/// Locks the camera capture to the current device orientation. -- (void)lockCaptureOrientation:(FCPPlatformDeviceOrientation)orientation - completion:(void (^)(FlutterError *_Nullable))completion; -/// Unlocks camera capture orientation, allowing it to automatically adapt to -/// device orientation. -- (void)unlockCaptureOrientationWithCompletion:(void (^)(FlutterError *_Nullable))completion; -/// Takes a picture with the current settings, and returns the path to the -/// resulting file. -- (void)takePictureWithCompletion:(void (^)(NSString *_Nullable, - FlutterError *_Nullable))completion; -/// Does any preprocessing necessary before beginning to record video. -- (void)prepareForVideoRecordingWithCompletion:(void (^)(FlutterError *_Nullable))completion; -/// Begins recording video, optionally enabling streaming to Dart at the same -/// time. -- (void)startVideoRecordingWithStreaming:(BOOL)enableStream - completion:(void (^)(FlutterError *_Nullable))completion; -/// Stops recording video, and results the path to the resulting file. -- (void)stopVideoRecordingWithCompletion:(void (^)(NSString *_Nullable, - FlutterError *_Nullable))completion; -/// Pauses video recording. -- (void)pauseVideoRecordingWithCompletion:(void (^)(FlutterError *_Nullable))completion; -/// Resumes a previously paused video recording. -- (void)resumeVideoRecordingWithCompletion:(void (^)(FlutterError *_Nullable))completion; -/// Switches the camera to the given flash mode. -- (void)setFlashMode:(FCPPlatformFlashMode)mode - completion:(void (^)(FlutterError *_Nullable))completion; -/// Switches the camera to the given exposure mode. -- (void)setExposureMode:(FCPPlatformExposureMode)mode - completion:(void (^)(FlutterError *_Nullable))completion; -/// Anchors auto-exposure to the given point in (0,1) coordinate space. -/// -/// A null value resets to the default exposure point. -- (void)setExposurePoint:(nullable FCPPlatformPoint *)point - completion:(void (^)(FlutterError *_Nullable))completion; -/// Returns the minimum exposure offset supported by the camera. -- (void)getMinimumExposureOffset:(void (^)(NSNumber *_Nullable, FlutterError *_Nullable))completion; -/// Returns the maximum exposure offset supported by the camera. -- (void)getMaximumExposureOffset:(void (^)(NSNumber *_Nullable, FlutterError *_Nullable))completion; -/// Sets the exposure offset manually to the given value. -- (void)setExposureOffset:(double)offset completion:(void (^)(FlutterError *_Nullable))completion; -/// Switches the camera to the given focus mode. -- (void)setFocusMode:(FCPPlatformFocusMode)mode - completion:(void (^)(FlutterError *_Nullable))completion; -/// Anchors auto-focus to the given point in (0,1) coordinate space. -/// -/// A null value resets to the default focus point. -- (void)setFocusPoint:(nullable FCPPlatformPoint *)point - completion:(void (^)(FlutterError *_Nullable))completion; -/// Returns the minimum zoom level supported by the camera. -- (void)getMinimumZoomLevel:(void (^)(NSNumber *_Nullable, FlutterError *_Nullable))completion; -/// Returns the maximum zoom level supported by the camera. -- (void)getMaximumZoomLevel:(void (^)(NSNumber *_Nullable, FlutterError *_Nullable))completion; -/// Sets the zoom factor. -- (void)setZoomLevel:(double)zoom completion:(void (^)(FlutterError *_Nullable))completion; -/// Pauses streaming of preview frames. -- (void)pausePreviewWithCompletion:(void (^)(FlutterError *_Nullable))completion; -/// Resumes a previously paused preview stream. -- (void)resumePreviewWithCompletion:(void (^)(FlutterError *_Nullable))completion; -/// Changes the camera used while recording video. -/// -/// This should only be called while video recording is active. -- (void)updateDescriptionWhileRecordingCameraName:(NSString *)cameraName - completion:(void (^)(FlutterError *_Nullable))completion; -/// Sets the file format used for taking pictures. -- (void)setImageFileFormat:(FCPPlatformImageFileFormat)format - completion:(void (^)(FlutterError *_Nullable))completion; -@end - -extern void SetUpFCPCameraApi(id binaryMessenger, - NSObject *_Nullable api); - -extern void SetUpFCPCameraApiWithSuffix(id binaryMessenger, - NSObject *_Nullable api, - NSString *messageChannelSuffix); - -/// Handler for native callbacks that are not tied to a specific camera ID. -@interface FCPCameraGlobalEventApi : NSObject -- (instancetype)initWithBinaryMessenger:(id)binaryMessenger; -- (instancetype)initWithBinaryMessenger:(id)binaryMessenger - messageChannelSuffix:(nullable NSString *)messageChannelSuffix; -/// Called when the device's physical orientation changes. -- (void)deviceOrientationChangedOrientation:(FCPPlatformDeviceOrientation)orientation - completion:(void (^)(FlutterError *_Nullable))completion; -@end - -/// Handler for native callbacks that are tied to a specific camera ID. -/// -/// This is intended to be initialized with the camera ID as a suffix. -@interface FCPCameraEventApi : NSObject -- (instancetype)initWithBinaryMessenger:(id)binaryMessenger; -- (instancetype)initWithBinaryMessenger:(id)binaryMessenger - messageChannelSuffix:(nullable NSString *)messageChannelSuffix; -/// Called when the camera is inialitized for use. -- (void)initializedWithState:(FCPPlatformCameraState *)initialState - completion:(void (^)(FlutterError *_Nullable))completion; -/// Called when an error occurs in the camera. -/// -/// This should be used for errors that occur outside of the context of -/// handling a specific HostApi call, such as during streaming. -- (void)reportError:(NSString *)message completion:(void (^)(FlutterError *_Nullable))completion; -@end - -NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/messages.g.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/messages.g.m deleted file mode 100644 index 64524e708c7d..000000000000 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/messages.g.m +++ /dev/null @@ -1,1367 +0,0 @@ -// Copyright 2013 The Flutter Authors -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. -// Autogenerated from Pigeon (v26.1.0), do not edit directly. -// See also: https://pub.dev/packages/pigeon - -#import "./include/camera_avfoundation/messages.g.h" - -#if TARGET_OS_OSX -#import -#else -#import -#endif - -#if !__has_feature(objc_arc) -#error File requires ARC to be enabled. -#endif - -static NSArray *wrapResult(id result, FlutterError *error) { - if (error) { - return @[ - error.code ?: [NSNull null], error.message ?: [NSNull null], error.details ?: [NSNull null] - ]; - } - return @[ result ?: [NSNull null] ]; -} - -static FlutterError *createConnectionError(NSString *channelName) { - return [FlutterError - errorWithCode:@"channel-error" - message:[NSString stringWithFormat:@"%@/%@/%@", - @"Unable to establish connection on channel: '", - channelName, @"'."] - details:@""]; -} - -static id GetNullableObjectAtIndex(NSArray *array, NSInteger key) { - id result = array[key]; - return (result == [NSNull null]) ? nil : result; -} - -@implementation FCPPlatformCameraLensDirectionBox -- (instancetype)initWithValue:(FCPPlatformCameraLensDirection)value { - self = [super init]; - if (self) { - _value = value; - } - return self; -} -@end - -@implementation FCPPlatformCameraLensTypeBox -- (instancetype)initWithValue:(FCPPlatformCameraLensType)value { - self = [super init]; - if (self) { - _value = value; - } - return self; -} -@end - -@implementation FCPPlatformDeviceOrientationBox -- (instancetype)initWithValue:(FCPPlatformDeviceOrientation)value { - self = [super init]; - if (self) { - _value = value; - } - return self; -} -@end - -@implementation FCPPlatformExposureModeBox -- (instancetype)initWithValue:(FCPPlatformExposureMode)value { - self = [super init]; - if (self) { - _value = value; - } - return self; -} -@end - -@implementation FCPPlatformFlashModeBox -- (instancetype)initWithValue:(FCPPlatformFlashMode)value { - self = [super init]; - if (self) { - _value = value; - } - return self; -} -@end - -@implementation FCPPlatformFocusModeBox -- (instancetype)initWithValue:(FCPPlatformFocusMode)value { - self = [super init]; - if (self) { - _value = value; - } - return self; -} -@end - -/// Pigeon version of ImageFileFormat. -@implementation FCPPlatformImageFileFormatBox -- (instancetype)initWithValue:(FCPPlatformImageFileFormat)value { - self = [super init]; - if (self) { - _value = value; - } - return self; -} -@end - -@implementation FCPPlatformImageFormatGroupBox -- (instancetype)initWithValue:(FCPPlatformImageFormatGroup)value { - self = [super init]; - if (self) { - _value = value; - } - return self; -} -@end - -@implementation FCPPlatformResolutionPresetBox -- (instancetype)initWithValue:(FCPPlatformResolutionPreset)value { - self = [super init]; - if (self) { - _value = value; - } - return self; -} -@end - -@interface FCPPlatformCameraDescription () -+ (FCPPlatformCameraDescription *)fromList:(NSArray *)list; -+ (nullable FCPPlatformCameraDescription *)nullableFromList:(NSArray *)list; -- (NSArray *)toList; -@end - -@interface FCPPlatformCameraState () -+ (FCPPlatformCameraState *)fromList:(NSArray *)list; -+ (nullable FCPPlatformCameraState *)nullableFromList:(NSArray *)list; -- (NSArray *)toList; -@end - -@interface FCPPlatformMediaSettings () -+ (FCPPlatformMediaSettings *)fromList:(NSArray *)list; -+ (nullable FCPPlatformMediaSettings *)nullableFromList:(NSArray *)list; -- (NSArray *)toList; -@end - -@interface FCPPlatformPoint () -+ (FCPPlatformPoint *)fromList:(NSArray *)list; -+ (nullable FCPPlatformPoint *)nullableFromList:(NSArray *)list; -- (NSArray *)toList; -@end - -@interface FCPPlatformSize () -+ (FCPPlatformSize *)fromList:(NSArray *)list; -+ (nullable FCPPlatformSize *)nullableFromList:(NSArray *)list; -- (NSArray *)toList; -@end - -@implementation FCPPlatformCameraDescription -+ (instancetype)makeWithName:(NSString *)name - lensDirection:(FCPPlatformCameraLensDirection)lensDirection - lensType:(FCPPlatformCameraLensType)lensType { - FCPPlatformCameraDescription *pigeonResult = [[FCPPlatformCameraDescription alloc] init]; - pigeonResult.name = name; - pigeonResult.lensDirection = lensDirection; - pigeonResult.lensType = lensType; - return pigeonResult; -} -+ (FCPPlatformCameraDescription *)fromList:(NSArray *)list { - FCPPlatformCameraDescription *pigeonResult = [[FCPPlatformCameraDescription alloc] init]; - pigeonResult.name = GetNullableObjectAtIndex(list, 0); - FCPPlatformCameraLensDirectionBox *boxedFCPPlatformCameraLensDirection = - GetNullableObjectAtIndex(list, 1); - pigeonResult.lensDirection = boxedFCPPlatformCameraLensDirection.value; - FCPPlatformCameraLensTypeBox *boxedFCPPlatformCameraLensType = GetNullableObjectAtIndex(list, 2); - pigeonResult.lensType = boxedFCPPlatformCameraLensType.value; - return pigeonResult; -} -+ (nullable FCPPlatformCameraDescription *)nullableFromList:(NSArray *)list { - return (list) ? [FCPPlatformCameraDescription fromList:list] : nil; -} -- (NSArray *)toList { - return @[ - self.name ?: [NSNull null], - [[FCPPlatformCameraLensDirectionBox alloc] initWithValue:self.lensDirection], - [[FCPPlatformCameraLensTypeBox alloc] initWithValue:self.lensType], - ]; -} -@end - -@implementation FCPPlatformCameraState -+ (instancetype)makeWithPreviewSize:(FCPPlatformSize *)previewSize - exposureMode:(FCPPlatformExposureMode)exposureMode - focusMode:(FCPPlatformFocusMode)focusMode - exposurePointSupported:(BOOL)exposurePointSupported - focusPointSupported:(BOOL)focusPointSupported { - FCPPlatformCameraState *pigeonResult = [[FCPPlatformCameraState alloc] init]; - pigeonResult.previewSize = previewSize; - pigeonResult.exposureMode = exposureMode; - pigeonResult.focusMode = focusMode; - pigeonResult.exposurePointSupported = exposurePointSupported; - pigeonResult.focusPointSupported = focusPointSupported; - return pigeonResult; -} -+ (FCPPlatformCameraState *)fromList:(NSArray *)list { - FCPPlatformCameraState *pigeonResult = [[FCPPlatformCameraState alloc] init]; - pigeonResult.previewSize = GetNullableObjectAtIndex(list, 0); - FCPPlatformExposureModeBox *boxedFCPPlatformExposureMode = GetNullableObjectAtIndex(list, 1); - pigeonResult.exposureMode = boxedFCPPlatformExposureMode.value; - FCPPlatformFocusModeBox *boxedFCPPlatformFocusMode = GetNullableObjectAtIndex(list, 2); - pigeonResult.focusMode = boxedFCPPlatformFocusMode.value; - pigeonResult.exposurePointSupported = [GetNullableObjectAtIndex(list, 3) boolValue]; - pigeonResult.focusPointSupported = [GetNullableObjectAtIndex(list, 4) boolValue]; - return pigeonResult; -} -+ (nullable FCPPlatformCameraState *)nullableFromList:(NSArray *)list { - return (list) ? [FCPPlatformCameraState fromList:list] : nil; -} -- (NSArray *)toList { - return @[ - self.previewSize ?: [NSNull null], - [[FCPPlatformExposureModeBox alloc] initWithValue:self.exposureMode], - [[FCPPlatformFocusModeBox alloc] initWithValue:self.focusMode], - @(self.exposurePointSupported), - @(self.focusPointSupported), - ]; -} -@end - -@implementation FCPPlatformMediaSettings -+ (instancetype)makeWithResolutionPreset:(FCPPlatformResolutionPreset)resolutionPreset - framesPerSecond:(nullable NSNumber *)framesPerSecond - videoBitrate:(nullable NSNumber *)videoBitrate - audioBitrate:(nullable NSNumber *)audioBitrate - enableAudio:(BOOL)enableAudio { - FCPPlatformMediaSettings *pigeonResult = [[FCPPlatformMediaSettings alloc] init]; - pigeonResult.resolutionPreset = resolutionPreset; - pigeonResult.framesPerSecond = framesPerSecond; - pigeonResult.videoBitrate = videoBitrate; - pigeonResult.audioBitrate = audioBitrate; - pigeonResult.enableAudio = enableAudio; - return pigeonResult; -} -+ (FCPPlatformMediaSettings *)fromList:(NSArray *)list { - FCPPlatformMediaSettings *pigeonResult = [[FCPPlatformMediaSettings alloc] init]; - FCPPlatformResolutionPresetBox *boxedFCPPlatformResolutionPreset = - GetNullableObjectAtIndex(list, 0); - pigeonResult.resolutionPreset = boxedFCPPlatformResolutionPreset.value; - pigeonResult.framesPerSecond = GetNullableObjectAtIndex(list, 1); - pigeonResult.videoBitrate = GetNullableObjectAtIndex(list, 2); - pigeonResult.audioBitrate = GetNullableObjectAtIndex(list, 3); - pigeonResult.enableAudio = [GetNullableObjectAtIndex(list, 4) boolValue]; - return pigeonResult; -} -+ (nullable FCPPlatformMediaSettings *)nullableFromList:(NSArray *)list { - return (list) ? [FCPPlatformMediaSettings fromList:list] : nil; -} -- (NSArray *)toList { - return @[ - [[FCPPlatformResolutionPresetBox alloc] initWithValue:self.resolutionPreset], - self.framesPerSecond ?: [NSNull null], - self.videoBitrate ?: [NSNull null], - self.audioBitrate ?: [NSNull null], - @(self.enableAudio), - ]; -} -@end - -@implementation FCPPlatformPoint -+ (instancetype)makeWithX:(double)x y:(double)y { - FCPPlatformPoint *pigeonResult = [[FCPPlatformPoint alloc] init]; - pigeonResult.x = x; - pigeonResult.y = y; - return pigeonResult; -} -+ (FCPPlatformPoint *)fromList:(NSArray *)list { - FCPPlatformPoint *pigeonResult = [[FCPPlatformPoint alloc] init]; - pigeonResult.x = [GetNullableObjectAtIndex(list, 0) doubleValue]; - pigeonResult.y = [GetNullableObjectAtIndex(list, 1) doubleValue]; - return pigeonResult; -} -+ (nullable FCPPlatformPoint *)nullableFromList:(NSArray *)list { - return (list) ? [FCPPlatformPoint fromList:list] : nil; -} -- (NSArray *)toList { - return @[ - @(self.x), - @(self.y), - ]; -} -@end - -@implementation FCPPlatformSize -+ (instancetype)makeWithWidth:(double)width height:(double)height { - FCPPlatformSize *pigeonResult = [[FCPPlatformSize alloc] init]; - pigeonResult.width = width; - pigeonResult.height = height; - return pigeonResult; -} -+ (FCPPlatformSize *)fromList:(NSArray *)list { - FCPPlatformSize *pigeonResult = [[FCPPlatformSize alloc] init]; - pigeonResult.width = [GetNullableObjectAtIndex(list, 0) doubleValue]; - pigeonResult.height = [GetNullableObjectAtIndex(list, 1) doubleValue]; - return pigeonResult; -} -+ (nullable FCPPlatformSize *)nullableFromList:(NSArray *)list { - return (list) ? [FCPPlatformSize fromList:list] : nil; -} -- (NSArray *)toList { - return @[ - @(self.width), - @(self.height), - ]; -} -@end - -@interface FCPMessagesPigeonCodecReader : FlutterStandardReader -@end -@implementation FCPMessagesPigeonCodecReader -- (nullable id)readValueOfType:(UInt8)type { - switch (type) { - case 129: { - NSNumber *enumAsNumber = [self readValue]; - return enumAsNumber == nil ? nil - : [[FCPPlatformCameraLensDirectionBox alloc] - initWithValue:[enumAsNumber integerValue]]; - } - case 130: { - NSNumber *enumAsNumber = [self readValue]; - return enumAsNumber == nil - ? nil - : [[FCPPlatformCameraLensTypeBox alloc] initWithValue:[enumAsNumber integerValue]]; - } - case 131: { - NSNumber *enumAsNumber = [self readValue]; - return enumAsNumber == nil ? nil - : [[FCPPlatformDeviceOrientationBox alloc] - initWithValue:[enumAsNumber integerValue]]; - } - case 132: { - NSNumber *enumAsNumber = [self readValue]; - return enumAsNumber == nil - ? nil - : [[FCPPlatformExposureModeBox alloc] initWithValue:[enumAsNumber integerValue]]; - } - case 133: { - NSNumber *enumAsNumber = [self readValue]; - return enumAsNumber == nil - ? nil - : [[FCPPlatformFlashModeBox alloc] initWithValue:[enumAsNumber integerValue]]; - } - case 134: { - NSNumber *enumAsNumber = [self readValue]; - return enumAsNumber == nil - ? nil - : [[FCPPlatformFocusModeBox alloc] initWithValue:[enumAsNumber integerValue]]; - } - case 135: { - NSNumber *enumAsNumber = [self readValue]; - return enumAsNumber == nil ? nil - : [[FCPPlatformImageFileFormatBox alloc] - initWithValue:[enumAsNumber integerValue]]; - } - case 136: { - NSNumber *enumAsNumber = [self readValue]; - return enumAsNumber == nil ? nil - : [[FCPPlatformImageFormatGroupBox alloc] - initWithValue:[enumAsNumber integerValue]]; - } - case 137: { - NSNumber *enumAsNumber = [self readValue]; - return enumAsNumber == nil ? nil - : [[FCPPlatformResolutionPresetBox alloc] - initWithValue:[enumAsNumber integerValue]]; - } - case 138: - return [FCPPlatformCameraDescription fromList:[self readValue]]; - case 139: - return [FCPPlatformCameraState fromList:[self readValue]]; - case 140: - return [FCPPlatformMediaSettings fromList:[self readValue]]; - case 141: - return [FCPPlatformPoint fromList:[self readValue]]; - case 142: - return [FCPPlatformSize fromList:[self readValue]]; - default: - return [super readValueOfType:type]; - } -} -@end - -@interface FCPMessagesPigeonCodecWriter : FlutterStandardWriter -@end -@implementation FCPMessagesPigeonCodecWriter -- (void)writeValue:(id)value { - if ([value isKindOfClass:[FCPPlatformCameraLensDirectionBox class]]) { - FCPPlatformCameraLensDirectionBox *box = (FCPPlatformCameraLensDirectionBox *)value; - [self writeByte:129]; - [self writeValue:(value == nil ? [NSNull null] : [NSNumber numberWithInteger:box.value])]; - } else if ([value isKindOfClass:[FCPPlatformCameraLensTypeBox class]]) { - FCPPlatformCameraLensTypeBox *box = (FCPPlatformCameraLensTypeBox *)value; - [self writeByte:130]; - [self writeValue:(value == nil ? [NSNull null] : [NSNumber numberWithInteger:box.value])]; - } else if ([value isKindOfClass:[FCPPlatformDeviceOrientationBox class]]) { - FCPPlatformDeviceOrientationBox *box = (FCPPlatformDeviceOrientationBox *)value; - [self writeByte:131]; - [self writeValue:(value == nil ? [NSNull null] : [NSNumber numberWithInteger:box.value])]; - } else if ([value isKindOfClass:[FCPPlatformExposureModeBox class]]) { - FCPPlatformExposureModeBox *box = (FCPPlatformExposureModeBox *)value; - [self writeByte:132]; - [self writeValue:(value == nil ? [NSNull null] : [NSNumber numberWithInteger:box.value])]; - } else if ([value isKindOfClass:[FCPPlatformFlashModeBox class]]) { - FCPPlatformFlashModeBox *box = (FCPPlatformFlashModeBox *)value; - [self writeByte:133]; - [self writeValue:(value == nil ? [NSNull null] : [NSNumber numberWithInteger:box.value])]; - } else if ([value isKindOfClass:[FCPPlatformFocusModeBox class]]) { - FCPPlatformFocusModeBox *box = (FCPPlatformFocusModeBox *)value; - [self writeByte:134]; - [self writeValue:(value == nil ? [NSNull null] : [NSNumber numberWithInteger:box.value])]; - } else if ([value isKindOfClass:[FCPPlatformImageFileFormatBox class]]) { - FCPPlatformImageFileFormatBox *box = (FCPPlatformImageFileFormatBox *)value; - [self writeByte:135]; - [self writeValue:(value == nil ? [NSNull null] : [NSNumber numberWithInteger:box.value])]; - } else if ([value isKindOfClass:[FCPPlatformImageFormatGroupBox class]]) { - FCPPlatformImageFormatGroupBox *box = (FCPPlatformImageFormatGroupBox *)value; - [self writeByte:136]; - [self writeValue:(value == nil ? [NSNull null] : [NSNumber numberWithInteger:box.value])]; - } else if ([value isKindOfClass:[FCPPlatformResolutionPresetBox class]]) { - FCPPlatformResolutionPresetBox *box = (FCPPlatformResolutionPresetBox *)value; - [self writeByte:137]; - [self writeValue:(value == nil ? [NSNull null] : [NSNumber numberWithInteger:box.value])]; - } else if ([value isKindOfClass:[FCPPlatformCameraDescription class]]) { - [self writeByte:138]; - [self writeValue:[value toList]]; - } else if ([value isKindOfClass:[FCPPlatformCameraState class]]) { - [self writeByte:139]; - [self writeValue:[value toList]]; - } else if ([value isKindOfClass:[FCPPlatformMediaSettings class]]) { - [self writeByte:140]; - [self writeValue:[value toList]]; - } else if ([value isKindOfClass:[FCPPlatformPoint class]]) { - [self writeByte:141]; - [self writeValue:[value toList]]; - } else if ([value isKindOfClass:[FCPPlatformSize class]]) { - [self writeByte:142]; - [self writeValue:[value toList]]; - } else { - [super writeValue:value]; - } -} -@end - -@interface FCPMessagesPigeonCodecReaderWriter : FlutterStandardReaderWriter -@end -@implementation FCPMessagesPigeonCodecReaderWriter -- (FlutterStandardWriter *)writerWithData:(NSMutableData *)data { - return [[FCPMessagesPigeonCodecWriter alloc] initWithData:data]; -} -- (FlutterStandardReader *)readerWithData:(NSData *)data { - return [[FCPMessagesPigeonCodecReader alloc] initWithData:data]; -} -@end - -NSObject *FCPGetMessagesCodec(void) { - static FlutterStandardMessageCodec *sSharedObject = nil; - static dispatch_once_t sPred = 0; - dispatch_once(&sPred, ^{ - FCPMessagesPigeonCodecReaderWriter *readerWriter = - [[FCPMessagesPigeonCodecReaderWriter alloc] init]; - sSharedObject = [FlutterStandardMessageCodec codecWithReaderWriter:readerWriter]; - }); - return sSharedObject; -} -void SetUpFCPCameraApi(id binaryMessenger, NSObject *api) { - SetUpFCPCameraApiWithSuffix(binaryMessenger, api, @""); -} - -void SetUpFCPCameraApiWithSuffix(id binaryMessenger, - NSObject *api, NSString *messageChannelSuffix) { - messageChannelSuffix = messageChannelSuffix.length > 0 - ? [NSString stringWithFormat:@".%@", messageChannelSuffix] - : @""; - /// Returns the list of available cameras. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString stringWithFormat:@"%@%@", - @"dev.flutter.pigeon.camera_avfoundation." - @"CameraApi.getAvailableCameras", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert( - [api respondsToSelector:@selector(availableCamerasWithCompletion:)], - @"FCPCameraApi api (%@) doesn't respond to @selector(availableCamerasWithCompletion:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - [api availableCamerasWithCompletion:^( - NSArray *_Nullable output, - FlutterError *_Nullable error) { - callback(wrapResult(output, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Create a new camera with the given settings, and returns its ID. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString stringWithFormat: - @"%@%@", - @"dev.flutter.pigeon.camera_avfoundation.CameraApi.create", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert([api respondsToSelector:@selector(createCameraWithName:settings:completion:)], - @"FCPCameraApi api (%@) doesn't respond to " - @"@selector(createCameraWithName:settings:completion:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - NSArray *args = message; - NSString *arg_cameraName = GetNullableObjectAtIndex(args, 0); - FCPPlatformMediaSettings *arg_settings = GetNullableObjectAtIndex(args, 1); - [api createCameraWithName:arg_cameraName - settings:arg_settings - completion:^(NSNumber *_Nullable output, FlutterError *_Nullable error) { - callback(wrapResult(output, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Initializes the camera with the given ID. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName: - [NSString - stringWithFormat:@"%@%@", - @"dev.flutter.pigeon.camera_avfoundation.CameraApi.initialize", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert([api respondsToSelector:@selector(initializeCamera:withImageFormat:completion:)], - @"FCPCameraApi api (%@) doesn't respond to " - @"@selector(initializeCamera:withImageFormat:completion:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - NSArray *args = message; - NSInteger arg_cameraId = [GetNullableObjectAtIndex(args, 0) integerValue]; - FCPPlatformImageFormatGroupBox *boxedFCPPlatformImageFormatGroup = - GetNullableObjectAtIndex(args, 1); - FCPPlatformImageFormatGroup arg_imageFormat = boxedFCPPlatformImageFormatGroup.value; - [api initializeCamera:arg_cameraId - withImageFormat:arg_imageFormat - completion:^(FlutterError *_Nullable error) { - callback(wrapResult(nil, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Begins streaming frames from the camera. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString stringWithFormat:@"%@%@", - @"dev.flutter.pigeon.camera_avfoundation." - @"CameraApi.startImageStream", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert( - [api respondsToSelector:@selector(startImageStreamWithCompletion:)], - @"FCPCameraApi api (%@) doesn't respond to @selector(startImageStreamWithCompletion:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - [api startImageStreamWithCompletion:^(FlutterError *_Nullable error) { - callback(wrapResult(nil, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Stops streaming frames from the camera. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString - stringWithFormat: - @"%@%@", - @"dev.flutter.pigeon.camera_avfoundation.CameraApi.stopImageStream", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert( - [api respondsToSelector:@selector(stopImageStreamWithCompletion:)], - @"FCPCameraApi api (%@) doesn't respond to @selector(stopImageStreamWithCompletion:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - [api stopImageStreamWithCompletion:^(FlutterError *_Nullable error) { - callback(wrapResult(nil, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Called by the Dart side of the plugin when it has received the last image - /// frame sent. - /// - /// This is used to throttle sending frames across the channel. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString stringWithFormat:@"%@%@", - @"dev.flutter.pigeon.camera_avfoundation." - @"CameraApi.receivedImageStreamData", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert([api respondsToSelector:@selector(receivedImageStreamDataWithCompletion:)], - @"FCPCameraApi api (%@) doesn't respond to " - @"@selector(receivedImageStreamDataWithCompletion:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - [api receivedImageStreamDataWithCompletion:^(FlutterError *_Nullable error) { - callback(wrapResult(nil, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Indicates that the given camera is no longer being used on the Dart side, - /// and any associated resources can be cleaned up. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString stringWithFormat: - @"%@%@", - @"dev.flutter.pigeon.camera_avfoundation.CameraApi.dispose", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert([api respondsToSelector:@selector(disposeCamera:completion:)], - @"FCPCameraApi api (%@) doesn't respond to @selector(disposeCamera:completion:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - NSArray *args = message; - NSInteger arg_cameraId = [GetNullableObjectAtIndex(args, 0) integerValue]; - [api disposeCamera:arg_cameraId - completion:^(FlutterError *_Nullable error) { - callback(wrapResult(nil, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Locks the camera capture to the current device orientation. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString stringWithFormat:@"%@%@", - @"dev.flutter.pigeon.camera_avfoundation." - @"CameraApi.lockCaptureOrientation", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert( - [api respondsToSelector:@selector(lockCaptureOrientation:completion:)], - @"FCPCameraApi api (%@) doesn't respond to @selector(lockCaptureOrientation:completion:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - NSArray *args = message; - FCPPlatformDeviceOrientationBox *boxedFCPPlatformDeviceOrientation = - GetNullableObjectAtIndex(args, 0); - FCPPlatformDeviceOrientation arg_orientation = boxedFCPPlatformDeviceOrientation.value; - [api lockCaptureOrientation:arg_orientation - completion:^(FlutterError *_Nullable error) { - callback(wrapResult(nil, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Unlocks camera capture orientation, allowing it to automatically adapt to - /// device orientation. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString stringWithFormat:@"%@%@", - @"dev.flutter.pigeon.camera_avfoundation." - @"CameraApi.unlockCaptureOrientation", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert([api respondsToSelector:@selector(unlockCaptureOrientationWithCompletion:)], - @"FCPCameraApi api (%@) doesn't respond to " - @"@selector(unlockCaptureOrientationWithCompletion:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - [api unlockCaptureOrientationWithCompletion:^(FlutterError *_Nullable error) { - callback(wrapResult(nil, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Takes a picture with the current settings, and returns the path to the - /// resulting file. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName: - [NSString - stringWithFormat:@"%@%@", - @"dev.flutter.pigeon.camera_avfoundation.CameraApi.takePicture", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert([api respondsToSelector:@selector(takePictureWithCompletion:)], - @"FCPCameraApi api (%@) doesn't respond to @selector(takePictureWithCompletion:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - [api - takePictureWithCompletion:^(NSString *_Nullable output, FlutterError *_Nullable error) { - callback(wrapResult(output, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Does any preprocessing necessary before beginning to record video. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString stringWithFormat:@"%@%@", - @"dev.flutter.pigeon.camera_avfoundation." - @"CameraApi.prepareForVideoRecording", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert([api respondsToSelector:@selector(prepareForVideoRecordingWithCompletion:)], - @"FCPCameraApi api (%@) doesn't respond to " - @"@selector(prepareForVideoRecordingWithCompletion:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - [api prepareForVideoRecordingWithCompletion:^(FlutterError *_Nullable error) { - callback(wrapResult(nil, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Begins recording video, optionally enabling streaming to Dart at the same - /// time. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString stringWithFormat:@"%@%@", - @"dev.flutter.pigeon.camera_avfoundation." - @"CameraApi.startVideoRecording", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert([api respondsToSelector:@selector(startVideoRecordingWithStreaming:completion:)], - @"FCPCameraApi api (%@) doesn't respond to " - @"@selector(startVideoRecordingWithStreaming:completion:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - NSArray *args = message; - BOOL arg_enableStream = [GetNullableObjectAtIndex(args, 0) boolValue]; - [api startVideoRecordingWithStreaming:arg_enableStream - completion:^(FlutterError *_Nullable error) { - callback(wrapResult(nil, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Stops recording video, and results the path to the resulting file. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString stringWithFormat:@"%@%@", - @"dev.flutter.pigeon.camera_avfoundation." - @"CameraApi.stopVideoRecording", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert( - [api respondsToSelector:@selector(stopVideoRecordingWithCompletion:)], - @"FCPCameraApi api (%@) doesn't respond to @selector(stopVideoRecordingWithCompletion:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - [api stopVideoRecordingWithCompletion:^(NSString *_Nullable output, - FlutterError *_Nullable error) { - callback(wrapResult(output, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Pauses video recording. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString stringWithFormat:@"%@%@", - @"dev.flutter.pigeon.camera_avfoundation." - @"CameraApi.pauseVideoRecording", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert( - [api respondsToSelector:@selector(pauseVideoRecordingWithCompletion:)], - @"FCPCameraApi api (%@) doesn't respond to @selector(pauseVideoRecordingWithCompletion:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - [api pauseVideoRecordingWithCompletion:^(FlutterError *_Nullable error) { - callback(wrapResult(nil, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Resumes a previously paused video recording. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString stringWithFormat:@"%@%@", - @"dev.flutter.pigeon.camera_avfoundation." - @"CameraApi.resumeVideoRecording", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert([api respondsToSelector:@selector(resumeVideoRecordingWithCompletion:)], - @"FCPCameraApi api (%@) doesn't respond to " - @"@selector(resumeVideoRecordingWithCompletion:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - [api resumeVideoRecordingWithCompletion:^(FlutterError *_Nullable error) { - callback(wrapResult(nil, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Switches the camera to the given flash mode. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString - stringWithFormat: - @"%@%@", - @"dev.flutter.pigeon.camera_avfoundation.CameraApi.setFlashMode", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert([api respondsToSelector:@selector(setFlashMode:completion:)], - @"FCPCameraApi api (%@) doesn't respond to @selector(setFlashMode:completion:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - NSArray *args = message; - FCPPlatformFlashModeBox *boxedFCPPlatformFlashMode = GetNullableObjectAtIndex(args, 0); - FCPPlatformFlashMode arg_mode = boxedFCPPlatformFlashMode.value; - [api setFlashMode:arg_mode - completion:^(FlutterError *_Nullable error) { - callback(wrapResult(nil, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Switches the camera to the given exposure mode. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString - stringWithFormat: - @"%@%@", - @"dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposureMode", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert([api respondsToSelector:@selector(setExposureMode:completion:)], - @"FCPCameraApi api (%@) doesn't respond to @selector(setExposureMode:completion:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - NSArray *args = message; - FCPPlatformExposureModeBox *boxedFCPPlatformExposureMode = - GetNullableObjectAtIndex(args, 0); - FCPPlatformExposureMode arg_mode = boxedFCPPlatformExposureMode.value; - [api setExposureMode:arg_mode - completion:^(FlutterError *_Nullable error) { - callback(wrapResult(nil, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Anchors auto-exposure to the given point in (0,1) coordinate space. - /// - /// A null value resets to the default exposure point. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString stringWithFormat:@"%@%@", - @"dev.flutter.pigeon.camera_avfoundation." - @"CameraApi.setExposurePoint", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert([api respondsToSelector:@selector(setExposurePoint:completion:)], - @"FCPCameraApi api (%@) doesn't respond to @selector(setExposurePoint:completion:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - NSArray *args = message; - FCPPlatformPoint *arg_point = GetNullableObjectAtIndex(args, 0); - [api setExposurePoint:arg_point - completion:^(FlutterError *_Nullable error) { - callback(wrapResult(nil, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Returns the minimum exposure offset supported by the camera. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString stringWithFormat:@"%@%@", - @"dev.flutter.pigeon.camera_avfoundation." - @"CameraApi.getMinExposureOffset", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert([api respondsToSelector:@selector(getMinimumExposureOffset:)], - @"FCPCameraApi api (%@) doesn't respond to @selector(getMinimumExposureOffset:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - [api getMinimumExposureOffset:^(NSNumber *_Nullable output, FlutterError *_Nullable error) { - callback(wrapResult(output, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Returns the maximum exposure offset supported by the camera. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString stringWithFormat:@"%@%@", - @"dev.flutter.pigeon.camera_avfoundation." - @"CameraApi.getMaxExposureOffset", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert([api respondsToSelector:@selector(getMaximumExposureOffset:)], - @"FCPCameraApi api (%@) doesn't respond to @selector(getMaximumExposureOffset:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - [api getMaximumExposureOffset:^(NSNumber *_Nullable output, FlutterError *_Nullable error) { - callback(wrapResult(output, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Sets the exposure offset manually to the given value. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString stringWithFormat:@"%@%@", - @"dev.flutter.pigeon.camera_avfoundation." - @"CameraApi.setExposureOffset", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert( - [api respondsToSelector:@selector(setExposureOffset:completion:)], - @"FCPCameraApi api (%@) doesn't respond to @selector(setExposureOffset:completion:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - NSArray *args = message; - double arg_offset = [GetNullableObjectAtIndex(args, 0) doubleValue]; - [api setExposureOffset:arg_offset - completion:^(FlutterError *_Nullable error) { - callback(wrapResult(nil, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Switches the camera to the given focus mode. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString - stringWithFormat: - @"%@%@", - @"dev.flutter.pigeon.camera_avfoundation.CameraApi.setFocusMode", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert([api respondsToSelector:@selector(setFocusMode:completion:)], - @"FCPCameraApi api (%@) doesn't respond to @selector(setFocusMode:completion:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - NSArray *args = message; - FCPPlatformFocusModeBox *boxedFCPPlatformFocusMode = GetNullableObjectAtIndex(args, 0); - FCPPlatformFocusMode arg_mode = boxedFCPPlatformFocusMode.value; - [api setFocusMode:arg_mode - completion:^(FlutterError *_Nullable error) { - callback(wrapResult(nil, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Anchors auto-focus to the given point in (0,1) coordinate space. - /// - /// A null value resets to the default focus point. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString - stringWithFormat: - @"%@%@", - @"dev.flutter.pigeon.camera_avfoundation.CameraApi.setFocusPoint", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert([api respondsToSelector:@selector(setFocusPoint:completion:)], - @"FCPCameraApi api (%@) doesn't respond to @selector(setFocusPoint:completion:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - NSArray *args = message; - FCPPlatformPoint *arg_point = GetNullableObjectAtIndex(args, 0); - [api setFocusPoint:arg_point - completion:^(FlutterError *_Nullable error) { - callback(wrapResult(nil, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Returns the minimum zoom level supported by the camera. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString - stringWithFormat: - @"%@%@", - @"dev.flutter.pigeon.camera_avfoundation.CameraApi.getMinZoomLevel", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert([api respondsToSelector:@selector(getMinimumZoomLevel:)], - @"FCPCameraApi api (%@) doesn't respond to @selector(getMinimumZoomLevel:)", api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - [api getMinimumZoomLevel:^(NSNumber *_Nullable output, FlutterError *_Nullable error) { - callback(wrapResult(output, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Returns the maximum zoom level supported by the camera. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString - stringWithFormat: - @"%@%@", - @"dev.flutter.pigeon.camera_avfoundation.CameraApi.getMaxZoomLevel", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert([api respondsToSelector:@selector(getMaximumZoomLevel:)], - @"FCPCameraApi api (%@) doesn't respond to @selector(getMaximumZoomLevel:)", api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - [api getMaximumZoomLevel:^(NSNumber *_Nullable output, FlutterError *_Nullable error) { - callback(wrapResult(output, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Sets the zoom factor. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString - stringWithFormat: - @"%@%@", - @"dev.flutter.pigeon.camera_avfoundation.CameraApi.setZoomLevel", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert([api respondsToSelector:@selector(setZoomLevel:completion:)], - @"FCPCameraApi api (%@) doesn't respond to @selector(setZoomLevel:completion:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - NSArray *args = message; - double arg_zoom = [GetNullableObjectAtIndex(args, 0) doubleValue]; - [api setZoomLevel:arg_zoom - completion:^(FlutterError *_Nullable error) { - callback(wrapResult(nil, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Pauses streaming of preview frames. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString - stringWithFormat: - @"%@%@", - @"dev.flutter.pigeon.camera_avfoundation.CameraApi.pausePreview", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert([api respondsToSelector:@selector(pausePreviewWithCompletion:)], - @"FCPCameraApi api (%@) doesn't respond to @selector(pausePreviewWithCompletion:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - [api pausePreviewWithCompletion:^(FlutterError *_Nullable error) { - callback(wrapResult(nil, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Resumes a previously paused preview stream. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString - stringWithFormat: - @"%@%@", - @"dev.flutter.pigeon.camera_avfoundation.CameraApi.resumePreview", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert([api respondsToSelector:@selector(resumePreviewWithCompletion:)], - @"FCPCameraApi api (%@) doesn't respond to @selector(resumePreviewWithCompletion:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - [api resumePreviewWithCompletion:^(FlutterError *_Nullable error) { - callback(wrapResult(nil, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Changes the camera used while recording video. - /// - /// This should only be called while video recording is active. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString stringWithFormat:@"%@%@", - @"dev.flutter.pigeon.camera_avfoundation." - @"CameraApi.updateDescriptionWhileRecording", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert([api respondsToSelector:@selector(updateDescriptionWhileRecordingCameraName: - completion:)], - @"FCPCameraApi api (%@) doesn't respond to " - @"@selector(updateDescriptionWhileRecordingCameraName:completion:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - NSArray *args = message; - NSString *arg_cameraName = GetNullableObjectAtIndex(args, 0); - [api updateDescriptionWhileRecordingCameraName:arg_cameraName - completion:^(FlutterError *_Nullable error) { - callback(wrapResult(nil, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } - /// Sets the file format used for taking pictures. - { - FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] - initWithName:[NSString stringWithFormat:@"%@%@", - @"dev.flutter.pigeon.camera_avfoundation." - @"CameraApi.setImageFileFormat", - messageChannelSuffix] - binaryMessenger:binaryMessenger - codec:FCPGetMessagesCodec()]; - if (api) { - NSCAssert( - [api respondsToSelector:@selector(setImageFileFormat:completion:)], - @"FCPCameraApi api (%@) doesn't respond to @selector(setImageFileFormat:completion:)", - api); - [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { - NSArray *args = message; - FCPPlatformImageFileFormatBox *boxedFCPPlatformImageFileFormat = - GetNullableObjectAtIndex(args, 0); - FCPPlatformImageFileFormat arg_format = boxedFCPPlatformImageFileFormat.value; - [api setImageFileFormat:arg_format - completion:^(FlutterError *_Nullable error) { - callback(wrapResult(nil, error)); - }]; - }]; - } else { - [channel setMessageHandler:nil]; - } - } -} -@interface FCPCameraGlobalEventApi () -@property(nonatomic, strong) NSObject *binaryMessenger; -@property(nonatomic, strong) NSString *messageChannelSuffix; -@end - -@implementation FCPCameraGlobalEventApi - -- (instancetype)initWithBinaryMessenger:(NSObject *)binaryMessenger { - return [self initWithBinaryMessenger:binaryMessenger messageChannelSuffix:@""]; -} -- (instancetype)initWithBinaryMessenger:(NSObject *)binaryMessenger - messageChannelSuffix:(nullable NSString *)messageChannelSuffix { - self = [self init]; - if (self) { - _binaryMessenger = binaryMessenger; - _messageChannelSuffix = [messageChannelSuffix length] == 0 - ? @"" - : [NSString stringWithFormat:@".%@", messageChannelSuffix]; - } - return self; -} -- (void)deviceOrientationChangedOrientation:(FCPPlatformDeviceOrientation)arg_orientation - completion:(void (^)(FlutterError *_Nullable))completion { - NSString *channelName = [NSString - stringWithFormat: - @"%@%@", - @"dev.flutter.pigeon.camera_avfoundation.CameraGlobalEventApi.deviceOrientationChanged", - _messageChannelSuffix]; - FlutterBasicMessageChannel *channel = - [FlutterBasicMessageChannel messageChannelWithName:channelName - binaryMessenger:self.binaryMessenger - codec:FCPGetMessagesCodec()]; - [channel sendMessage:@[ [[FCPPlatformDeviceOrientationBox alloc] initWithValue:arg_orientation] ] - reply:^(NSArray *reply) { - if (reply != nil) { - if (reply.count > 1) { - completion([FlutterError errorWithCode:reply[0] - message:reply[1] - details:reply[2]]); - } else { - completion(nil); - } - } else { - completion(createConnectionError(channelName)); - } - }]; -} -@end - -@interface FCPCameraEventApi () -@property(nonatomic, strong) NSObject *binaryMessenger; -@property(nonatomic, strong) NSString *messageChannelSuffix; -@end - -@implementation FCPCameraEventApi - -- (instancetype)initWithBinaryMessenger:(NSObject *)binaryMessenger { - return [self initWithBinaryMessenger:binaryMessenger messageChannelSuffix:@""]; -} -- (instancetype)initWithBinaryMessenger:(NSObject *)binaryMessenger - messageChannelSuffix:(nullable NSString *)messageChannelSuffix { - self = [self init]; - if (self) { - _binaryMessenger = binaryMessenger; - _messageChannelSuffix = [messageChannelSuffix length] == 0 - ? @"" - : [NSString stringWithFormat:@".%@", messageChannelSuffix]; - } - return self; -} -- (void)initializedWithState:(FCPPlatformCameraState *)arg_initialState - completion:(void (^)(FlutterError *_Nullable))completion { - NSString *channelName = [NSString - stringWithFormat:@"%@%@", - @"dev.flutter.pigeon.camera_avfoundation.CameraEventApi.initialized", - _messageChannelSuffix]; - FlutterBasicMessageChannel *channel = - [FlutterBasicMessageChannel messageChannelWithName:channelName - binaryMessenger:self.binaryMessenger - codec:FCPGetMessagesCodec()]; - [channel sendMessage:@[ arg_initialState ?: [NSNull null] ] - reply:^(NSArray *reply) { - if (reply != nil) { - if (reply.count > 1) { - completion([FlutterError errorWithCode:reply[0] - message:reply[1] - details:reply[2]]); - } else { - completion(nil); - } - } else { - completion(createConnectionError(channelName)); - } - }]; -} -- (void)reportError:(NSString *)arg_message - completion:(void (^)(FlutterError *_Nullable))completion { - NSString *channelName = [NSString - stringWithFormat:@"%@%@", @"dev.flutter.pigeon.camera_avfoundation.CameraEventApi.error", - _messageChannelSuffix]; - FlutterBasicMessageChannel *channel = - [FlutterBasicMessageChannel messageChannelWithName:channelName - binaryMessenger:self.binaryMessenger - codec:FCPGetMessagesCodec()]; - [channel sendMessage:@[ arg_message ?: [NSNull null] ] - reply:^(NSArray *reply) { - if (reply != nil) { - if (reply.count > 1) { - completion([FlutterError errorWithCode:reply[0] - message:reply[1] - details:reply[2]]); - } else { - completion(nil); - } - } else { - completion(createConnectionError(channelName)); - } - }]; -} -@end diff --git a/packages/camera/camera_avfoundation/pigeons/messages.dart b/packages/camera/camera_avfoundation/pigeons/messages.dart index f4bdd8d998da..2557eb02707c 100644 --- a/packages/camera/camera_avfoundation/pigeons/messages.dart +++ b/packages/camera/camera_avfoundation/pigeons/messages.dart @@ -7,14 +7,8 @@ import 'package:pigeon/pigeon.dart'; @ConfigurePigeon( PigeonOptions( dartOut: 'lib/src/messages.g.dart', - objcHeaderOut: - 'ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/messages.g.h', - objcSourceOut: - 'ios/camera_avfoundation/Sources/camera_avfoundation_objc/messages.g.m', - objcOptions: ObjcOptions( - prefix: 'FCP', - headerIncludePath: './include/camera_avfoundation/messages.g.h', - ), + swiftOut: + 'ios/camera_avfoundation/Sources/camera_avfoundation/Messages.swift', copyrightHeader: 'pigeons/copyright.txt', ), ) diff --git a/packages/camera/camera_avfoundation/pubspec.yaml b/packages/camera/camera_avfoundation/pubspec.yaml index 1e87023aa7b0..241456f0c5d1 100644 --- a/packages/camera/camera_avfoundation/pubspec.yaml +++ b/packages/camera/camera_avfoundation/pubspec.yaml @@ -2,7 +2,7 @@ name: camera_avfoundation description: iOS implementation of the camera plugin. repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_avfoundation issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22 -version: 0.9.23+2 +version: 0.9.23+3 environment: sdk: ^3.9.0