Skip to content
This repository has been archived by the owner on Feb 22, 2023. It is now read-only.

Commit

Permalink
[camera] Fixed a crash when streaming on iOS (#4520)
Browse files Browse the repository at this point in the history
  • Loading branch information
zuvola authored Mar 29, 2022
1 parent 794fa82 commit 972e942
Show file tree
Hide file tree
Showing 9 changed files with 159 additions and 13 deletions.
4 changes: 4 additions & 0 deletions packages/camera/camera/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
## 0.9.4+18

* Fixes a crash in iOS when streaming on low-performance devices.

## 0.9.4+17

* Removes obsolete information from README, and adds OS support table.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
25C3919135C3D981E6F800D0 /* libPods-RunnerTests.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 1944D8072499F3B5E7653D44 /* libPods-RunnerTests.a */; };
334733EA2668111C00DCC49E /* CameraOrientationTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 03BB767226653ABE00CE5A93 /* CameraOrientationTests.m */; };
3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; };
788A065A27B0E02900533D74 /* StreamingTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 788A065927B0E02900533D74 /* StreamingTest.m */; };
978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */; };
97C146F31CF9000F007C117D /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 97C146F21CF9000F007C117D /* main.m */; };
97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; };
Expand Down Expand Up @@ -70,6 +71,7 @@
1944D8072499F3B5E7653D44 /* libPods-RunnerTests.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-RunnerTests.a"; sourceTree = BUILT_PRODUCTS_DIR; };
3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = "<group>"; };
59848A7CA98C1FADF8840207 /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = "<group>"; };
788A065927B0E02900533D74 /* StreamingTest.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = StreamingTest.m; sourceTree = "<group>"; };
7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = "<group>"; };
7AFFD8ED1D35381100E5BB4D /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = "<group>"; };
7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = "<group>"; };
Expand Down Expand Up @@ -142,6 +144,7 @@
F63F9EED27143B19002479BF /* MockFLTThreadSafeFlutterResult.h */,
E032F24F279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m */,
E0F95E3C27A32AB900699390 /* CameraPropertiesTests.m */,
788A065927B0E02900533D74 /* StreamingTest.m */,
);
path = RunnerTests;
sourceTree = "<group>";
Expand Down Expand Up @@ -416,6 +419,7 @@
E0CDBAC227CD9729002561D9 /* CameraTestUtils.m in Sources */,
334733EA2668111C00DCC49E /* CameraOrientationTests.m in Sources */,
E032F250279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m in Sources */,
788A065A27B0E02900533D74 /* StreamingTest.m in Sources */,
E0C6E2022770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m in Sources */,
E0C6E2012770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m in Sources */,
E0C6E2002770F01A00EA6AA3 /* ThreadSafeMethodChannelTests.m in Sources */,
Expand Down
85 changes: 85 additions & 0 deletions packages/camera/camera/example/ios/RunnerTests/StreamingTest.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

@import camera;
@import camera.Test;
@import XCTest;
@import AVFoundation;
#import <OCMock/OCMock.h>
#import "CameraTestUtils.h"

@interface StreamingTests : XCTestCase
@property(readonly, nonatomic) FLTCam *camera;
@property(readonly, nonatomic) CMSampleBufferRef sampleBuffer;
@end

@implementation StreamingTests

- (void)setUp {
dispatch_queue_t captureSessionQueue = dispatch_queue_create("testing", NULL);
_camera = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue);
_sampleBuffer = FLTCreateTestSampleBuffer();
}

- (void)tearDown {
CFRelease(_sampleBuffer);
}

- (void)testExceedMaxStreamingPendingFramesCount {
XCTestExpectation *streamingExpectation = [self
expectationWithDescription:@"Must not call handler over maxStreamingPendingFramesCount"];

id handlerMock = OCMClassMock([FLTImageStreamHandler class]);
OCMStub([handlerMock eventSink]).andReturn(^(id event) {
[streamingExpectation fulfill];
});

id messenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger));
[_camera startImageStreamWithMessenger:messenger imageStreamHandler:handlerMock];

XCTKVOExpectation *expectation = [[XCTKVOExpectation alloc] initWithKeyPath:@"isStreamingImages"
object:_camera
expectedValue:@YES];
XCTWaiterResult result = [XCTWaiter waitForExpectations:@[ expectation ] timeout:1];
XCTAssertEqual(result, XCTWaiterResultCompleted);

streamingExpectation.expectedFulfillmentCount = 4;
for (int i = 0; i < 10; i++) {
[_camera captureOutput:nil didOutputSampleBuffer:self.sampleBuffer fromConnection:nil];
}

[self waitForExpectationsWithTimeout:1.0 handler:nil];
}

- (void)testReceivedImageStreamData {
XCTestExpectation *streamingExpectation =
[self expectationWithDescription:
@"Must be able to call the handler again when receivedImageStreamData is called"];

id handlerMock = OCMClassMock([FLTImageStreamHandler class]);
OCMStub([handlerMock eventSink]).andReturn(^(id event) {
[streamingExpectation fulfill];
});

id messenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger));
[_camera startImageStreamWithMessenger:messenger imageStreamHandler:handlerMock];

XCTKVOExpectation *expectation = [[XCTKVOExpectation alloc] initWithKeyPath:@"isStreamingImages"
object:_camera
expectedValue:@YES];
XCTWaiterResult result = [XCTWaiter waitForExpectations:@[ expectation ] timeout:1];
XCTAssertEqual(result, XCTWaiterResultCompleted);

streamingExpectation.expectedFulfillmentCount = 5;
for (int i = 0; i < 10; i++) {
[_camera captureOutput:nil didOutputSampleBuffer:self.sampleBuffer fromConnection:nil];
}

[_camera receivedImageStreamData];
[_camera captureOutput:nil didOutputSampleBuffer:self.sampleBuffer fromConnection:nil];

[self waitForExpectationsWithTimeout:1.0 handler:nil];
}

@end
3 changes: 3 additions & 0 deletions packages/camera/camera/ios/Classes/CameraPlugin.m
Original file line number Diff line number Diff line change
Expand Up @@ -162,6 +162,9 @@ - (void)handleMethodCallAsync:(FlutterMethodCall *)call
} else if ([@"stopImageStream" isEqualToString:call.method]) {
[_camera stopImageStream];
[result sendSuccess];
} else if ([@"receivedImageStreamData" isEqualToString:call.method]) {
[_camera receivedImageStreamData];
[result sendSuccess];
} else {
NSDictionary *argsMap = call.arguments;
NSUInteger cameraId = ((NSNumber *)argsMap[@"cameraId"]).unsignedIntegerValue;
Expand Down
8 changes: 8 additions & 0 deletions packages/camera/camera/ios/Classes/FLTCam.h
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,14 @@ NS_ASSUME_NONNULL_BEGIN
- (void)setFocusModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr;
- (void)applyFocusMode;

/**
* Acknowledges the receipt of one image stream frame.
*
* This should be called each time a frame is received. Failing to call it may
* cause later frames to be dropped instead of streamed.
*/
- (void)receivedImageStreamData;

/**
* Applies FocusMode on the AVCaptureDevice.
*
Expand Down
39 changes: 27 additions & 12 deletions packages/camera/camera/ios/Classes/FLTCam.m
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,6 @@
@import CoreMotion;
#import <libkern/OSAtomic.h>

@interface FLTImageStreamHandler : NSObject <FlutterStreamHandler>
// The queue on which `eventSink` property should be accessed
@property(nonatomic, strong) dispatch_queue_t captureSessionQueue;
// `eventSink` property should be accessed on `captureSessionQueue`.
// The block itself should be invoked on the main queue.
@property FlutterEventSink eventSink;
@end

@implementation FLTImageStreamHandler

- (instancetype)initWithCaptureSessionQueue:(dispatch_queue_t)captureSessionQueue {
Expand Down Expand Up @@ -68,7 +60,13 @@ @interface FLTCam () <AVCaptureVideoDataOutputSampleBufferDelegate,
@property(assign, nonatomic) BOOL videoIsDisconnected;
@property(assign, nonatomic) BOOL audioIsDisconnected;
@property(assign, nonatomic) BOOL isAudioSetup;
@property(assign, nonatomic) BOOL isStreamingImages;

/// Number of frames currently pending processing.
@property(assign, nonatomic) int streamingPendingFramesCount;

/// Maximum number of frames pending processing.
@property(assign, nonatomic) int maxStreamingPendingFramesCount;

@property(assign, nonatomic) UIDeviceOrientation lockedCaptureOrientation;
@property(assign, nonatomic) CMTime lastVideoSampleTime;
@property(assign, nonatomic) CMTime lastAudioSampleTime;
Expand Down Expand Up @@ -135,6 +133,11 @@ - (instancetype)initWithCameraName:(NSString *)cameraName
_videoFormat = kCVPixelFormatType_32BGRA;
_inProgressSavePhotoDelegates = [NSMutableDictionary dictionary];

// To limit memory consumption, limit the number of frames pending processing.
// After some testing, 4 was determined to be the best maximum value.
// https://github.com/flutter/plugins/pull/4520#discussion_r766335637
_maxStreamingPendingFramesCount = 4;

NSError *localError = nil;
_captureVideoInput = [AVCaptureDeviceInput deviceInputWithDevice:_captureDevice
error:&localError];
Expand Down Expand Up @@ -401,7 +404,8 @@ - (void)captureOutput:(AVCaptureOutput *)output
}
if (_isStreamingImages) {
FlutterEventSink eventSink = _imageStreamHandler.eventSink;
if (eventSink) {
if (eventSink && (self.streamingPendingFramesCount < self.maxStreamingPendingFramesCount)) {
self.streamingPendingFramesCount++;
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Must lock base address before accessing the pixel data
CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
Expand Down Expand Up @@ -898,19 +902,26 @@ - (void)setExposureOffsetWithResult:(FLTThreadSafeFlutterResult *)result offset:
}

- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger {
[self startImageStreamWithMessenger:messenger
imageStreamHandler:[[FLTImageStreamHandler alloc]
initWithCaptureSessionQueue:_captureSessionQueue]];
}

- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger
imageStreamHandler:(FLTImageStreamHandler *)imageStreamHandler {
if (!_isStreamingImages) {
FlutterEventChannel *eventChannel =
[FlutterEventChannel eventChannelWithName:@"plugins.flutter.io/camera/imageStream"
binaryMessenger:messenger];
FLTThreadSafeEventChannel *threadSafeEventChannel =
[[FLTThreadSafeEventChannel alloc] initWithEventChannel:eventChannel];

_imageStreamHandler =
[[FLTImageStreamHandler alloc] initWithCaptureSessionQueue:_captureSessionQueue];
_imageStreamHandler = imageStreamHandler;
[threadSafeEventChannel setStreamHandler:_imageStreamHandler
completion:^{
dispatch_async(self->_captureSessionQueue, ^{
self.isStreamingImages = YES;
self.streamingPendingFramesCount = 0;
});
}];
} else {
Expand All @@ -928,6 +939,10 @@ - (void)stopImageStream {
}
}

- (void)receivedImageStreamData {
self.streamingPendingFramesCount--;
}

- (void)getMaxZoomLevelWithResult:(FLTThreadSafeFlutterResult *)result {
CGFloat maxZoomFactor = [self getMaxAvailableZoomFactor];

Expand Down
20 changes: 20 additions & 0 deletions packages/camera/camera/ios/Classes/FLTCam_Test.h
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,19 @@
#import "FLTCam.h"
#import "FLTSavePhotoDelegate.h"

@interface FLTImageStreamHandler : NSObject <FlutterStreamHandler>

/// The queue on which `eventSink` property should be accessed.
@property(nonatomic, strong) dispatch_queue_t captureSessionQueue;

/// The event sink to stream camera events to Dart.
///
/// The property should only be accessed on `captureSessionQueue`.
/// The block itself should be invoked on the main queue.
@property FlutterEventSink eventSink;

@end

// APIs exposed for unit testing.
@interface FLTCam ()

Expand All @@ -14,6 +27,9 @@
/// The output for photo capturing. Exposed setter for unit tests.
@property(strong, nonatomic) AVCapturePhotoOutput *capturePhotoOutput API_AVAILABLE(ios(10));

/// True when images from the camera are being streamed.
@property(assign, nonatomic) BOOL isStreamingImages;

/// A dictionary to retain all in-progress FLTSavePhotoDelegates. The key of the dictionary is the
/// AVCapturePhotoSettings's uniqueID for each photo capture operation, and the value is the
/// FLTSavePhotoDelegate that handles the result of each photo capture operation. Note that photo
Expand All @@ -38,4 +54,8 @@
captureSessionQueue:(dispatch_queue_t)captureSessionQueue
error:(NSError **)error;

/// Start streaming images.
- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger
imageStreamHandler:(FLTImageStreamHandler *)imageStreamHandler;

@end
7 changes: 7 additions & 0 deletions packages/camera/camera/lib/src/camera_controller.dart
Original file line number Diff line number Diff line change
Expand Up @@ -448,6 +448,13 @@ class CameraController extends ValueNotifier<CameraValue> {
_imageStreamSubscription =
cameraEventChannel.receiveBroadcastStream().listen(
(dynamic imageData) {
if (defaultTargetPlatform == TargetPlatform.iOS) {
try {
_channel.invokeMethod<void>('receivedImageStreamData');
} on PlatformException catch (e) {
throw CameraException(e.code, e.message);
}
}
onAvailable(
CameraImage.fromPlatformData(imageData as Map<dynamic, dynamic>));
},
Expand Down
2 changes: 1 addition & 1 deletion packages/camera/camera/pubspec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ description: A Flutter plugin for controlling the camera. Supports previewing
Dart.
repository: https://github.com/flutter/plugins/tree/main/packages/camera/camera
issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
version: 0.9.4+17
version: 0.9.4+18

environment:
sdk: ">=2.14.0 <3.0.0"
Expand Down

0 comments on commit 972e942

Please sign in to comment.