-
Notifications
You must be signed in to change notification settings - Fork 9.8k
[camera] Fixed a crash when streaming on iOS #4520
Changes from all commits
29beaa7
aec0f93
0d9f61a
7cff775
5d1fb56
7c0fa9e
3056e17
1a48ffc
4cb5d6e
27957a1
97d16c7
1382d62
46d9df0
84dc53d
3ed1e94
9a3e627
6191e3a
e732d7b
b69ea0f
e9beb1f
74d9d85
6b81e1a
96f8edb
1167112
24871c6
7635469
376bbfa
f276bd0
f69aed6
9653345
661cf49
8e334bf
7b8d74f
ab233fa
830d5ad
095faf3
e185988
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,85 @@ | ||
// Copyright 2013 The Flutter Authors. All rights reserved. | ||
// Use of this source code is governed by a BSD-style license that can be | ||
// found in the LICENSE file. | ||
|
||
@import camera; | ||
@import camera.Test; | ||
@import XCTest; | ||
@import AVFoundation; | ||
#import <OCMock/OCMock.h> | ||
#import "CameraTestUtils.h" | ||
|
||
@interface StreamingTests : XCTestCase | ||
@property(readonly, nonatomic) FLTCam *camera; | ||
@property(readonly, nonatomic) CMSampleBufferRef sampleBuffer; | ||
@end | ||
|
||
@implementation StreamingTests | ||
|
||
- (void)setUp { | ||
dispatch_queue_t captureSessionQueue = dispatch_queue_create("testing", NULL); | ||
_camera = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue); | ||
_sampleBuffer = FLTCreateTestSampleBuffer(); | ||
} | ||
|
||
- (void)tearDown { | ||
CFRelease(_sampleBuffer); | ||
} | ||
|
||
- (void)testExceedMaxStreamingPendingFramesCount { | ||
XCTestExpectation *streamingExpectation = [self | ||
expectationWithDescription:@"Must not call handler over maxStreamingPendingFramesCount"]; | ||
|
||
id handlerMock = OCMClassMock([FLTImageStreamHandler class]); | ||
OCMStub([handlerMock eventSink]).andReturn(^(id event) { | ||
[streamingExpectation fulfill]; | ||
}); | ||
|
||
id messenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger)); | ||
[_camera startImageStreamWithMessenger:messenger imageStreamHandler:handlerMock]; | ||
|
||
XCTKVOExpectation *expectation = [[XCTKVOExpectation alloc] initWithKeyPath:@"isStreamingImages" | ||
object:_camera | ||
expectedValue:@YES]; | ||
XCTWaiterResult result = [XCTWaiter waitForExpectations:@[ expectation ] timeout:1]; | ||
XCTAssertEqual(result, XCTWaiterResultCompleted); | ||
|
||
streamingExpectation.expectedFulfillmentCount = 4; | ||
for (int i = 0; i < 10; i++) { | ||
[_camera captureOutput:nil didOutputSampleBuffer:self.sampleBuffer fromConnection:nil]; | ||
} | ||
|
||
[self waitForExpectationsWithTimeout:1.0 handler:nil]; | ||
} | ||
|
||
- (void)testReceivedImageStreamData { | ||
XCTestExpectation *streamingExpectation = | ||
[self expectationWithDescription: | ||
@"Must be able to call the handler again when receivedImageStreamData is called"]; | ||
|
||
id handlerMock = OCMClassMock([FLTImageStreamHandler class]); | ||
OCMStub([handlerMock eventSink]).andReturn(^(id event) { | ||
[streamingExpectation fulfill]; | ||
}); | ||
|
||
id messenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger)); | ||
[_camera startImageStreamWithMessenger:messenger imageStreamHandler:handlerMock]; | ||
|
||
XCTKVOExpectation *expectation = [[XCTKVOExpectation alloc] initWithKeyPath:@"isStreamingImages" | ||
object:_camera | ||
expectedValue:@YES]; | ||
XCTWaiterResult result = [XCTWaiter waitForExpectations:@[ expectation ] timeout:1]; | ||
XCTAssertEqual(result, XCTWaiterResultCompleted); | ||
|
||
streamingExpectation.expectedFulfillmentCount = 5; | ||
for (int i = 0; i < 10; i++) { | ||
[_camera captureOutput:nil didOutputSampleBuffer:self.sampleBuffer fromConnection:nil]; | ||
} | ||
|
||
[_camera receivedImageStreamData]; | ||
[_camera captureOutput:nil didOutputSampleBuffer:self.sampleBuffer fromConnection:nil]; | ||
|
||
[self waitForExpectationsWithTimeout:1.0 handler:nil]; | ||
} | ||
|
||
@end |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -61,6 +61,14 @@ NS_ASSUME_NONNULL_BEGIN | |
- (void)setFocusModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr; | ||
- (void)applyFocusMode; | ||
|
||
/** | ||
* Acknowledges the receipt of one image stream frame. | ||
* | ||
* This should be called each time a frame is received. Failing to call it may | ||
* cause later frames to be dropped instead of streamed. | ||
*/ | ||
- (void)receivedImageStreamData; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This needs a declaration comment, per style guide. (I know almost nothing else in this header current has one, but existing style violations aren't a reason to perpetuate new style violations.) There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Done. |
||
|
||
/** | ||
* Applies FocusMode on the AVCaptureDevice. | ||
* | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -5,6 +5,19 @@ | |
#import "FLTCam.h" | ||
#import "FLTSavePhotoDelegate.h" | ||
|
||
@interface FLTImageStreamHandler : NSObject <FlutterStreamHandler> | ||
|
||
/// The queue on which `eventSink` property should be accessed. | ||
@property(nonatomic, strong) dispatch_queue_t captureSessionQueue; | ||
|
||
/// The event sink to stream camera events to Dart. | ||
/// | ||
/// The property should only be accessed on `captureSessionQueue`. | ||
/// The block itself should be invoked on the main queue. | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. A declaration comment should start wit a description of what something is, before getting into how to use it. So:
|
||
@property FlutterEventSink eventSink; | ||
|
||
@end | ||
|
||
// APIs exposed for unit testing. | ||
@interface FLTCam () | ||
|
||
|
@@ -14,6 +27,9 @@ | |
/// The output for photo capturing. Exposed setter for unit tests. | ||
@property(strong, nonatomic) AVCapturePhotoOutput *capturePhotoOutput API_AVAILABLE(ios(10)); | ||
|
||
/// True when images from the camera are being streamed. | ||
@property(assign, nonatomic) BOOL isStreamingImages; | ||
|
||
/// A dictionary to retain all in-progress FLTSavePhotoDelegates. The key of the dictionary is the | ||
/// AVCapturePhotoSettings's uniqueID for each photo capture operation, and the value is the | ||
/// FLTSavePhotoDelegate that handles the result of each photo capture operation. Note that photo | ||
|
@@ -38,4 +54,8 @@ | |
captureSessionQueue:(dispatch_queue_t)captureSessionQueue | ||
error:(NSError **)error; | ||
|
||
/// Start streaming images. | ||
- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger | ||
imageStreamHandler:(FLTImageStreamHandler *)imageStreamHandler; | ||
|
||
@end |
Original file line number | Diff line number | Diff line change | ||||||||||||||||||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
|
@@ -448,6 +448,13 @@ class CameraController extends ValueNotifier<CameraValue> { | |||||||||||||||||||||||||||||||||
_imageStreamSubscription = | ||||||||||||||||||||||||||||||||||
cameraEventChannel.receiveBroadcastStream().listen( | ||||||||||||||||||||||||||||||||||
(dynamic imageData) { | ||||||||||||||||||||||||||||||||||
if (defaultTargetPlatform == TargetPlatform.iOS) { | ||||||||||||||||||||||||||||||||||
try { | ||||||||||||||||||||||||||||||||||
_channel.invokeMethod<void>('receivedImageStreamData'); | ||||||||||||||||||||||||||||||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Given that this is asynchronous in both directions I would expect this could cause significant frame loss even on a reasonably powerful device. How much investigation have you done into specifically what the parameters of the crash are? Is it really necessary to have a maximum of a single frame in flight? It seems likely that this needs a more nuanced approach (maybe a hard limit still but higher than one, maybe something that's tunable by plugin clients). It's hard to determine the right fix without more details about what the failure condition actually is. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I modified the plugin to allow you to set a limit on the maximum number of frames pending processing.
I don't have that many devices, but according to flutter/flutter#44436, it seems to be crashing on iPhone 6, 7, 8, etc. These devices still have a large market share that cannot be ignored. Also, for image analysis processes such as OCR, low or medium quality is not always satisfactory. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Thanks for this testing. Given this table, I'm fine with starting from setting a fixed value—4 looks like a good place to start—with an explanation in the code of where that number came from, and then if it turns out that causes issues in some cases we can add an API to tune it later. |
||||||||||||||||||||||||||||||||||
} on PlatformException catch (e) { | ||||||||||||||||||||||||||||||||||
throw CameraException(e.code, e.message); | ||||||||||||||||||||||||||||||||||
} | ||||||||||||||||||||||||||||||||||
} | ||||||||||||||||||||||||||||||||||
onAvailable( | ||||||||||||||||||||||||||||||||||
CameraImage.fromPlatformData(imageData as Map<dynamic, dynamic>)); | ||||||||||||||||||||||||||||||||||
}, | ||||||||||||||||||||||||||||||||||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
nice to see these new test helpers being used :)