Skip to content
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
## 2.7.1

* Adds possibility to play videos at more than 30 FPS.
* Fixes playing state not updating in some paths.

## 2.7.0

* Adds support for platform views as an optional way of displaying a video.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,7 @@ @interface StubFVPDisplayLinkFactory : NSObject <FVPDisplayLinkFactory>

/** This display link to return. */
@property(nonatomic, strong) FVPDisplayLink *displayLink;
@property(nonatomic, copy) void (^fireDisplayLink)(void);

- (instancetype)initWithDisplayLink:(FVPDisplayLink *)displayLink;

Expand All @@ -144,6 +145,7 @@ - (instancetype)initWithDisplayLink:(FVPDisplayLink *)displayLink {
}
- (FVPDisplayLink *)displayLinkWithRegistrar:(id<FlutterPluginRegistrar>)registrar
callback:(void (^)(void))callback {
self.fireDisplayLink = callback;
return self.displayLink;
}

Expand Down Expand Up @@ -321,13 +323,14 @@ - (void)testSeekToWhilePausedStartsDisplayLinkTemporarily {
OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero])
.ignoringNonObjectArgs()
.andReturn(YES);
// Any non-zero value is fine here since it won't actually be used, just NULL-checked.
CVPixelBufferRef fakeBufferRef = (CVPixelBufferRef)1;
CVPixelBufferRef bufferRef;
CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef);
OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero itemTimeForDisplay:NULL])
.ignoringNonObjectArgs()
.andReturn(fakeBufferRef);
.andReturn(bufferRef);
// Simulate a callback from the engine to request a new frame.
[player copyPixelBuffer];
stubDisplayLinkFactory.fireDisplayLink();
CFRelease([player copyPixelBuffer]);
// Since a frame was found, and the video is paused, the display link should be paused again.
OCMVerify([mockDisplayLink setRunning:NO]);
}
Expand Down Expand Up @@ -373,15 +376,16 @@ - (void)testInitStartsDisplayLinkTemporarily {
OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero])
.ignoringNonObjectArgs()
.andReturn(YES);
// Any non-zero value is fine here since it won't actually be used, just NULL-checked.
CVPixelBufferRef fakeBufferRef = (CVPixelBufferRef)1;
CVPixelBufferRef bufferRef;
CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef);
OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero itemTimeForDisplay:NULL])
.ignoringNonObjectArgs()
.andReturn(fakeBufferRef);
.andReturn(bufferRef);
// Simulate a callback from the engine to request a new frame.
FVPTextureBasedVideoPlayer *player =
(FVPTextureBasedVideoPlayer *)videoPlayerPlugin.playersByIdentifier[playerIdentifier];
[player copyPixelBuffer];
stubDisplayLinkFactory.fireDisplayLink();
CFRelease([player copyPixelBuffer]);
// Since a frame was found, and the video is paused, the display link should be paused again.
OCMVerify([mockDisplayLink setRunning:NO]);
}
Expand Down Expand Up @@ -433,19 +437,25 @@ - (void)testSeekToWhilePlayingDoesNotStopDisplayLink {

FVPTextureBasedVideoPlayer *player =
(FVPTextureBasedVideoPlayer *)videoPlayerPlugin.playersByIdentifier[playerIdentifier];
XCTAssertEqual([player position], 1234);
// Wait for the player's position to update, it shouldn't take long.
XCTestExpectation *positionExpectation =
[self expectationForPredicate:[NSPredicate predicateWithFormat:@"position == 1234"]
evaluatedWithObject:player
handler:nil];
[self waitForExpectations:@[ positionExpectation ] timeout:3.0];

// Simulate a buffer being available.
OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero])
.ignoringNonObjectArgs()
.andReturn(YES);
// Any non-zero value is fine here since it won't actually be used, just NULL-checked.
CVPixelBufferRef fakeBufferRef = (CVPixelBufferRef)1;
CVPixelBufferRef bufferRef;
CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef);
OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero itemTimeForDisplay:NULL])
.ignoringNonObjectArgs()
.andReturn(fakeBufferRef);
.andReturn(bufferRef);
// Simulate a callback from the engine to request a new frame.
[player copyPixelBuffer];
stubDisplayLinkFactory.fireDisplayLink();
CFRelease([player copyPixelBuffer]);
// Since the video was playing, the display link should not be paused after getting a buffer.
OCMVerify(never(), [mockDisplayLink setRunning:NO]);
}
Expand Down Expand Up @@ -994,6 +1004,84 @@ - (void)testUpdatePlayingStateShouldNotResetRate {
XCTAssertEqual(player.player.rate, 2);
}

- (void)testPlayerShouldNotDropEverySecondFrame {
NSObject<FlutterPluginRegistrar> *registrar =
[GetPluginRegistry() registrarForPlugin:@"testPlayerShouldNotDropEverySecondFrame"];
NSObject<FlutterPluginRegistrar> *partialRegistrar = OCMPartialMock(registrar);
NSObject<FlutterTextureRegistry> *mockTextureRegistry =
OCMProtocolMock(@protocol(FlutterTextureRegistry));
OCMStub([partialRegistrar textures]).andReturn(mockTextureRegistry);

FVPDisplayLink *displayLink = [[FVPDisplayLink alloc] initWithRegistrar:registrar
callback:^(){
}];
StubFVPDisplayLinkFactory *stubDisplayLinkFactory =
[[StubFVPDisplayLinkFactory alloc] initWithDisplayLink:displayLink];
AVPlayerItemVideoOutput *mockVideoOutput = OCMPartialMock([[AVPlayerItemVideoOutput alloc] init]);
FVPVideoPlayerPlugin *videoPlayerPlugin = [[FVPVideoPlayerPlugin alloc]
initWithAVFactory:[[StubFVPAVFactory alloc] initWithPlayer:nil output:mockVideoOutput]
displayLinkFactory:stubDisplayLinkFactory
registrar:partialRegistrar];

FlutterError *error;
[videoPlayerPlugin initialize:&error];
XCTAssertNil(error);
FVPCreationOptions *create = [FVPCreationOptions
makeWithAsset:nil
uri:@"https://flutter.github.io/assets-for-api-docs/assets/videos/bee.mp4"
packageName:nil
formatHint:nil
httpHeaders:@{}
viewType:FVPPlatformVideoViewTypeTextureView];
NSNumber *playerIdentifier = [videoPlayerPlugin createWithOptions:create error:&error];
FVPTextureBasedVideoPlayer *player =
(FVPTextureBasedVideoPlayer *)videoPlayerPlugin.playersByIdentifier[playerIdentifier];

__block CMTime currentTime = kCMTimeZero;
OCMStub([mockVideoOutput itemTimeForHostTime:0])
.ignoringNonObjectArgs()
.andDo(^(NSInvocation *invocation) {
[invocation setReturnValue:&currentTime];
});
__block NSMutableSet *pixelBuffers = NSMutableSet.new;
OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero])
.ignoringNonObjectArgs()
.andDo(^(NSInvocation *invocation) {
CMTime itemTime;
[invocation getArgument:&itemTime atIndex:2];
BOOL has = [pixelBuffers containsObject:[NSValue valueWithCMTime:itemTime]];
[invocation setReturnValue:&has];
});
OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero
itemTimeForDisplay:[OCMArg anyPointer]])
.ignoringNonObjectArgs()
.andDo(^(NSInvocation *invocation) {
CMTime itemTime;
[invocation getArgument:&itemTime atIndex:2];
CVPixelBufferRef bufferRef = NULL;
if ([pixelBuffers containsObject:[NSValue valueWithCMTime:itemTime]]) {
CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef);
}
[pixelBuffers removeObject:[NSValue valueWithCMTime:itemTime]];
[invocation setReturnValue:&bufferRef];
});
void (^advanceFrame)(void) = ^{
currentTime.value++;
[pixelBuffers addObject:[NSValue valueWithCMTime:currentTime]];
};

advanceFrame();
OCMExpect([mockTextureRegistry textureFrameAvailable:playerIdentifier.intValue]);
stubDisplayLinkFactory.fireDisplayLink();
OCMVerifyAllWithDelay(mockTextureRegistry, 10);

advanceFrame();
OCMExpect([mockTextureRegistry textureFrameAvailable:playerIdentifier.intValue]);
CFRelease([player copyPixelBuffer]);
stubDisplayLinkFactory.fireDisplayLink();
OCMVerifyAllWithDelay(mockTextureRegistry, 10);
}

#if TARGET_OS_IOS
- (void)testVideoPlayerShouldNotOverwritePlayAndRecordNorDefaultToSpeaker {
NSObject<FlutterPluginRegistrar> *registrar = [GetPluginRegistry()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,28 +4,16 @@

#import "./include/video_player_avfoundation/FVPFrameUpdater.h"

/// FVPFrameUpdater is responsible for notifying the Flutter texture registry
/// when a new video frame is available.
@interface FVPFrameUpdater ()
/// The Flutter texture registry used to notify about new frames.
@property(nonatomic, weak, readonly) NSObject<FlutterTextureRegistry> *registry;
@end

@implementation FVPFrameUpdater
- (FVPFrameUpdater *)initWithRegistry:(NSObject<FlutterTextureRegistry> *)registry {
NSAssert(self, @"super init cannot be nil");
if (self == nil) return nil;
_registry = registry;
_lastKnownAvailableTime = kCMTimeInvalid;
return self;
}

- (void)displayLinkFired {
// Only report a new frame if one is actually available.
CMTime outputItemTime = [self.videoOutput itemTimeForHostTime:CACurrentMediaTime()];
if ([self.videoOutput hasNewPixelBufferForItemTime:outputItemTime]) {
_lastKnownAvailableTime = outputItemTime;
[_registry textureFrameAvailable:_textureIdentifier];
}
self.frameDuration = _displayLink.duration;
[_registry textureFrameAvailable:_textureIdentifier];
}
@end
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,20 @@ @interface FVPTextureBasedVideoPlayer ()
@property(nonatomic) FVPFrameUpdater *frameUpdater;
// The display link that drives frameUpdater.
@property(nonatomic) FVPDisplayLink *displayLink;
// The latest buffer obtained from video output. This is stored so that it can be returned from
// copyPixelBuffer again if nothing new is available, since the engine has undefined behavior when
// returning NULL.
@property(nonatomic) CVPixelBufferRef latestPixelBuffer;
// The time that represents when the next frame displays.
@property(nonatomic) CFTimeInterval targetTime;
// Whether to enqueue textureFrameAvailable from copyPixelBuffer.
@property(nonatomic) BOOL selfRefresh;
// The time that represents the start of average frame duration measurement.
@property(nonatomic) CFTimeInterval startTime;
// The number of frames since the start of average frame duration measurement.
@property(nonatomic) int framesCount;
// The latest frame duration since there was significant change.
@property(nonatomic) CFTimeInterval latestDuration;
// Whether a new frame needs to be provided to the engine regardless of the current play/pause state
// (e.g., after a seek while paused). If YES, the display link should continue to run until the next
// frame is successfully provided.
Expand Down Expand Up @@ -67,7 +81,8 @@ - (instancetype)initWithPlayerItem:(AVPlayerItem *)item
if (self) {
_frameUpdater = frameUpdater;
_displayLink = displayLink;
_frameUpdater.videoOutput = self.videoOutput;
_frameUpdater.displayLink = _displayLink;
_selfRefresh = true;
_onDisposed = [onDisposed copy];

// This is to fix 2 bugs: 1. blank video for encrypted video streams on iOS 16
Expand All @@ -81,6 +96,10 @@ - (instancetype)initWithPlayerItem:(AVPlayerItem *)item
return self;
}

- (void)dealloc {
CVBufferRelease(_latestPixelBuffer);
}

- (void)setTextureIdentifier:(int64_t)textureIdentifier {
self.frameUpdater.textureIdentifier = textureIdentifier;
}
Expand Down Expand Up @@ -161,17 +180,32 @@ - (void)dispose {
#pragma mark - FlutterTexture

- (CVPixelBufferRef)copyPixelBuffer {
// If the difference between target time and current time is longer than this fraction of frame
// duration then reset target time.
const float resetThreshold = 0.5;

// Ensure video sampling at regular intervals. This function is not called at exact time intervals
// so CACurrentMediaTime returns irregular timestamps which causes missed video frames. The range
// outside of which targetTime is reset should be narrow enough to make possible lag as small as
// possible and at the same time wide enough to avoid too frequent resets which would lead to
// irregular sampling.
// TODO: Ideally there would be a targetTimestamp of display link used by the flutter engine.
// https://github.com/flutter/flutter/issues/159087
CFTimeInterval currentTime = CACurrentMediaTime();
CFTimeInterval duration = self.frameUpdater.frameDuration;
if (fabs(self.targetTime - currentTime) > duration * resetThreshold) {
self.targetTime = currentTime;
}
self.targetTime += duration;

CVPixelBufferRef buffer = NULL;
CMTime outputItemTime = [self.videoOutput itemTimeForHostTime:CACurrentMediaTime()];
CMTime outputItemTime = [self.videoOutput itemTimeForHostTime:self.targetTime];
if ([self.videoOutput hasNewPixelBufferForItemTime:outputItemTime]) {
buffer = [self.videoOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL];
} else {
// If the current time isn't available yet, use the time that was checked when informing the
// engine that a frame was available (if any).
CMTime lastAvailableTime = self.frameUpdater.lastKnownAvailableTime;
if (CMTIME_IS_VALID(lastAvailableTime)) {
buffer = [self.videoOutput copyPixelBufferForItemTime:lastAvailableTime
itemTimeForDisplay:NULL];
if (buffer) {
// Balance the owned reference from copyPixelBufferForItemTime.
CVBufferRelease(self.latestPixelBuffer);
self.latestPixelBuffer = buffer;
}
}

Expand All @@ -184,7 +218,48 @@ - (CVPixelBufferRef)copyPixelBuffer {
}
}

return buffer;
// Calling textureFrameAvailable only from within displayLinkFired would require a non-trivial
// solution to minimize missed video frames due to race between displayLinkFired, copyPixelBuffer
// and place where is _textureFrameAvailable reset to false in the flutter engine.
// TODO: Ideally FlutterTexture would support mode of operation where the copyPixelBuffer is
// called always or some other alternative, instead of on demand by calling textureFrameAvailable.
// https://github.com/flutter/flutter/issues/159162
if (self.displayLink.running && self.selfRefresh) {
// The number of frames over which to measure average frame duration.
const int windowSize = 10;
// If measured average frame duration is shorter than this fraction of frame duration obtained
// from display link then rely solely on refreshes from display link.
const float durationThreshold = 0.5;
// If duration changes by this fraction or more then reset average frame duration measurement.
const float resetFraction = 0.01;

if (fabs(duration - self.latestDuration) >= self.latestDuration * resetFraction) {
self.startTime = currentTime;
self.framesCount = 0;
self.latestDuration = duration;
}
if (self.framesCount == windowSize) {
CFTimeInterval averageDuration = (currentTime - self.startTime) / windowSize;
if (averageDuration < duration * durationThreshold) {
NSLog(@"Warning: measured average duration between frames is unexpectedly short (%f/%f), "
@"please report this to "
@"https://github.com/flutter/flutter/issues.",
averageDuration, duration);
self.selfRefresh = false;
}
self.startTime = currentTime;
self.framesCount = 0;
}
self.framesCount++;

dispatch_async(dispatch_get_main_queue(), ^{
[self.frameUpdater.registry textureFrameAvailable:self.frameUpdater.textureIdentifier];
});
}

// Add a retain for the engine, since the copyPixelBufferForItemTime has already been accounted
// for, and the engine expects an owning reference.
return CVBufferRetain(self.latestPixelBuffer);
}

- (void)onTextureUnregistered:(NSObject<FlutterTexture> *)texture {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,10 @@ - (instancetype)initWithPlayerItem:(AVPlayerItem *)item
error:nil] == AVKeyValueStatusLoaded) {
// Rotate the video by using a videoComposition and the preferredTransform
self->_preferredTransform = FVPGetStandardizedTransformForTrack(videoTrack);
// Do not use video composition when it is not needed.
if (CGAffineTransformIsIdentity(self->_preferredTransform)) {
return;
}
// Note:
// https://developer.apple.com/documentation/avfoundation/avplayeritem/1388818-videocomposition
// Video composition can only be used with file-based media and is not supported for
Expand Down Expand Up @@ -207,9 +211,14 @@ - (AVMutableVideoComposition *)getVideoCompositionWithTransform:(CGAffineTransfo
}
videoComposition.renderSize = CGSizeMake(width, height);

// TODO(@recastrodiaz): should we use videoTrack.nominalFrameRate ?
// Currently set at a constant 30 FPS
videoComposition.frameDuration = CMTimeMake(1, 30);
videoComposition.sourceTrackIDForFrameTiming = videoTrack.trackID;
if (CMTIME_IS_VALID(videoTrack.minFrameDuration)) {
videoComposition.frameDuration = videoTrack.minFrameDuration;
} else {
NSLog(@"Warning: videoTrack.minFrameDuration for input video is invalid, please report this to "
@"https://github.com/flutter/flutter/issues with input video attached.");
videoComposition.frameDuration = CMTimeMake(1, 30);
}

return videoComposition;
}
Expand Down Expand Up @@ -239,7 +248,6 @@ - (void)observeValueForKeyPath:(NSString *)path
case AVPlayerItemStatusReadyToPlay:
[item addOutput:_videoOutput];
[self setupEventSinkIfReadyToPlay];
[self updatePlayingState];
break;
}
} else if (context == presentationSizeContext || context == durationContext) {
Expand All @@ -249,7 +257,6 @@ - (void)observeValueForKeyPath:(NSString *)path
// its presentation size or duration. When these properties are finally set, re-check if
// all required properties and instantiate the event sink if it is not already set up.
[self setupEventSinkIfReadyToPlay];
[self updatePlayingState];
}
} else if (context == playbackLikelyToKeepUpContext) {
[self updatePlayingState];
Expand Down Expand Up @@ -387,6 +394,8 @@ - (void)setupEventSinkIfReadyToPlay {
}

_isInitialized = YES;
[self updatePlayingState];

_eventSink(@{
@"event" : @"initialized",
@"duration" : @(duration),
Expand Down
Loading