Skip to content
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
## 2.6.3

* Adds possibility to play videos at more than 30 FPS.
* Fixes playing state not updating in some paths.

## 2.6.2

* Updates Pigeon for non-nullable collection type support.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,7 @@ @interface StubFVPDisplayLinkFactory : NSObject <FVPDisplayLinkFactory>

/** This display link to return. */
@property(nonatomic, strong) FVPDisplayLink *displayLink;
@property(nonatomic, copy) void (^fireDisplayLink)(void);

- (instancetype)initWithDisplayLink:(FVPDisplayLink *)displayLink;

Expand All @@ -138,6 +139,7 @@ - (instancetype)initWithDisplayLink:(FVPDisplayLink *)displayLink {
}
- (FVPDisplayLink *)displayLinkWithRegistrar:(id<FlutterPluginRegistrar>)registrar
callback:(void (^)(void))callback {
self.fireDisplayLink = callback;
return self.displayLink;
}

Expand Down Expand Up @@ -243,13 +245,14 @@ - (void)testSeekToWhilePausedStartsDisplayLinkTemporarily {
OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero])
.ignoringNonObjectArgs()
.andReturn(YES);
// Any non-zero value is fine here since it won't actually be used, just NULL-checked.
CVPixelBufferRef fakeBufferRef = (CVPixelBufferRef)1;
CVPixelBufferRef bufferRef;
CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef);
OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero itemTimeForDisplay:NULL])
.ignoringNonObjectArgs()
.andReturn(fakeBufferRef);
.andReturn(bufferRef);
// Simulate a callback from the engine to request a new frame.
[player copyPixelBuffer];
stubDisplayLinkFactory.fireDisplayLink();
CFRelease([player copyPixelBuffer]);
// Since a frame was found, and the video is paused, the display link should be paused again.
OCMVerify([mockDisplayLink setRunning:NO]);
}
Expand Down Expand Up @@ -294,14 +297,15 @@ - (void)testInitStartsDisplayLinkTemporarily {
OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero])
.ignoringNonObjectArgs()
.andReturn(YES);
// Any non-zero value is fine here since it won't actually be used, just NULL-checked.
CVPixelBufferRef fakeBufferRef = (CVPixelBufferRef)1;
CVPixelBufferRef bufferRef;
CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef);
OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero itemTimeForDisplay:NULL])
.ignoringNonObjectArgs()
.andReturn(fakeBufferRef);
.andReturn(bufferRef);
// Simulate a callback from the engine to request a new frame.
FVPVideoPlayer *player = videoPlayerPlugin.playersByTextureId[textureId];
[player copyPixelBuffer];
stubDisplayLinkFactory.fireDisplayLink();
CFRelease([player copyPixelBuffer]);
// Since a frame was found, and the video is paused, the display link should be paused again.
OCMVerify([mockDisplayLink setRunning:NO]);
}
Expand Down Expand Up @@ -357,13 +361,14 @@ - (void)testSeekToWhilePlayingDoesNotStopDisplayLink {
OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero])
.ignoringNonObjectArgs()
.andReturn(YES);
// Any non-zero value is fine here since it won't actually be used, just NULL-checked.
CVPixelBufferRef fakeBufferRef = (CVPixelBufferRef)1;
CVPixelBufferRef bufferRef;
CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef);
OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero itemTimeForDisplay:NULL])
.ignoringNonObjectArgs()
.andReturn(fakeBufferRef);
.andReturn(bufferRef);
// Simulate a callback from the engine to request a new frame.
[player copyPixelBuffer];
stubDisplayLinkFactory.fireDisplayLink();
CFRelease([player copyPixelBuffer]);
// Since the video was playing, the display link should not be paused after getting a buffer.
OCMVerify(never(), [mockDisplayLink setRunning:NO]);
}
Expand Down Expand Up @@ -790,6 +795,82 @@ - (void)testPublishesInRegistration {
XCTAssertTrue([publishedValue isKindOfClass:[FVPVideoPlayerPlugin class]]);
}

- (void)testPlayerShouldNotDropEverySecondFrame {
NSObject<FlutterPluginRegistrar> *registrar =
[GetPluginRegistry() registrarForPlugin:@"testPlayerShouldNotDropEverySecondFrame"];
NSObject<FlutterPluginRegistrar> *partialRegistrar = OCMPartialMock(registrar);
NSObject<FlutterTextureRegistry> *mockTextureRegistry =
OCMProtocolMock(@protocol(FlutterTextureRegistry));
OCMStub([partialRegistrar textures]).andReturn(mockTextureRegistry);

FVPDisplayLink *displayLink = [[FVPDisplayLink alloc] initWithRegistrar:registrar
callback:^(){
}];
StubFVPDisplayLinkFactory *stubDisplayLinkFactory =
[[StubFVPDisplayLinkFactory alloc] initWithDisplayLink:displayLink];
AVPlayerItemVideoOutput *mockVideoOutput = OCMPartialMock([[AVPlayerItemVideoOutput alloc] init]);
FVPVideoPlayerPlugin *videoPlayerPlugin = [[FVPVideoPlayerPlugin alloc]
initWithAVFactory:[[StubFVPAVFactory alloc] initWithPlayer:nil output:mockVideoOutput]
displayLinkFactory:stubDisplayLinkFactory
registrar:partialRegistrar];

FlutterError *error;
[videoPlayerPlugin initialize:&error];
XCTAssertNil(error);
FVPCreationOptions *create = [FVPCreationOptions
makeWithAsset:nil
uri:@"https://flutter.github.io/assets-for-api-docs/assets/videos/bee.mp4"
packageName:nil
formatHint:nil
httpHeaders:@{}];
NSNumber *textureId = [videoPlayerPlugin createWithOptions:create error:&error];
FVPVideoPlayer *player = videoPlayerPlugin.playersByTextureId[textureId];

__block CMTime currentTime = kCMTimeZero;
OCMStub([mockVideoOutput itemTimeForHostTime:0])
.ignoringNonObjectArgs()
.andDo(^(NSInvocation *invocation) {
[invocation setReturnValue:&currentTime];
});
__block NSMutableSet *pixelBuffers = NSMutableSet.new;
OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero])
.ignoringNonObjectArgs()
.andDo(^(NSInvocation *invocation) {
CMTime itemTime;
[invocation getArgument:&itemTime atIndex:2];
BOOL has = [pixelBuffers containsObject:[NSValue valueWithCMTime:itemTime]];
[invocation setReturnValue:&has];
});
OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero
itemTimeForDisplay:[OCMArg anyPointer]])
.ignoringNonObjectArgs()
.andDo(^(NSInvocation *invocation) {
CMTime itemTime;
[invocation getArgument:&itemTime atIndex:2];
CVPixelBufferRef bufferRef = NULL;
if ([pixelBuffers containsObject:[NSValue valueWithCMTime:itemTime]]) {
CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef);
}
[pixelBuffers removeObject:[NSValue valueWithCMTime:itemTime]];
[invocation setReturnValue:&bufferRef];
});
void (^advanceFrame)(void) = ^{
currentTime.value++;
[pixelBuffers addObject:[NSValue valueWithCMTime:currentTime]];
};

advanceFrame();
OCMExpect([mockTextureRegistry textureFrameAvailable:textureId.intValue]);
stubDisplayLinkFactory.fireDisplayLink();
OCMVerifyAllWithDelay(mockTextureRegistry, 10);

advanceFrame();
OCMExpect([mockTextureRegistry textureFrameAvailable:textureId.intValue]);
CFRelease([player copyPixelBuffer]);
stubDisplayLinkFactory.fireDisplayLink();
OCMVerifyAllWithDelay(mockTextureRegistry, 10);
}

#if TARGET_OS_IOS
- (void)validateTransformFixForOrientation:(UIImageOrientation)orientation {
AVAssetTrack *track = [[FakeAVAssetTrack alloc] initWithOrientation:orientation];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,26 +21,24 @@ @interface FVPFrameUpdater : NSObject
@property(nonatomic, weak, readonly) NSObject<FlutterTextureRegistry> *registry;
// The output that this updater is managing.
@property(nonatomic, weak) AVPlayerItemVideoOutput *videoOutput;
// The last time that has been validated as avaliable according to hasNewPixelBufferForItemTime:.
@property(nonatomic, assign) CMTime lastKnownAvailableTime;
// The display link that drives frameUpdater.
@property(nonatomic) FVPDisplayLink *displayLink;
// The time interval between screen refresh updates. Display link duration is in an undefined state
// until displayLinkFired is called at least once so it should not be used directly.
@property(atomic) CFTimeInterval frameDuration;
@end

@implementation FVPFrameUpdater
- (FVPFrameUpdater *)initWithRegistry:(NSObject<FlutterTextureRegistry> *)registry {
NSAssert(self, @"super init cannot be nil");
if (self == nil) return nil;
_registry = registry;
_lastKnownAvailableTime = kCMTimeInvalid;
return self;
}

- (void)displayLinkFired {
// Only report a new frame if one is actually available.
CMTime outputItemTime = [self.videoOutput itemTimeForHostTime:CACurrentMediaTime()];
if ([self.videoOutput hasNewPixelBufferForItemTime:outputItemTime]) {
_lastKnownAvailableTime = outputItemTime;
[_registry textureFrameAvailable:_textureId];
}
self.frameDuration = _displayLink.duration;
[_registry textureFrameAvailable:_textureId];
}
@end

Expand Down Expand Up @@ -88,6 +86,20 @@ @interface FVPVideoPlayer ()
@property(nonatomic) FVPFrameUpdater *frameUpdater;
// The display link that drives frameUpdater.
@property(nonatomic) FVPDisplayLink *displayLink;
// The latest buffer obtained from video output. This is stored so that it can be returned from
// copyPixelBuffer again if nothing new is available, since the engine has undefined behavior when
// returning NULL.
@property(nonatomic) CVPixelBufferRef latestPixelBuffer;
// The time that represents when the next frame displays.
@property(nonatomic) CFTimeInterval targetTime;
// Whether to enqueue textureFrameAvailable from copyPixelBuffer.
@property(nonatomic) BOOL selfRefresh;
// The time that represents the start of average frame duration measurement.
@property(nonatomic) CFTimeInterval startTime;
// The number of frames since the start of average frame duration measurement.
@property(nonatomic) int framesCount;
// The latest frame duration since there was significant change.
@property(nonatomic) CFTimeInterval latestDuration;
// Whether a new frame needs to be provided to the engine regardless of the current play/pause state
// (e.g., after a seek while paused). If YES, the display link should continue to run until the next
// frame is successfully provided.
Expand Down Expand Up @@ -135,6 +147,7 @@ - (instancetype)initWithAsset:(NSString *)asset
}

- (void)dealloc {
CVBufferRelease(_latestPixelBuffer);
if (!_disposed) {
[self removeKeyValueObservers];
}
Expand Down Expand Up @@ -234,9 +247,14 @@ - (AVMutableVideoComposition *)getVideoCompositionWithTransform:(CGAffineTransfo
}
videoComposition.renderSize = CGSizeMake(width, height);

// TODO(@recastrodiaz): should we use videoTrack.nominalFrameRate ?
// Currently set at a constant 30 FPS
videoComposition.frameDuration = CMTimeMake(1, 30);
videoComposition.sourceTrackIDForFrameTiming = videoTrack.trackID;
if (CMTIME_IS_VALID(videoTrack.minFrameDuration)) {
videoComposition.frameDuration = videoTrack.minFrameDuration;
} else {
NSLog(@"Warning: videoTrack.minFrameDuration for input video is invalid, please report this to "
@"https://github.com/flutter/flutter/issues with input video attached.");
videoComposition.frameDuration = CMTimeMake(1, 30);
}

return videoComposition;
}
Expand Down Expand Up @@ -283,6 +301,10 @@ - (instancetype)initWithPlayerItem:(AVPlayerItem *)item
error:nil] == AVKeyValueStatusLoaded) {
// Rotate the video by using a videoComposition and the preferredTransform
self->_preferredTransform = FVPGetStandardizedTransformForTrack(videoTrack);
// Do not use video composition when it is not needed.
if (CGAffineTransformIsIdentity(self->_preferredTransform)) {
return;
}
// Note:
// https://developer.apple.com/documentation/avfoundation/avplayeritem/1388818-videocomposition
// Video composition can only be used with file-based media and is not supported for
Expand Down Expand Up @@ -319,6 +341,8 @@ - (instancetype)initWithPlayerItem:(AVPlayerItem *)item
};
_videoOutput = [avFactory videoOutputWithPixelBufferAttributes:pixBuffAttributes];
frameUpdater.videoOutput = _videoOutput;
frameUpdater.displayLink = _displayLink;
_selfRefresh = true;

[self addObserversForItem:item player:_player];

Expand Down Expand Up @@ -358,7 +382,6 @@ - (void)observeValueForKeyPath:(NSString *)path
case AVPlayerItemStatusReadyToPlay:
[item addOutput:_videoOutput];
[self setupEventSinkIfReadyToPlay];
[self updatePlayingState];
break;
}
} else if (context == presentationSizeContext || context == durationContext) {
Expand All @@ -368,7 +391,6 @@ - (void)observeValueForKeyPath:(NSString *)path
// its presentation size or duration. When these properties are finally set, re-check if
// all required properties and instantiate the event sink if it is not already set up.
[self setupEventSinkIfReadyToPlay];
[self updatePlayingState];
}
} else if (context == playbackLikelyToKeepUpContext) {
[self updatePlayingState];
Expand Down Expand Up @@ -447,6 +469,8 @@ - (void)setupEventSinkIfReadyToPlay {
}

_isInitialized = YES;
[self updatePlayingState];

_eventSink(@{
@"event" : @"initialized",
@"duration" : @(duration),
Expand Down Expand Up @@ -543,16 +567,32 @@ - (void)setPlaybackSpeed:(double)speed {
}

- (CVPixelBufferRef)copyPixelBuffer {
// If the difference between target time and current time is longer than this fraction of frame
// duration then reset target time.
const float resetThreshold = 0.5;

// Ensure video sampling at regular intervals. This function is not called at exact time intervals
// so CACurrentMediaTime returns irregular timestamps which causes missed video frames. The range
// outside of which targetTime is reset should be narrow enough to make possible lag as small as
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: make lags (due to skipping frames?) as less frequent as possible.

Copy link
Contributor Author

@misos1 misos1 Feb 2, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

"Lag" here means that targetTime is lagging behind current time or conversely.

// possible and at the same time wide enough to avoid too frequent resets which would lead to
// irregular sampling.
// TODO: Ideally there would be a targetTimestamp of display link used by the flutter engine.
// https://github.com/flutter/flutter/issues/159087
CFTimeInterval currentTime = CACurrentMediaTime();
CFTimeInterval duration = self.frameUpdater.frameDuration;
if (fabs(self.targetTime - currentTime) > duration * resetThreshold) {
self.targetTime = currentTime;
}
self.targetTime += duration;

CVPixelBufferRef buffer = NULL;
CMTime outputItemTime = [_videoOutput itemTimeForHostTime:CACurrentMediaTime()];
if ([_videoOutput hasNewPixelBufferForItemTime:outputItemTime]) {
buffer = [_videoOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL];
} else {
// If the current time isn't available yet, use the time that was checked when informing the
// engine that a frame was available (if any).
CMTime lastAvailableTime = self.frameUpdater.lastKnownAvailableTime;
if (CMTIME_IS_VALID(lastAvailableTime)) {
buffer = [_videoOutput copyPixelBufferForItemTime:lastAvailableTime itemTimeForDisplay:NULL];
CMTime outputItemTime = [self.videoOutput itemTimeForHostTime:self.targetTime];
if ([self.videoOutput hasNewPixelBufferForItemTime:outputItemTime]) {
buffer = [self.videoOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL];
if (buffer) {
// Balance the owned reference from copyPixelBufferForItemTime.
CVBufferRelease(self.latestPixelBuffer);
self.latestPixelBuffer = buffer;
}
}

Expand All @@ -565,7 +605,48 @@ - (CVPixelBufferRef)copyPixelBuffer {
}
}

return buffer;
// Calling textureFrameAvailable only from within displayLinkFired would require a non-trivial
// solution to minimize missed video frames due to race between displayLinkFired, copyPixelBuffer
// and place where is _textureFrameAvailable reset to false in the flutter engine.
// TODO: Ideally FlutterTexture would support mode of operation where the copyPixelBuffer is
// called always or some other alternative, instead of on demand by calling textureFrameAvailable.
// https://github.com/flutter/flutter/issues/159162
if (self.displayLink.running && self.selfRefresh) {
// The number of frames over which to measure average frame duration.
const int windowSize = 10;
// If duration changes by this fraction or more then reset average frame duration measurement.
const float resetFraction = 0.01;
// If measured average frame duration is shorter than this fraction of frame duration obtained
// from display link then rely solely on refreshes from display link.
const float durationThreshold = 0.5;

if (fabs(duration - self.latestDuration) >= self.latestDuration * resetFraction) {
self.startTime = currentTime;
self.framesCount = 0;
self.latestDuration = duration;
}
if (self.framesCount == windowSize) {
CFTimeInterval averageDuration = (currentTime - self.startTime) / windowSize;
if (averageDuration < duration * durationThreshold) {
NSLog(@"Warning: measured average duration between frames is unexpectedly short (%f/%f), "
@"please report this to "
@"https://github.com/flutter/flutter/issues.",
averageDuration, duration);
self.selfRefresh = false;
}
self.startTime = currentTime;
self.framesCount = 0;
}
self.framesCount++;

dispatch_async(dispatch_get_main_queue(), ^{
[self.frameUpdater.registry textureFrameAvailable:self.frameUpdater.textureId];
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I missed this change in the last review. So this version just constantly calls textureFrameAvailable:, as fast as possible, unconditionally, while the video is playing? Doesn't that just completely defeat the purpose of having the display link at all?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Display link is still usable for obtaining actual duration and for starting after pause. I also tried to call it conditionally using hasNewPixelBufferForItemTime with targetTime + duration (another +duration into future) but it did not work well for below 60 fps videos, there is that race problem even when just some video frames depend on textureFrameAvailable from display link.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Display link is still usable for obtaining actual duration and for starting after pause.

Neither of those things require a regularly firing display link; one is just a getter, and the other is a one-time call. This version appears to have two sets of regular calls: the display link callback, on a set cadence that we determine based on the video, and then this, which is as fast as possible no matter what the refresh rate, frame rate, or elapsed time are.

I understand the goals of the previous iterations of this PR, but I don't understand unconditionally driving buffer copies as fast as possible no matter what, or constantly telling the engine that there are new frames available regardless of whether or not that's true.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

on a set cadence that we determine based on the video

It is actually based on display refresh rate.

which is as fast as possible no matter what the refresh rate, frame rate, or elapsed time are

This too, it is not faster than display refresh rate, it is at display refresh rate.

Copy link
Collaborator

@stuartmorgan-g stuartmorgan-g Nov 25, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It is actually based on display refresh rate.

Right, sorry; I was misremembering the logic used to set videoComposition.frameDuration logic as feeding into the display link.

This too, it is not faster than display refresh rate, it is at display refresh rate.

How is an unconditional and immediate call every time a frame is provided the same as the refresh rate?

Copy link
Contributor Author

@misos1 misos1 Dec 5, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I did not observe such high deviations. But much smaller deviations (than screen refresh period) are enough to give different frames by using CACurrentMediaTime vs something more regular. All calls to CACurrentMediaTime may return timestamp strictly between timestamps of current and next screen refresh but time points of frames in video are shifted randomly to this (and this shift may change in time). Here is example of irregular sampling (like with CACurrentMediaTime) where video frame 2 was shown twice while frame 3 was dropped in contrast with regular sampling (like with targetTime in this PR):

screen refresh number:   -1---|---2---|---3---|---4---|---
irregular sampling:       v        v    v           v
video frame number:      |---1---|---2---|---3---|---4---|
regular sampling:          ^       ^       ^       ^

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks, that's extremely helpful! I understand the issue much more clearly now. It would be a great idea to put that ASCII diagram into the comment in the code.

I think the next step is to file an issue to explore with the engine team whether making the copyPixelBuffer/textureFrameAvailable more explicit about call patterns (e.g., saying that calls will not be more frequent than ~the screen refresh rate) is something the engine team is comfortable with, so we know whether or not we need to build limiting logic at the plugin level.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

So from flutter/flutter#160520 (comment) "We could make the behavior more documented/explicit but we may still need to change things in the future." should I take that as yes or do I need to implement that frame limiting? What is the next step?

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The "may still need to change things in the future" means we shouldn't rely on the current behavior, so we should do plugin-level frame limiting.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oh ok, I understood that as "ok, but in future switch to platform views".

});
}

// Add a retain for the engine, since the copyPixelBufferForItemTime has already been accounted
// for, and the engine expects an owning reference.
return CVBufferRetain(self.latestPixelBuffer);
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

And then here: // Add a retain for the engine, since the `copyPixelBufferForItemTime:` has already been accounted for, and the engine expects an owning reference.

}

- (void)onTextureUnregistered:(NSObject<FlutterTexture> *)texture {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,9 @@
/// Defaults to NO.
@property(nonatomic, assign) BOOL running;

/// The time interval between screen refresh updates.
@property(nonatomic, readonly) CFTimeInterval duration;

/// Initializes a display link that calls the given callback when fired.
///
/// The display link starts paused, so must be started, by setting 'running' to YES, before the
Expand Down
Loading