Skip to content
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
## NEXT
## 2.6.2

* Adds possibility to play videos at more than 30 FPS.
* Fixes playing state not updating in some paths.
* Updates minimum supported SDK version to Flutter 3.19/Dart 3.3.

## 2.6.1
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,7 @@ @interface StubFVPDisplayLinkFactory : NSObject <FVPDisplayLinkFactory>

/** This display link to return. */
@property(nonatomic, strong) FVPDisplayLink *displayLink;
@property(nonatomic) void (^fireDisplayLink)(void);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: (nonatomic, copy)


- (instancetype)initWithDisplayLink:(FVPDisplayLink *)displayLink;

Expand All @@ -138,6 +139,7 @@ - (instancetype)initWithDisplayLink:(FVPDisplayLink *)displayLink {
}
- (FVPDisplayLink *)displayLinkWithRegistrar:(id<FlutterPluginRegistrar>)registrar
callback:(void (^)(void))callback {
self.fireDisplayLink = callback;
return self.displayLink;
}

Expand Down Expand Up @@ -243,13 +245,14 @@ - (void)testSeekToWhilePausedStartsDisplayLinkTemporarily {
OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero])
.ignoringNonObjectArgs()
.andReturn(YES);
// Any non-zero value is fine here since it won't actually be used, just NULL-checked.
CVPixelBufferRef fakeBufferRef = (CVPixelBufferRef)1;
CVPixelBufferRef bufferRef;
CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef);
OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero itemTimeForDisplay:NULL])
.ignoringNonObjectArgs()
.andReturn(fakeBufferRef);
.andReturn(bufferRef);
// Simulate a callback from the engine to request a new frame.
[player copyPixelBuffer];
stubDisplayLinkFactory.fireDisplayLink();
CFRelease([player copyPixelBuffer]);
// Since a frame was found, and the video is paused, the display link should be paused again.
OCMVerify([mockDisplayLink setRunning:NO]);
}
Expand Down Expand Up @@ -294,14 +297,15 @@ - (void)testInitStartsDisplayLinkTemporarily {
OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero])
.ignoringNonObjectArgs()
.andReturn(YES);
// Any non-zero value is fine here since it won't actually be used, just NULL-checked.
CVPixelBufferRef fakeBufferRef = (CVPixelBufferRef)1;
CVPixelBufferRef bufferRef;
CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef);
OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero itemTimeForDisplay:NULL])
.ignoringNonObjectArgs()
.andReturn(fakeBufferRef);
.andReturn(bufferRef);
// Simulate a callback from the engine to request a new frame.
FVPVideoPlayer *player = videoPlayerPlugin.playersByTextureId[textureId];
[player copyPixelBuffer];
stubDisplayLinkFactory.fireDisplayLink();
CFRelease([player copyPixelBuffer]);
// Since a frame was found, and the video is paused, the display link should be paused again.
OCMVerify([mockDisplayLink setRunning:NO]);
}
Expand Down Expand Up @@ -357,13 +361,14 @@ - (void)testSeekToWhilePlayingDoesNotStopDisplayLink {
OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero])
.ignoringNonObjectArgs()
.andReturn(YES);
// Any non-zero value is fine here since it won't actually be used, just NULL-checked.
CVPixelBufferRef fakeBufferRef = (CVPixelBufferRef)1;
CVPixelBufferRef bufferRef;
CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef);
OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero itemTimeForDisplay:NULL])
.ignoringNonObjectArgs()
.andReturn(fakeBufferRef);
.andReturn(bufferRef);
// Simulate a callback from the engine to request a new frame.
[player copyPixelBuffer];
stubDisplayLinkFactory.fireDisplayLink();
CFRelease([player copyPixelBuffer]);
// Since the video was playing, the display link should not be paused after getting a buffer.
OCMVerify(never(), [mockDisplayLink setRunning:NO]);
}
Expand Down Expand Up @@ -790,6 +795,82 @@ - (void)testPublishesInRegistration {
XCTAssertTrue([publishedValue isKindOfClass:[FVPVideoPlayerPlugin class]]);
}

- (void)testPlayerShouldNotDropEverySecondFrame {
NSObject<FlutterPluginRegistrar> *registrar =
[GetPluginRegistry() registrarForPlugin:@"testPlayerShouldNotDropEverySecondFrame"];
NSObject<FlutterPluginRegistrar> *partialRegistrar = OCMPartialMock(registrar);
NSObject<FlutterTextureRegistry> *mockTextureRegistry =
OCMProtocolMock(@protocol(FlutterTextureRegistry));
OCMStub([partialRegistrar textures]).andReturn(mockTextureRegistry);

FVPDisplayLink *displayLink = [[FVPDisplayLink alloc] initWithRegistrar:registrar
callback:^(){
}];
StubFVPDisplayLinkFactory *stubDisplayLinkFactory =
[[StubFVPDisplayLinkFactory alloc] initWithDisplayLink:displayLink];
AVPlayerItemVideoOutput *mockVideoOutput = OCMPartialMock([[AVPlayerItemVideoOutput alloc] init]);
FVPVideoPlayerPlugin *videoPlayerPlugin = [[FVPVideoPlayerPlugin alloc]
initWithAVFactory:[[StubFVPAVFactory alloc] initWithPlayer:nil output:mockVideoOutput]
displayLinkFactory:stubDisplayLinkFactory
registrar:partialRegistrar];

FlutterError *error;
[videoPlayerPlugin initialize:&error];
XCTAssertNil(error);
FVPCreationOptions *create = [FVPCreationOptions
makeWithAsset:nil
uri:@"https://flutter.github.io/assets-for-api-docs/assets/videos/bee.mp4"
packageName:nil
formatHint:nil
httpHeaders:@{}];
NSNumber *textureId = [videoPlayerPlugin createWithOptions:create error:&error];
FVPVideoPlayer *player = videoPlayerPlugin.playersByTextureId[textureId];

__block CMTime currentTime = kCMTimeZero;
OCMStub([mockVideoOutput itemTimeForHostTime:0])
.ignoringNonObjectArgs()
.andDo(^(NSInvocation *invocation) {
[invocation setReturnValue:&currentTime];
});
__block NSMutableSet *pixelBuffers = NSMutableSet.new;
OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero])
.ignoringNonObjectArgs()
.andDo(^(NSInvocation *invocation) {
CMTime itemTime;
[invocation getArgument:&itemTime atIndex:2];
BOOL has = [pixelBuffers containsObject:[NSValue valueWithCMTime:itemTime]];
[invocation setReturnValue:&has];
});
OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero
itemTimeForDisplay:[OCMArg anyPointer]])
.ignoringNonObjectArgs()
.andDo(^(NSInvocation *invocation) {
CMTime itemTime;
[invocation getArgument:&itemTime atIndex:2];
CVPixelBufferRef bufferRef = NULL;
if ([pixelBuffers containsObject:[NSValue valueWithCMTime:itemTime]]) {
CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef);
}
[pixelBuffers removeObject:[NSValue valueWithCMTime:itemTime]];
[invocation setReturnValue:&bufferRef];
});
void (^advanceFrame)(void) = ^{
currentTime.value++;
[pixelBuffers addObject:[NSValue valueWithCMTime:currentTime]];
};

advanceFrame();
OCMExpect([mockTextureRegistry textureFrameAvailable:textureId.intValue]);
stubDisplayLinkFactory.fireDisplayLink();
OCMVerifyAllWithDelay(mockTextureRegistry, 10);

advanceFrame();
OCMExpect([mockTextureRegistry textureFrameAvailable:textureId.intValue]);
CFRelease([player copyPixelBuffer]);
stubDisplayLinkFactory.fireDisplayLink();
OCMVerifyAllWithDelay(mockTextureRegistry, 10);
}

#if TARGET_OS_IOS
- (void)validateTransformFixForOrientation:(UIImageOrientation)orientation {
AVAssetTrack *track = [[FakeAVAssetTrack alloc] initWithOrientation:orientation];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,27 +21,38 @@ @interface FVPFrameUpdater : NSObject
@property(nonatomic, weak, readonly) NSObject<FlutterTextureRegistry> *registry;
// The output that this updater is managing.
@property(nonatomic, weak) AVPlayerItemVideoOutput *videoOutput;
// The last time that has been validated as avaliable according to hasNewPixelBufferForItemTime:.
@property(nonatomic, assign) CMTime lastKnownAvailableTime;
@property(nonatomic) CVPixelBufferRef latestPixelBuffer;
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

All properties need comments, per the style guide. Please explain in comments what the purpose of these two new properties is.

@property(nonatomic) dispatch_queue_t pixelBufferSynchronizationQueue;
@end

@implementation FVPFrameUpdater
- (FVPFrameUpdater *)initWithRegistry:(NSObject<FlutterTextureRegistry> *)registry {
NSAssert(self, @"super init cannot be nil");
if (self == nil) return nil;
_registry = registry;
_lastKnownAvailableTime = kCMTimeInvalid;
return self;
}

- (void)displayLinkFired {
// Only report a new frame if one is actually available.
CMTime outputItemTime = [self.videoOutput itemTimeForHostTime:CACurrentMediaTime()];
if ([self.videoOutput hasNewPixelBufferForItemTime:outputItemTime]) {
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hm, shouldn't these two lines be inside the dispatch_async? AVPlayerItemVideoOutput doesn't seem to be marked as threadsafe.

_lastKnownAvailableTime = outputItemTime;
dispatch_async(self.pixelBufferSynchronizationQueue, ^{
if (self.latestPixelBuffer) {
CFRelease(self.latestPixelBuffer);
}
self.latestPixelBuffer = [self.videoOutput copyPixelBufferForItemTime:outputItemTime
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is essentially doubling the memory usage for video output, isn't it? Why doesn't the previous approach of only storing the timestamp work? The PR description discusses the early-consume problem, but it seems like that could be addressed simply by changing copyPixelBuffer to prefer the last time instead of the current time.

Copy link
Contributor Author

@misos1 misos1 Aug 23, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is essentially doubling the memory usage for video output, isn't it?

Can you please explain how? Maybe memory usage with the original approach would be one frame less if flutter engine deleted its previous pixel buffer right before calling copyPixelBuffer but it would not want to do that because copyPixelBuffer can also return NULL and then it needs something latest to show.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can you please explain how?

Aren't you keeping an extra copy of the frame besides the one kept by the player and the one kept by the engine?

I guess not doubled, but increasing by one frame relative to the current implementation.

(Also looking again, the current PR code appears to be leaking every frame it consumes.)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

(Also looking again, the current PR code appears to be leaking every frame it consumes.)

I cannot see it.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oh, I see now. The memory flow is pretty hard to follow here on the copied buffer as currently written, with the buffer sometimes freed by the frame updater, and sometimes handed off to to the engine.

Which brings us back to the initial question: why do we need to copy the buffer proactively when the display link fires instead of storing the timestamp?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Aren't you keeping an extra copy of the frame besides the one kept by the player and the one kept by the engine?

Both versions have a worst case number of pixel buffers 2+N where N is the number held by the player. This case is between copyPixelBufferForItemTime and until the engine replaces its own after copyPixelBuffer. Current version just can have 2+N for a little longer, especially when displayLinkFired is called after copyPixelBuffer at each frame. Btw if the player kept only a single frame then storing timestamp instead of buffer would not work.

Which brings us back to the initial question: why do we need to copy the buffer proactively when the display link fires instead of storing the timestamp?

That would mean fetching pixel buffers from the past, especially when displayLinkFired is called after copyPixelBuffer at each frame. But this is based on undocumented and possibly wrong assumption that the player always leaves at least one past frame ready for us. What if this is not the case? Actually it is not. Seems the player periodically flushes all past pixel buffers. After there are some 3 pixel buffers in the past then the player flushes them all.

I tried an implementation which was sending timestamps instead of pixel buffers and copyPixelBufferForItemTime often returned NULL with timestamp for which hasNewPixelBufferForItemTime returned true before. It had 4x more frame drops in average during my tests compared to little modified current implementation (with copyPixelBufferForItemTime moved outside of pixelBufferSynchronizationQueue).

There are several causes of frame drops. This modified implementation minimises cases where copyPixelBufferForItemTime returns NULL and accidentally also frame drops caused by artefacts of using two "display links". They are caused by displayLinkFired and copyPixelBuffer changing order. Because things running on pixelBufferSynchronizationQueue are short (in time) and new pixel buffer is generated after some time then even when is copyPixelBuffer called after displayLinkFired at some frame (while before it was conversely) it has chance to obtain pixel buffer from displayLinkFired from previous frame and to not clear _textureFrameAvailable from this latest displayLinkFired. But this is of course not ideal, a more proper way would be to wait until the middle of frame and then send a new pixel buffer and call textureFrameAvailable but even more proper solution would be to not use two "display links".

Another cause of frame drops is when hasNewPixelBufferForItemTime returns false which is now prevalent. Seems this is caused by irregularities at which is called displayLinkFired which causes irregular timestamps returned by CACurrentMediaTime. I tested to use CADisplayLink::timestamp instead and frame drops dropped almost to zero, below 0.1%, around 20x less than current implementation (modified) on average during my tests. But this would need access to CADisplayLink through FVPDisplayLink and some other implementation for macos.

I also tested an implementation without a display link where textureFrameAvailable and copyPixelBuffer were called for each frame and it had around 3x less frame drops than the current implementation (modified). Unfortunately I could not use CADisplayLink::timestamp here so there were still frame drops due to irregularities. Flutter engine would need to provide something similar, or maybe it can be simply obtained by rounding CACurrentMediaTime down to refresh duration of display but that would then require update frequency of display from engine (or something what is doing FVPDisplayLink for macos).

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That would mean fetching pixel buffers from the past, especially when displayLinkFired is called after copyPixelBuffer at each frame. But this is based on undocumented and possibly wrong assumption that the player always leaves at least one past frame ready for us. What if this is not the case? Actually it is not. Seems the player periodically flushes all past pixel buffers. After there are some 3 pixel buffers in the past then the player flushes them all.

I tried an implementation which was sending timestamps instead of pixel buffers and copyPixelBufferForItemTime often returned NULL with timestamp for which hasNewPixelBufferForItemTime returned true before. It had 4x more frame drops in average during my tests compared to little modified current implementation

Very interesting, thanks for the details! That definitely seems worth the slight memory hit.

Another cause of frame drops is when hasNewPixelBufferForItemTime returns false which is now prevalent.

Could you file an issue with the details of what you've found here for us to follow up on in the future? It sounds like you've done a lot of great investigation here that we should be sure to capture and track.

itemTimeForDisplay:NULL];
});
[_registry textureFrameAvailable:_textureId];
}
}

- (void)dealloc {
if (_latestPixelBuffer) {
CFRelease(_latestPixelBuffer);
}
}
@end

@interface FVPDefaultAVFactory : NSObject <FVPAVFactory>
Expand Down Expand Up @@ -92,6 +103,7 @@ @interface FVPVideoPlayer ()
// (e.g., after a seek while paused). If YES, the display link should continue to run until the next
// frame is successfully provided.
@property(nonatomic, assign) BOOL waitingForFrame;
@property(nonatomic) dispatch_queue_t pixelBufferSynchronizationQueue;

- (instancetype)initWithURL:(NSURL *)url
frameUpdater:(FVPFrameUpdater *)frameUpdater
Expand Down Expand Up @@ -234,9 +246,8 @@ - (AVMutableVideoComposition *)getVideoCompositionWithTransform:(CGAffineTransfo
}
videoComposition.renderSize = CGSizeMake(width, height);

// TODO(@recastrodiaz): should we use videoTrack.nominalFrameRate ?
// Currently set at a constant 30 FPS
videoComposition.frameDuration = CMTimeMake(1, 30);
videoComposition.sourceTrackIDForFrameTiming = videoTrack.trackID;
videoComposition.frameDuration = videoTrack.minFrameDuration;

return videoComposition;
}
Expand Down Expand Up @@ -283,6 +294,10 @@ - (instancetype)initWithPlayerItem:(AVPlayerItem *)item
error:nil] == AVKeyValueStatusLoaded) {
// Rotate the video by using a videoComposition and the preferredTransform
self->_preferredTransform = FVPGetStandardizedTransformForTrack(videoTrack);
// do not use video composition when it is not needed
if (CGAffineTransformIsIdentity(self->_preferredTransform)) {
return;
}
// Note:
// https://developer.apple.com/documentation/avfoundation/avplayeritem/1388818-videocomposition
// Video composition can only be used with file-based media and is not supported for
Expand Down Expand Up @@ -320,6 +335,10 @@ - (instancetype)initWithPlayerItem:(AVPlayerItem *)item
_videoOutput = [avFactory videoOutputWithPixelBufferAttributes:pixBuffAttributes];
frameUpdater.videoOutput = _videoOutput;

_pixelBufferSynchronizationQueue =
dispatch_queue_create("io.flutter.video_player.pixelBufferSynchronizationQueue", NULL);
frameUpdater.pixelBufferSynchronizationQueue = _pixelBufferSynchronizationQueue;

[self addObserversForItem:item player:_player];

[asset loadValuesAsynchronouslyForKeys:@[ @"tracks" ] completionHandler:assetCompletionHandler];
Expand Down Expand Up @@ -358,7 +377,6 @@ - (void)observeValueForKeyPath:(NSString *)path
case AVPlayerItemStatusReadyToPlay:
[item addOutput:_videoOutput];
[self setupEventSinkIfReadyToPlay];
[self updatePlayingState];
break;
}
} else if (context == presentationSizeContext || context == durationContext) {
Expand All @@ -368,7 +386,6 @@ - (void)observeValueForKeyPath:(NSString *)path
// its presentation size or duration. When these properties are finally set, re-check if
// all required properties and instantiate the event sink if it is not already set up.
[self setupEventSinkIfReadyToPlay];
[self updatePlayingState];
}
} else if (context == playbackLikelyToKeepUpContext) {
[self updatePlayingState];
Expand Down Expand Up @@ -447,6 +464,8 @@ - (void)setupEventSinkIfReadyToPlay {
}

_isInitialized = YES;
[self updatePlayingState];

_eventSink(@{
@"event" : @"initialized",
@"duration" : @(duration),
Expand Down Expand Up @@ -543,18 +562,11 @@ - (void)setPlaybackSpeed:(double)speed {
}

- (CVPixelBufferRef)copyPixelBuffer {
CVPixelBufferRef buffer = NULL;
CMTime outputItemTime = [_videoOutput itemTimeForHostTime:CACurrentMediaTime()];
if ([_videoOutput hasNewPixelBufferForItemTime:outputItemTime]) {
buffer = [_videoOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL];
} else {
// If the current time isn't available yet, use the time that was checked when informing the
// engine that a frame was available (if any).
CMTime lastAvailableTime = self.frameUpdater.lastKnownAvailableTime;
if (CMTIME_IS_VALID(lastAvailableTime)) {
buffer = [_videoOutput copyPixelBufferForItemTime:lastAvailableTime itemTimeForDisplay:NULL];
}
}
__block CVPixelBufferRef buffer = NULL;
dispatch_sync(self.pixelBufferSynchronizationQueue, ^{
buffer = self.frameUpdater.latestPixelBuffer;
self.frameUpdater.latestPixelBuffer = NULL;
});

if (self.waitingForFrame && buffer) {
self.waitingForFrame = NO;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: video_player_avfoundation
description: iOS and macOS implementation of the video_player plugin.
repository: https://github.com/flutter/packages/tree/main/packages/video_player/video_player_avfoundation
issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+video_player%22
version: 2.6.1
version: 2.6.2

environment:
sdk: ^3.3.0
Expand Down