-
Notifications
You must be signed in to change notification settings - Fork 3.6k
[video_player_avfoundation] enable more than 30 fps #7466
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 1 commit
0bade59
ad0ba7d
02a1241
e0f85ce
014df97
89b0517
2400260
aae683d
016f799
8fb95cb
836733c
e304b16
af56816
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -21,27 +21,38 @@ @interface FVPFrameUpdater : NSObject | |
| @property(nonatomic, weak, readonly) NSObject<FlutterTextureRegistry> *registry; | ||
| // The output that this updater is managing. | ||
| @property(nonatomic, weak) AVPlayerItemVideoOutput *videoOutput; | ||
| // The last time that has been validated as avaliable according to hasNewPixelBufferForItemTime:. | ||
| @property(nonatomic, assign) CMTime lastKnownAvailableTime; | ||
| @property(nonatomic) CVPixelBufferRef latestPixelBuffer; | ||
|
||
| @property(nonatomic) dispatch_queue_t pixelBufferSynchronizationQueue; | ||
| @end | ||
|
|
||
| @implementation FVPFrameUpdater | ||
| - (FVPFrameUpdater *)initWithRegistry:(NSObject<FlutterTextureRegistry> *)registry { | ||
| NSAssert(self, @"super init cannot be nil"); | ||
| if (self == nil) return nil; | ||
| _registry = registry; | ||
| _lastKnownAvailableTime = kCMTimeInvalid; | ||
| return self; | ||
| } | ||
|
|
||
| - (void)displayLinkFired { | ||
| // Only report a new frame if one is actually available. | ||
| CMTime outputItemTime = [self.videoOutput itemTimeForHostTime:CACurrentMediaTime()]; | ||
| if ([self.videoOutput hasNewPixelBufferForItemTime:outputItemTime]) { | ||
|
||
| _lastKnownAvailableTime = outputItemTime; | ||
| dispatch_async(self.pixelBufferSynchronizationQueue, ^{ | ||
| if (self.latestPixelBuffer) { | ||
| CFRelease(self.latestPixelBuffer); | ||
| } | ||
| self.latestPixelBuffer = [self.videoOutput copyPixelBufferForItemTime:outputItemTime | ||
|
||
| itemTimeForDisplay:NULL]; | ||
| }); | ||
| [_registry textureFrameAvailable:_textureId]; | ||
| } | ||
| } | ||
|
|
||
| - (void)dealloc { | ||
| if (_latestPixelBuffer) { | ||
| CFRelease(_latestPixelBuffer); | ||
stuartmorgan-g marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
| } | ||
| } | ||
| @end | ||
|
|
||
| @interface FVPDefaultAVFactory : NSObject <FVPAVFactory> | ||
|
|
@@ -92,6 +103,7 @@ @interface FVPVideoPlayer () | |
| // (e.g., after a seek while paused). If YES, the display link should continue to run until the next | ||
| // frame is successfully provided. | ||
| @property(nonatomic, assign) BOOL waitingForFrame; | ||
| @property(nonatomic) dispatch_queue_t pixelBufferSynchronizationQueue; | ||
|
|
||
| - (instancetype)initWithURL:(NSURL *)url | ||
| frameUpdater:(FVPFrameUpdater *)frameUpdater | ||
|
|
@@ -234,9 +246,8 @@ - (AVMutableVideoComposition *)getVideoCompositionWithTransform:(CGAffineTransfo | |
| } | ||
| videoComposition.renderSize = CGSizeMake(width, height); | ||
|
|
||
| // TODO(@recastrodiaz): should we use videoTrack.nominalFrameRate ? | ||
| // Currently set at a constant 30 FPS | ||
| videoComposition.frameDuration = CMTimeMake(1, 30); | ||
| videoComposition.sourceTrackIDForFrameTiming = videoTrack.trackID; | ||
| videoComposition.frameDuration = videoTrack.minFrameDuration; | ||
stuartmorgan-g marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
|
|
||
| return videoComposition; | ||
| } | ||
|
|
@@ -283,6 +294,10 @@ - (instancetype)initWithPlayerItem:(AVPlayerItem *)item | |
| error:nil] == AVKeyValueStatusLoaded) { | ||
| // Rotate the video by using a videoComposition and the preferredTransform | ||
| self->_preferredTransform = FVPGetStandardizedTransformForTrack(videoTrack); | ||
| // do not use video composition when it is not needed | ||
stuartmorgan-g marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
| if (CGAffineTransformIsIdentity(self->_preferredTransform)) { | ||
stuartmorgan-g marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
| return; | ||
| } | ||
| // Note: | ||
| // https://developer.apple.com/documentation/avfoundation/avplayeritem/1388818-videocomposition | ||
| // Video composition can only be used with file-based media and is not supported for | ||
|
|
@@ -320,6 +335,10 @@ - (instancetype)initWithPlayerItem:(AVPlayerItem *)item | |
| _videoOutput = [avFactory videoOutputWithPixelBufferAttributes:pixBuffAttributes]; | ||
| frameUpdater.videoOutput = _videoOutput; | ||
|
|
||
| _pixelBufferSynchronizationQueue = | ||
stuartmorgan-g marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
| dispatch_queue_create("io.flutter.video_player.pixelBufferSynchronizationQueue", NULL); | ||
stuartmorgan-g marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
| frameUpdater.pixelBufferSynchronizationQueue = _pixelBufferSynchronizationQueue; | ||
stuartmorgan-g marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
|
|
||
| [self addObserversForItem:item player:_player]; | ||
|
|
||
| [asset loadValuesAsynchronouslyForKeys:@[ @"tracks" ] completionHandler:assetCompletionHandler]; | ||
|
|
@@ -358,7 +377,6 @@ - (void)observeValueForKeyPath:(NSString *)path | |
| case AVPlayerItemStatusReadyToPlay: | ||
| [item addOutput:_videoOutput]; | ||
| [self setupEventSinkIfReadyToPlay]; | ||
| [self updatePlayingState]; | ||
| break; | ||
| } | ||
| } else if (context == presentationSizeContext || context == durationContext) { | ||
|
|
@@ -368,7 +386,6 @@ - (void)observeValueForKeyPath:(NSString *)path | |
| // its presentation size or duration. When these properties are finally set, re-check if | ||
| // all required properties and instantiate the event sink if it is not already set up. | ||
| [self setupEventSinkIfReadyToPlay]; | ||
| [self updatePlayingState]; | ||
| } | ||
| } else if (context == playbackLikelyToKeepUpContext) { | ||
| [self updatePlayingState]; | ||
|
|
@@ -447,6 +464,8 @@ - (void)setupEventSinkIfReadyToPlay { | |
| } | ||
|
|
||
| _isInitialized = YES; | ||
| [self updatePlayingState]; | ||
|
|
||
| _eventSink(@{ | ||
| @"event" : @"initialized", | ||
| @"duration" : @(duration), | ||
|
|
@@ -543,18 +562,11 @@ - (void)setPlaybackSpeed:(double)speed { | |
| } | ||
|
|
||
| - (CVPixelBufferRef)copyPixelBuffer { | ||
| CVPixelBufferRef buffer = NULL; | ||
| CMTime outputItemTime = [_videoOutput itemTimeForHostTime:CACurrentMediaTime()]; | ||
| if ([_videoOutput hasNewPixelBufferForItemTime:outputItemTime]) { | ||
| buffer = [_videoOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL]; | ||
| } else { | ||
| // If the current time isn't available yet, use the time that was checked when informing the | ||
| // engine that a frame was available (if any). | ||
| CMTime lastAvailableTime = self.frameUpdater.lastKnownAvailableTime; | ||
| if (CMTIME_IS_VALID(lastAvailableTime)) { | ||
| buffer = [_videoOutput copyPixelBufferForItemTime:lastAvailableTime itemTimeForDisplay:NULL]; | ||
| } | ||
| } | ||
| __block CVPixelBufferRef buffer = NULL; | ||
| dispatch_sync(self.pixelBufferSynchronizationQueue, ^{ | ||
| buffer = self.frameUpdater.latestPixelBuffer; | ||
| self.frameUpdater.latestPixelBuffer = NULL; | ||
stuartmorgan-g marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
| }); | ||
|
|
||
| if (self.waitingForFrame && buffer) { | ||
| self.waitingForFrame = NO; | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
nit:
(nonatomic, copy)