annotate modules/javafx.media/src/main/native/jfxmedia/platform/osx/avf/AVFMediaPlayer.mm @ 10315:020fbca40bf9

8170630: [MacOSX] Media crash NSInvalidArgumentException playing some video Reviewed-by: ddehaven
author almatvee
date Thu, 02 Feb 2017 12:46:31 -0800
parents 1143dd8b53d8
children f35915708e0c
rev   line source
ddehaven@8148 1 /*
almatvee@10315 2 * Copyright (c) 2014, 2017, Oracle and/or its affiliates. All rights reserved.
ddehaven@8148 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
ddehaven@8148 4 *
ddehaven@8148 5 * This code is free software; you can redistribute it and/or modify it
ddehaven@8148 6 * under the terms of the GNU General Public License version 2 only, as
ddehaven@8148 7 * published by the Free Software Foundation. Oracle designates this
ddehaven@8148 8 * particular file as subject to the "Classpath" exception as provided
ddehaven@8148 9 * by Oracle in the LICENSE file that accompanied this code.
ddehaven@8148 10 *
ddehaven@8148 11 * This code is distributed in the hope that it will be useful, but WITHOUT
ddehaven@8148 12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
ddehaven@8148 13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
ddehaven@8148 14 * version 2 for more details (a copy is included in the LICENSE file that
ddehaven@8148 15 * accompanied this code).
ddehaven@8148 16 *
ddehaven@8148 17 * You should have received a copy of the GNU General Public License version
ddehaven@8148 18 * 2 along with this work; if not, write to the Free Software Foundation,
ddehaven@8148 19 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
ddehaven@8148 20 *
ddehaven@8148 21 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
ddehaven@8148 22 * or visit www.oracle.com if you need additional information or have any
ddehaven@8148 23 * questions.
ddehaven@8148 24 */
ddehaven@8148 25
ddehaven@8148 26 #import "AVFMediaPlayer.h"
ddehaven@8148 27 #import <objc/runtime.h>
ddehaven@8148 28 #import "CVVideoFrame.h"
ddehaven@8148 29
ddehaven@8148 30 #import <PipelineManagement/NullAudioEqualizer.h>
ddehaven@8148 31 #import <PipelineManagement/NullAudioSpectrum.h>
ddehaven@8148 32
ddehaven@8148 33 #import "AVFAudioProcessor.h"
ddehaven@8148 34
ddehaven@8148 35 // "borrowed" from green screen player on ADC
ddehaven@8148 36 // These are used to reduce power consumption when there are no video frames
ddehaven@8148 37 // to be rendered, which is generally A Good Thing
ddehaven@8148 38 #define FREEWHEELING_PERIOD_IN_SECONDS 0.5
ddehaven@8148 39 #define ADVANCE_INTERVAL_IN_SECONDS 0.1
ddehaven@8148 40
ddehaven@8148 41 // set to 1 to debug track information
ddehaven@8148 42 #define DUMP_TRACK_INFO 0
ddehaven@8148 43
ddehaven@8148 44 // trick used by Apple in AVGreenScreenPlayer
ddehaven@8148 45 // This avoids calling [NSString isEqualTo:@"..."]
ddehaven@8148 46 // The actual value is meaningless, but needs to be unique
ddehaven@8148 47 static void *AVFMediaPlayerItemStatusContext = &AVFMediaPlayerItemStatusContext;
ddehaven@8148 48 static void *AVFMediaPlayerItemDurationContext = &AVFMediaPlayerItemDurationContext;
ddehaven@8148 49 static void *AVFMediaPlayerItemTracksContext = &AVFMediaPlayerItemTracksContext;
ddehaven@8148 50
ddehaven@8148 51 #define FORCE_VO_FORMAT 0
ddehaven@8148 52 #if FORCE_VO_FORMAT
ddehaven@8148 53 // #define FORCED_VO_FORMAT kCVPixelFormatType_32BGRA
ddehaven@8148 54 // #define FORCED_VO_FORMAT kCVPixelFormatType_422YpCbCr8
ddehaven@8148 55 // #define FORCED_VO_FORMAT kCVPixelFormatType_420YpCbCr8Planar
ddehaven@8148 56 #define FORCED_VO_FORMAT kCVPixelFormatType_422YpCbCr8_yuvs // Unsupported, use to test fallback
ddehaven@8148 57 #endif
ddehaven@8148 58
ddehaven@8148 59 // Apple really likes to output '2vuy', this should be the least expensive conversion
ddehaven@8148 60 #define FALLBACK_VO_FORMAT kCVPixelFormatType_422YpCbCr8
ddehaven@8148 61
ddehaven@9307 62 #define FOURCC_CHAR(f) ((f) & 0x7f) ? (char)((f) & 0x7f) : '?'
ddehaven@9307 63
ddehaven@9307 64 static inline NSString *FourCCToNSString(UInt32 fcc) {
ddehaven@9307 65 if (fcc < 0x100) {
ddehaven@9307 66 return [NSString stringWithFormat:@"%u", fcc];
ddehaven@9307 67 }
ddehaven@9307 68 return [NSString stringWithFormat:@"%c%c%c%c",
ddehaven@9307 69 FOURCC_CHAR(fcc >> 24),
ddehaven@9307 70 FOURCC_CHAR(fcc >> 16),
ddehaven@9307 71 FOURCC_CHAR(fcc >> 8),
ddehaven@9307 72 FOURCC_CHAR(fcc)];
ddehaven@9307 73 }
ddehaven@9307 74
ddehaven@9307 75 #if DUMP_TRACK_INFO
ddehaven@9307 76 static void append_log(NSMutableString *s, NSString *fmt, ...) {
ddehaven@9307 77 va_list args;
ddehaven@9307 78 va_start(args, fmt);
ddehaven@9307 79 NSString *appString = [[NSString alloc] initWithFormat:fmt arguments:args];
ddehaven@9307 80 [s appendFormat:@"%@\n", appString];
ddehaven@9307 81 va_end(args);
ddehaven@9307 82 }
ddehaven@9307 83 #define TRACK_LOG(fmt, ...) append_log(trackLog, fmt, ##__VA_ARGS__)
ddehaven@9307 84 #else
ddehaven@9307 85 #define TRACK_LOG(...) {}
ddehaven@9307 86 #endif
ddehaven@9307 87
ddehaven@8148 88 @implementation AVFMediaPlayer
ddehaven@8148 89
ddehaven@8148 90 static void SpectrumCallbackProc(void *context, double duration);
ddehaven@8148 91
ddehaven@8148 92 static CVReturn displayLinkCallback(CVDisplayLinkRef displayLink,
ddehaven@8148 93 const CVTimeStamp *inNow,
ddehaven@8148 94 const CVTimeStamp *inOutputTime,
ddehaven@8148 95 CVOptionFlags flagsIn,
ddehaven@8148 96 CVOptionFlags *flagsOut,
ddehaven@8148 97 void *displayLinkContext);
ddehaven@8148 98
ddehaven@8148 99 + (BOOL) playerAvailable {
ddehaven@8148 100 // Check if AVPlayerItemVideoOutput exists, if not we're running on 10.7 or
ddehaven@10036 101 // earlier which is no longer supported
ddehaven@8148 102 Class klass = objc_getClass("AVPlayerItemVideoOutput");
ddehaven@8148 103 return (klass != nil);
ddehaven@8148 104 }
ddehaven@8148 105
ddehaven@8148 106 - (id) initWithURL:(NSURL *)source eventHandler:(CJavaPlayerEventDispatcher*)hdlr {
ddehaven@8148 107 if ((self = [super init]) != nil) {
ddehaven@8148 108 previousWidth = -1;
ddehaven@8148 109 previousHeight = -1;
ddehaven@8148 110 previousPlayerState = kPlayerState_UNKNOWN;
ddehaven@8148 111
ddehaven@8148 112 eventHandler = hdlr;
ddehaven@8148 113
ddehaven@8148 114 self.movieURL = source;
ddehaven@8148 115 _buggyHLSSupport = NO;
ddehaven@8148 116 _hlsBugResetCount = 0;
ddehaven@8148 117
ddehaven@8148 118 // Create our own work queue
ddehaven@8148 119 playerQueue = dispatch_queue_create(NULL, NULL);
ddehaven@8148 120
ddehaven@8148 121 // Create the player
ddehaven@10036 122 _player = [AVPlayer playerWithURL:source];
ddehaven@8148 123 if (!_player) {
ddehaven@8148 124 return nil;
ddehaven@8148 125 }
ddehaven@8148 126 _player.volume = 1.0f;
ddehaven@8148 127 _player.muted = NO;
ddehaven@8148 128
ddehaven@8555 129 // Set the player item end action to NONE since we'll handle it internally
ddehaven@8555 130 _player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
ddehaven@8555 131
ddehaven@8555 132 /*
ddehaven@8555 133 * AVPlayerItem notifications we could listen for:
ddehaven@8555 134 * 10.7 AVPlayerItemTimeJumpedNotification -> the item's current time has changed discontinuously
ddehaven@8555 135 * 10.7 AVPlayerItemDidPlayToEndTimeNotification -> item has played to its end time
ddehaven@8555 136 * 10.7 AVPlayerItemFailedToPlayToEndTimeNotification (userInfo = NSError) -> item has failed to play to its end time
ddehaven@8555 137 * 10.9 AVPlayerItemPlaybackStalledNotification -> media did not arrive in time to continue playback
ddehaven@8555 138 */
ddehaven@8555 139 playerObservers = [[NSMutableArray alloc] init];
ddehaven@8555 140 id<NSObject> observer;
ddehaven@8555 141 __weak AVFMediaPlayer *blockSelf = self; // retain cycle avoidance
ddehaven@8555 142 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
ddehaven@8555 143 observer = [center addObserverForName:AVPlayerItemDidPlayToEndTimeNotification
ddehaven@10036 144 object:_player.currentItem
ddehaven@8555 145 queue:[NSOperationQueue mainQueue]
ddehaven@8555 146 usingBlock:^(NSNotification *note) {
ddehaven@8555 147 // promote FINISHED state...
ddehaven@8555 148 [blockSelf setPlayerState:kPlayerState_FINISHED];
ddehaven@8555 149 }];
ddehaven@8555 150 if (observer) {
ddehaven@8555 151 [playerObservers addObject:observer];
ddehaven@8555 152 }
ddehaven@8555 153
ddehaven@8148 154 keyPathsObserved = [[NSMutableArray alloc] init];
ddehaven@10036 155 [self observeKeyPath:@"self.player.currentItem.status"
ddehaven@8148 156 withContext:AVFMediaPlayerItemStatusContext];
ddehaven@8148 157
ddehaven@10036 158 [self observeKeyPath:@"self.player.currentItem.duration"
ddehaven@8148 159 withContext:AVFMediaPlayerItemDurationContext];
ddehaven@8148 160
ddehaven@10036 161 [self observeKeyPath:@"self.player.currentItem.tracks"
ddehaven@8148 162 withContext:AVFMediaPlayerItemTracksContext];
ddehaven@8148 163
ddehaven@8148 164
ddehaven@8148 165 [self setPlayerState:kPlayerState_UNKNOWN];
ddehaven@8148 166
ddehaven@8148 167 // filled out later
ddehaven@8148 168 _videoFormat = nil;
ddehaven@8148 169 _lastHostTime = 0LL;
ddehaven@8148 170
ddehaven@8148 171 // Don't create video output until we know we have video
ddehaven@8148 172 _playerOutput = nil;
ddehaven@8148 173 _displayLink = NULL;
ddehaven@8148 174
ddehaven@10036 175 _audioProcessor = [[AVFAudioProcessor alloc] init];
ddehaven@10036 176 if (_audioProcessor.audioSpectrum != nullptr) {
ddehaven@10036 177 _audioProcessor.audioSpectrum->SetSpectrumCallbackProc(SpectrumCallbackProc, (__bridge void*)self);
ddehaven@10036 178 }
ddehaven@8148 179
ddehaven@10036 180 isDisposed = NO;
ddehaven@8148 181 }
ddehaven@8148 182 return self;
ddehaven@8148 183 }
ddehaven@8148 184
ddehaven@8148 185 - (void) dealloc {
ddehaven@8148 186 [self dispose];
ddehaven@8148 187
ddehaven@8148 188 self.movieURL = nil;
ddehaven@8148 189 self.player = nil;
ddehaven@8148 190 self.playerOutput = nil;
ddehaven@8148 191 }
ddehaven@8148 192
ddehaven@8148 193 - (CAudioSpectrum*) audioSpectrum {
ddehaven@10036 194 AVFAudioSpectrumUnitPtr asPtr = _audioProcessor.audioSpectrum;
ddehaven@10036 195 return static_cast<CAudioSpectrum*>(&(*asPtr));
ddehaven@8148 196 }
ddehaven@8148 197
ddehaven@8148 198 - (CAudioEqualizer*) audioEqualizer {
ddehaven@10036 199 AVFAudioEqualizerPtr eqPtr = _audioProcessor.audioEqualizer;
ddehaven@10036 200 return static_cast<CAudioEqualizer*>(&(*eqPtr));
ddehaven@8148 201 }
ddehaven@8148 202
ddehaven@8148 203 - (void) observeKeyPath:(NSString*)keyPath withContext:(void*)context {
ddehaven@8148 204 [self addObserver:self forKeyPath:keyPath options:NSKeyValueObservingOptionNew context:context];
ddehaven@8148 205 [keyPathsObserved addObject:keyPath];
ddehaven@8148 206 }
ddehaven@8148 207
ddehaven@8148 208 // If we get an unsupported pixel format in the video output, call this to
ddehaven@8148 209 // force it to output our fallback format
ddehaven@8148 210 - (void) setFallbackVideoFormat {
ddehaven@9307 211 // schedule this to be done when we're not buried inside the AVPlayer callback
ddehaven@9307 212 __weak AVFMediaPlayer *blockSelf = self; // retain cycle avoidance
ddehaven@9307 213 dispatch_async(dispatch_get_main_queue(), ^{
ddehaven@9307 214 LOGGER_DEBUGMSG(([[NSString stringWithFormat:@"Falling back on video format: %@", FourCCToNSString(FALLBACK_VO_FORMAT)] UTF8String]));
ddehaven@9307 215 AVPlayerItemVideoOutput *newOutput =
ddehaven@8148 216 [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:
ddehaven@8148 217 @{(id)kCVPixelBufferPixelFormatTypeKey: @(FALLBACK_VO_FORMAT)}];
ddehaven@8148 218
ddehaven@9307 219 if (newOutput) {
ddehaven@9307 220 CVDisplayLinkStop(_displayLink);
ddehaven@10036 221 [_player.currentItem removeOutput:_playerOutput];
kcr@9332 222 [_playerOutput setDelegate:nil queue:nil];
ddehaven@8148 223
ddehaven@9307 224 self.playerOutput = newOutput;
ddehaven@9307 225 [_playerOutput setDelegate:blockSelf queue:playerQueue];
ddehaven@9307 226 [_playerOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:ADVANCE_INTERVAL_IN_SECONDS];
ddehaven@10036 227 [_player.currentItem addOutput:_playerOutput];
ddehaven@9307 228 }
ddehaven@9307 229 });
ddehaven@8148 230 }
ddehaven@8148 231
ddehaven@8148 232 - (void) createVideoOutput {
ddehaven@8148 233 @synchronized(self) {
ddehaven@8148 234 // Skip if already created
ddehaven@8148 235 if (!_playerOutput) {
ddehaven@9307 236 #if FORCE_VO_FORMAT
ddehaven@9307 237 LOGGER_DEBUGMSG(([[NSString stringWithFormat:@"Forcing VO format: %@", FourCCToNSString(FORCED_VO_FORMAT)] UTF8String]));
ddehaven@9307 238 #endif
ddehaven@8148 239 // Create the player video output
ddehaven@8148 240 // kCVPixelFormatType_32ARGB comes out inverted, so don't use it
ddehaven@8148 241 // '2vuy' -> kCVPixelFormatType_422YpCbCr8 -> YCbCr_422 (uses less CPU too)
ddehaven@8148 242 // kCVPixelFormatType_420YpCbCr8Planar
ddehaven@8148 243 _playerOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:
ddehaven@8148 244 #if FORCE_VO_FORMAT
ddehaven@8148 245 @{(id)kCVPixelBufferPixelFormatTypeKey: @(FORCED_VO_FORMAT)}];
ddehaven@8148 246 #else
ddehaven@8148 247 @{}]; // let AVFoundation decide the format...
ddehaven@8148 248 #endif
ddehaven@8148 249 if (!_playerOutput) {
ddehaven@8148 250 return;
ddehaven@8148 251 }
ddehaven@8148 252 _playerOutput.suppressesPlayerRendering = YES;
kcr@9542 253
ddehaven@8148 254 // Set up the display link (do we need this??)
ddehaven@8148 255 // might need to create a display link context struct that retains us
ddehaven@8148 256 // rather than passing self as the context
ddehaven@8148 257 CVDisplayLinkCreateWithActiveCGDisplays(&_displayLink);
ddehaven@8148 258 CVDisplayLinkSetOutputCallback(_displayLink, displayLinkCallback, (__bridge void *)self);
ddehaven@8148 259 // Pause display link to conserve power
ddehaven@8148 260 CVDisplayLinkStop(_displayLink);
ddehaven@8148 261
ddehaven@8148 262 // Set up playerOutput delegate
ddehaven@8148 263 [_playerOutput setDelegate:self queue:playerQueue];
ddehaven@8148 264 [_playerOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:ADVANCE_INTERVAL_IN_SECONDS];
ddehaven@8148 265
ddehaven@10036 266 [_player.currentItem addOutput:_playerOutput];
ddehaven@8148 267 }
ddehaven@8148 268 }
ddehaven@8148 269 }
ddehaven@8148 270
ddehaven@8148 271 - (void) setPlayerState:(int)newState {
ddehaven@8148 272 if (newState != previousPlayerState) {
ddehaven@8148 273 // For now just send up to client
ddehaven@8148 274 eventHandler->SendPlayerStateEvent(newState, 0.0);
ddehaven@8148 275 previousPlayerState = newState;
ddehaven@8148 276 }
ddehaven@8148 277 }
ddehaven@8148 278
ddehaven@8148 279 - (void) observeValueForKeyPath:(NSString *)keyPath
ddehaven@8148 280 ofObject:(id)object
ddehaven@8148 281 change:(NSDictionary *)change
ddehaven@8148 282 context:(void *)context {
ddehaven@9484 283 if (context == AVFMediaPlayerItemStatusContext) {
almatvee@10315 284 // According to docs change[NSKeyValueChangeNewKey] can be NSNull when player.currentItem is nil
almatvee@10315 285 if (![change[NSKeyValueChangeNewKey] isKindOfClass:[NSNull class]]) {
almatvee@10315 286 AVPlayerStatus status = (AVPlayerStatus)[[change objectForKey:NSKeyValueChangeNewKey] longValue];
almatvee@10315 287 if (status == AVPlayerStatusReadyToPlay) {
almatvee@10315 288 if (!_movieReady) {
almatvee@10315 289 LOGGER_DEBUGMSG(([[NSString stringWithFormat:@"Setting player to READY state"] UTF8String]));
almatvee@10315 290 // Only send this once, though we'll receive notification a few times
almatvee@10315 291 [self setPlayerState:kPlayerState_READY];
almatvee@10315 292 _movieReady = true;
almatvee@10315 293 }
ddehaven@8148 294 }
ddehaven@8148 295 }
ddehaven@8148 296 } else if (context == AVFMediaPlayerItemDurationContext) {
ddehaven@8148 297 // send update duration event
ddehaven@10036 298 double duration = CMTimeGetSeconds(_player.currentItem.duration);
ddehaven@8148 299 eventHandler->SendDurationUpdateEvent(duration);
ddehaven@8148 300 } else if (context == AVFMediaPlayerItemTracksContext) {
ddehaven@8148 301 [self extractTrackInfo];
ddehaven@9484 302 } else {
ddehaven@9484 303 [super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
ddehaven@9484 304 }
ddehaven@8148 305 }
ddehaven@8148 306
ddehaven@8148 307 - (double) currentTime
ddehaven@8148 308 {
ddehaven@9484 309 return CMTimeGetSeconds([self.player currentTime]);
ddehaven@8148 310 }
ddehaven@8148 311
ddehaven@8148 312 - (void) setCurrentTime:(double)time
ddehaven@8148 313 {
ddehaven@9484 314 [self.player seekToTime:CMTimeMakeWithSeconds(time, 1)];
ddehaven@8148 315 }
ddehaven@8148 316
ddehaven@8148 317 - (BOOL) mute {
ddehaven@8148 318 return self.player.muted;
ddehaven@8148 319 }
ddehaven@8148 320
ddehaven@8148 321 - (void) setMute:(BOOL)state {
ddehaven@8148 322 self.player.muted = state;
ddehaven@8148 323 }
ddehaven@8148 324
ddehaven@10036 325 - (int64_t) audioSyncDelay {
ddehaven@10036 326 return _audioProcessor.audioDelay;
ddehaven@10036 327 }
ddehaven@10036 328
ddehaven@8148 329 - (void) setAudioSyncDelay:(int64_t)audioSyncDelay {
ddehaven@10036 330 _audioProcessor.audioDelay = audioSyncDelay;
ddehaven@8148 331 }
ddehaven@8148 332
ddehaven@8148 333 - (float) balance {
ddehaven@10036 334 return _audioProcessor.balance;
ddehaven@8148 335 }
ddehaven@8148 336
ddehaven@8148 337 - (void) setBalance:(float)balance {
ddehaven@10036 338 _audioProcessor.balance = balance;
ddehaven@8148 339 }
ddehaven@8148 340
ddehaven@8148 341 - (float) volume {
ddehaven@10036 342 return _audioProcessor.volume;
ddehaven@8148 343 }
ddehaven@8148 344
ddehaven@8148 345 - (void) setVolume:(float)volume {
ddehaven@10036 346 _audioProcessor.volume = volume;
ddehaven@8148 347 }
ddehaven@8148 348
ddehaven@8148 349 - (float) rate {
ddehaven@8148 350 return self.player.rate;
ddehaven@8148 351 }
ddehaven@8148 352
ddehaven@8148 353 - (void) setRate:(float)rate {
ddehaven@8148 354 self.player.rate = rate;
ddehaven@8148 355 }
ddehaven@8148 356
ddehaven@8148 357 - (double) duration {
ddehaven@10036 358 if (self.player.currentItem.status == AVPlayerItemStatusReadyToPlay) {
ddehaven@10036 359 return CMTimeGetSeconds(self.player.currentItem.duration);
ddehaven@8148 360 }
ddehaven@8148 361 return -1.0;
ddehaven@8148 362 }
ddehaven@8148 363
ddehaven@8148 364 - (void) play {
ddehaven@8148 365 [self.player play];
ddehaven@8148 366 [self setPlayerState:kPlayerState_PLAYING];
ddehaven@8148 367 }
ddehaven@8148 368
ddehaven@8148 369 - (void) pause {
ddehaven@8148 370 [self.player pause];
ddehaven@8148 371 [self setPlayerState:kPlayerState_PAUSED];
ddehaven@8148 372 }
ddehaven@8148 373
ddehaven@8148 374 - (void) stop {
ddehaven@8148 375 [self.player pause];
ddehaven@8148 376 [self.player seekToTime:kCMTimeZero];
ddehaven@8148 377 [self setPlayerState:kPlayerState_STOPPED];
ddehaven@8148 378 }
ddehaven@8148 379
ddehaven@8148 380 - (void) finish {
ddehaven@8148 381 }
ddehaven@8148 382
ddehaven@8148 383 - (void) dispose {
ddehaven@8148 384 @synchronized(self) {
ddehaven@8148 385 if (!isDisposed) {
ddehaven@9460 386 if (_player != nil) {
ddehaven@10036 387 // stop the player
ddehaven@10036 388 _player.rate = 0.0;
ddehaven@10036 389 [_player cancelPendingPrerolls];
ddehaven@10036 390 }
ddehaven@10036 391
ddehaven@10036 392 AVFAudioSpectrumUnitPtr asPtr = _audioProcessor.audioSpectrum;
ddehaven@10036 393 if (asPtr != nullptr) {
ddehaven@10036 394 // Prevent future spectrum callbacks
ddehaven@10036 395 asPtr->SetEnabled(FALSE);
ddehaven@10036 396 asPtr->SetSpectrumCallbackProc(NULL, NULL);
ddehaven@10036 397 asPtr->SetBands(0, NULL);
ddehaven@9460 398 }
ddehaven@9460 399
almatvee@9241 400 if (_playerOutput != nil) {
ddehaven@10036 401 [_player.currentItem removeOutput:_playerOutput];
almatvee@9241 402 [_playerOutput setDelegate:nil queue:nil];
almatvee@9241 403 }
almatvee@9241 404
ddehaven@8148 405 [self setPlayerState:kPlayerState_HALTED];
ddehaven@8555 406
ddehaven@8555 407 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
ddehaven@8555 408 for (id<NSObject> observer in playerObservers) {
ddehaven@8555 409 [center removeObserver:observer];
ddehaven@8555 410 }
ddehaven@8555 411
ddehaven@8148 412 for (NSString *keyPath in keyPathsObserved) {
ddehaven@8148 413 [self removeObserver:self forKeyPath:keyPath];
ddehaven@8148 414 }
ddehaven@8555 415
ddehaven@8148 416 if (_displayLink) {
ddehaven@8148 417 CVDisplayLinkStop(_displayLink);
ddehaven@8148 418 CVDisplayLinkRelease(_displayLink);
ddehaven@8148 419 _displayLink = NULL;
ddehaven@8148 420 }
ddehaven@8148 421 isDisposed = YES;
ddehaven@8148 422 }
ddehaven@8148 423 }
ddehaven@8148 424 }
ddehaven@8148 425
ddehaven@8148 426 - (void) extractTrackInfo {
ddehaven@8148 427 #if DUMP_TRACK_INFO
ddehaven@8148 428 NSMutableString *trackLog = [[NSMutableString alloc] initWithFormat:
ddehaven@8148 429 @"Parsing tracks for player item %@:\n",
ddehaven@10036 430 _player.currentItem];
ddehaven@8148 431 #endif
ddehaven@10036 432 NSArray *tracks = self.player.currentItem.tracks;
ddehaven@8148 433 int videoIndex = 1;
ddehaven@8148 434 int audioIndex = 1;
ddehaven@8148 435 int textIndex = 1;
ddehaven@8148 436 BOOL createVideo = NO;
ddehaven@8148 437
ddehaven@8148 438 for (AVPlayerItemTrack *trackObj in tracks) {
ddehaven@8148 439 AVAssetTrack *track = trackObj.assetTrack;
ddehaven@8148 440 NSString *type = track.mediaType;
ddehaven@8148 441 NSString *name = nil;
ddehaven@8148 442 NSString *lang = @"und";
ddehaven@8148 443 CTrack::Encoding encoding = CTrack::CUSTOM;
ddehaven@8148 444 FourCharCode fcc = 0;
ddehaven@8148 445
ddehaven@8148 446 CMFormatDescriptionRef desc = NULL;
ddehaven@8148 447 NSArray *formatDescList = track.formatDescriptions;
ddehaven@8148 448 if (formatDescList && formatDescList.count > 0) {
ddehaven@8148 449 desc = (__bridge CMFormatDescriptionRef)[formatDescList objectAtIndex:0];
ddehaven@8148 450 if (!desc) {
ddehaven@8148 451 TRACK_LOG(@"Can't get format description, skipping track");
ddehaven@8148 452 continue;
ddehaven@8148 453 }
ddehaven@8148 454 fcc = CMFormatDescriptionGetMediaSubType(desc);
ddehaven@8148 455 switch (fcc) {
ddehaven@8148 456 case 'avc1':
ddehaven@8148 457 encoding = CTrack::H264;
ddehaven@8148 458 break;
ddehaven@8148 459 case kAudioFormatLinearPCM:
ddehaven@8148 460 encoding = CTrack::PCM;
ddehaven@8148 461 break;
ddehaven@8148 462 case kAudioFormatMPEG4AAC:
ddehaven@8148 463 encoding = CTrack::AAC;
ddehaven@8148 464 break;
ddehaven@8148 465 case kAudioFormatMPEGLayer1:
ddehaven@8148 466 case kAudioFormatMPEGLayer2:
ddehaven@8148 467 encoding = CTrack::MPEG1AUDIO;
ddehaven@8148 468 break;
ddehaven@8148 469 case kAudioFormatMPEGLayer3:
ddehaven@8148 470 encoding = CTrack::MPEG1LAYER3;
ddehaven@8148 471 break;
ddehaven@8148 472 default:
ddehaven@8148 473 // Everything else will show up as custom
ddehaven@8148 474 break;
ddehaven@8148 475 }
ddehaven@8148 476 }
ddehaven@8148 477
ddehaven@8148 478 if (track.languageCode) {
ddehaven@8148 479 lang = track.languageCode;
ddehaven@8148 480 }
ddehaven@8148 481
ddehaven@8148 482 TRACK_LOG(@"Track %d (%@)", index, track.mediaType);
ddehaven@8148 483 TRACK_LOG(@" enabled: %s", track.enabled ? "YES" : "NO");
ddehaven@8148 484 TRACK_LOG(@" track ID: %d", track.trackID);
ddehaven@8148 485 TRACK_LOG(@" language code: %@ (%sprovided)", lang, track.languageCode ? "" : "NOT ");
ddehaven@9307 486 TRACK_LOG(@" encoding (FourCC): '%@' (JFX encoding %d)",
ddehaven@9307 487 FourCCToNSString(fcc),
ddehaven@8148 488 (int)encoding);
ddehaven@8148 489
ddehaven@8148 490 // Tracks in AVFoundation don't have names, so we'll need to give them
ddehaven@8148 491 // sequential names based on their type, e.g., "Video Track 1"
ddehaven@8148 492 if ([type isEqualTo:AVMediaTypeVideo]) {
ddehaven@8148 493 int width = -1;
ddehaven@8148 494 int height = -1;
ddehaven@8148 495 float frameRate = -1.0;
ddehaven@8148 496 if ([track hasMediaCharacteristic:AVMediaCharacteristicVisual]) {
ddehaven@8148 497 width = (int)track.naturalSize.width;
ddehaven@8148 498 height = (int)track.naturalSize.height;
ddehaven@8148 499 frameRate = track.nominalFrameRate;
ddehaven@8148 500 }
ddehaven@8148 501 name = [NSString stringWithFormat:@"Video Track %d", videoIndex++];
ddehaven@8148 502 CVideoTrack *outTrack = new CVideoTrack((int64_t)track.trackID,
ddehaven@8148 503 [name UTF8String],
ddehaven@8148 504 encoding,
ddehaven@8148 505 (bool)track.enabled,
ddehaven@8148 506 width,
ddehaven@8148 507 height,
ddehaven@8148 508 frameRate,
ddehaven@8148 509 false);
ddehaven@8148 510
ddehaven@8148 511 TRACK_LOG(@" track name: %@", name);
ddehaven@8148 512 TRACK_LOG(@" video attributes:");
ddehaven@8148 513 TRACK_LOG(@" width: %d", width);
ddehaven@8148 514 TRACK_LOG(@" height: %d", height);
ddehaven@8148 515 TRACK_LOG(@" frame rate: %2.2f", frameRate);
kcr@9542 516
ddehaven@8148 517 eventHandler->SendVideoTrackEvent(outTrack);
ddehaven@8148 518 delete outTrack;
ddehaven@8148 519
ddehaven@8148 520 // signal to create the video output when we're done
ddehaven@8148 521 createVideo = YES;
ddehaven@8148 522 } else if ([type isEqualTo:AVMediaTypeAudio]) {
ddehaven@8148 523 name = [NSString stringWithFormat:@"Audio Track %d", audioIndex++];
ddehaven@8148 524 TRACK_LOG(@" track name: %@", name);
ddehaven@8148 525
ddehaven@10036 526 // Set up audio processing
ddehaven@10036 527 if (_audioProcessor) {
ddehaven@8148 528 // Make sure the players volume is set to 1.0
ddehaven@8148 529 self.player.volume = 1.0;
ddehaven@8148 530
ddehaven@10036 531 // set up the mixer
ddehaven@10036 532 _audioProcessor.audioTrack = track;
ddehaven@10036 533 self.player.currentItem.audioMix = _audioProcessor.mixer;
ddehaven@8148 534 }
ddehaven@8148 535
ddehaven@8148 536 // We have to get the audio information from the format description
ddehaven@8148 537 const AudioStreamBasicDescription *asbd = CMAudioFormatDescriptionGetStreamBasicDescription(desc);
ddehaven@8148 538 size_t layoutSize;
ddehaven@8148 539 const AudioChannelLayout *layout = CMAudioFormatDescriptionGetChannelLayout(desc, &layoutSize);
ddehaven@8148 540 int channels = 2;
ddehaven@8148 541 int channelMask = CAudioTrack::FRONT_LEFT | CAudioTrack::FRONT_RIGHT;
ddehaven@8148 542 float sampleRate = 44100.0;
ddehaven@8148 543
ddehaven@8148 544 TRACK_LOG(@" audio attributes:");
ddehaven@8148 545 if (asbd) {
ddehaven@8148 546 sampleRate = (float)asbd->mSampleRate;
ddehaven@8148 547 TRACK_LOG(@" sample rate: %2.2f", sampleRate);
ddehaven@8148 548 }
ddehaven@8148 549 if (layout) {
ddehaven@8148 550 channels = (int)AudioChannelLayoutTag_GetNumberOfChannels(layout->mChannelLayoutTag);
ddehaven@8148 551
ddehaven@8148 552 TRACK_LOG(@" channel count: %d", channels);
ddehaven@8148 553 TRACK_LOG(@" channel mask: %02x", channelMask);
ddehaven@8148 554 }
ddehaven@8148 555
ddehaven@8148 556 CAudioTrack *audioTrack = new CAudioTrack((int64_t)track.trackID,
ddehaven@8148 557 [name UTF8String],
ddehaven@8148 558 encoding,
ddehaven@8148 559 (bool)track.enabled,
ddehaven@8148 560 [lang UTF8String],
ddehaven@8148 561 channels, channelMask, sampleRate);
ddehaven@8148 562 eventHandler->SendAudioTrackEvent(audioTrack);
ddehaven@8148 563 delete audioTrack;
ddehaven@8148 564 } else if ([type isEqualTo:AVMediaTypeClosedCaption]) {
ddehaven@8148 565 name = [NSString stringWithFormat:@"Subtitle Track %d", textIndex++];
ddehaven@8148 566 TRACK_LOG(@" track name: %@", name);
ddehaven@8148 567 CSubtitleTrack *subTrack = new CSubtitleTrack((int64_t)track.trackID,
ddehaven@8148 568 [name UTF8String],
ddehaven@8148 569 encoding,
ddehaven@8148 570 (bool)track.enabled,
ddehaven@8148 571 [lang UTF8String]);
ddehaven@8148 572 eventHandler->SendSubtitleTrackEvent(subTrack);
ddehaven@8148 573 delete subTrack;
ddehaven@8148 574 }
ddehaven@8148 575 }
ddehaven@8148 576
ddehaven@8148 577 #if DUMP_TRACK_INFO
ddehaven@8148 578 LOGGER_INFOMSG([trackLog UTF8String]);
ddehaven@8148 579 #endif
ddehaven@8148 580
ddehaven@8148 581 if (createVideo) {
ddehaven@8148 582 [self createVideoOutput];
ddehaven@8148 583 }
ddehaven@8148 584 }
ddehaven@8148 585
ddehaven@8148 586 - (void) outputMediaDataWillChange:(AVPlayerItemOutput *)sender {
ddehaven@8148 587 _lastHostTime = CVGetCurrentHostTime();
ddehaven@8148 588 CVDisplayLinkStart(_displayLink);
ddehaven@8148 589 _hlsBugResetCount = 0;
ddehaven@8148 590 }
ddehaven@8148 591
ddehaven@8148 592 - (void) outputSequenceWasFlushed:(AVPlayerItemOutput *)output {
ddehaven@8148 593 _hlsBugResetCount = 0;
ddehaven@8148 594 _lastHostTime = CVGetCurrentHostTime();
ddehaven@8148 595 }
ddehaven@8148 596
ddehaven@8148 597 - (void) sendPixelBuffer:(CVPixelBufferRef)buf frameTime:(double)frameTime hostTime:(int64_t)hostTime {
ddehaven@8148 598 _lastHostTime = hostTime;
ddehaven@8148 599 CVVideoFrame *frame = NULL;
ddehaven@8148 600 try {
ddehaven@8148 601 frame = new CVVideoFrame(buf, frameTime, _lastHostTime);
ddehaven@8148 602 } catch (const char *message) {
ddehaven@8148 603 // Check if the video format is supported, if not try our fallback format
ddehaven@8148 604 OSType format = CVPixelBufferGetPixelFormatType(buf);
ddehaven@9307 605 if (format == 0) {
ddehaven@9307 606 // Bad pixel format, possibly a bad frame or ???
ddehaven@9307 607 // This seems to happen when the stream is corrupt, so let's ignore
ddehaven@9307 608 // it and hope things recover
ddehaven@9307 609 return;
ddehaven@9307 610 }
ddehaven@8148 611 if (!CVVideoFrame::IsFormatSupported(format)) {
ddehaven@9307 612 LOGGER_DEBUGMSG(([[NSString stringWithFormat:@"Bad pixel format: '%@'",
ddehaven@9307 613 FourCCToNSString(format)] UTF8String]));
ddehaven@8148 614 [self setFallbackVideoFormat];
ddehaven@8148 615 return;
ddehaven@8148 616 }
ddehaven@8148 617 // Can't use this frame, report an error and ignore it
ddehaven@8148 618 LOGGER_DEBUGMSG(message);
ddehaven@8148 619 return;
ddehaven@8148 620 }
ddehaven@8148 621
ddehaven@8148 622 if (previousWidth < 0 || previousHeight < 0
ddehaven@8148 623 || previousWidth != frame->GetWidth() || previousHeight != frame->GetHeight())
ddehaven@8148 624 {
ddehaven@8148 625 // Send/Queue frame size changed event
ddehaven@8148 626 previousWidth = frame->GetWidth();
ddehaven@8148 627 previousHeight = frame->GetHeight();
ddehaven@8148 628 eventHandler->SendFrameSizeChangedEvent(previousWidth, previousHeight);
ddehaven@8148 629 }
ddehaven@8148 630 eventHandler->SendNewFrameEvent(frame);
ddehaven@8148 631 }
ddehaven@8148 632
ddehaven@8148 633 - (void) sendSpectrumEventDuration:(double)duration {
ddehaven@8148 634 if (eventHandler) {
ddehaven@8148 635 double timestamp = self.currentTime;
ddehaven@8148 636 eventHandler->SendAudioSpectrumEvent(timestamp, duration);
ddehaven@8148 637 }
ddehaven@8148 638 }
ddehaven@8148 639
ddehaven@8148 640 @end
ddehaven@8148 641
ddehaven@8148 642 static void SpectrumCallbackProc(void *context, double duration) {
ddehaven@8148 643 if (context) {
ddehaven@8148 644 AVFMediaPlayer *player = (__bridge AVFMediaPlayer*)context;
ddehaven@8148 645 [player sendSpectrumEventDuration:duration];
ddehaven@8148 646 }
ddehaven@8148 647 }
ddehaven@8148 648
ddehaven@8148 649 static CVReturn displayLinkCallback(CVDisplayLinkRef displayLink, const CVTimeStamp *inNow, const CVTimeStamp *inOutputTime, CVOptionFlags flagsIn, CVOptionFlags *flagsOut, void *displayLinkContext)
ddehaven@8148 650 {
ddehaven@9484 651 AVFMediaPlayer *self = (__bridge AVFMediaPlayer *)displayLinkContext;
ddehaven@9484 652 AVPlayerItemVideoOutput *playerItemVideoOutput = self.playerOutput;
ddehaven@8148 653
ddehaven@9484 654 // The displayLink calls back at every vsync (screen refresh)
ddehaven@9484 655 // Compute itemTime for the next vsync
ddehaven@9484 656 CMTime outputItemTime = [playerItemVideoOutput itemTimeForCVTimeStamp:*inOutputTime];
ddehaven@8148 657 if ([playerItemVideoOutput hasNewPixelBufferForItemTime:outputItemTime]) {
ddehaven@8148 658 CVPixelBufferRef pixBuff = [playerItemVideoOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL];
ddehaven@9484 659 // Copy the pixel buffer to be displayed next and add it to AVSampleBufferDisplayLayer for display
ddehaven@8148 660 double frameTime = CMTimeGetSeconds(outputItemTime);
ddehaven@9484 661 [self sendPixelBuffer:pixBuff frameTime:frameTime hostTime:inOutputTime->hostTime];
ddehaven@8148 662 self.hlsBugResetCount = 0;
ddehaven@8148 663
ddehaven@9484 664 CVBufferRelease(pixBuff);
ddehaven@9484 665 } else {
ddehaven@9484 666 CMTime delta = CMClockMakeHostTimeFromSystemUnits(inNow->hostTime - self.lastHostTime);
ddehaven@8148 667 NSTimeInterval elapsedTime = CMTimeGetSeconds(delta);
ddehaven@8148 668
ddehaven@9484 669 if (elapsedTime > FREEWHEELING_PERIOD_IN_SECONDS) {
ddehaven@8148 670 if (self.player.rate != 0.0) {
ddehaven@8148 671 if (self.hlsBugResetCount > 9) {
ddehaven@8148 672 /*
ddehaven@8148 673 * There is a bug in AVFoundation where if we're playing a HLS
ddehaven@8148 674 * stream and it switches to a different bitrate, the video
ddehaven@8148 675 * output will stop receiving frames. So far, the only workaround
ddehaven@8148 676 * for this has been to remove then re-add the video output
ddehaven@8148 677 * This causes the video to pause for a bit, but it's better
ddehaven@8148 678 * than not playing at all, and this should not happen once
ddehaven@8148 679 * the bug is fixed in AVFoundation.
ddehaven@8148 680 */
ddehaven@10036 681 [self.player.currentItem removeOutput:playerItemVideoOutput];
ddehaven@10036 682 [self.player.currentItem addOutput:playerItemVideoOutput];
ddehaven@8148 683 self.hlsBugResetCount = 0;
ddehaven@8148 684 self.lastHostTime = inNow->hostTime;
ddehaven@8148 685 // fall through to allow it to stop the display link
ddehaven@8148 686 } else {
ddehaven@8148 687 self.hlsBugResetCount++;
ddehaven@8148 688 self.lastHostTime = inNow->hostTime;
ddehaven@8148 689 return kCVReturnSuccess;
ddehaven@8148 690 }
ddehaven@8148 691 }
ddehaven@9484 692 // No new images for a while. Shut down the display link to conserve
ddehaven@8148 693 // power, but request a wakeup call if new images are coming.
ddehaven@9484 694 CVDisplayLinkStop(displayLink);
ddehaven@9484 695 [playerItemVideoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:ADVANCE_INTERVAL_IN_SECONDS];
ddehaven@9484 696 }
ddehaven@9484 697 }
ddehaven@8148 698
ddehaven@9484 699 return kCVReturnSuccess;
ddehaven@8148 700 }