1 /*
   2  * Copyright (c) 2014, 2015, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.  Oracle designates this
   8  * particular file as subject to the "Classpath" exception as provided
   9  * by Oracle in the LICENSE file that accompanied this code.
  10  *
  11  * This code is distributed in the hope that it will be useful, but WITHOUT
  12  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  13  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  14  * version 2 for more details (a copy is included in the LICENSE file that
  15  * accompanied this code).
  16  *
  17  * You should have received a copy of the GNU General Public License version
  18  * 2 along with this work; if not, write to the Free Software Foundation,
  19  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  20  *
  21  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  22  * or visit www.oracle.com if you need additional information or have any
  23  * questions.
  24  */
  25 
  26 #import "AVFMediaPlayer.h"
  27 #import <objc/runtime.h>
  28 #import "CVVideoFrame.h"
  29 
  30 #import <PipelineManagement/NullAudioEqualizer.h>
  31 #import <PipelineManagement/NullAudioSpectrum.h>
  32 
  33 #import "AVFAudioProcessor.h"
  34 
  35 // "borrowed" from green screen player on ADC
  36 // These are used to reduce power consumption when there are no video frames
  37 // to be rendered, which is generally A Good Thing
  38 #define FREEWHEELING_PERIOD_IN_SECONDS 0.5
  39 #define ADVANCE_INTERVAL_IN_SECONDS 0.1
  40 
  41 // set to 1 to debug track information
  42 #define DUMP_TRACK_INFO 0
  43 
  44 // trick used by Apple in AVGreenScreenPlayer
  45 // This avoids calling [NSString isEqualTo:@"..."]
  46 // The actual value is meaningless, but needs to be unique
  47 static void *AVFMediaPlayerItemStatusContext = &AVFMediaPlayerItemStatusContext;
  48 static void *AVFMediaPlayerItemDurationContext = &AVFMediaPlayerItemDurationContext;
  49 static void *AVFMediaPlayerItemTracksContext = &AVFMediaPlayerItemTracksContext;
  50 
  51 #define FORCE_VO_FORMAT 0
  52 #if FORCE_VO_FORMAT
  53 // #define FORCED_VO_FORMAT kCVPixelFormatType_32BGRA
  54 // #define FORCED_VO_FORMAT kCVPixelFormatType_422YpCbCr8
  55 // #define FORCED_VO_FORMAT kCVPixelFormatType_420YpCbCr8Planar
  56  #define FORCED_VO_FORMAT kCVPixelFormatType_422YpCbCr8_yuvs // Unsupported, use to test fallback
  57 #endif
  58 
  59 // Apple really likes to output '2vuy', this should be the least expensive conversion
  60 #define FALLBACK_VO_FORMAT kCVPixelFormatType_422YpCbCr8
  61 
  62 #define FOURCC_CHAR(f) ((f) & 0x7f) ? (char)((f) & 0x7f) : '?'
  63 
  64 static inline NSString *FourCCToNSString(UInt32 fcc) {
  65     if (fcc < 0x100) {
  66         return [NSString stringWithFormat:@"%u", fcc];
  67     }
  68     return [NSString stringWithFormat:@"%c%c%c%c",
  69             FOURCC_CHAR(fcc >> 24),
  70             FOURCC_CHAR(fcc >> 16),
  71             FOURCC_CHAR(fcc >> 8),
  72             FOURCC_CHAR(fcc)];
  73 }
  74 
  75 #if DUMP_TRACK_INFO
  76 static void append_log(NSMutableString *s, NSString *fmt, ...) {
  77     va_list args;
  78     va_start(args, fmt);
  79     NSString *appString = [[NSString alloc] initWithFormat:fmt arguments:args];
  80     [s appendFormat:@"%@\n", appString];
  81     va_end(args);
  82 }
  83 #define TRACK_LOG(fmt, ...) append_log(trackLog, fmt, ##__VA_ARGS__)
  84 #else
  85 #define TRACK_LOG(...) {}
  86 #endif
  87 
  88 @implementation AVFMediaPlayer
  89 
  90 static void SpectrumCallbackProc(void *context, double duration);
  91 
  92 static CVReturn displayLinkCallback(CVDisplayLinkRef displayLink,
  93                                     const CVTimeStamp *inNow,
  94                                     const CVTimeStamp *inOutputTime,
  95                                     CVOptionFlags flagsIn,
  96                                     CVOptionFlags *flagsOut,
  97                                     void *displayLinkContext);
  98 
  99 + (BOOL) playerAvailable {
 100     // Check if AVPlayerItemVideoOutput exists, if not we're running on 10.7 or
 101     // earlier and have to fall back on QTKit
 102     Class klass = objc_getClass("AVPlayerItemVideoOutput");
 103     return (klass != nil);
 104 }
 105 
 106 - (id) initWithURL:(NSURL *)source eventHandler:(CJavaPlayerEventDispatcher*)hdlr {
 107     if ((self = [super init]) != nil) {
 108         _audioSyncDelay = 0LL;
 109         _volume = 1.0f;
 110         _balance = 0.0f;
 111 
 112         previousWidth = -1;
 113         previousHeight = -1;
 114         previousPlayerState = kPlayerState_UNKNOWN;
 115 
 116         eventHandler = hdlr;
 117 
 118         self.movieURL = source;
 119         _buggyHLSSupport = NO;
 120         _hlsBugResetCount = 0;
 121 
 122         // Create our own work queue
 123         playerQueue = dispatch_queue_create(NULL, NULL);
 124 
 125         // Create the player
 126         _player = [[AVPlayer alloc] init];
 127         if (!_player) {
 128             return nil;
 129         }
 130         _player.volume = 1.0f;
 131         _player.muted = NO;
 132 
 133         _playerItem = [AVPlayerItem playerItemWithURL:_movieURL];
 134         if (!_playerItem) {
 135             return nil;
 136         }
 137         [_player replaceCurrentItemWithPlayerItem:_playerItem];
 138 
 139         // Set the player item end action to NONE since we'll handle it internally
 140         _player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
 141 
 142         /*
 143          * AVPlayerItem notifications we could listen for:
 144          * 10.7 AVPlayerItemTimeJumpedNotification -> the item's current time has changed discontinuously
 145          * 10.7 AVPlayerItemDidPlayToEndTimeNotification -> item has played to its end time
 146          * 10.7 AVPlayerItemFailedToPlayToEndTimeNotification (userInfo = NSError) -> item has failed to play to its end time
 147          * 10.9 AVPlayerItemPlaybackStalledNotification -> media did not arrive in time to continue playback
 148          */
 149         playerObservers = [[NSMutableArray alloc] init];
 150         id<NSObject> observer;
 151         __weak AVFMediaPlayer *blockSelf = self; // retain cycle avoidance
 152         NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
 153         observer = [center addObserverForName:AVPlayerItemDidPlayToEndTimeNotification
 154                                        object:_playerItem
 155                                         queue:[NSOperationQueue mainQueue]
 156                                    usingBlock:^(NSNotification *note) {
 157                                        // promote FINISHED state...
 158                                        [blockSelf setPlayerState:kPlayerState_FINISHED];
 159                                    }];
 160         if (observer) {
 161             [playerObservers addObject:observer];
 162         }
 163 
 164         keyPathsObserved = [[NSMutableArray alloc] init];
 165         [self observeKeyPath:@"self.playerItem.status"
 166                  withContext:AVFMediaPlayerItemStatusContext];
 167 
 168         [self observeKeyPath:@"self.playerItem.duration"
 169                  withContext:AVFMediaPlayerItemDurationContext];
 170 
 171         [self observeKeyPath:@"self.playerItem.tracks"
 172                  withContext:AVFMediaPlayerItemTracksContext];
 173 
 174 
 175         [self setPlayerState:kPlayerState_UNKNOWN];
 176 
 177         // filled out later
 178         _videoFormat = nil;
 179         _lastHostTime = 0LL;
 180 
 181         // Don't create video output until we know we have video
 182         _playerOutput = nil;
 183         _displayLink = NULL;
 184 
 185         _audioSpectrum = new AVFAudioSpectrumUnit();
 186         _audioSpectrum->SetSpectrumCallbackProc(SpectrumCallbackProc, (__bridge void*)self);
 187 
 188         _audioEqualizer = new AVFAudioEqualizer();
 189     }
 190     return self;
 191 }
 192 
 193 - (void) dealloc {
 194     [self dispose];
 195 
 196     self.movieURL = nil;
 197     self.player = nil;
 198     self.playerItem = nil;
 199     self.playerOutput = nil;
 200 
 201     if (_audioSpectrum) {
 202         delete _audioSpectrum;
 203         _audioSpectrum = NULL;
 204     }
 205 
 206     if (_audioEqualizer) {
 207         delete _audioEqualizer;
 208         _audioEqualizer = NULL;
 209     }
 210 }
 211 
 212 - (CAudioSpectrum*) audioSpectrum {
 213     return _audioSpectrum;
 214 }
 215 
 216 - (CAudioEqualizer*) audioEqualizer {
 217     return _audioEqualizer;
 218 }
 219 
 220 - (void) observeKeyPath:(NSString*)keyPath withContext:(void*)context {
 221     [self addObserver:self forKeyPath:keyPath options:NSKeyValueObservingOptionNew context:context];
 222     [keyPathsObserved addObject:keyPath];
 223 }
 224 
 225 // If we get an unsupported pixel format in the video output, call this to
 226 // force it to output our fallback format
 227 - (void) setFallbackVideoFormat {
 228     // schedule this to be done when we're not buried inside the AVPlayer callback
 229     __weak AVFMediaPlayer *blockSelf = self; // retain cycle avoidance
 230     dispatch_async(dispatch_get_main_queue(), ^{
 231         LOGGER_DEBUGMSG(([[NSString stringWithFormat:@"Falling back on video format: %@", FourCCToNSString(FALLBACK_VO_FORMAT)] UTF8String]));
 232         AVPlayerItemVideoOutput *newOutput =
 233         [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:
 234          @{(id)kCVPixelBufferPixelFormatTypeKey: @(FALLBACK_VO_FORMAT)}];
 235 
 236         if (newOutput) {
 237             CVDisplayLinkStop(_displayLink);
 238             [_playerItem removeOutput:_playerOutput];
 239         [_playerOutput setDelegate:nil queue:nil];
 240 
 241             self.playerOutput = newOutput;
 242             [_playerOutput setDelegate:blockSelf queue:playerQueue];
 243             [_playerOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:ADVANCE_INTERVAL_IN_SECONDS];
 244             [_playerItem addOutput:_playerOutput];
 245         }
 246     });
 247 }
 248 
 249 - (void) createVideoOutput {
 250     @synchronized(self) {
 251         // Skip if already created
 252         if (!_playerOutput) {
 253 #if FORCE_VO_FORMAT
 254             LOGGER_DEBUGMSG(([[NSString stringWithFormat:@"Forcing VO format: %@", FourCCToNSString(FORCED_VO_FORMAT)] UTF8String]));
 255 #endif
 256             // Create the player video output
 257             // kCVPixelFormatType_32ARGB comes out inverted, so don't use it
 258             // '2vuy' -> kCVPixelFormatType_422YpCbCr8 -> YCbCr_422 (uses less CPU too)
 259             // kCVPixelFormatType_420YpCbCr8Planar
 260             _playerOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:
 261 #if FORCE_VO_FORMAT
 262                              @{(id)kCVPixelBufferPixelFormatTypeKey: @(FORCED_VO_FORMAT)}];
 263 #else
 264                              @{}]; // let AVFoundation decide the format...
 265 #endif
 266             if (!_playerOutput) {
 267                 return;
 268             }
 269             _playerOutput.suppressesPlayerRendering = YES;
 270 
 271             // Set up the display link (do we need this??)
 272             // might need to create a display link context struct that retains us
 273             // rather than passing self as the context
 274             CVDisplayLinkCreateWithActiveCGDisplays(&_displayLink);
 275             CVDisplayLinkSetOutputCallback(_displayLink, displayLinkCallback, (__bridge void *)self);
 276             // Pause display link to conserve power
 277             CVDisplayLinkStop(_displayLink);
 278 
 279             // Set up playerOutput delegate
 280             [_playerOutput setDelegate:self queue:playerQueue];
 281             [_playerOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:ADVANCE_INTERVAL_IN_SECONDS];
 282 
 283             [_playerItem addOutput:_playerOutput];
 284         }
 285     }
 286 }
 287 
 288 - (void) setPlayerState:(int)newState {
 289     if (newState != previousPlayerState) {
 290         // For now just send up to client
 291         eventHandler->SendPlayerStateEvent(newState, 0.0);
 292         previousPlayerState = newState;
 293     }
 294 }
 295 
 296 - (void) observeValueForKeyPath:(NSString *)keyPath
 297                        ofObject:(id)object
 298                          change:(NSDictionary *)change
 299                         context:(void *)context {
 300     if (context == AVFMediaPlayerItemStatusContext) {
 301         AVPlayerStatus status = (AVPlayerStatus)[[change objectForKey:NSKeyValueChangeNewKey] longValue];
 302         if (status == AVPlayerStatusReadyToPlay) {
 303             if (!_movieReady) {
 304                 // Only send this once, though we'll receive notification a few times
 305                 [self setPlayerState:kPlayerState_READY];
 306                 _movieReady = true;
 307             }
 308         }
 309     } else if (context == AVFMediaPlayerItemDurationContext) {
 310         // send update duration event
 311         double duration = CMTimeGetSeconds(_playerItem.duration);
 312         eventHandler->SendDurationUpdateEvent(duration);
 313     } else if (context == AVFMediaPlayerItemTracksContext) {
 314         [self extractTrackInfo];
 315     } else {
 316         [super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
 317     }
 318 }
 319 
 320 - (double) currentTime
 321 {
 322     return CMTimeGetSeconds([self.player currentTime]);
 323 }
 324 
 325 - (void) setCurrentTime:(double)time
 326 {
 327     [self.player seekToTime:CMTimeMakeWithSeconds(time, 1)];
 328 }
 329 
 330 - (BOOL) mute {
 331     return self.player.muted;
 332 }
 333 
 334 - (void) setMute:(BOOL)state {
 335     self.player.muted = state;
 336 }
 337 
 338 - (void) setAudioSyncDelay:(int64_t)audioSyncDelay {
 339     _audioSyncDelay = audioSyncDelay;
 340     if (_audioProcessor) {
 341         _audioProcessor.audioDelay = audioSyncDelay;
 342     }
 343 }
 344 
 345 - (float) balance {
 346     return _balance;
 347 }
 348 
 349 - (void) setBalance:(float)balance {
 350     _balance = balance;
 351     if (_audioProcessor) {
 352         _audioProcessor.balance = balance;
 353     }
 354 }
 355 
 356 - (float) volume {
 357     return _volume;
 358 }
 359 
 360 - (void) setVolume:(float)volume {
 361     _volume = volume;
 362     if (_audioProcessor) {
 363         _audioProcessor.volume = volume;
 364     }
 365 }
 366 
 367 - (float) rate {
 368     return self.player.rate;
 369 }
 370 
 371 - (void) setRate:(float)rate {
 372     self.player.rate = rate;
 373 }
 374 
 375 - (double) duration {
 376     if (self.playerItem.status == AVPlayerItemStatusReadyToPlay) {
 377         return CMTimeGetSeconds(self.playerItem.duration);
 378     }
 379     return -1.0;
 380 }
 381 
 382 - (void) play {
 383     [self.player play];
 384     [self setPlayerState:kPlayerState_PLAYING];
 385 }
 386 
 387 - (void) pause {
 388     [self.player pause];
 389     [self setPlayerState:kPlayerState_PAUSED];
 390 }
 391 
 392 - (void) stop {
 393     [self.player pause];
 394     [self.player seekToTime:kCMTimeZero];
 395     [self setPlayerState:kPlayerState_STOPPED];
 396 }
 397 
 398 - (void) finish {
 399 }
 400 
 401 - (void) dispose {
 402     @synchronized(self) {
 403         if (!isDisposed) {
 404             if (_player != nil) {
 405                 // this should stop and dealloc the audio processor
 406                 _player.currentItem.audioMix = nil;
 407             }
 408 
 409             if (_playerOutput != nil) {
 410                 [_playerItem removeOutput:_playerOutput];
 411                 [_playerOutput setDelegate:nil queue:nil];
 412             }
 413 
 414             [self setPlayerState:kPlayerState_HALTED];
 415 
 416             NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
 417             for (id<NSObject> observer in playerObservers) {
 418                 [center removeObserver:observer];
 419             }
 420 
 421             for (NSString *keyPath in keyPathsObserved) {
 422                 [self removeObserver:self forKeyPath:keyPath];
 423             }
 424 
 425             if (_displayLink) {
 426                 CVDisplayLinkStop(_displayLink);
 427                 CVDisplayLinkRelease(_displayLink);
 428                 _displayLink = NULL;
 429             }
 430             isDisposed = YES;
 431         }
 432     }
 433 }
 434 
 435 - (void) extractTrackInfo {
 436 #if DUMP_TRACK_INFO
 437     NSMutableString *trackLog = [[NSMutableString alloc] initWithFormat:
 438                                  @"Parsing tracks for player item %@:\n",
 439                                  _playerItem];
 440 #endif
 441     NSArray *tracks = self.playerItem.tracks;
 442     int videoIndex = 1;
 443     int audioIndex = 1;
 444     int textIndex = 1;
 445     BOOL createVideo = NO;
 446 
 447     for (AVPlayerItemTrack *trackObj in tracks) {
 448         AVAssetTrack *track = trackObj.assetTrack;
 449         NSString *type = track.mediaType;
 450         NSString *name = nil;
 451         NSString *lang = @"und";
 452         CTrack::Encoding encoding = CTrack::CUSTOM;
 453         FourCharCode fcc = 0;
 454 
 455         CMFormatDescriptionRef desc = NULL;
 456         NSArray *formatDescList = track.formatDescriptions;
 457         if (formatDescList && formatDescList.count > 0) {
 458             desc = (__bridge CMFormatDescriptionRef)[formatDescList objectAtIndex:0];
 459             if (!desc) {
 460                 TRACK_LOG(@"Can't get format description, skipping track");
 461                 continue;
 462             }
 463             fcc = CMFormatDescriptionGetMediaSubType(desc);
 464             switch (fcc) {
 465                 case 'avc1':
 466                     encoding = CTrack::H264;
 467                     break;
 468                 case kAudioFormatLinearPCM:
 469                     encoding = CTrack::PCM;
 470                     break;
 471                 case kAudioFormatMPEG4AAC:
 472                     encoding = CTrack::AAC;
 473                     break;
 474                 case kAudioFormatMPEGLayer1:
 475                 case kAudioFormatMPEGLayer2:
 476                     encoding = CTrack::MPEG1AUDIO;
 477                     break;
 478                 case kAudioFormatMPEGLayer3:
 479                     encoding = CTrack::MPEG1LAYER3;
 480                     break;
 481                 default:
 482                     // Everything else will show up as custom
 483                     break;
 484             }
 485         }
 486 
 487         if (track.languageCode) {
 488             lang = track.languageCode;
 489         }
 490 
 491         TRACK_LOG(@"Track %d (%@)", index, track.mediaType);
 492         TRACK_LOG(@"  enabled: %s", track.enabled ? "YES" : "NO");
 493         TRACK_LOG(@"  track ID: %d", track.trackID);
 494         TRACK_LOG(@"  language code: %@ (%sprovided)", lang, track.languageCode ? "" : "NOT ");
 495         TRACK_LOG(@"  encoding (FourCC): '%@' (JFX encoding %d)",
 496                   FourCCToNSString(fcc),
 497                   (int)encoding);
 498 
 499         // Tracks in AVFoundation don't have names, so we'll need to give them
 500         // sequential names based on their type, e.g., "Video Track 1"
 501         if ([type isEqualTo:AVMediaTypeVideo]) {
 502             int width = -1;
 503             int height = -1;
 504             float frameRate = -1.0;
 505             if ([track hasMediaCharacteristic:AVMediaCharacteristicVisual]) {
 506                 width = (int)track.naturalSize.width;
 507                 height = (int)track.naturalSize.height;
 508                 frameRate = track.nominalFrameRate;
 509             }
 510             name = [NSString stringWithFormat:@"Video Track %d", videoIndex++];
 511             CVideoTrack *outTrack = new CVideoTrack((int64_t)track.trackID,
 512                                                    [name UTF8String],
 513                                                    encoding,
 514                                                    (bool)track.enabled,
 515                                                    width,
 516                                                    height,
 517                                                    frameRate,
 518                                                    false);
 519 
 520             TRACK_LOG(@"  track name: %@", name);
 521             TRACK_LOG(@"  video attributes:");
 522             TRACK_LOG(@"    width: %d", width);
 523             TRACK_LOG(@"    height: %d", height);
 524             TRACK_LOG(@"    frame rate: %2.2f", frameRate);
 525 
 526             eventHandler->SendVideoTrackEvent(outTrack);
 527             delete outTrack;
 528 
 529             // signal to create the video output when we're done
 530             createVideo = YES;
 531         } else if ([type isEqualTo:AVMediaTypeAudio]) {
 532             name = [NSString stringWithFormat:@"Audio Track %d", audioIndex++];
 533             TRACK_LOG(@"  track name: %@", name);
 534 
 535             // Create audio processor
 536             if (!_audioProcessor) {
 537                 _audioProcessor = [[AVFAudioProcessor alloc] initWithPlayer:self
 538                                                                  assetTrack:track];
 539                 _audioProcessor.volume = _volume;
 540                 _audioProcessor.balance = _balance;
 541                 // Make sure the players volume is set to 1.0
 542                 self.player.volume = 1.0;
 543 
 544                 // Set up EQ and spectrum
 545                 _audioProcessor.audioSpectrum = _audioSpectrum;
 546                 _audioProcessor.audioEqualizer = _audioEqualizer;
 547             }
 548 
 549             // We have to get the audio information from the format description
 550             const AudioStreamBasicDescription *asbd = CMAudioFormatDescriptionGetStreamBasicDescription(desc);
 551             size_t layoutSize;
 552             const AudioChannelLayout *layout = CMAudioFormatDescriptionGetChannelLayout(desc, &layoutSize);
 553             int channels = 2;
 554             int channelMask = CAudioTrack::FRONT_LEFT | CAudioTrack::FRONT_RIGHT;
 555             float sampleRate = 44100.0;
 556 
 557             TRACK_LOG(@"  audio attributes:");
 558             if (asbd) {
 559                 sampleRate = (float)asbd->mSampleRate;
 560                 TRACK_LOG(@"    sample rate: %2.2f", sampleRate);
 561             }
 562             if (layout) {
 563                 channels = (int)AudioChannelLayoutTag_GetNumberOfChannels(layout->mChannelLayoutTag);
 564 
 565                 TRACK_LOG(@"    channel count: %d", channels);
 566                 TRACK_LOG(@"    channel mask: %02x", channelMask);
 567             }
 568 
 569             CAudioTrack *audioTrack = new CAudioTrack((int64_t)track.trackID,
 570                                    [name UTF8String],
 571                                    encoding,
 572                                    (bool)track.enabled,
 573                                    [lang UTF8String],
 574                                    channels, channelMask, sampleRate);
 575             eventHandler->SendAudioTrackEvent(audioTrack);
 576             delete audioTrack;
 577         } else if ([type isEqualTo:AVMediaTypeClosedCaption]) {
 578             name = [NSString stringWithFormat:@"Subtitle Track %d", textIndex++];
 579             TRACK_LOG(@"  track name: %@", name);
 580             CSubtitleTrack *subTrack = new CSubtitleTrack((int64_t)track.trackID,
 581                                                          [name UTF8String],
 582                                                          encoding,
 583                                                          (bool)track.enabled,
 584                                                          [lang UTF8String]);
 585             eventHandler->SendSubtitleTrackEvent(subTrack);
 586             delete subTrack;
 587         }
 588     }
 589 
 590 #if DUMP_TRACK_INFO
 591     LOGGER_INFOMSG([trackLog UTF8String]);
 592 #endif
 593 
 594     if (createVideo) {
 595         [self createVideoOutput];
 596     }
 597 }
 598 
 599 - (void) outputMediaDataWillChange:(AVPlayerItemOutput *)sender {
 600     _lastHostTime = CVGetCurrentHostTime();
 601     CVDisplayLinkStart(_displayLink);
 602     _hlsBugResetCount = 0;
 603 }
 604 
 605 - (void) outputSequenceWasFlushed:(AVPlayerItemOutput *)output {
 606     _hlsBugResetCount = 0;
 607     _lastHostTime = CVGetCurrentHostTime();
 608 }
 609 
 610 - (void) sendPixelBuffer:(CVPixelBufferRef)buf frameTime:(double)frameTime hostTime:(int64_t)hostTime {
 611     _lastHostTime = hostTime;
 612     CVVideoFrame *frame = NULL;
 613     try {
 614         frame = new CVVideoFrame(buf, frameTime, _lastHostTime);
 615     } catch (const char *message) {
 616         // Check if the video format is supported, if not try our fallback format
 617         OSType format = CVPixelBufferGetPixelFormatType(buf);
 618         if (format == 0) {
 619             // Bad pixel format, possibly a bad frame or ???
 620             // This seems to happen when the stream is corrupt, so let's ignore
 621             // it and hope things recover
 622             return;
 623         }
 624         if (!CVVideoFrame::IsFormatSupported(format)) {
 625             LOGGER_DEBUGMSG(([[NSString stringWithFormat:@"Bad pixel format: '%@'",
 626                                FourCCToNSString(format)] UTF8String]));
 627             [self setFallbackVideoFormat];
 628             return;
 629         }
 630         // Can't use this frame, report an error and ignore it
 631         LOGGER_DEBUGMSG(message);
 632         return;
 633     }
 634 
 635     if (previousWidth < 0 || previousHeight < 0
 636         || previousWidth != frame->GetWidth() || previousHeight != frame->GetHeight())
 637     {
 638         // Send/Queue frame size changed event
 639         previousWidth = frame->GetWidth();
 640         previousHeight = frame->GetHeight();
 641         eventHandler->SendFrameSizeChangedEvent(previousWidth, previousHeight);
 642     }
 643     eventHandler->SendNewFrameEvent(frame);
 644 }
 645 
 646 - (void) sendSpectrumEventDuration:(double)duration {
 647     if (eventHandler) {
 648         double timestamp = self.currentTime;
 649         eventHandler->SendAudioSpectrumEvent(timestamp, duration);
 650     }
 651 }
 652 
 653 @end
 654 
 655 static void SpectrumCallbackProc(void *context, double duration) {
 656     if (context) {
 657         AVFMediaPlayer *player = (__bridge AVFMediaPlayer*)context;
 658         [player sendSpectrumEventDuration:duration];
 659     }
 660 }
 661 
 662 static CVReturn displayLinkCallback(CVDisplayLinkRef displayLink, const CVTimeStamp *inNow, const CVTimeStamp *inOutputTime, CVOptionFlags flagsIn, CVOptionFlags *flagsOut, void *displayLinkContext)
 663 {
 664     AVFMediaPlayer *self = (__bridge AVFMediaPlayer *)displayLinkContext;
 665     AVPlayerItemVideoOutput *playerItemVideoOutput = self.playerOutput;
 666 
 667     // The displayLink calls back at every vsync (screen refresh)
 668     // Compute itemTime for the next vsync
 669     CMTime outputItemTime = [playerItemVideoOutput itemTimeForCVTimeStamp:*inOutputTime];
 670     if ([playerItemVideoOutput hasNewPixelBufferForItemTime:outputItemTime]) {
 671         CVPixelBufferRef pixBuff = [playerItemVideoOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL];
 672         // Copy the pixel buffer to be displayed next and add it to AVSampleBufferDisplayLayer for display
 673         double frameTime = CMTimeGetSeconds(outputItemTime);
 674         [self sendPixelBuffer:pixBuff frameTime:frameTime hostTime:inOutputTime->hostTime];
 675         self.hlsBugResetCount = 0;
 676 
 677         CVBufferRelease(pixBuff);
 678     } else {
 679         CMTime delta = CMClockMakeHostTimeFromSystemUnits(inNow->hostTime - self.lastHostTime);
 680         NSTimeInterval elapsedTime = CMTimeGetSeconds(delta);
 681 
 682         if (elapsedTime > FREEWHEELING_PERIOD_IN_SECONDS) {
 683             if (self.player.rate != 0.0) {
 684                 if (self.hlsBugResetCount > 9) {
 685                     /*
 686                      * There is a bug in AVFoundation where if we're playing a HLS
 687                      * stream and it switches to a different bitrate, the video
 688                      * output will stop receiving frames. So far, the only workaround
 689                      * for this has been to remove then re-add the video output
 690                      * This causes the video to pause for a bit, but it's better
 691                      * than not playing at all, and this should not happen once
 692                      * the bug is fixed in AVFoundation.
 693                      */
 694                     [self.playerItem removeOutput:playerItemVideoOutput];
 695                     [self.playerItem addOutput:playerItemVideoOutput];
 696                     self.hlsBugResetCount = 0;
 697                     self.lastHostTime = inNow->hostTime;
 698                     // fall through to allow it to stop the display link
 699                 } else {
 700                     self.hlsBugResetCount++;
 701                     self.lastHostTime = inNow->hostTime;
 702                     return kCVReturnSuccess;
 703                 }
 704             }
 705             // No new images for a while.  Shut down the display link to conserve
 706             // power, but request a wakeup call if new images are coming.
 707             CVDisplayLinkStop(displayLink);
 708             [playerItemVideoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:ADVANCE_INTERVAL_IN_SECONDS];
 709         }
 710     }
 711 
 712     return kCVReturnSuccess;
 713 }