1 /*
2 * Copyright (c) 2014, 2015, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation. Oracle designates this
8 * particular file as subject to the "Classpath" exception as provided
9 * by Oracle in the LICENSE file that accompanied this code.
10 *
11 * This code is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 * version 2 for more details (a copy is included in the LICENSE file that
15 * accompanied this code).
16 *
17 * You should have received a copy of the GNU General Public License version
18 * 2 along with this work; if not, write to the Free Software Foundation,
19 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
20 *
21 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
22 * or visit www.oracle.com if you need additional information or have any
88 @implementation AVFMediaPlayer
89
90 static void SpectrumCallbackProc(void *context, double duration);
91
92 static CVReturn displayLinkCallback(CVDisplayLinkRef displayLink,
93 const CVTimeStamp *inNow,
94 const CVTimeStamp *inOutputTime,
95 CVOptionFlags flagsIn,
96 CVOptionFlags *flagsOut,
97 void *displayLinkContext);
98
99 + (BOOL) playerAvailable {
100 // Check if AVPlayerItemVideoOutput exists, if not we're running on 10.7 or
101 // earlier and have to fall back on QTKit
102 Class klass = objc_getClass("AVPlayerItemVideoOutput");
103 return (klass != nil);
104 }
105
106 - (id) initWithURL:(NSURL *)source eventHandler:(CJavaPlayerEventDispatcher*)hdlr {
107 if ((self = [super init]) != nil) {
108 _audioSyncDelay = 0LL;
109 _volume = 1.0f;
110 _balance = 0.0f;
111
112 previousWidth = -1;
113 previousHeight = -1;
114 previousPlayerState = kPlayerState_UNKNOWN;
115
116 eventHandler = hdlr;
117
118 self.movieURL = source;
119 _buggyHLSSupport = NO;
120 _hlsBugResetCount = 0;
121
122 // Create our own work queue
123 playerQueue = dispatch_queue_create(NULL, NULL);
124
125 // Create the player
126 _player = [[AVPlayer alloc] init];
127 if (!_player) {
128 return nil;
129 }
130 _player.volume = 1.0f;
131 _player.muted = NO;
132
133 _playerItem = [AVPlayerItem playerItemWithURL:_movieURL];
134 if (!_playerItem) {
135 return nil;
136 }
137 [_player replaceCurrentItemWithPlayerItem:_playerItem];
138
139 // Set the player item end action to NONE since we'll handle it internally
140 _player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
141
142 /*
143 * AVPlayerItem notifications we could listen for:
144 * 10.7 AVPlayerItemTimeJumpedNotification -> the item's current time has changed discontinuously
145 * 10.7 AVPlayerItemDidPlayToEndTimeNotification -> item has played to its end time
146 * 10.7 AVPlayerItemFailedToPlayToEndTimeNotification (userInfo = NSError) -> item has failed to play to its end time
147 * 10.9 AVPlayerItemPlaybackStalledNotification -> media did not arrive in time to continue playback
148 */
149 playerObservers = [[NSMutableArray alloc] init];
150 id<NSObject> observer;
151 __weak AVFMediaPlayer *blockSelf = self; // retain cycle avoidance
152 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
153 observer = [center addObserverForName:AVPlayerItemDidPlayToEndTimeNotification
154 object:_playerItem
155 queue:[NSOperationQueue mainQueue]
156 usingBlock:^(NSNotification *note) {
157 // promote FINISHED state...
158 [blockSelf setPlayerState:kPlayerState_FINISHED];
159 }];
160 if (observer) {
161 [playerObservers addObject:observer];
162 }
163
164 keyPathsObserved = [[NSMutableArray alloc] init];
165 [self observeKeyPath:@"self.playerItem.status"
166 withContext:AVFMediaPlayerItemStatusContext];
167
168 [self observeKeyPath:@"self.playerItem.duration"
169 withContext:AVFMediaPlayerItemDurationContext];
170
171 [self observeKeyPath:@"self.playerItem.tracks"
172 withContext:AVFMediaPlayerItemTracksContext];
173
174
175 [self setPlayerState:kPlayerState_UNKNOWN];
176
177 // filled out later
178 _videoFormat = nil;
179 _lastHostTime = 0LL;
180
181 // Don't create video output until we know we have video
182 _playerOutput = nil;
183 _displayLink = NULL;
184
185 _audioSpectrum = new AVFAudioSpectrumUnit();
186 _audioSpectrum->SetSpectrumCallbackProc(SpectrumCallbackProc, (__bridge void*)self);
187
188 _audioEqualizer = new AVFAudioEqualizer();
189 }
190 return self;
191 }
192
193 - (void) dealloc {
194 [self dispose];
195
196 self.movieURL = nil;
197 self.player = nil;
198 self.playerItem = nil;
199 self.playerOutput = nil;
200
201 if (_audioSpectrum) {
202 delete _audioSpectrum;
203 _audioSpectrum = NULL;
204 }
205
206 if (_audioEqualizer) {
207 delete _audioEqualizer;
208 _audioEqualizer = NULL;
209 }
210 }
211
212 - (CAudioSpectrum*) audioSpectrum {
213 return _audioSpectrum;
214 }
215
216 - (CAudioEqualizer*) audioEqualizer {
217 return _audioEqualizer;
218 }
219
220 - (void) observeKeyPath:(NSString*)keyPath withContext:(void*)context {
221 [self addObserver:self forKeyPath:keyPath options:NSKeyValueObservingOptionNew context:context];
222 [keyPathsObserved addObject:keyPath];
223 }
224
225 // If we get an unsupported pixel format in the video output, call this to
226 // force it to output our fallback format
227 - (void) setFallbackVideoFormat {
228 // schedule this to be done when we're not buried inside the AVPlayer callback
229 __weak AVFMediaPlayer *blockSelf = self; // retain cycle avoidance
230 dispatch_async(dispatch_get_main_queue(), ^{
231 LOGGER_DEBUGMSG(([[NSString stringWithFormat:@"Falling back on video format: %@", FourCCToNSString(FALLBACK_VO_FORMAT)] UTF8String]));
232 AVPlayerItemVideoOutput *newOutput =
233 [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:
234 @{(id)kCVPixelBufferPixelFormatTypeKey: @(FALLBACK_VO_FORMAT)}];
235
236 if (newOutput) {
237 CVDisplayLinkStop(_displayLink);
238 [_playerItem removeOutput:_playerOutput];
239 [_playerOutput setDelegate:nil queue:nil];
240
241 self.playerOutput = newOutput;
242 [_playerOutput setDelegate:blockSelf queue:playerQueue];
243 [_playerOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:ADVANCE_INTERVAL_IN_SECONDS];
244 [_playerItem addOutput:_playerOutput];
245 }
246 });
247 }
248
249 - (void) createVideoOutput {
250 @synchronized(self) {
251 // Skip if already created
252 if (!_playerOutput) {
253 #if FORCE_VO_FORMAT
254 LOGGER_DEBUGMSG(([[NSString stringWithFormat:@"Forcing VO format: %@", FourCCToNSString(FORCED_VO_FORMAT)] UTF8String]));
255 #endif
256 // Create the player video output
257 // kCVPixelFormatType_32ARGB comes out inverted, so don't use it
258 // '2vuy' -> kCVPixelFormatType_422YpCbCr8 -> YCbCr_422 (uses less CPU too)
259 // kCVPixelFormatType_420YpCbCr8Planar
260 _playerOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:
261 #if FORCE_VO_FORMAT
262 @{(id)kCVPixelBufferPixelFormatTypeKey: @(FORCED_VO_FORMAT)}];
263 #else
264 @{}]; // let AVFoundation decide the format...
265 #endif
266 if (!_playerOutput) {
267 return;
268 }
269 _playerOutput.suppressesPlayerRendering = YES;
270
271 // Set up the display link (do we need this??)
272 // might need to create a display link context struct that retains us
273 // rather than passing self as the context
274 CVDisplayLinkCreateWithActiveCGDisplays(&_displayLink);
275 CVDisplayLinkSetOutputCallback(_displayLink, displayLinkCallback, (__bridge void *)self);
276 // Pause display link to conserve power
277 CVDisplayLinkStop(_displayLink);
278
279 // Set up playerOutput delegate
280 [_playerOutput setDelegate:self queue:playerQueue];
281 [_playerOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:ADVANCE_INTERVAL_IN_SECONDS];
282
283 [_playerItem addOutput:_playerOutput];
284 }
285 }
286 }
287
288 - (void) setPlayerState:(int)newState {
289 if (newState != previousPlayerState) {
290 // For now just send up to client
291 eventHandler->SendPlayerStateEvent(newState, 0.0);
292 previousPlayerState = newState;
293 }
294 }
295
296 - (void) observeValueForKeyPath:(NSString *)keyPath
297 ofObject:(id)object
298 change:(NSDictionary *)change
299 context:(void *)context {
300 if (context == AVFMediaPlayerItemStatusContext) {
301 AVPlayerStatus status = (AVPlayerStatus)[[change objectForKey:NSKeyValueChangeNewKey] longValue];
302 if (status == AVPlayerStatusReadyToPlay) {
303 if (!_movieReady) {
304 // Only send this once, though we'll receive notification a few times
305 [self setPlayerState:kPlayerState_READY];
306 _movieReady = true;
307 }
308 }
309 } else if (context == AVFMediaPlayerItemDurationContext) {
310 // send update duration event
311 double duration = CMTimeGetSeconds(_playerItem.duration);
312 eventHandler->SendDurationUpdateEvent(duration);
313 } else if (context == AVFMediaPlayerItemTracksContext) {
314 [self extractTrackInfo];
315 } else {
316 [super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
317 }
318 }
319
320 - (double) currentTime
321 {
322 return CMTimeGetSeconds([self.player currentTime]);
323 }
324
325 - (void) setCurrentTime:(double)time
326 {
327 [self.player seekToTime:CMTimeMakeWithSeconds(time, 1)];
328 }
329
330 - (BOOL) mute {
331 return self.player.muted;
332 }
333
334 - (void) setMute:(BOOL)state {
335 self.player.muted = state;
336 }
337
338 - (void) setAudioSyncDelay:(int64_t)audioSyncDelay {
339 _audioSyncDelay = audioSyncDelay;
340 if (_audioProcessor) {
341 _audioProcessor.audioDelay = audioSyncDelay;
342 }
343 }
344
345 - (float) balance {
346 return _balance;
347 }
348
349 - (void) setBalance:(float)balance {
350 _balance = balance;
351 if (_audioProcessor) {
352 _audioProcessor.balance = balance;
353 }
354 }
355
356 - (float) volume {
357 return _volume;
358 }
359
360 - (void) setVolume:(float)volume {
361 _volume = volume;
362 if (_audioProcessor) {
363 _audioProcessor.volume = volume;
364 }
365 }
366
367 - (float) rate {
368 return self.player.rate;
369 }
370
371 - (void) setRate:(float)rate {
372 self.player.rate = rate;
373 }
374
375 - (double) duration {
376 if (self.playerItem.status == AVPlayerItemStatusReadyToPlay) {
377 return CMTimeGetSeconds(self.playerItem.duration);
378 }
379 return -1.0;
380 }
381
382 - (void) play {
383 [self.player play];
384 [self setPlayerState:kPlayerState_PLAYING];
385 }
386
387 - (void) pause {
388 [self.player pause];
389 [self setPlayerState:kPlayerState_PAUSED];
390 }
391
392 - (void) stop {
393 [self.player pause];
394 [self.player seekToTime:kCMTimeZero];
395 [self setPlayerState:kPlayerState_STOPPED];
396 }
397
398 - (void) finish {
399 }
400
401 - (void) dispose {
402 @synchronized(self) {
403 if (!isDisposed) {
404 if (_player != nil) {
405 // this should stop and dealloc the audio processor
406 _player.currentItem.audioMix = nil;
407 }
408
409 if (_playerOutput != nil) {
410 [_playerItem removeOutput:_playerOutput];
411 [_playerOutput setDelegate:nil queue:nil];
412 }
413
414 [self setPlayerState:kPlayerState_HALTED];
415
416 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
417 for (id<NSObject> observer in playerObservers) {
418 [center removeObserver:observer];
419 }
420
421 for (NSString *keyPath in keyPathsObserved) {
422 [self removeObserver:self forKeyPath:keyPath];
423 }
424
425 if (_displayLink) {
426 CVDisplayLinkStop(_displayLink);
427 CVDisplayLinkRelease(_displayLink);
428 _displayLink = NULL;
429 }
430 isDisposed = YES;
431 }
432 }
433 }
434
435 - (void) extractTrackInfo {
436 #if DUMP_TRACK_INFO
437 NSMutableString *trackLog = [[NSMutableString alloc] initWithFormat:
438 @"Parsing tracks for player item %@:\n",
439 _playerItem];
440 #endif
441 NSArray *tracks = self.playerItem.tracks;
442 int videoIndex = 1;
443 int audioIndex = 1;
444 int textIndex = 1;
445 BOOL createVideo = NO;
446
447 for (AVPlayerItemTrack *trackObj in tracks) {
448 AVAssetTrack *track = trackObj.assetTrack;
449 NSString *type = track.mediaType;
450 NSString *name = nil;
451 NSString *lang = @"und";
452 CTrack::Encoding encoding = CTrack::CUSTOM;
453 FourCharCode fcc = 0;
454
455 CMFormatDescriptionRef desc = NULL;
456 NSArray *formatDescList = track.formatDescriptions;
457 if (formatDescList && formatDescList.count > 0) {
458 desc = (__bridge CMFormatDescriptionRef)[formatDescList objectAtIndex:0];
459 if (!desc) {
460 TRACK_LOG(@"Can't get format description, skipping track");
461 continue;
515 width,
516 height,
517 frameRate,
518 false);
519
520 TRACK_LOG(@" track name: %@", name);
521 TRACK_LOG(@" video attributes:");
522 TRACK_LOG(@" width: %d", width);
523 TRACK_LOG(@" height: %d", height);
524 TRACK_LOG(@" frame rate: %2.2f", frameRate);
525
526 eventHandler->SendVideoTrackEvent(outTrack);
527 delete outTrack;
528
529 // signal to create the video output when we're done
530 createVideo = YES;
531 } else if ([type isEqualTo:AVMediaTypeAudio]) {
532 name = [NSString stringWithFormat:@"Audio Track %d", audioIndex++];
533 TRACK_LOG(@" track name: %@", name);
534
535 // Create audio processor
536 if (!_audioProcessor) {
537 _audioProcessor = [[AVFAudioProcessor alloc] initWithPlayer:self
538 assetTrack:track];
539 _audioProcessor.volume = _volume;
540 _audioProcessor.balance = _balance;
541 // Make sure the players volume is set to 1.0
542 self.player.volume = 1.0;
543
544 // Set up EQ and spectrum
545 _audioProcessor.audioSpectrum = _audioSpectrum;
546 _audioProcessor.audioEqualizer = _audioEqualizer;
547 }
548
549 // We have to get the audio information from the format description
550 const AudioStreamBasicDescription *asbd = CMAudioFormatDescriptionGetStreamBasicDescription(desc);
551 size_t layoutSize;
552 const AudioChannelLayout *layout = CMAudioFormatDescriptionGetChannelLayout(desc, &layoutSize);
553 int channels = 2;
554 int channelMask = CAudioTrack::FRONT_LEFT | CAudioTrack::FRONT_RIGHT;
555 float sampleRate = 44100.0;
556
557 TRACK_LOG(@" audio attributes:");
558 if (asbd) {
559 sampleRate = (float)asbd->mSampleRate;
560 TRACK_LOG(@" sample rate: %2.2f", sampleRate);
561 }
562 if (layout) {
563 channels = (int)AudioChannelLayoutTag_GetNumberOfChannels(layout->mChannelLayoutTag);
564
565 TRACK_LOG(@" channel count: %d", channels);
566 TRACK_LOG(@" channel mask: %02x", channelMask);
674 [self sendPixelBuffer:pixBuff frameTime:frameTime hostTime:inOutputTime->hostTime];
675 self.hlsBugResetCount = 0;
676
677 CVBufferRelease(pixBuff);
678 } else {
679 CMTime delta = CMClockMakeHostTimeFromSystemUnits(inNow->hostTime - self.lastHostTime);
680 NSTimeInterval elapsedTime = CMTimeGetSeconds(delta);
681
682 if (elapsedTime > FREEWHEELING_PERIOD_IN_SECONDS) {
683 if (self.player.rate != 0.0) {
684 if (self.hlsBugResetCount > 9) {
685 /*
686 * There is a bug in AVFoundation where if we're playing a HLS
687 * stream and it switches to a different bitrate, the video
688 * output will stop receiving frames. So far, the only workaround
689 * for this has been to remove then re-add the video output
690 * This causes the video to pause for a bit, but it's better
691 * than not playing at all, and this should not happen once
692 * the bug is fixed in AVFoundation.
693 */
694 [self.playerItem removeOutput:playerItemVideoOutput];
695 [self.playerItem addOutput:playerItemVideoOutput];
696 self.hlsBugResetCount = 0;
697 self.lastHostTime = inNow->hostTime;
698 // fall through to allow it to stop the display link
699 } else {
700 self.hlsBugResetCount++;
701 self.lastHostTime = inNow->hostTime;
702 return kCVReturnSuccess;
703 }
704 }
705 // No new images for a while. Shut down the display link to conserve
706 // power, but request a wakeup call if new images are coming.
707 CVDisplayLinkStop(displayLink);
708 [playerItemVideoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:ADVANCE_INTERVAL_IN_SECONDS];
709 }
710 }
711
712 return kCVReturnSuccess;
713 }
|
1 /*
2 * Copyright (c) 2014, 2016, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation. Oracle designates this
8 * particular file as subject to the "Classpath" exception as provided
9 * by Oracle in the LICENSE file that accompanied this code.
10 *
11 * This code is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 * version 2 for more details (a copy is included in the LICENSE file that
15 * accompanied this code).
16 *
17 * You should have received a copy of the GNU General Public License version
18 * 2 along with this work; if not, write to the Free Software Foundation,
19 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
20 *
21 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
22 * or visit www.oracle.com if you need additional information or have any
88 @implementation AVFMediaPlayer
89
90 static void SpectrumCallbackProc(void *context, double duration);
91
92 static CVReturn displayLinkCallback(CVDisplayLinkRef displayLink,
93 const CVTimeStamp *inNow,
94 const CVTimeStamp *inOutputTime,
95 CVOptionFlags flagsIn,
96 CVOptionFlags *flagsOut,
97 void *displayLinkContext);
98
99 + (BOOL) playerAvailable {
100 // Check if AVPlayerItemVideoOutput exists, if not we're running on 10.7 or
101 // earlier and have to fall back on QTKit
102 Class klass = objc_getClass("AVPlayerItemVideoOutput");
103 return (klass != nil);
104 }
105
106 - (id) initWithURL:(NSURL *)source eventHandler:(CJavaPlayerEventDispatcher*)hdlr {
107 if ((self = [super init]) != nil) {
108 previousWidth = -1;
109 previousHeight = -1;
110 previousPlayerState = kPlayerState_UNKNOWN;
111
112 eventHandler = hdlr;
113
114 self.movieURL = source;
115 _buggyHLSSupport = NO;
116 _hlsBugResetCount = 0;
117
118 // Create our own work queue
119 playerQueue = dispatch_queue_create(NULL, NULL);
120
121 // Create the player
122 _player = [AVPlayer playerWithURL:source];
123 if (!_player) {
124 return nil;
125 }
126 _player.volume = 1.0f;
127 _player.muted = NO;
128
129 // Set the player item end action to NONE since we'll handle it internally
130 _player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
131
132 /*
133 * AVPlayerItem notifications we could listen for:
134 * 10.7 AVPlayerItemTimeJumpedNotification -> the item's current time has changed discontinuously
135 * 10.7 AVPlayerItemDidPlayToEndTimeNotification -> item has played to its end time
136 * 10.7 AVPlayerItemFailedToPlayToEndTimeNotification (userInfo = NSError) -> item has failed to play to its end time
137 * 10.9 AVPlayerItemPlaybackStalledNotification -> media did not arrive in time to continue playback
138 */
139 playerObservers = [[NSMutableArray alloc] init];
140 id<NSObject> observer;
141 __weak AVFMediaPlayer *blockSelf = self; // retain cycle avoidance
142 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
143 observer = [center addObserverForName:AVPlayerItemDidPlayToEndTimeNotification
144 object:_player.currentItem
145 queue:[NSOperationQueue mainQueue]
146 usingBlock:^(NSNotification *note) {
147 // promote FINISHED state...
148 [blockSelf setPlayerState:kPlayerState_FINISHED];
149 }];
150 if (observer) {
151 [playerObservers addObject:observer];
152 }
153
154 keyPathsObserved = [[NSMutableArray alloc] init];
155 [self observeKeyPath:@"self.player.currentItem.status"
156 withContext:AVFMediaPlayerItemStatusContext];
157
158 [self observeKeyPath:@"self.player.currentItem.duration"
159 withContext:AVFMediaPlayerItemDurationContext];
160
161 [self observeKeyPath:@"self.player.currentItem.tracks"
162 withContext:AVFMediaPlayerItemTracksContext];
163
164
165 [self setPlayerState:kPlayerState_UNKNOWN];
166
167 // filled out later
168 _videoFormat = nil;
169 _lastHostTime = 0LL;
170
171 // Don't create video output until we know we have video
172 _playerOutput = nil;
173 _displayLink = NULL;
174
175 _audioProcessor = [[AVFAudioProcessor alloc] init];
176 if (_audioProcessor.audioSpectrum != nullptr) {
177 _audioProcessor.audioSpectrum->SetSpectrumCallbackProc(SpectrumCallbackProc, (__bridge void*)self);
178 }
179
180 isDisposed = NO;
181 }
182 return self;
183 }
184
185 - (void) dealloc {
186 [self dispose];
187
188 self.movieURL = nil;
189 self.player = nil;
190 self.playerOutput = nil;
191 }
192
193 - (CAudioSpectrum*) audioSpectrum {
194 AVFAudioSpectrumUnitPtr asPtr = _audioProcessor.audioSpectrum;
195 return static_cast<CAudioSpectrum*>(&(*asPtr));
196 }
197
198 - (CAudioEqualizer*) audioEqualizer {
199 AVFAudioEqualizerPtr eqPtr = _audioProcessor.audioEqualizer;
200 return static_cast<CAudioEqualizer*>(&(*eqPtr));
201 }
202
203 - (void) observeKeyPath:(NSString*)keyPath withContext:(void*)context {
204 [self addObserver:self forKeyPath:keyPath options:NSKeyValueObservingOptionNew context:context];
205 [keyPathsObserved addObject:keyPath];
206 }
207
208 // If we get an unsupported pixel format in the video output, call this to
209 // force it to output our fallback format
210 - (void) setFallbackVideoFormat {
211 // schedule this to be done when we're not buried inside the AVPlayer callback
212 __weak AVFMediaPlayer *blockSelf = self; // retain cycle avoidance
213 dispatch_async(dispatch_get_main_queue(), ^{
214 LOGGER_DEBUGMSG(([[NSString stringWithFormat:@"Falling back on video format: %@", FourCCToNSString(FALLBACK_VO_FORMAT)] UTF8String]));
215 AVPlayerItemVideoOutput *newOutput =
216 [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:
217 @{(id)kCVPixelBufferPixelFormatTypeKey: @(FALLBACK_VO_FORMAT)}];
218
219 if (newOutput) {
220 CVDisplayLinkStop(_displayLink);
221 [_player.currentItem removeOutput:_playerOutput];
222 [_playerOutput setDelegate:nil queue:nil];
223
224 self.playerOutput = newOutput;
225 [_playerOutput setDelegate:blockSelf queue:playerQueue];
226 [_playerOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:ADVANCE_INTERVAL_IN_SECONDS];
227 [_player.currentItem addOutput:_playerOutput];
228 }
229 });
230 }
231
232 - (void) createVideoOutput {
233 @synchronized(self) {
234 // Skip if already created
235 if (!_playerOutput) {
236 #if FORCE_VO_FORMAT
237 LOGGER_DEBUGMSG(([[NSString stringWithFormat:@"Forcing VO format: %@", FourCCToNSString(FORCED_VO_FORMAT)] UTF8String]));
238 #endif
239 // Create the player video output
240 // kCVPixelFormatType_32ARGB comes out inverted, so don't use it
241 // '2vuy' -> kCVPixelFormatType_422YpCbCr8 -> YCbCr_422 (uses less CPU too)
242 // kCVPixelFormatType_420YpCbCr8Planar
243 _playerOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:
244 #if FORCE_VO_FORMAT
245 @{(id)kCVPixelBufferPixelFormatTypeKey: @(FORCED_VO_FORMAT)}];
246 #else
247 @{}]; // let AVFoundation decide the format...
248 #endif
249 if (!_playerOutput) {
250 return;
251 }
252 _playerOutput.suppressesPlayerRendering = YES;
253
254 // Set up the display link (do we need this??)
255 // might need to create a display link context struct that retains us
256 // rather than passing self as the context
257 CVDisplayLinkCreateWithActiveCGDisplays(&_displayLink);
258 CVDisplayLinkSetOutputCallback(_displayLink, displayLinkCallback, (__bridge void *)self);
259 // Pause display link to conserve power
260 CVDisplayLinkStop(_displayLink);
261
262 // Set up playerOutput delegate
263 [_playerOutput setDelegate:self queue:playerQueue];
264 [_playerOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:ADVANCE_INTERVAL_IN_SECONDS];
265
266 [_player.currentItem addOutput:_playerOutput];
267 }
268 }
269 }
270
271 - (void) setPlayerState:(int)newState {
272 if (newState != previousPlayerState) {
273 // For now just send up to client
274 eventHandler->SendPlayerStateEvent(newState, 0.0);
275 previousPlayerState = newState;
276 }
277 }
278
279 - (void) observeValueForKeyPath:(NSString *)keyPath
280 ofObject:(id)object
281 change:(NSDictionary *)change
282 context:(void *)context {
283 if (context == AVFMediaPlayerItemStatusContext) {
284 AVPlayerStatus status = (AVPlayerStatus)[[change objectForKey:NSKeyValueChangeNewKey] longValue];
285 if (status == AVPlayerStatusReadyToPlay) {
286 if (!_movieReady) {
287 // Only send this once, though we'll receive notification a few times
288 [self setPlayerState:kPlayerState_READY];
289 _movieReady = true;
290 }
291 }
292 } else if (context == AVFMediaPlayerItemDurationContext) {
293 // send update duration event
294 double duration = CMTimeGetSeconds(_player.currentItem.duration);
295 eventHandler->SendDurationUpdateEvent(duration);
296 } else if (context == AVFMediaPlayerItemTracksContext) {
297 [self extractTrackInfo];
298 } else {
299 [super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
300 }
301 }
302
303 - (double) currentTime
304 {
305 return CMTimeGetSeconds([self.player currentTime]);
306 }
307
308 - (void) setCurrentTime:(double)time
309 {
310 [self.player seekToTime:CMTimeMakeWithSeconds(time, 1)];
311 }
312
313 - (BOOL) mute {
314 return self.player.muted;
315 }
316
317 - (void) setMute:(BOOL)state {
318 self.player.muted = state;
319 }
320
321 - (int64_t) audioSyncDelay {
322 return _audioProcessor.audioDelay;
323 }
324
325 - (void) setAudioSyncDelay:(int64_t)audioSyncDelay {
326 _audioProcessor.audioDelay = audioSyncDelay;
327 }
328
329 - (float) balance {
330 return _audioProcessor.balance;
331 }
332
333 - (void) setBalance:(float)balance {
334 _audioProcessor.balance = balance;
335 }
336
337 - (float) volume {
338 return _audioProcessor.volume;
339 }
340
341 - (void) setVolume:(float)volume {
342 _audioProcessor.volume = volume;
343 }
344
345 - (float) rate {
346 return self.player.rate;
347 }
348
349 - (void) setRate:(float)rate {
350 self.player.rate = rate;
351 }
352
353 - (double) duration {
354 if (self.player.currentItem.status == AVPlayerItemStatusReadyToPlay) {
355 return CMTimeGetSeconds(self.player.currentItem.duration);
356 }
357 return -1.0;
358 }
359
360 - (void) play {
361 [self.player play];
362 [self setPlayerState:kPlayerState_PLAYING];
363 }
364
365 - (void) pause {
366 [self.player pause];
367 [self setPlayerState:kPlayerState_PAUSED];
368 }
369
370 - (void) stop {
371 [self.player pause];
372 [self.player seekToTime:kCMTimeZero];
373 [self setPlayerState:kPlayerState_STOPPED];
374 }
375
376 - (void) finish {
377 }
378
379 - (void) dispose {
380 @synchronized(self) {
381 if (!isDisposed) {
382 if (_player != nil) {
383 // stop the player
384 _player.rate = 0.0;
385 [_player cancelPendingPrerolls];
386 }
387
388 AVFAudioSpectrumUnitPtr asPtr = _audioProcessor.audioSpectrum;
389 if (asPtr != nullptr) {
390 // Prevent future spectrum callbacks
391 asPtr->SetEnabled(FALSE);
392 asPtr->SetSpectrumCallbackProc(NULL, NULL);
393 asPtr->SetBands(0, NULL);
394 }
395
396 if (_playerOutput != nil) {
397 [_player.currentItem removeOutput:_playerOutput];
398 [_playerOutput setDelegate:nil queue:nil];
399 }
400
401 [self setPlayerState:kPlayerState_HALTED];
402
403 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
404 for (id<NSObject> observer in playerObservers) {
405 [center removeObserver:observer];
406 }
407
408 for (NSString *keyPath in keyPathsObserved) {
409 [self removeObserver:self forKeyPath:keyPath];
410 }
411
412 if (_displayLink) {
413 CVDisplayLinkStop(_displayLink);
414 CVDisplayLinkRelease(_displayLink);
415 _displayLink = NULL;
416 }
417 isDisposed = YES;
418 }
419 }
420 }
421
422 - (void) extractTrackInfo {
423 #if DUMP_TRACK_INFO
424 NSMutableString *trackLog = [[NSMutableString alloc] initWithFormat:
425 @"Parsing tracks for player item %@:\n",
426 _player.currentItem];
427 #endif
428 NSArray *tracks = self.player.currentItem.tracks;
429 int videoIndex = 1;
430 int audioIndex = 1;
431 int textIndex = 1;
432 BOOL createVideo = NO;
433
434 for (AVPlayerItemTrack *trackObj in tracks) {
435 AVAssetTrack *track = trackObj.assetTrack;
436 NSString *type = track.mediaType;
437 NSString *name = nil;
438 NSString *lang = @"und";
439 CTrack::Encoding encoding = CTrack::CUSTOM;
440 FourCharCode fcc = 0;
441
442 CMFormatDescriptionRef desc = NULL;
443 NSArray *formatDescList = track.formatDescriptions;
444 if (formatDescList && formatDescList.count > 0) {
445 desc = (__bridge CMFormatDescriptionRef)[formatDescList objectAtIndex:0];
446 if (!desc) {
447 TRACK_LOG(@"Can't get format description, skipping track");
448 continue;
502 width,
503 height,
504 frameRate,
505 false);
506
507 TRACK_LOG(@" track name: %@", name);
508 TRACK_LOG(@" video attributes:");
509 TRACK_LOG(@" width: %d", width);
510 TRACK_LOG(@" height: %d", height);
511 TRACK_LOG(@" frame rate: %2.2f", frameRate);
512
513 eventHandler->SendVideoTrackEvent(outTrack);
514 delete outTrack;
515
516 // signal to create the video output when we're done
517 createVideo = YES;
518 } else if ([type isEqualTo:AVMediaTypeAudio]) {
519 name = [NSString stringWithFormat:@"Audio Track %d", audioIndex++];
520 TRACK_LOG(@" track name: %@", name);
521
522 // Set up audio processing
523 if (_audioProcessor) {
524 // Make sure the players volume is set to 1.0
525 self.player.volume = 1.0;
526
527 // set up the mixer
528 _audioProcessor.audioTrack = track;
529 self.player.currentItem.audioMix = _audioProcessor.mixer;
530 }
531
532 // We have to get the audio information from the format description
533 const AudioStreamBasicDescription *asbd = CMAudioFormatDescriptionGetStreamBasicDescription(desc);
534 size_t layoutSize;
535 const AudioChannelLayout *layout = CMAudioFormatDescriptionGetChannelLayout(desc, &layoutSize);
536 int channels = 2;
537 int channelMask = CAudioTrack::FRONT_LEFT | CAudioTrack::FRONT_RIGHT;
538 float sampleRate = 44100.0;
539
540 TRACK_LOG(@" audio attributes:");
541 if (asbd) {
542 sampleRate = (float)asbd->mSampleRate;
543 TRACK_LOG(@" sample rate: %2.2f", sampleRate);
544 }
545 if (layout) {
546 channels = (int)AudioChannelLayoutTag_GetNumberOfChannels(layout->mChannelLayoutTag);
547
548 TRACK_LOG(@" channel count: %d", channels);
549 TRACK_LOG(@" channel mask: %02x", channelMask);
657 [self sendPixelBuffer:pixBuff frameTime:frameTime hostTime:inOutputTime->hostTime];
658 self.hlsBugResetCount = 0;
659
660 CVBufferRelease(pixBuff);
661 } else {
662 CMTime delta = CMClockMakeHostTimeFromSystemUnits(inNow->hostTime - self.lastHostTime);
663 NSTimeInterval elapsedTime = CMTimeGetSeconds(delta);
664
665 if (elapsedTime > FREEWHEELING_PERIOD_IN_SECONDS) {
666 if (self.player.rate != 0.0) {
667 if (self.hlsBugResetCount > 9) {
668 /*
669 * There is a bug in AVFoundation where if we're playing a HLS
670 * stream and it switches to a different bitrate, the video
671 * output will stop receiving frames. So far, the only workaround
672 * for this has been to remove then re-add the video output
673 * This causes the video to pause for a bit, but it's better
674 * than not playing at all, and this should not happen once
675 * the bug is fixed in AVFoundation.
676 */
677 [self.player.currentItem removeOutput:playerItemVideoOutput];
678 [self.player.currentItem addOutput:playerItemVideoOutput];
679 self.hlsBugResetCount = 0;
680 self.lastHostTime = inNow->hostTime;
681 // fall through to allow it to stop the display link
682 } else {
683 self.hlsBugResetCount++;
684 self.lastHostTime = inNow->hostTime;
685 return kCVReturnSuccess;
686 }
687 }
688 // No new images for a while. Shut down the display link to conserve
689 // power, but request a wakeup call if new images are coming.
690 CVDisplayLinkStop(displayLink);
691 [playerItemVideoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:ADVANCE_INTERVAL_IN_SECONDS];
692 }
693 }
694
695 return kCVReturnSuccess;
696 }
|