1 /*
  2  * Copyright (c) 2014, 2020, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.  Oracle designates this
  8  * particular file as subject to the "Classpath" exception as provided
  9  * by Oracle in the LICENSE file that accompanied this code.
 10  *
 11  * This code is distributed in the hope that it will be useful, but WITHOUT
 12  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 13  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 14  * version 2 for more details (a copy is included in the LICENSE file that
 15  * accompanied this code).
 16  *
 17  * You should have received a copy of the GNU General Public License version
 18  * 2 along with this work; if not, write to the Free Software Foundation,
 19  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 20  *
 21  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 22  * or visit www.oracle.com if you need additional information or have any
 23  * questions.
 24  */
 25 
 26 #import "AVFMediaPlayer.h"
 27 #import <objc/runtime.h>
 28 #import "CVVideoFrame.h"
 29 
 30 #import <PipelineManagement/NullAudioEqualizer.h>
 31 #import <PipelineManagement/NullAudioSpectrum.h>
 32 
 33 #import "AVFAudioProcessor.h"
 34 
 35 // "borrowed" from green screen player on ADC
 36 // These are used to reduce power consumption when there are no video frames
 37 // to be rendered, which is generally A Good Thing
 38 #define FREEWHEELING_PERIOD_IN_SECONDS 0.5
 39 #define ADVANCE_INTERVAL_IN_SECONDS 0.1
 40 
 41 // set to 1 to debug track information
 42 #define DUMP_TRACK_INFO 0
 43 
 44 // trick used by Apple in AVGreenScreenPlayer
 45 // This avoids calling [NSString isEqualTo:@"..."]
 46 // The actual value is meaningless, but needs to be unique
 47 static void *AVFMediaPlayerItemStatusContext = &AVFMediaPlayerItemStatusContext;
 48 static void *AVFMediaPlayerItemDurationContext = &AVFMediaPlayerItemDurationContext;
 49 static void *AVFMediaPlayerItemTracksContext = &AVFMediaPlayerItemTracksContext;
 50 
 51 #define FORCE_VO_FORMAT 0
 52 #if FORCE_VO_FORMAT
 53 // #define FORCED_VO_FORMAT kCVPixelFormatType_32BGRA
 54 // #define FORCED_VO_FORMAT kCVPixelFormatType_422YpCbCr8
 55 // #define FORCED_VO_FORMAT kCVPixelFormatType_420YpCbCr8Planar
 56  #define FORCED_VO_FORMAT kCVPixelFormatType_422YpCbCr8_yuvs // Unsupported, use to test fallback
 57 #endif
 58 
 59 // Apple really likes to output '2vuy', this should be the least expensive conversion
 60 #define FALLBACK_VO_FORMAT kCVPixelFormatType_422YpCbCr8
 61 
 62 #define FOURCC_CHAR(f) ((f) & 0x7f) ? (char)((f) & 0x7f) : '?'
 63 
 64 static inline NSString *FourCCToNSString(UInt32 fcc) {
 65     if (fcc < 0x100) {
 66         return [NSString stringWithFormat:@"%u", fcc];
 67     }
 68     return [NSString stringWithFormat:@"%c%c%c%c",
 69             FOURCC_CHAR(fcc >> 24),
 70             FOURCC_CHAR(fcc >> 16),
 71             FOURCC_CHAR(fcc >> 8),
 72             FOURCC_CHAR(fcc)];
 73 }
 74 
 75 #if DUMP_TRACK_INFO
 76 static void append_log(NSMutableString *s, NSString *fmt, ...) {
 77     va_list args;
 78     va_start(args, fmt);
 79     NSString *appString = [[NSString alloc] initWithFormat:fmt arguments:args];
 80     [s appendFormat:@"%@\n", appString];
 81     va_end(args);
 82 }
 83 #define TRACK_LOG(fmt, ...) append_log(trackLog, fmt, ##__VA_ARGS__)
 84 #else
 85 #define TRACK_LOG(...) {}
 86 #endif
 87 
 88 @implementation AVFMediaPlayer
 89 
 90 static void SpectrumCallbackProc(void *context, double duration, double timestamp);
 91 
 92 static CVReturn displayLinkCallback(CVDisplayLinkRef displayLink,
 93                                     const CVTimeStamp *inNow,
 94                                     const CVTimeStamp *inOutputTime,
 95                                     CVOptionFlags flagsIn,
 96                                     CVOptionFlags *flagsOut,
 97                                     void *displayLinkContext);
 98 
 99 + (BOOL) playerAvailable {
100     // Check if AVPlayerItemVideoOutput exists, if not we're running on 10.7 or
101     // earlier which is no longer supported
102     Class klass = objc_getClass("AVPlayerItemVideoOutput");
103     return (klass != nil);
104 }
105 
106 - (id) initWithURL:(NSURL *)source eventHandler:(CJavaPlayerEventDispatcher*)hdlr {
107     if ((self = [super init]) != nil) {
108         previousWidth = -1;
109         previousHeight = -1;
110         previousPlayerState = kPlayerState_UNKNOWN;
111 
112         eventHandler = hdlr;
113 
114         self.movieURL = source;
115         _buggyHLSSupport = NO;
116         _hlsBugResetCount = 0;
117 
118         // Create our own work queue
119         playerQueue = dispatch_queue_create(NULL, NULL);
120 
121         // Create the player
122         _player = [AVPlayer playerWithURL:source];
123         if (!_player) {
124             return nil;
125         }
126         _player.volume = 1.0f;
127         _player.muted = NO;
128 
129         // Set the player item end action to NONE since we'll handle it internally
130         _player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
131 
132         /*
133          * AVPlayerItem notifications we could listen for:
134          * 10.7 AVPlayerItemTimeJumpedNotification -> the item's current time has changed discontinuously
135          * 10.7 AVPlayerItemDidPlayToEndTimeNotification -> item has played to its end time
136          * 10.7 AVPlayerItemFailedToPlayToEndTimeNotification (userInfo = NSError) -> item has failed to play to its end time
137          * 10.9 AVPlayerItemPlaybackStalledNotification -> media did not arrive in time to continue playback
138          */
139         playerObservers = [[NSMutableArray alloc] init];
140         id<NSObject> observer;
141         __weak AVFMediaPlayer *blockSelf = self; // retain cycle avoidance
142         NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
143         observer = [center addObserverForName:AVPlayerItemDidPlayToEndTimeNotification
144                                        object:_player.currentItem
145                                         queue:[NSOperationQueue mainQueue]
146                                    usingBlock:^(NSNotification *note) {
147                                        // promote FINISHED state...
148                                        [blockSelf setPlayerState:kPlayerState_FINISHED];
149                                    }];
150         if (observer) {
151             [playerObservers addObject:observer];
152         }
153 
154         keyPathsObserved = [[NSMutableArray alloc] init];
155         [self observeKeyPath:@"self.player.currentItem.status"
156                  withContext:AVFMediaPlayerItemStatusContext];
157 
158         [self observeKeyPath:@"self.player.currentItem.duration"
159                  withContext:AVFMediaPlayerItemDurationContext];
160 
161         [self observeKeyPath:@"self.player.currentItem.tracks"
162                  withContext:AVFMediaPlayerItemTracksContext];
163 
164 
165         [self setPlayerState:kPlayerState_UNKNOWN];
166 
167         // filled out later
168         _videoFormat = nil;
169         _lastHostTime = 0LL;
170 
171         // Don't create video output until we know we have video
172         _playerOutput = nil;
173         _displayLink = NULL;
174 
175         _audioProcessor = [[AVFAudioProcessor alloc] init];
176         if (_audioProcessor.audioSpectrum != nullptr) {
177             _audioProcessor.audioSpectrum->SetSpectrumCallbackProc(SpectrumCallbackProc, (__bridge void*)self);
178         }
179 
180         isDisposed = NO;
181     }
182     return self;
183 }
184 
185 - (void) dealloc {
186     [self dispose];
187 
188     self.movieURL = nil;
189     self.player = nil;
190     self.playerOutput = nil;
191 }
192 
193 - (CAudioSpectrum*) audioSpectrum {
194     AVFAudioSpectrumUnitPtr asPtr = _audioProcessor.audioSpectrum;
195     return static_cast<CAudioSpectrum*>(&(*asPtr));
196 }
197 
198 - (CAudioEqualizer*) audioEqualizer {
199     AVFAudioEqualizerPtr eqPtr = _audioProcessor.audioEqualizer;
200     return static_cast<CAudioEqualizer*>(&(*eqPtr));
201 }
202 
203 - (void) observeKeyPath:(NSString*)keyPath withContext:(void*)context {
204     [self addObserver:self forKeyPath:keyPath options:NSKeyValueObservingOptionNew context:context];
205     [keyPathsObserved addObject:keyPath];
206 }
207 
208 // If we get an unsupported pixel format in the video output, call this to
209 // force it to output our fallback format
210 - (void) setFallbackVideoFormat {
211     // schedule this to be done when we're not buried inside the AVPlayer callback
212     __weak AVFMediaPlayer *blockSelf = self; // retain cycle avoidance
213     dispatch_async(dispatch_get_main_queue(), ^{
214         LOGGER_DEBUGMSG(([[NSString stringWithFormat:@"Falling back on video format: %@", FourCCToNSString(FALLBACK_VO_FORMAT)] UTF8String]));
215         AVPlayerItemVideoOutput *newOutput =
216         [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:
217          @{(id)kCVPixelBufferPixelFormatTypeKey: @(FALLBACK_VO_FORMAT)}];
218 
219         if (newOutput) {
220             newOutput.suppressesPlayerRendering = YES;
221 
222             CVDisplayLinkStop(_displayLink);
223             [_player.currentItem removeOutput:_playerOutput];
224             [_playerOutput setDelegate:nil queue:nil];
225 
226             self.playerOutput = newOutput;
227             [_playerOutput setDelegate:blockSelf queue:playerQueue];
228             [_playerOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:ADVANCE_INTERVAL_IN_SECONDS];
229             [_player.currentItem addOutput:_playerOutput];
230         }
231     });
232 }
233 
234 - (void) createVideoOutput {
235     @synchronized(self) {
236         // Skip if already created
237         if (!_playerOutput) {
238 #if FORCE_VO_FORMAT
239             LOGGER_DEBUGMSG(([[NSString stringWithFormat:@"Forcing VO format: %@", FourCCToNSString(FORCED_VO_FORMAT)] UTF8String]));
240 #endif
241             // Create the player video output
242             // kCVPixelFormatType_32ARGB comes out inverted, so don't use it
243             // '2vuy' -> kCVPixelFormatType_422YpCbCr8 -> YCbCr_422 (uses less CPU too)
244             // kCVPixelFormatType_420YpCbCr8Planar
245             _playerOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:
246 #if FORCE_VO_FORMAT
247                              @{(id)kCVPixelBufferPixelFormatTypeKey: @(FORCED_VO_FORMAT)}];
248 #else
249                              @{}]; // let AVFoundation decide the format...
250 #endif
251             if (!_playerOutput) {
252                 return;
253             }
254             _playerOutput.suppressesPlayerRendering = YES;
255 
256             // Set up the display link (do we need this??)
257             // might need to create a display link context struct that retains us
258             // rather than passing self as the context
259             CVDisplayLinkCreateWithActiveCGDisplays(&_displayLink);
260             CVDisplayLinkSetOutputCallback(_displayLink, displayLinkCallback, (__bridge void *)self);
261             // Pause display link to conserve power
262             CVDisplayLinkStop(_displayLink);
263 
264             // Set up playerOutput delegate
265             [_playerOutput setDelegate:self queue:playerQueue];
266             [_playerOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:ADVANCE_INTERVAL_IN_SECONDS];
267 
268             [_player.currentItem addOutput:_playerOutput];
269         }
270     }
271 }
272 
273 - (void) setPlayerState:(int)newState {
274     if (newState != previousPlayerState) {
275         // For now just send up to client
276         eventHandler->SendPlayerStateEvent(newState, 0.0);
277         previousPlayerState = newState;
278     }
279 }
280 
281 - (void) hlsBugReset {
282     // schedule this to be done when we're not buried inside the AVPlayer callback
283     dispatch_async(dispatch_get_main_queue(), ^{
284         LOGGER_DEBUGMSG(([[NSString stringWithFormat:@"hlsBugReset()"] UTF8String]));
285 
286         if (_playerOutput) {
287             _playerOutput.suppressesPlayerRendering = YES;
288 
289             CVDisplayLinkStop(_displayLink);
290             [_player.currentItem removeOutput:_playerOutput];
291 
292             [_playerOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:ADVANCE_INTERVAL_IN_SECONDS];
293             [_player.currentItem addOutput:_playerOutput];
294 
295             self.hlsBugResetCount = 0;
296         }
297     });
298 }
299 
300 - (void) observeValueForKeyPath:(NSString *)keyPath
301                        ofObject:(id)object
302                          change:(NSDictionary *)change
303                         context:(void *)context {
304     if (context == AVFMediaPlayerItemStatusContext) {
305         // According to docs change[NSKeyValueChangeNewKey] can be NSNull when player.currentItem is nil
306         if (![change[NSKeyValueChangeNewKey] isKindOfClass:[NSNull class]]) {
307             AVPlayerStatus status = (AVPlayerStatus)[[change objectForKey:NSKeyValueChangeNewKey] longValue];
308             if (status == AVPlayerStatusReadyToPlay) {
309                 if (!_movieReady) {
310                     LOGGER_DEBUGMSG(([[NSString stringWithFormat:@"Setting player to READY state"] UTF8String]));
311                     // Only send this once, though we'll receive notification a few times
312                     [self setPlayerState:kPlayerState_READY];
313                     _movieReady = true;
314                 }
315             }
316         }
317     } else if (context == AVFMediaPlayerItemDurationContext) {
318         // send update duration event
319         double duration = CMTimeGetSeconds(_player.currentItem.duration);
320         eventHandler->SendDurationUpdateEvent(duration);
321     } else if (context == AVFMediaPlayerItemTracksContext) {
322         [self extractTrackInfo];
323     } else {
324         [super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
325     }
326 }
327 
328 - (double) currentTime
329 {
330     return CMTimeGetSeconds([self.player currentTime]);
331 }
332 
333 - (void) setCurrentTime:(double)time
334 {
335     [self.player seekToTime:CMTimeMakeWithSeconds(time, 1)];
336 }
337 
338 - (BOOL) mute {
339     return self.player.muted;
340 }
341 
342 - (void) setMute:(BOOL)state {
343     self.player.muted = state;
344 }
345 
346 - (int64_t) audioSyncDelay {
347     return _audioProcessor.audioDelay;
348 }
349 
350 - (void) setAudioSyncDelay:(int64_t)audioSyncDelay {
351     _audioProcessor.audioDelay = audioSyncDelay;
352 }
353 
354 - (float) balance {
355     return _audioProcessor.balance;
356 }
357 
358 - (void) setBalance:(float)balance {
359     _audioProcessor.balance = balance;
360 }
361 
362 - (float) volume {
363     return _audioProcessor.volume;
364 }
365 
366 - (void) setVolume:(float)volume {
367     _audioProcessor.volume = volume;
368 }
369 
370 - (float) rate {
371     return self.player.rate;
372 }
373 
374 - (void) setRate:(float)rate {
375     self.player.rate = rate;
376 }
377 
378 - (double) duration {
379     if (self.player.currentItem.status == AVPlayerItemStatusReadyToPlay) {
380         return CMTimeGetSeconds(self.player.currentItem.duration);
381     }
382     return -1.0;
383 }
384 
385 - (void) play {
386     [self.player play];
387     [self setPlayerState:kPlayerState_PLAYING];
388 }
389 
390 - (void) pause {
391     [self.player pause];
392     [self setPlayerState:kPlayerState_PAUSED];
393 }
394 
395 - (void) stop {
396     [self.player pause];
397     [self.player seekToTime:kCMTimeZero];
398     [self setPlayerState:kPlayerState_STOPPED];
399 }
400 
401 - (void) finish {
402 }
403 
404 - (void) dispose {
405     @synchronized(self) {
406         if (!isDisposed) {
407             if (_player != nil) {
408                 // stop the player
409                 _player.rate = 0.0;
410                 [_player cancelPendingPrerolls];
411             }
412 
413             AVFAudioSpectrumUnitPtr asPtr = _audioProcessor.audioSpectrum;
414             if (asPtr != nullptr) {
415                 // Prevent future spectrum callbacks
416                 asPtr->SetEnabled(FALSE);
417                 asPtr->SetSpectrumCallbackProc(NULL, NULL);
418                 asPtr->SetBands(0, NULL);
419             }
420 
421             if (_playerOutput != nil) {
422                 [_player.currentItem removeOutput:_playerOutput];
423                 [_playerOutput setDelegate:nil queue:nil];
424             }
425 
426             [self setPlayerState:kPlayerState_HALTED];
427 
428             NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
429             for (id<NSObject> observer in playerObservers) {
430                 [center removeObserver:observer];
431             }
432 
433             for (NSString *keyPath in keyPathsObserved) {
434                 [self removeObserver:self forKeyPath:keyPath];
435             }
436 
437             if (_displayLink) {
438                 CVDisplayLinkStop(_displayLink);
439                 CVDisplayLinkRelease(_displayLink);
440                 _displayLink = NULL;
441             }
442             isDisposed = YES;
443         }
444     }
445 }
446 
447 - (void) extractTrackInfo {
448 #if DUMP_TRACK_INFO
449     NSMutableString *trackLog = [[NSMutableString alloc] initWithFormat:
450                                  @"Parsing tracks for player item %@:\n",
451                                  _player.currentItem];
452 #endif
453     NSArray *tracks = self.player.currentItem.tracks;
454     int videoIndex = 1;
455     int audioIndex = 1;
456     int textIndex = 1;
457     BOOL createVideo = NO;
458 
459     for (AVPlayerItemTrack *trackObj in tracks) {
460         AVAssetTrack *track = trackObj.assetTrack;
461         NSString *type = track.mediaType;
462         NSString *name = nil;
463         NSString *lang = @"und";
464         CTrack::Encoding encoding = CTrack::CUSTOM;
465         FourCharCode fcc = 0;
466 
467         CMFormatDescriptionRef desc = NULL;
468         NSArray *formatDescList = track.formatDescriptions;
469         if (formatDescList && formatDescList.count > 0) {
470             desc = (__bridge CMFormatDescriptionRef)[formatDescList objectAtIndex:0];
471             if (!desc) {
472                 TRACK_LOG(@"Can't get format description, skipping track");
473                 continue;
474             }
475             fcc = CMFormatDescriptionGetMediaSubType(desc);
476             switch (fcc) {
477                 case 'avc1':
478                     encoding = CTrack::H264;
479                     break;
480                 case kAudioFormatLinearPCM:
481                     encoding = CTrack::PCM;
482                     break;
483                 case kAudioFormatMPEG4AAC:
484                     encoding = CTrack::AAC;
485                     break;
486                 case kAudioFormatMPEGLayer1:
487                 case kAudioFormatMPEGLayer2:
488                     encoding = CTrack::MPEG1AUDIO;
489                     break;
490                 case kAudioFormatMPEGLayer3:
491                     encoding = CTrack::MPEG1LAYER3;
492                     break;
493                 default:
494                     // Everything else will show up as custom
495                     break;
496             }
497         }
498 
499         if (track.languageCode) {
500             lang = track.languageCode;
501         }
502 
503         TRACK_LOG(@"Track %d (%@)", index, track.mediaType);
504         TRACK_LOG(@"  enabled: %s", track.enabled ? "YES" : "NO");
505         TRACK_LOG(@"  track ID: %d", track.trackID);
506         TRACK_LOG(@"  language code: %@ (%sprovided)", lang, track.languageCode ? "" : "NOT ");
507         TRACK_LOG(@"  encoding (FourCC): '%@' (JFX encoding %d)",
508                   FourCCToNSString(fcc),
509                   (int)encoding);
510 
511         // Tracks in AVFoundation don't have names, so we'll need to give them
512         // sequential names based on their type, e.g., "Video Track 1"
513         if ([type isEqualTo:AVMediaTypeVideo]) {
514             int width = -1;
515             int height = -1;
516             float frameRate = -1.0;
517             if ([track hasMediaCharacteristic:AVMediaCharacteristicVisual]) {
518                 width = (int)track.naturalSize.width;
519                 height = (int)track.naturalSize.height;
520                 frameRate = track.nominalFrameRate;
521             }
522             name = [NSString stringWithFormat:@"Video Track %d", videoIndex++];
523             CVideoTrack *outTrack = new CVideoTrack((int64_t)track.trackID,
524                                                    [name UTF8String],
525                                                    encoding,
526                                                    (bool)track.enabled,
527                                                    width,
528                                                    height,
529                                                    frameRate,
530                                                    false);
531 
532             TRACK_LOG(@"  track name: %@", name);
533             TRACK_LOG(@"  video attributes:");
534             TRACK_LOG(@"    width: %d", width);
535             TRACK_LOG(@"    height: %d", height);
536             TRACK_LOG(@"    frame rate: %2.2f", frameRate);
537 
538             eventHandler->SendVideoTrackEvent(outTrack);
539             delete outTrack;
540 
541             // signal to create the video output when we're done
542             createVideo = YES;
543         } else if ([type isEqualTo:AVMediaTypeAudio]) {
544             name = [NSString stringWithFormat:@"Audio Track %d", audioIndex++];
545             TRACK_LOG(@"  track name: %@", name);
546 
547             // Set up audio processing
548             if (_audioProcessor) {
549                 // Make sure the players volume is set to 1.0
550                 self.player.volume = 1.0;
551 
552                 // set up the mixer
553                 _audioProcessor.audioTrack = track;
554                 self.player.currentItem.audioMix = _audioProcessor.mixer;
555             }
556 
557             // We have to get the audio information from the format description
558             const AudioStreamBasicDescription *asbd = CMAudioFormatDescriptionGetStreamBasicDescription(desc);
559             size_t layoutSize;
560             const AudioChannelLayout *layout = CMAudioFormatDescriptionGetChannelLayout(desc, &layoutSize);
561             int channels = 2;
562             int channelMask = CAudioTrack::FRONT_LEFT | CAudioTrack::FRONT_RIGHT;
563             float sampleRate = 44100.0;
564 
565             TRACK_LOG(@"  audio attributes:");
566             if (asbd) {
567                 sampleRate = (float)asbd->mSampleRate;
568                 TRACK_LOG(@"    sample rate: %2.2f", sampleRate);
569             }
570             if (layout) {
571                 channels = (int)AudioChannelLayoutTag_GetNumberOfChannels(layout->mChannelLayoutTag);
572 
573                 TRACK_LOG(@"    channel count: %d", channels);
574                 TRACK_LOG(@"    channel mask: %02x", channelMask);
575             }
576 
577             CAudioTrack *audioTrack = new CAudioTrack((int64_t)track.trackID,
578                                    [name UTF8String],
579                                    encoding,
580                                    (bool)track.enabled,
581                                    [lang UTF8String],
582                                    channels, channelMask, sampleRate);
583             eventHandler->SendAudioTrackEvent(audioTrack);
584             delete audioTrack;
585         } else if ([type isEqualTo:AVMediaTypeClosedCaption]) {
586             name = [NSString stringWithFormat:@"Subtitle Track %d", textIndex++];
587             TRACK_LOG(@"  track name: %@", name);
588             CSubtitleTrack *subTrack = new CSubtitleTrack((int64_t)track.trackID,
589                                                          [name UTF8String],
590                                                          encoding,
591                                                          (bool)track.enabled,
592                                                          [lang UTF8String]);
593             eventHandler->SendSubtitleTrackEvent(subTrack);
594             delete subTrack;
595         }
596     }
597 
598 #if DUMP_TRACK_INFO
599     LOGGER_INFOMSG([trackLog UTF8String]);
600 #endif
601 
602     if (createVideo) {
603         [self createVideoOutput];
604     }
605 }
606 
607 - (void) outputMediaDataWillChange:(AVPlayerItemOutput *)sender {
608     _lastHostTime = CVGetCurrentHostTime();
609     CVDisplayLinkStart(_displayLink);
610     _hlsBugResetCount = 0;
611 }
612 
613 - (void) outputSequenceWasFlushed:(AVPlayerItemOutput *)output {
614     _hlsBugResetCount = 0;
615     _lastHostTime = CVGetCurrentHostTime();
616 }
617 
618 - (void) sendPixelBuffer:(CVPixelBufferRef)buf frameTime:(double)frameTime hostTime:(int64_t)hostTime {
619     _lastHostTime = hostTime;
620     CVVideoFrame *frame = NULL;
621     try {
622         frame = new CVVideoFrame(buf, frameTime, _lastHostTime);
623     } catch (const char *message) {
624         // Check if the video format is supported, if not try our fallback format
625         OSType format = CVPixelBufferGetPixelFormatType(buf);
626         if (format == 0) {
627             // Bad pixel format, possibly a bad frame or ???
628             // This seems to happen when the stream is corrupt, so let's ignore
629             // it and hope things recover
630             return;
631         }
632         if (!CVVideoFrame::IsFormatSupported(format)) {
633             LOGGER_DEBUGMSG(([[NSString stringWithFormat:@"Bad pixel format: '%@'",
634                                FourCCToNSString(format)] UTF8String]));
635             [self setFallbackVideoFormat];
636             return;
637         }
638         // Can't use this frame, report an error and ignore it
639         LOGGER_DEBUGMSG(message);
640         return;
641     }
642 
643     if (previousWidth < 0 || previousHeight < 0
644         || previousWidth != frame->GetWidth() || previousHeight != frame->GetHeight())
645     {
646         // Send/Queue frame size changed event
647         previousWidth = frame->GetWidth();
648         previousHeight = frame->GetHeight();
649         eventHandler->SendFrameSizeChangedEvent(previousWidth, previousHeight);
650     }
651     eventHandler->SendNewFrameEvent(frame);
652 }
653 
654 - (void) sendSpectrumEventDuration:(double)duration timestamp:(double)timestamp {
655     if (eventHandler) {
656         if (timestamp < 0) {
657             timestamp = self.currentTime;
658         }
659         eventHandler->SendAudioSpectrumEvent(timestamp, duration);
660     }
661 }
662 
663 @end
664 
665 static void SpectrumCallbackProc(void *context, double duration, double timestamp) {
666     if (context) {
667         AVFMediaPlayer *player = (__bridge AVFMediaPlayer*)context;
668         [player sendSpectrumEventDuration:duration timestamp:timestamp];
669     }
670 }
671 
672 static CVReturn displayLinkCallback(CVDisplayLinkRef displayLink, const CVTimeStamp *inNow, const CVTimeStamp *inOutputTime, CVOptionFlags flagsIn, CVOptionFlags *flagsOut, void *displayLinkContext)
673 {
674     AVFMediaPlayer *self = (__bridge AVFMediaPlayer *)displayLinkContext;
675     AVPlayerItemVideoOutput *playerItemVideoOutput = self.playerOutput;
676 
677     // The displayLink calls back at every vsync (screen refresh)
678     // Compute itemTime for the next vsync
679     CMTime outputItemTime = [playerItemVideoOutput itemTimeForCVTimeStamp:*inOutputTime];
680     if ([playerItemVideoOutput hasNewPixelBufferForItemTime:outputItemTime]) {
681         CVPixelBufferRef pixBuff = [playerItemVideoOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL];
682         // Copy the pixel buffer to be displayed next and add it to AVSampleBufferDisplayLayer for display
683         double frameTime = CMTimeGetSeconds(outputItemTime);
684         [self sendPixelBuffer:pixBuff frameTime:frameTime hostTime:inOutputTime->hostTime];
685         self.hlsBugResetCount = 0;
686 
687         CVBufferRelease(pixBuff);
688     } else {
689         CMTime delta = CMClockMakeHostTimeFromSystemUnits(inNow->hostTime - self.lastHostTime);
690         NSTimeInterval elapsedTime = CMTimeGetSeconds(delta);
691 
692         if (elapsedTime > FREEWHEELING_PERIOD_IN_SECONDS) {
693             if (self.player.rate != 0.0) {
694                 if (self.hlsBugResetCount > 9) {
695                     /*
696                      * There is a bug in AVFoundation where if we're playing a HLS
697                      * stream and it switches to a different bitrate, the video
698                      * output will stop receiving frames. So far, the only workaround
699                      * for this has been to remove then re-add the video output
700                      * This causes the video to pause for a bit, but it's better
701                      * than not playing at all, and this should not happen once
702                      * the bug is fixed in AVFoundation.
703                      */
704                     [self hlsBugReset];
705                     self.lastHostTime = inNow->hostTime;
706                     return kCVReturnSuccess; // hlsBugReset() will stop display link
707                 } else {
708                     self.hlsBugResetCount++;
709                     self.lastHostTime = inNow->hostTime;
710                     return kCVReturnSuccess;
711                 }
712             }
713             // No new images for a while.  Shut down the display link to conserve
714             // power, but request a wakeup call if new images are coming.
715             CVDisplayLinkStop(displayLink);
716             [playerItemVideoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:ADVANCE_INTERVAL_IN_SECONDS];
717         }
718     }
719 
720     return kCVReturnSuccess;
721 }