framework2
1449 строк · 35.3 Кб
1//
2// ofAVFoundationVideoPlayer.m
3// Created by Lukasz Karluk on 06/07/14.
4// Merged with code by Sam Kronick, James George and Elie Zananiri.
5//
6
7#import "ofAVFoundationVideoPlayer.h"
8
9#define IS_OS_6_OR_LATER ([[[UIDevice currentDevice] systemVersion] floatValue] >= 6.0)
10
11
12
13static NSString * const kTracksKey = @"tracks";
14static NSString * const kStatusKey = @"status";
15static NSString * const kRateKey = @"rate";
16
17//---------------------------------------------------------- video player.
18@implementation ofAVFoundationVideoPlayer
19
20static const void *ItemStatusContext = &ItemStatusContext;
21static const void *PlayerRateContext = &ItemStatusContext;
22
23
24- (instancetype)init {
25self = [super init];
26if(self) {
27
28// create avplayer
29_player = nil;
30
31asyncLock = [[NSLock alloc] init];
32deallocCond = nil;
33
34#if defined(USE_VIDEO_OUTPUT)
35// create videooutput
36_videoOutput = nil;
37_videoInfo = nil;
38#endif
39
40
41timeObserver = nil;
42
43videoSampleBuffer = nil;
44audioSampleBuffer = nil;
45videoSampleTime = kCMTimeNegativeInfinity;//kCMTimeZero;
46audioSampleTime = kCMTimeNegativeInfinity;//kCMTimeZero;
47synchSampleTime = kCMTimeInvalid;
48duration = kCMTimeZero;
49currentTime = kCMTimeZero;
50volume = 1;
51speed = 1;
52frameRate = 0;
53
54videoWidth = 0;
55videoHeight = 0;
56
57bWillBeUpdatedExternally = NO;
58bReady = NO;
59bLoaded = NO;
60bPlayStateBeforeLoad = NO;
61bUpdateFirstFrame = YES;
62bNewFrame = NO;
63bPlaying = NO;
64bWasPlayingBackwards = NO;
65bFinished = NO;
66bAutoPlayOnLoad = NO;
67loop = LOOP_NONE;
68bSeeking = NO;
69bSampleVideo = YES;
70bIsUnloaded = NO;
71frameBeforeReady = 0;
72positionBeforeReady = 0.F;
73
74// do not sample audio by default
75// we are lacking interfaces for audiodata
76bSampleAudio = NO;
77
78bStream = NO;
79}
80return self;
81}
82
83#if defined(USE_VIDEO_OUTPUT)
84- (void)createVideoOutput
85{
86#ifdef TARGET_IOS
87NSDictionary *pixBuffAttributes = @{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)};
88#elif defined(TARGET_OSX)
89NSDictionary *pixBuffAttributes = @{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32ARGB)};
90#endif
91
92self.videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:pixBuffAttributes];
93if (!self.videoOutput) {
94NSLog(@"error creating video output");
95return;
96}
97
98self.videoOutput.suppressesPlayerRendering = YES;
99}
100#endif
101
102
103//---------------------------------------------------------- cleanup / dispose.
104- (void)dealloc
105{
106if (_player != nil){
107[self unloadVideo];
108}
109
110[asyncLock lock];
111
112[asyncLock unlock];
113
114// release locks
115if (deallocCond != nil) {
116deallocCond = nil;
117}
118}
119
120
121
122//---------------------------------------------------------- load / unload.
123- (BOOL)loadWithFile:(NSString*)file async:(BOOL)bAsync{
124NSArray * fileSplit = [file componentsSeparatedByString:@"."];
125NSURL * fileURL = [[NSBundle mainBundle] URLForResource:[fileSplit objectAtIndex:0]
126withExtension:[fileSplit objectAtIndex:1]];
127
128return [self loadWithURL:fileURL async:bAsync stream:NO];
129}
130
131- (BOOL)loadWithPath:(NSString*)path async:(BOOL)bAsync{
132NSURL * fileURL = [NSURL fileURLWithPath:path];
133return [self loadWithURL:fileURL async:bAsync stream:NO];
134}
135
136- (BOOL)loadWithURL:(NSURL*)url async:(BOOL)bAsync stream:(BOOL)isStream {
137bStream = isStream;
138return [self loadWithURL:url async:bAsync];
139}
140
141- (BOOL)loadWithURL:(NSURL*)url async:(BOOL)bAsync {
142
143
144NSDictionary *options = @{(id)AVURLAssetPreferPreciseDurationAndTimingKey:@(YES)};
145AVURLAsset* asset = [AVURLAsset URLAssetWithURL:url options:options];
146
147if(asset == nil) {
148NSLog(@"error loading asset: %@", [url description]);
149return NO;
150}
151
152
153// store state
154BOOL _bReady = bReady;
155BOOL _bLoaded = bLoaded;
156BOOL _bPlayStateBeforeLoad = bPlayStateBeforeLoad;
157
158// set internal state
159bIsUnloaded = NO;
160bReady = NO;
161bLoaded = NO;
162bIsStopped = YES;
163bPlayStateBeforeLoad = NO;
164frameBeforeReady = 0;
165positionBeforeReady = 0.F;
166
167// going to load
168dispatch_semaphore_t sema = dispatch_semaphore_create(0);
169dispatch_queue_t queue;
170if(bAsync == YES){
171queue = dispatch_get_main_queue();
172} else {
173queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
174}
175
176dispatch_async(queue, ^{
177@autoreleasepool {
178}
179
180[asset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:kTracksKey] completionHandler:^{
181
182NSError * error = nil;
183AVKeyValueStatus status = [asset statusOfValueForKey:kTracksKey error:&error];
184
185if(status != AVKeyValueStatusLoaded) {
186NSLog(@"error loading asset tracks: %@", [error localizedDescription]);
187// reset
188self->bReady = _bReady;
189self->bLoaded = _bLoaded;
190self->bPlayStateBeforeLoad = _bPlayStateBeforeLoad;
191if(bAsync == NO){
192dispatch_semaphore_signal(sema);
193}
194return;
195}
196
197CMTime _duration = [asset duration];
198
199if(CMTimeCompare(_duration, kCMTimeZero) == 0) {
200NSLog(@"track loaded with zero duration.");
201// reset
202self->bReady = _bReady;
203self->bLoaded = _bLoaded;
204self->bPlayStateBeforeLoad = _bPlayStateBeforeLoad;
205if(bAsync == NO){
206dispatch_semaphore_signal(sema);
207}
208return;
209}
210
211// TODO
212// why not reading infinite media?
213// how about playing back HLS streams?
214if(isfinite(CMTimeGetSeconds(self->duration)) == NO) {
215NSLog(@"track loaded with infinite duration.");
216// reset
217self->bReady = _bReady;
218self->bLoaded = _bLoaded;
219self->bPlayStateBeforeLoad = _bPlayStateBeforeLoad;
220if(bAsync == NO){
221dispatch_semaphore_signal(sema);
222}
223return;
224}
225
226NSArray * videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
227if(!self->bStream && [videoTracks count] == 0) {
228NSLog(@"no video tracks found.");
229// reset
230self->bReady = _bReady;
231self->bLoaded = _bLoaded;
232self->bPlayStateBeforeLoad = _bPlayStateBeforeLoad;
233if(bAsync == NO){
234dispatch_semaphore_signal(sema);
235}
236return;
237}
238
239//------------------------------------------------------------
240//------------------------------------------------------------ use asset
241// good to go
242[self->asyncLock lock];
243
244if (self->bIsUnloaded) {
245// player was unloaded before we could load everting
246self->bIsUnloaded = NO;
247if(bAsync == NO){
248dispatch_semaphore_signal(sema);
249}
250[self->asyncLock unlock];
251return;
252}
253
254// clean up
255[self unloadVideoAsync]; // unload video if one is already loaded.
256
257self->bIsUnloaded = NO;
258
259// set asset
260self.asset = asset;
261self->duration = _duration;
262
263if (!self->bStream) {
264// create asset reader
265BOOL bOk = [self createAssetReaderWithTimeRange:CMTimeRangeMake(kCMTimeZero, self->duration)];
266if(bOk == NO) {
267NSLog(@"problem with creating asset reader.");
268if(bAsync == NO){
269dispatch_semaphore_signal(sema);
270}
271[self->asyncLock unlock];
272return;
273}
274
275AVAssetTrack * videoTrack = [videoTracks objectAtIndex:0];
276self->frameRate = videoTrack.nominalFrameRate;
277self->videoWidth = [videoTrack naturalSize].width;
278self->videoHeight = [videoTrack naturalSize].height;
279
280NSLog(@"video file loaded at %li x %li @ %f fps", (long)self->videoWidth, (long)self->videoHeight, self->frameRate);
281}
282
283
284// currentTime = CMTimeMakeWithSeconds((1.0/frameRate), NSEC_PER_SEC);//kCMTimeZero;
285self->currentTime = CMTimeMakeWithSeconds(0.0, NSEC_PER_SEC);//kCMTimeZero;
286
287
288//------------------------------------------------------------ create player item.
289AVPlayerItem* playerItem = [AVPlayerItem playerItemWithAsset:self.asset];
290
291if (!playerItem) {
292NSLog(@"could not create AVPlayerItem");
293if(bAsync == NO){
294dispatch_semaphore_signal(sema);
295}
296[self->asyncLock unlock];
297return;
298}
299
300//------------------------------------------------------------ player item.
301self.playerItem = playerItem;
302[self.playerItem addObserver:self
303forKeyPath:kStatusKey
304options:0
305context:&ItemStatusContext];
306
307NSNotificationCenter* notificationCenter = [NSNotificationCenter defaultCenter];
308[notificationCenter addObserver:self
309selector:@selector(playerItemDidReachEnd)
310name:AVPlayerItemDidPlayToEndTimeNotification
311object:self.playerItem];
312
313//AVPlayerItemPlaybackStalledNotification only exists from OS X 10.9 or iOS 6.0 and up
314#if (__MAC_OS_X_VERSION_MIN_REQUIRED >= 1090) || (__IPHONE_OS_VERSION_MIN_REQUIRED >= 60000)
315[notificationCenter addObserver:self
316selector:@selector(playerItemDidStall)
317name:AVPlayerItemPlaybackStalledNotification
318object:self.playerItem];
319#endif
320
321#if defined(USE_VIDEO_OUTPUT)
322// safety
323if (self.videoOutput == nil) {
324[self createVideoOutput];
325}
326
327// add video output
328[self.playerItem addOutput:self.videoOutput];
329#endif
330
331
332//------------------------------------------------------------ recreate player.
333// destroy player if any - should never be the case!!
334if(self.player != nil) {
335[self removeTimeObserverFromPlayer];
336[self.player removeObserver:self forKeyPath:kRateKey context:&PlayerRateContext];
337self.player = nil;
338}
339
340// create new player
341self.player = [AVPlayer playerWithPlayerItem:self.playerItem];
342[self.player addObserver:self
343forKeyPath:kRateKey
344options:NSKeyValueObservingOptionNew
345context:&PlayerRateContext];
346// add timeobserver?
347[self addTimeObserverToPlayer];
348
349self->_player.volume = self->volume;
350
351// loaded
352self->bLoaded = true;
353
354if(bAsync == NO){
355dispatch_semaphore_signal(sema);
356}
357
358[self->asyncLock unlock];
359
360}];
361});
362
363// Wait for the dispatch semaphore signal
364if(bAsync == NO){
365dispatch_semaphore_wait(sema, DISPATCH_TIME_FOREVER);
366return bLoaded;
367} else {
368return YES;
369}
370}
371
372
373#pragma mark - unload video
374- (void)unloadVideoAsync {
375
376bIsUnloaded = YES;
377bReady = NO;
378bLoaded = NO;
379// bPlayStateBeforeLoad = NO;
380bUpdateFirstFrame = YES;
381bNewFrame = NO;
382bPlaying = NO;
383bFinished = NO;
384bWasPlayingBackwards = NO;
385
386videoSampleTime = kCMTimeNegativeInfinity;
387audioSampleTime = kCMTimeNegativeInfinity;
388synchSampleTime = kCMTimeInvalid;
389duration = kCMTimeZero;
390currentTime = kCMTimeZero;
391
392videoWidth = 0;
393videoHeight = 0;
394
395
396// a reference to all the variables for the block
397__block AVAsset* currentAsset = _asset;
398__block AVAssetReader* currentReader = _assetReader;
399__block AVAssetReaderTrackOutput* currentVideoTrack = _assetReaderVideoTrackOutput;
400__block AVAssetReaderTrackOutput* currentAudioTrack = _assetReaderAudioTrackOutput;
401__block AVPlayerItem* currentItem = _playerItem;
402__block AVPlayer* currentPlayer = _player;
403__block id currentTimeObserver = timeObserver;
404
405__block CMSampleBufferRef currentVideoSampleBuffer = videoSampleBuffer;
406__block CMSampleBufferRef currentAudioSampleBuffer = audioSampleBuffer;
407
408#if defined(USE_VIDEO_OUTPUT)
409__block AVPlayerItemVideoOutput* currentVideoOutput = _videoOutput;
410__block CMVideoFormatDescriptionRef currentVideoInfo = _videoInfo;
411
412_videoOutput = nil;
413self.videoOutput = nil;
414
415_videoInfo = nil;
416#endif
417
418// set all to nil
419// cleanup happens in the block
420_asset = nil;
421self.asset = nil;
422
423_assetReader = nil;
424self.assetReader = nil;
425
426_assetReaderVideoTrackOutput = nil;
427self.assetReaderVideoTrackOutput = nil;
428
429_assetReaderAudioTrackOutput = nil;
430self.assetReaderAudioTrackOutput = nil;
431
432_playerItem = nil;
433self.playerItem = nil;
434
435_player = nil;
436self.player = nil;
437timeObserver = nil;
438
439videoSampleBuffer = nil;
440audioSampleBuffer = nil;
441
442dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
443
444@autoreleasepool {
445
446[self->asyncLock lock];
447
448// relase assetreader
449if (currentReader != nil) {
450[currentReader cancelReading];
451currentReader = nil;
452
453if (currentVideoTrack != nil) {
454currentVideoTrack = nil;
455}
456
457if (currentAudioTrack != nil) {
458currentAudioTrack = nil;
459}
460}
461
462// release asset
463if (currentAsset != nil) {
464[currentAsset cancelLoading];
465currentAsset = nil;
466}
467
468
469// release current player item
470if(currentItem != nil) {
471
472[currentItem cancelPendingSeeks];
473[currentItem removeObserver:self forKeyPath:kStatusKey context:&ItemStatusContext];
474
475NSNotificationCenter* notificationCenter = [NSNotificationCenter defaultCenter];
476[notificationCenter removeObserver:self
477name:AVPlayerItemDidPlayToEndTimeNotification
478object:currentItem];
479
480//AVPlayerItemPlaybackStalledNotification only exists from OS X 10.9 or iOS 6.0 and up
481#if (__MAC_OS_X_VERSION_MIN_REQUIRED >= 1090) || (__IPHONE_OS_VERSION_MIN_REQUIRED >= 60000)
482[notificationCenter removeObserver:self
483name:AVPlayerItemPlaybackStalledNotification
484object:currentItem];
485#endif
486
487#if defined(USE_VIDEO_OUTPUT)
488// remove output
489[currentItem removeOutput:currentVideoOutput];
490
491// release videouOutput
492if (currentVideoOutput != nil) {
493currentVideoOutput = nil;
494}
495
496// destroy video info
497if (currentVideoInfo != nil) {
498CFRelease(currentVideoInfo);
499currentVideoInfo = nil;
500}
501#endif
502
503currentItem = nil;
504}
505
506
507// destroy current player
508if (currentPlayer != nil) {
509[currentPlayer removeObserver:self forKeyPath:kRateKey context:&PlayerRateContext];
510
511if (currentTimeObserver != nil) {
512[currentPlayer removeTimeObserver:currentTimeObserver];
513currentTimeObserver = nil;
514}
515
516currentPlayer = nil;
517}
518
519
520if(currentVideoSampleBuffer) {
521CFRelease(currentVideoSampleBuffer);
522currentVideoSampleBuffer = nil;
523}
524
525if(currentAudioSampleBuffer) {
526CFRelease(currentAudioSampleBuffer);
527currentAudioSampleBuffer = nil;
528}
529
530[self->asyncLock unlock];
531
532if (self->deallocCond != nil) {
533[self->deallocCond lock];
534[self->deallocCond signal];
535[self->deallocCond unlock];
536}
537}
538});
539
540}
541
542- (void)unloadVideo
543{
544// create a condition
545deallocCond = [[NSCondition alloc] init];
546[deallocCond lock];
547
548// unload current video
549[self unloadVideoAsync];
550
551// wait for unloadVideoAsync to finish
552[deallocCond wait];
553[deallocCond unlock];
554
555deallocCond = nil;
556}
557
558- (void)close
559{
560[asyncLock lock];
561[self unloadVideoAsync];
562[asyncLock unlock];
563}
564
565
566#pragma mark -
567- (BOOL)createAssetReaderWithTimeRange:(CMTimeRange)timeRange {
568
569videoSampleTime = videoSampleTimePrev = kCMTimeNegativeInfinity;
570audioSampleTime = kCMTimeNegativeInfinity;
571
572
573NSError *error = nil;
574
575// safety
576if (self.assetReader != nil) {
577[self.assetReader cancelReading];
578self.assetReader = nil;
579}
580
581// create new asset reader
582self.assetReader = [AVAssetReader assetReaderWithAsset:self.asset error:&error];
583if(error) {
584NSLog(@"assetReader: error during initialisation: %@", [error localizedDescription]);
585return NO;
586}
587
588// set timerange
589self.assetReader.timeRange = timeRange;
590
591
592//------------------------------------------------------------ add video output.
593if (bSampleVideo) {
594NSMutableDictionary * videoOutputSettings = [[NSMutableDictionary alloc] init];
595#ifdef TARGET_IOS
596[videoOutputSettings setObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
597forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
598#elif defined(TARGET_OSX)
599[videoOutputSettings setObject:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB]
600forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
601#endif
602
603
604NSArray * videoTracks = [self.asset tracksWithMediaType:AVMediaTypeVideo];
605if([videoTracks count] > 0) {
606AVAssetTrack * videoTrack = [videoTracks objectAtIndex:0];
607self.assetReaderVideoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack
608outputSettings:videoOutputSettings];
609
610if(self.assetReaderVideoTrackOutput != nil) {
611
612// dont copy sample data (might improve performance)
613self.assetReaderVideoTrackOutput.alwaysCopiesSampleData = NO;
614
615if([self.assetReader canAddOutput:self.assetReaderVideoTrackOutput]) {
616[self.assetReader addOutput:self.assetReaderVideoTrackOutput];
617} else {
618NSLog(@"assetReaderVideoTrackOutput cannot be add to assetReader");
619}
620
621} else {
622NSLog(@"assetReaderVideoTrackOutput failed to load.");
623}
624}
625}
626
627//------------------------------------------------------------ add audio output.
628if (bSampleAudio) {
629// really?
630double preferredHardwareSampleRate = 44100;
631#ifdef TARGET_IOS
632[[AVAudioSession sharedInstance] currentHardwareSampleRate];
633#endif
634
635AudioChannelLayout channelLayout;
636bzero(&channelLayout, sizeof(channelLayout));
637channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
638
639int numOfChannels = 1;
640if(channelLayout.mChannelLayoutTag == kAudioChannelLayoutTag_Stereo) {
641numOfChannels = 2;
642}
643
644NSDictionary * audioOutputSettings = nil;
645audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
646[NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
647[NSNumber numberWithFloat:preferredHardwareSampleRate], AVSampleRateKey,
648#ifdef IS_OS_6_OR_LATER
649// including AVNumberOfChannelsKey & AVChannelLayoutKey on iOS5 causes a crash,
650// check if iOS6 or greater before including these.
651[NSNumber numberWithInt:numOfChannels], AVNumberOfChannelsKey,
652[NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)], AVChannelLayoutKey,
653#endif
654[NSNumber numberWithInt:16], AVLinearPCMBitDepthKey,
655[NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
656[NSNumber numberWithBool:NO], AVLinearPCMIsFloatKey,
657[NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
658nil];
659
660NSArray * audioTracks = [self.asset tracksWithMediaType:AVMediaTypeAudio];
661if([audioTracks count] > 0) {
662AVAssetTrack * audioTrack = [audioTracks objectAtIndex:0];
663
664self.assetReaderAudioTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack
665outputSettings:audioOutputSettings];
666
667if(self.assetReaderAudioTrackOutput != nil) {
668
669// dont copy sample data
670self.assetReaderAudioTrackOutput.alwaysCopiesSampleData = NO;
671
672if([self.assetReader canAddOutput:self.assetReaderAudioTrackOutput]) {
673[self.assetReader addOutput:self.assetReaderAudioTrackOutput];
674} else {
675NSLog(@"assetReaderAudioTrackOutput cannot be add to assetReader");
676}
677
678} else {
679NSLog(@"assetReaderAudioTrackOutput failed to load.");
680}
681}
682}
683
684//------------------------------------------------------------ start reading.
685BOOL bOk = [self.assetReader startReading];
686if(!bOk ) {
687NSLog(@"assetReader couldn't startReading: %@", [self.assetReader error]);
688}
689
690return YES;
691}
692
693
694//---------------------------------------------------------- player callbacks.
695- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
696
697if(context == &ItemStatusContext) {
698
699if (object == self.playerItem) {
700
701if ([self.playerItem status] == AVPlayerItemStatusReadyToPlay) {
702
703if (bReady) {
704return;
705}
706
707bReady = true;
708
709// set volume for current video
710[self setVolume:volume];
711
712// set speed for current video
713[self setSpeed:speed];
714
715// set start-frame
716if (frameBeforeReady > 0) {
717[self setFrame:frameBeforeReady];
718}
719
720// set start-position
721if (positionBeforeReady > 0.F) {
722[self setPosition:positionBeforeReady];
723}
724
725// auto-play or play if started before beeing ready
726if(bAutoPlayOnLoad || bPlayStateBeforeLoad) {
727[self play];
728} else {
729[self pause];
730}
731
732// update as soon is ready so pixels are loaded.
733[self update];
734
735
736} else if ([self.playerItem status] == AVPlayerItemStatusUnknown) {
737NSLog(@"AVPlayerItemStatusUnknown");
738} else if ([self.playerItem status] == AVPlayerItemStatusFailed) {
739NSLog(@"AVPlayerItemStatusFailed");
740} else {
741NSLog(@"AVPlayerItem: such status: %ld", (long)[self.playerItem status]);
742}
743
744} else {
745// ignore other objects
746}
747
748return;
749} else if (context == &PlayerRateContext) {
750
751if (object == self.player) {
752
753if (bReady &&
754[keyPath isEqualToString:kRateKey])
755{
756float rate = [[change objectForKey:@"new"] floatValue];
757bPlaying = (rate != 0);
758}
759} else {
760// ignore other object
761}
762
763return;
764}
765
766// push it up the observer chain
767[super observeValueForKeyPath:keyPath
768ofObject:object
769change:change
770context:context];
771}
772
773- (void)playerItemDidReachEnd {
774
775bFinished = YES;
776bPlaying = NO;
777
778if (speed > 0.0) {
779// playing forward
780if (loop == LOOP_NORMAL) {
781[self seekToStart];
782[self play];
783} else if (loop == LOOP_PALINDROME) {
784[self setSpeed:-speed];
785[self play];
786}
787
788} else if (speed < 0.0) {
789// playing backwards
790if (loop == LOOP_NORMAL) {
791[self seekToEnd];
792[self play];
793} else if (loop == LOOP_PALINDROME) {
794[self setSpeed:-speed];
795[self play];
796}
797}
798
799
800if(loop > LOOP_NONE) {
801bFinished = NO;
802}
803}
804
805
806- (void)playerItemDidStall {
807NSLog(@"playerItem did stall - samples did not arrive in time");
808}
809
810
811//---------------------------------------------------------- update.
812- (void)update {
813
814/**
815* return if,
816* video is not yet loaded,
817* video is finished playing.
818*/
819if(!bReady || bFinished) {
820bNewFrame = NO;
821return;
822}
823
824
825
826#if !defined(USE_VIDEO_OUTPUT)
827[self updateFromAssetReader];
828#else
829// get new sample
830if (!bStream && self.player.rate > 0.0) {
831// playing forward
832// pull out frames from assetreader
833[self updateFromAssetReader];
834} else {
835// playing paused or playing backwards
836// get samples from videooutput
837[self updateFromVideoOutput];
838if (bNewFrame) {
839videoSampleTimePrev = videoSampleTime;
840}
841}
842#endif
843}
844
845#if defined(USE_VIDEO_OUTPUT)
846- (void)updateFromVideoOutput {
847OSStatus err = noErr;
848
849// get time from player
850CMTime time = [_player currentTime];
851
852if ([self.videoOutput hasNewPixelBufferForItemTime:time]) {
853
854bNewFrame = YES;
855currentTime = time;
856
857// get buffer
858CVPixelBufferRef buffer = [self.videoOutput copyPixelBufferForItemTime:time itemTimeForDisplay:NULL];
859
860// set videosize in case it is not set yet
861if (videoWidth == 0 || videoHeight == 0) {
862CGSize presentationSize = _playerItem.presentationSize;
863videoWidth = presentationSize.width;
864videoHeight = presentationSize.height;
865}
866
867// create or update video format description
868if (!_videoInfo || !CMVideoFormatDescriptionMatchesImageBuffer(_videoInfo, buffer)) {
869if (_videoInfo) {
870CFRelease(_videoInfo);
871_videoInfo = nil;
872}
873err = CMVideoFormatDescriptionCreateForImageBuffer(NULL, buffer, &_videoInfo);
874}
875if (err) {
876NSLog(@"Error at CMVideoFormatDescriptionCreateForImageBuffer %ld", (long)err);
877bNewFrame = NO;
878// release temp buffer
879CVBufferRelease(buffer);
880return;
881}
882
883// decodeTimeStamp is set to kCMTimeInvalid since we already receive decoded frames
884CMSampleTimingInfo sampleTimingInfo = {
885.duration = kCMTimeInvalid,
886.presentationTimeStamp = time,
887.decodeTimeStamp = kCMTimeInvalid
888};
889
890
891// release old buffer.
892if(videoSampleBuffer) {
893CFRelease(videoSampleBuffer);
894videoSampleBuffer = nil;
895}
896
897// create new sampleBuffer
898err = CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault,
899buffer,
900true,
901NULL,
902NULL,
903_videoInfo,
904&sampleTimingInfo,
905&videoSampleBuffer);
906if (err) {
907NSLog(@"Error at CMSampleBufferCreateForImageBuffer %ld", (long)err);
908bNewFrame = NO;
909// release temp buffer
910CVBufferRelease(buffer);
911return;
912}
913
914// release temp buffer
915CVBufferRelease(buffer);
916
917videoSampleTime = time;
918
919
920} else {
921// no new frame for time
922bNewFrame = NO;
923}
924}
925#endif
926
927- (void)updateFromAssetReader {
928
929/**
930* in most cases we check at what time the video player is up to,
931* and use the time for sampling buffers in the code below.
932* but if a synchSampleTime is provided then we use it instead of video player time.
933* synchSampleTime is used when synchronous time is needed (video player plays asynchronously),
934* such as when needing to access the video frames, frame by frame.
935* synchSampleTime is cleared (invalidated) on every frame so it must be set before calling update.
936*/
937CMTime time = kCMTimeZero;
938if(CMTIME_IS_VALID(synchSampleTime)) {
939time = synchSampleTime;
940time = CMTimeMaximum(time, kCMTimeZero);
941time = CMTimeMinimum(time, duration);
942synchSampleTime = kCMTimeInvalid;
943} else {
944time = [_player currentTime];
945}
946
947BOOL bTimeChanged = CMTimeCompare(time, currentTime) != 0;
948currentTime = time;
949
950if(bUpdateFirstFrame) {
951
952// this forces the first frame to be updated.
953// here the values for time and currentTime are both zero.
954// so this is to get around the progress check below.
955
956bUpdateFirstFrame = NO;
957
958} else if(bTimeChanged == NO) {
959
960// current time has not changed,
961// so the video has not progressed.
962
963if(self.assetReader != nil) {
964
965// check that assetReader has been created.
966// if assetReader is nil, the video must still be seeking to a new position.
967// so even though the time has not changed, the assetReader needs to be created.
968
969bNewFrame = NO;
970return;
971}
972}
973
974if(self.assetReader == nil) {
975if(bSeeking == true) {
976
977// video player is seeking to new position.
978// asset reader can only be created when seeking has finished.
979
980bNewFrame = NO;
981return;
982}
983
984[self createAssetReaderWithTimeRange:CMTimeRangeMake(currentTime, duration)];
985}
986
987if (self.assetReader.status == AVAssetReaderStatusFailed) {
988NSLog(@"assetReader error: %@", self.assetReader.error);
989}
990
991if(self.assetReader.status != AVAssetReaderStatusReading)
992{
993bNewFrame = NO;
994return;
995}
996
997//---------------------------------------------------------- video buffer.
998BOOL bCopiedNewSamples = NO;
999while(bSampleVideo == true && // video sampling is on.
1000self.assetReaderVideoTrackOutput != nil && // asset has a video track.
1001self.assetReader.status == AVAssetReaderStatusReading && // asset read is in reading state.
1002((CMTimeCompare(videoSampleTime, currentTime) == -1) )) // timestamp is less then currentTime.
1003
1004{
1005CMSampleBufferRef videoBufferTemp = nil;
1006
1007@try {
1008videoBufferTemp = [self.assetReaderVideoTrackOutput copyNextSampleBuffer];
1009} @catch (NSException * e) {
1010NSLog(@"error: %@", e);
1011break;
1012}
1013
1014if(videoBufferTemp) {
1015if(videoSampleBuffer) { // release old buffer.
1016CFRelease(videoSampleBuffer);
1017videoSampleBuffer = nil;
1018}
1019videoSampleBuffer = videoBufferTemp; // save reference to new buffer.
1020
1021videoSampleTime = CMSampleBufferGetPresentationTimeStamp(videoSampleBuffer);
1022
1023bCopiedNewSamples = YES;
1024} else {
1025bNewFrame = NO;
1026videoSampleTime = videoSampleTimePrev = kCMTimeNegativeInfinity;
1027bUpdateFirstFrame = YES;
1028break;
1029}
1030}
1031
1032//---------------------------------------------------------- audio buffer.
1033while(bSampleAudio == true && // audio sampling is on.
1034self.assetReaderAudioTrackOutput != nil && // asset has a audio track.
1035self.assetReader.status == AVAssetReaderStatusReading && // asset read is in reading state.
1036((CMTimeCompare(audioSampleTime, currentTime) == -1) || // timestamp is less then currentTime.
1037(CMTimeCompare(audioSampleTime, currentTime) == 0))) // timestamp is equal currentTime.
1038{
1039CMSampleBufferRef audioBufferTemp;
1040@try {
1041audioBufferTemp = [self.assetReaderAudioTrackOutput copyNextSampleBuffer];
1042} @catch (NSException * e) {
1043break;
1044}
1045
1046if(audioBufferTemp) {
1047if(audioSampleBuffer) { // release old buffer.
1048CFRelease(audioSampleBuffer);
1049audioSampleBuffer = nil;
1050}
1051audioSampleBuffer = audioBufferTemp; // save reference to new buffer.
1052
1053audioSampleTime = CMSampleBufferGetPresentationTimeStamp(audioSampleBuffer);
1054} else {
1055audioSampleTime = kCMTimeNegativeInfinity;
1056break;
1057}
1058}
1059
1060if(bCopiedNewSamples == true) {
1061bNewFrame = CMTimeCompare(videoSampleTime, videoSampleTimePrev) == 1;
1062
1063if(bNewFrame) {
1064videoSampleTimePrev = videoSampleTime;
1065}
1066}
1067}
1068
1069
1070- (void)addTimeObserverToPlayer {
1071if(bWillBeUpdatedExternally) {
1072return;
1073}
1074
1075if(timeObserver != nil){
1076return;
1077}
1078
1079double interval = 1.0 / (double)frameRate;
1080
1081__block ofAVFoundationVideoPlayer* refToSelf = self;
1082timeObserver = [_player addPeriodicTimeObserverForInterval:CMTimeMakeWithSeconds(interval, NSEC_PER_SEC)
1083queue:dispatch_get_main_queue()
1084usingBlock:^(CMTime time) {
1085[refToSelf update];
1086}];
1087}
1088
1089- (void)removeTimeObserverFromPlayer {
1090if(timeObserver != nil) {
1091[_player removeTimeObserver:timeObserver];
1092timeObserver = nil;
1093}
1094}
1095
1096//---------------------------------------------------------- play / pause.
1097- (void)play {
1098if([self isReady]) {
1099if(![self isPlaying]) {
1100[self togglePlayPause];
1101}
1102} else {
1103bPlayStateBeforeLoad = YES;
1104}
1105}
1106
1107- (void)pause {
1108if([self isReady]) {
1109if([self isPlaying]) {
1110[self togglePlayPause];
1111}
1112} else {
1113bPlayStateBeforeLoad = NO;
1114}
1115}
1116
1117- (void)togglePlayPause {
1118bIsStopped = NO;
1119
1120bPlaying = !bPlaying;
1121if([self isPlaying]) {
1122if([self isFinished]) {
1123[self seekToStart];
1124bFinished = NO;
1125}
1126[_player setRate:speed];
1127} else {
1128[_player pause];
1129}
1130}
1131
1132- (void)stop {
1133[self setPosition:0];
1134[self pause];
1135bIsStopped = YES;
1136}
1137
1138- (void)stepByCount:(long)frames
1139{
1140if(![self isReady]) {
1141return;
1142}
1143
1144#if defined(USE_VIDEO_OUTPUT)
1145[_player.currentItem stepByCount:frames];
1146#else
1147if (frames < 0) {
1148
1149double timeSec = CMTimeGetSeconds(currentTime) - (1.0/frameRate);
1150[self seekToTime:CMTimeMakeWithSeconds(timeSec, NSEC_PER_SEC) withTolerance:kCMTimeZero];
1151
1152} else if (![self isFinished] && frames > 0) {
1153
1154double timeSec = CMTimeGetSeconds(currentTime) + (1.0/frameRate);
1155CMTime time = CMTimeMakeWithSeconds(timeSec, NSEC_PER_SEC);
1156
1157bSeeking = YES;
1158time = CMTimeMaximum(time, kCMTimeZero);
1159time = CMTimeMinimum(time, duration);
1160
1161// frames are preloaded, only seek player
1162[_player seekToTime:time
1163toleranceBefore:kCMTimePositiveInfinity
1164toleranceAfter:kCMTimePositiveInfinity
1165completionHandler:^(BOOL finished) {
1166bSeeking = NO;
1167}];
1168}
1169#endif
1170}
1171
1172//---------------------------------------------------------- seek.
1173- (void)seekToStart {
1174[self seekToTime:kCMTimeZero withTolerance:kCMTimeZero];
1175}
1176
1177- (void)seekToEnd {
1178[self seekToTime:duration withTolerance:kCMTimeZero];
1179}
1180
1181- (void)seekToTime:(CMTime)time {
1182[self seekToTime:time withTolerance:kCMTimePositiveInfinity];
1183}
1184
1185- (void)seekToTime:(CMTime)time
1186withTolerance:(CMTime)tolerance {
1187
1188if(![self isReady]) {
1189return;
1190}
1191
1192if([self isFinished]) {
1193bFinished = NO;
1194}
1195
1196// TODO?
1197// expensive call?
1198// destroy it on a thread?
1199[self.assetReader cancelReading];
1200self.assetReader = nil;
1201self.assetReaderVideoTrackOutput = nil;
1202self.assetReaderAudioTrackOutput = nil;
1203
1204bSeeking = YES;
1205
1206// restrict time
1207time = CMTimeMaximum(time, kCMTimeZero);
1208time = CMTimeMinimum(time, duration);
1209
1210// if (!bStream && (CMTimeCompare(time, videoSampleTime) < 0)) {
1211// if jumping back in time
1212//[self createAssetReaderWithTimeRange:CMTimeRangeMake(time, duration)];
1213// }
1214
1215// set reader to real requested time
1216[_player seekToTime:time
1217toleranceBefore:tolerance
1218toleranceAfter:tolerance
1219completionHandler:^(BOOL finished) {
1220self->bSeeking = NO;
1221}];
1222}
1223
1224//---------------------------------------------------------- states.
1225- (BOOL)isReady {
1226return bReady;
1227}
1228
1229- (BOOL)isLoaded {
1230return bLoaded;
1231}
1232
1233- (BOOL)isPlaying {
1234return bPlaying;
1235}
1236
1237- (BOOL)isPaused {
1238return !bPlaying & !bIsStopped;
1239}
1240
1241- (BOOL)isNewFrame {
1242return bNewFrame;
1243}
1244
1245- (BOOL)isFinished {
1246return bFinished;
1247}
1248
1249//---------------------------------------------------------- sampling getters / setters.
1250- (void)setEnableVideoSampling:(BOOL)value {
1251bSampleVideo = value;
1252}
1253- (void)setEnableAudioSampling:(BOOL)value {
1254bSampleAudio = value;
1255}
1256
1257- (void)setSynchSampleTime:(CMTime)time {
1258synchSampleTime = time;
1259}
1260
1261- (void)setSynchSampleTimeInSec:(double)time {
1262[self setSynchSampleTime:CMTimeMakeWithSeconds(time, NSEC_PER_SEC)];
1263}
1264
1265- (CMTime)getVideoSampleTime {
1266return videoSampleTime;
1267}
1268
1269- (double)getVideoSampleTimeInSec {
1270return CMTimeGetSeconds(videoSampleTime);
1271}
1272
1273- (CMTime)getAudioSampleTime {
1274return audioSampleTime;
1275}
1276
1277- (double)getAudioSampleTimeInSec {
1278return CMTimeGetSeconds(audioSampleTime);
1279}
1280
1281- (CMSampleBufferRef)getVideoSampleBuffer {
1282return videoSampleBuffer;
1283}
1284
1285- (CMSampleBufferRef)getAudioSampleBuffer {
1286return audioSampleBuffer;
1287}
1288
1289- (CVImageBufferRef)getCurrentFrame {
1290return CMSampleBufferGetImageBuffer(videoSampleBuffer);
1291}
1292
1293//---------------------------------------------------------- getters / setters.
1294- (NSInteger)getWidth {
1295return videoWidth;
1296}
1297
1298- (NSInteger)getHeight {
1299return videoHeight;
1300}
1301
1302- (CMTime)getCurrentTime {
1303return currentTime;
1304}
1305
1306- (double)getCurrentTimeInSec {
1307return CMTimeGetSeconds(videoSampleTime);
1308}
1309
1310- (CMTime)getDuration {
1311return duration;
1312}
1313
1314- (double)getDurationInSec {
1315return CMTimeGetSeconds(duration);
1316}
1317
1318- (float)getFrameRate {
1319return frameRate;
1320}
1321
1322- (int)getDurationInFrames {
1323return [self getDurationInSec] * [self getFrameRate];
1324}
1325
1326- (int)getCurrentFrameNum {
1327return [self getCurrentTimeInSec] * [self getFrameRate];
1328}
1329
1330- (void)setPosition:(float)position {
1331if ([self isReady]) {
1332double time = [self getDurationInSec] * position;
1333[self seekToTime:CMTimeMakeWithSeconds(time, NSEC_PER_SEC)];
1334} else {
1335positionBeforeReady = position;
1336frameBeforeReady = 0;
1337}
1338}
1339
1340- (void)setFrame:(int)frame {
1341if ([self isReady]) {
1342float position = frame / (float)[self getDurationInFrames];
1343[self setPosition:position];
1344} else {
1345frameBeforeReady = frame;
1346positionBeforeReady = 0.F;
1347}
1348}
1349
1350- (float)getPosition {
1351return ([self getCurrentTimeInSec] / [self getDurationInSec]);
1352}
1353
1354- (void)setVolume:(float)value {
1355
1356volume = value;
1357
1358if(![self isReady]) {
1359return;
1360}
1361
1362if (self.playerItem == nil) {
1363return;
1364}
1365
1366_player.volume = volume;
1367}
1368
1369- (float)getVolume {
1370return volume;
1371}
1372
1373- (void)setLoop:(playerLoopType)value {
1374loop = value;
1375}
1376
1377- (playerLoopType)getLoop {
1378return loop;
1379}
1380
1381- (void)setSpeed:(float)value {
1382
1383speed = value;
1384
1385if(![self isReady]) {
1386return;
1387}
1388
1389if (_player == nil) {
1390return;
1391}
1392
1393if (!bStream && !bSeeking && bWasPlayingBackwards && value > 0.0) {
1394// create assetReaders if we played backwards earlier
1395[self createAssetReaderWithTimeRange:CMTimeRangeMake(currentTime, duration)];
1396bWasPlayingBackwards = NO;
1397}
1398
1399if (!bWasPlayingBackwards && value < 0.0) {
1400
1401#if !defined(USE_VIDEO_OUTPUT)
1402// not supported
1403NSLog(@"ERROR: Backwards playback is not supported. Minimum requirement is OSX 10.8 or iOS 6.0");
1404value = 0.0;
1405#else
1406if (!self.playerItem.canPlayReverse) {
1407if (!bStream) {
1408NSLog(@"ERROR: can not play backwards: not supported (check your codec)");
1409} else {
1410NSLog(@"ERROR: can not play backwards a stream");
1411}
1412
1413value = 0.0;
1414}
1415if (self.videoOutput == nil) {
1416NSLog(@"ERROR: can not play backwards: no video output");
1417value = 0.0;
1418}
1419#endif
1420}
1421
1422if (value < 0.0) {
1423bWasPlayingBackwards = YES;
1424}
1425
1426[_player setRate:value];
1427}
1428
1429- (float)getSpeed {
1430return speed;
1431}
1432
1433- (void)setAutoplay:(BOOL)value {
1434bAutoPlayOnLoad = value;
1435}
1436
1437- (BOOL)getAutoplay {
1438return bAutoPlayOnLoad;
1439}
1440
1441- (void)setWillBeUpdatedExternally:(BOOL)value {
1442bWillBeUpdatedExternally = value;
1443}
1444
1445- (void)setStreaming:(BOOL)value {
1446bStream = value;
1447}
1448
1449@end
1450