framework2

Форк
0
/
ofAVFoundationVideoPlayer.m 
1449 строк · 35.3 Кб
1
//
2
//  ofAVFoundationVideoPlayer.m
3
//  Created by Lukasz Karluk on 06/07/14.
4
//	Merged with code by Sam Kronick, James George and Elie Zananiri.
5
//
6

7
#import "ofAVFoundationVideoPlayer.h"
8

9
#define IS_OS_6_OR_LATER    ([[[UIDevice currentDevice] systemVersion] floatValue] >= 6.0)
10

11

12

13
static NSString * const kTracksKey = @"tracks";
14
static NSString * const kStatusKey = @"status";
15
static NSString * const kRateKey = @"rate";
16

17
//---------------------------------------------------------- video player.
18
@implementation ofAVFoundationVideoPlayer
19

20
static const void *ItemStatusContext = &ItemStatusContext;
21
static const void *PlayerRateContext = &ItemStatusContext;
22

23

24
- (instancetype)init {
25
	self = [super init];
26
	if(self) {
27
		
28
		// create avplayer
29
		_player = nil;
30
		
31
		asyncLock = [[NSLock alloc] init];
32
		deallocCond = nil;
33
		
34
#if defined(USE_VIDEO_OUTPUT)
35
		// create videooutput
36
		_videoOutput = nil;
37
		_videoInfo = nil;
38
#endif
39
	
40
		
41
		timeObserver = nil;
42
		
43
		videoSampleBuffer = nil;
44
		audioSampleBuffer = nil;
45
		videoSampleTime = kCMTimeNegativeInfinity;//kCMTimeZero;
46
		audioSampleTime = kCMTimeNegativeInfinity;//kCMTimeZero;
47
		synchSampleTime = kCMTimeInvalid;
48
		duration = kCMTimeZero;
49
		currentTime = kCMTimeZero;
50
		volume = 1;
51
		speed = 1;
52
		frameRate = 0;
53
		
54
		videoWidth = 0;
55
		videoHeight = 0;
56
		
57
		bWillBeUpdatedExternally = NO;
58
		bReady = NO;
59
		bLoaded = NO;
60
		bPlayStateBeforeLoad = NO;
61
		bUpdateFirstFrame = YES;
62
		bNewFrame = NO;
63
		bPlaying = NO;
64
		bWasPlayingBackwards  = NO;
65
		bFinished = NO;
66
		bAutoPlayOnLoad = NO;
67
		loop = LOOP_NONE;
68
		bSeeking = NO;
69
		bSampleVideo = YES;
70
		bIsUnloaded = NO;
71
		frameBeforeReady = 0;
72
		positionBeforeReady = 0.F;
73
		
74
		// do not sample audio by default
75
		// we are lacking interfaces for audiodata
76
		bSampleAudio = NO;
77
		
78
		bStream = NO;
79
	}
80
	return self;
81
}
82

83
#if defined(USE_VIDEO_OUTPUT)
84
- (void)createVideoOutput
85
{
86
#ifdef TARGET_IOS
87
	NSDictionary *pixBuffAttributes = @{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)};
88
#elif defined(TARGET_OSX)
89
	NSDictionary *pixBuffAttributes = @{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32ARGB)};
90
#endif
91
	
92
	self.videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:pixBuffAttributes];
93
	if (!self.videoOutput) {
94
		NSLog(@"error creating video output");
95
		return;
96
	}
97
	
98
	self.videoOutput.suppressesPlayerRendering = YES;
99
}
100
#endif
101

102

103
//---------------------------------------------------------- cleanup / dispose.
104
- (void)dealloc
105
{
106
	if (_player != nil){
107
		[self unloadVideo];
108
	}
109
	
110
	[asyncLock lock];
111

112
	[asyncLock unlock];
113
	
114
	// release locks
115
	if (deallocCond != nil) {
116
        deallocCond = nil;
117
	}
118
}
119

120

121

122
//---------------------------------------------------------- load / unload.
123
- (BOOL)loadWithFile:(NSString*)file async:(BOOL)bAsync{
124
	NSArray * fileSplit = [file componentsSeparatedByString:@"."];
125
	NSURL * fileURL = [[NSBundle mainBundle] URLForResource:[fileSplit objectAtIndex:0]
126
											  withExtension:[fileSplit objectAtIndex:1]];
127
	
128
	return [self loadWithURL:fileURL async:bAsync stream:NO];
129
}
130

131
- (BOOL)loadWithPath:(NSString*)path async:(BOOL)bAsync{
132
	NSURL * fileURL = [NSURL fileURLWithPath:path];
133
	return [self loadWithURL:fileURL async:bAsync stream:NO];
134
}
135

136
- (BOOL)loadWithURL:(NSURL*)url async:(BOOL)bAsync stream:(BOOL)isStream {
137
	bStream = isStream;
138
	return [self loadWithURL:url async:bAsync];
139
}
140

141
- (BOOL)loadWithURL:(NSURL*)url async:(BOOL)bAsync {
142
	
143
	
144
	NSDictionary *options = @{(id)AVURLAssetPreferPreciseDurationAndTimingKey:@(YES)};
145
	AVURLAsset* asset = [AVURLAsset URLAssetWithURL:url options:options];
146
	
147
	if(asset == nil) {
148
		NSLog(@"error loading asset: %@", [url description]);
149
		return NO;
150
	}
151
	
152
	
153
	// store state
154
	BOOL _bReady = bReady;
155
	BOOL _bLoaded = bLoaded;
156
	BOOL _bPlayStateBeforeLoad = bPlayStateBeforeLoad;
157
	
158
	// set internal state
159
	bIsUnloaded = NO;
160
	bReady = NO;
161
	bLoaded = NO;
162
	bIsStopped = YES;
163
	bPlayStateBeforeLoad = NO;
164
	frameBeforeReady = 0;
165
	positionBeforeReady = 0.F;
166
	
167
	// going to load
168
	dispatch_semaphore_t sema = dispatch_semaphore_create(0);
169
	dispatch_queue_t queue;
170
	if(bAsync == YES){
171
		queue = dispatch_get_main_queue();
172
	} else {
173
		queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
174
	}
175
	
176
	dispatch_async(queue, ^{
177
		@autoreleasepool {
178
		}
179
		
180
		[asset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:kTracksKey] completionHandler:^{
181
			
182
			NSError * error = nil;
183
			AVKeyValueStatus status = [asset statusOfValueForKey:kTracksKey error:&error];
184
			
185
			if(status != AVKeyValueStatusLoaded) {
186
				NSLog(@"error loading asset tracks: %@", [error localizedDescription]);
187
				// reset
188
				self->bReady = _bReady;
189
				self->bLoaded = _bLoaded;
190
				self->bPlayStateBeforeLoad = _bPlayStateBeforeLoad;
191
				if(bAsync == NO){
192
					dispatch_semaphore_signal(sema);
193
				}
194
				return;
195
			}
196
			
197
			CMTime _duration = [asset duration];
198
			
199
			if(CMTimeCompare(_duration, kCMTimeZero) == 0) {
200
				NSLog(@"track loaded with zero duration.");
201
				// reset
202
				self->bReady = _bReady;
203
				self->bLoaded = _bLoaded;
204
				self->bPlayStateBeforeLoad = _bPlayStateBeforeLoad;
205
				if(bAsync == NO){
206
					dispatch_semaphore_signal(sema);
207
				}
208
				return;
209
			}
210
			
211
			// TODO
212
			// why not reading infinite media?
213
			// how about playing back HLS streams?
214
			if(isfinite(CMTimeGetSeconds(self->duration)) == NO) {
215
				NSLog(@"track loaded with infinite duration.");
216
				// reset
217
				self->bReady = _bReady;
218
				self->bLoaded = _bLoaded;
219
				self->bPlayStateBeforeLoad = _bPlayStateBeforeLoad;
220
				if(bAsync == NO){
221
					dispatch_semaphore_signal(sema);
222
				}
223
				return;
224
			}
225
			
226
			NSArray * videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
227
			if(!self->bStream && [videoTracks count] == 0) {
228
				NSLog(@"no video tracks found.");
229
				// reset
230
				self->bReady = _bReady;
231
				self->bLoaded = _bLoaded;
232
				self->bPlayStateBeforeLoad = _bPlayStateBeforeLoad;
233
				if(bAsync == NO){
234
					dispatch_semaphore_signal(sema);
235
				}
236
				return;
237
			}
238
			
239
			//------------------------------------------------------------
240
			//------------------------------------------------------------ use asset
241
			// good to go
242
			[self->asyncLock lock];
243
			
244
			if (self->bIsUnloaded) {
245
				// player was unloaded before we could load everting
246
				self->bIsUnloaded = NO;
247
				if(bAsync == NO){
248
					dispatch_semaphore_signal(sema);
249
				}
250
				[self->asyncLock unlock];
251
				return;
252
			}
253
			
254
			// clean up
255
			[self unloadVideoAsync];     // unload video if one is already loaded.
256
			
257
			self->bIsUnloaded = NO;
258
			
259
			// set asset
260
			self.asset = asset;
261
			self->duration = _duration;
262
			
263
			if (!self->bStream) {
264
				// create asset reader
265
				BOOL bOk = [self createAssetReaderWithTimeRange:CMTimeRangeMake(kCMTimeZero, self->duration)];
266
				if(bOk == NO) {
267
					NSLog(@"problem with creating asset reader.");
268
					if(bAsync == NO){
269
						dispatch_semaphore_signal(sema);
270
					}
271
					[self->asyncLock unlock];
272
					return;
273
				}
274
				
275
				AVAssetTrack * videoTrack = [videoTracks objectAtIndex:0];
276
				self->frameRate = videoTrack.nominalFrameRate;
277
				self->videoWidth = [videoTrack naturalSize].width;
278
				self->videoHeight = [videoTrack naturalSize].height;
279
				
280
				NSLog(@"video file loaded at %li x %li @ %f fps", (long)self->videoWidth, (long)self->videoHeight, self->frameRate);
281
			}
282
			
283
			
284
//			currentTime = CMTimeMakeWithSeconds((1.0/frameRate), NSEC_PER_SEC);//kCMTimeZero;
285
			self->currentTime = CMTimeMakeWithSeconds(0.0, NSEC_PER_SEC);//kCMTimeZero;
286
			
287
			
288
			//------------------------------------------------------------ create player item.
289
			AVPlayerItem* playerItem = [AVPlayerItem playerItemWithAsset:self.asset];
290
			
291
			if (!playerItem) {
292
				NSLog(@"could not create AVPlayerItem");
293
				if(bAsync == NO){
294
					dispatch_semaphore_signal(sema);
295
				}
296
				[self->asyncLock unlock];
297
				return;
298
			}
299
			
300
			//------------------------------------------------------------ player item.
301
			self.playerItem = playerItem;
302
			[self.playerItem addObserver:self
303
							  forKeyPath:kStatusKey
304
								 options:0
305
								 context:&ItemStatusContext];
306
			
307
			NSNotificationCenter* notificationCenter = [NSNotificationCenter defaultCenter];
308
			[notificationCenter addObserver:self
309
								   selector:@selector(playerItemDidReachEnd)
310
									   name:AVPlayerItemDidPlayToEndTimeNotification
311
									 object:self.playerItem];
312
			
313
			//AVPlayerItemPlaybackStalledNotification only exists from OS X 10.9 or iOS 6.0 and up
314
#if (__MAC_OS_X_VERSION_MIN_REQUIRED >= 1090) || (__IPHONE_OS_VERSION_MIN_REQUIRED >= 60000)
315
			[notificationCenter addObserver:self
316
								   selector:@selector(playerItemDidStall)
317
									   name:AVPlayerItemPlaybackStalledNotification
318
									 object:self.playerItem];
319
#endif
320
			
321
#if defined(USE_VIDEO_OUTPUT)
322
			// safety
323
			if (self.videoOutput == nil) {
324
				[self createVideoOutput];
325
			}
326
			
327
			// add video output
328
			[self.playerItem addOutput:self.videoOutput];
329
#endif
330
			
331
			
332
			//------------------------------------------------------------ recreate player.
333
			// destroy player if any - should never be the case!!
334
			if(self.player != nil) {
335
				[self removeTimeObserverFromPlayer];
336
				[self.player removeObserver:self forKeyPath:kRateKey context:&PlayerRateContext];
337
				self.player = nil;
338
			}
339
			
340
			// create new player
341
			self.player = [AVPlayer playerWithPlayerItem:self.playerItem];
342
			[self.player addObserver:self
343
						  forKeyPath:kRateKey
344
							 options:NSKeyValueObservingOptionNew
345
							 context:&PlayerRateContext];
346
			// add timeobserver?
347
			[self addTimeObserverToPlayer];
348
			
349
			self->_player.volume = self->volume;
350
			
351
			// loaded
352
			self->bLoaded = true;
353
			
354
			if(bAsync == NO){
355
				dispatch_semaphore_signal(sema);
356
			}
357
			
358
			[self->asyncLock unlock];
359

360
		}];
361
	});
362
	
363
	// Wait for the dispatch semaphore signal
364
	if(bAsync == NO){
365
		dispatch_semaphore_wait(sema, DISPATCH_TIME_FOREVER);
366
		return bLoaded;
367
	} else {
368
		return YES;
369
	}
370
}
371

372

373
#pragma mark - unload video
374
- (void)unloadVideoAsync {
375

376
	bIsUnloaded = YES;
377
	bReady = NO;
378
	bLoaded = NO;
379
//	bPlayStateBeforeLoad = NO;
380
	bUpdateFirstFrame = YES;
381
	bNewFrame = NO;
382
	bPlaying = NO;
383
	bFinished = NO;
384
	bWasPlayingBackwards = NO;
385
	
386
	videoSampleTime = kCMTimeNegativeInfinity;
387
	audioSampleTime = kCMTimeNegativeInfinity;
388
	synchSampleTime = kCMTimeInvalid;
389
	duration = kCMTimeZero;
390
	currentTime = kCMTimeZero;
391
	
392
	videoWidth = 0;
393
	videoHeight = 0;
394
	
395

396
	// a reference to all the variables for the block
397
	__block AVAsset* currentAsset = _asset;
398
	__block AVAssetReader* currentReader = _assetReader;
399
	__block AVAssetReaderTrackOutput* currentVideoTrack = _assetReaderVideoTrackOutput;
400
	__block AVAssetReaderTrackOutput* currentAudioTrack = _assetReaderAudioTrackOutput;
401
	__block AVPlayerItem* currentItem = _playerItem;
402
	__block AVPlayer* currentPlayer = _player;
403
	__block id currentTimeObserver = timeObserver;
404
	
405
	__block CMSampleBufferRef currentVideoSampleBuffer = videoSampleBuffer;
406
	__block CMSampleBufferRef currentAudioSampleBuffer = audioSampleBuffer;
407
	
408
#if defined(USE_VIDEO_OUTPUT)
409
	__block AVPlayerItemVideoOutput* currentVideoOutput = _videoOutput;
410
	__block CMVideoFormatDescriptionRef currentVideoInfo = _videoInfo;
411
	
412
	_videoOutput = nil;
413
	self.videoOutput = nil;
414
	
415
	_videoInfo = nil;
416
#endif
417
	
418
	// set all to nil
419
	// cleanup happens in the block
420
	_asset = nil;
421
	self.asset = nil;
422
	
423
	_assetReader = nil;
424
	self.assetReader = nil;
425
	
426
	_assetReaderVideoTrackOutput = nil;
427
	self.assetReaderVideoTrackOutput = nil;
428
	
429
	_assetReaderAudioTrackOutput = nil;
430
	self.assetReaderAudioTrackOutput = nil;
431
	
432
	_playerItem = nil;
433
	self.playerItem = nil;
434
	
435
	_player = nil;
436
	self.player = nil;
437
	timeObserver = nil;
438
	
439
	videoSampleBuffer = nil;
440
	audioSampleBuffer = nil;
441
	
442
	dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
443
		
444
		@autoreleasepool {
445
			
446
			[self->asyncLock lock];
447
			
448
			// relase assetreader
449
			if (currentReader != nil) {
450
				[currentReader cancelReading];
451
				currentReader = nil;
452
				
453
				if (currentVideoTrack != nil) {
454
					currentVideoTrack = nil;
455
				}
456
				
457
				if (currentAudioTrack != nil) {
458
					currentAudioTrack = nil;
459
				}
460
			}
461
			
462
			// release asset
463
			if (currentAsset != nil) {
464
				[currentAsset cancelLoading];
465
				currentAsset = nil;
466
			}
467
			
468
			
469
			// release current player item
470
			if(currentItem != nil) {
471
				
472
				[currentItem cancelPendingSeeks];
473
				[currentItem removeObserver:self forKeyPath:kStatusKey context:&ItemStatusContext];
474
				
475
				NSNotificationCenter* notificationCenter = [NSNotificationCenter defaultCenter];
476
				[notificationCenter removeObserver:self
477
											  name:AVPlayerItemDidPlayToEndTimeNotification
478
											object:currentItem];
479
				
480
				//AVPlayerItemPlaybackStalledNotification only exists from OS X 10.9 or iOS 6.0 and up
481
#if (__MAC_OS_X_VERSION_MIN_REQUIRED >= 1090) || (__IPHONE_OS_VERSION_MIN_REQUIRED >= 60000)
482
				[notificationCenter removeObserver:self
483
											  name:AVPlayerItemPlaybackStalledNotification
484
											object:currentItem];
485
#endif
486
				
487
#if defined(USE_VIDEO_OUTPUT)
488
				// remove output
489
				[currentItem removeOutput:currentVideoOutput];
490
				
491
				// release videouOutput
492
				if (currentVideoOutput != nil) {
493
					currentVideoOutput = nil;
494
				}
495
				
496
				// destroy video info
497
				if (currentVideoInfo != nil) {
498
					CFRelease(currentVideoInfo);
499
					currentVideoInfo = nil;
500
				}
501
#endif
502
				
503
				currentItem = nil;
504
			}
505
			
506
			
507
			// destroy current player
508
			if (currentPlayer != nil) {
509
				[currentPlayer removeObserver:self forKeyPath:kRateKey context:&PlayerRateContext];
510

511
				if (currentTimeObserver != nil) {
512
					[currentPlayer removeTimeObserver:currentTimeObserver];
513
					currentTimeObserver = nil;
514
				}
515
				
516
				currentPlayer = nil;
517
			}
518
			
519
			
520
			if(currentVideoSampleBuffer) {
521
				CFRelease(currentVideoSampleBuffer);
522
				currentVideoSampleBuffer = nil;
523
			}
524
			
525
			if(currentAudioSampleBuffer) {
526
				CFRelease(currentAudioSampleBuffer);
527
				currentAudioSampleBuffer = nil;
528
			}
529
			
530
			[self->asyncLock unlock];
531
			
532
			if (self->deallocCond != nil) {
533
				[self->deallocCond lock];
534
				[self->deallocCond signal];
535
				[self->deallocCond unlock];
536
			}
537
		}
538
	});
539
	
540
}
541

542
- (void)unloadVideo
543
{
544
	// create a condition
545
	deallocCond = [[NSCondition alloc] init];
546
	[deallocCond lock];
547
	
548
	// unload current video
549
	[self unloadVideoAsync];
550
	
551
	// wait for unloadVideoAsync to finish
552
	[deallocCond wait];
553
	[deallocCond unlock];
554
	
555
	deallocCond = nil;
556
}
557

558
- (void)close
559
{
560
	[asyncLock lock];
561
	[self unloadVideoAsync];
562
	[asyncLock unlock];
563
}
564

565

566
#pragma mark -
567
- (BOOL)createAssetReaderWithTimeRange:(CMTimeRange)timeRange {
568
	
569
	videoSampleTime = videoSampleTimePrev = kCMTimeNegativeInfinity;
570
	audioSampleTime = kCMTimeNegativeInfinity;
571
	
572

573
	NSError *error = nil;
574
	
575
	// safety
576
	if (self.assetReader != nil) {
577
		[self.assetReader cancelReading];
578
		self.assetReader = nil;
579
	}
580
	
581
	// create new asset reader
582
	self.assetReader = [AVAssetReader assetReaderWithAsset:self.asset error:&error];
583
	if(error) {
584
		NSLog(@"assetReader: error during initialisation: %@", [error localizedDescription]);
585
		return NO;
586
	}
587
	
588
	// set timerange
589
	self.assetReader.timeRange = timeRange;
590
	
591
	
592
	//------------------------------------------------------------ add video output.
593
	if (bSampleVideo) {
594
		NSMutableDictionary * videoOutputSettings = [[NSMutableDictionary alloc] init];
595
#ifdef TARGET_IOS
596
		[videoOutputSettings setObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
597
								forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
598
#elif defined(TARGET_OSX)
599
		[videoOutputSettings setObject:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB]
600
								forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
601
#endif
602
		
603
		
604
		NSArray * videoTracks = [self.asset tracksWithMediaType:AVMediaTypeVideo];
605
		if([videoTracks count] > 0) {
606
			AVAssetTrack * videoTrack = [videoTracks objectAtIndex:0];
607
			self.assetReaderVideoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack
608
																						  outputSettings:videoOutputSettings];
609
			
610
			if(self.assetReaderVideoTrackOutput != nil) {
611
				
612
				// dont copy sample data (might improve performance)
613
				self.assetReaderVideoTrackOutput.alwaysCopiesSampleData = NO;
614
			
615
				if([self.assetReader canAddOutput:self.assetReaderVideoTrackOutput]) {
616
					[self.assetReader addOutput:self.assetReaderVideoTrackOutput];
617
				} else {
618
					NSLog(@"assetReaderVideoTrackOutput cannot be add to assetReader");
619
				}
620
				
621
			} else {
622
				NSLog(@"assetReaderVideoTrackOutput failed to load.");
623
			}
624
		}
625
	}
626
	
627
	//------------------------------------------------------------ add audio output.
628
	if (bSampleAudio) {
629
		// really?
630
		double preferredHardwareSampleRate = 44100;
631
#ifdef TARGET_IOS
632
		[[AVAudioSession sharedInstance] currentHardwareSampleRate];
633
#endif
634
		
635
		AudioChannelLayout channelLayout;
636
		bzero(&channelLayout, sizeof(channelLayout));
637
		channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
638
		
639
		int numOfChannels = 1;
640
		if(channelLayout.mChannelLayoutTag == kAudioChannelLayoutTag_Stereo) {
641
			numOfChannels = 2;
642
		}
643
		
644
		NSDictionary * audioOutputSettings = nil;
645
		audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
646
							   [NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
647
							   [NSNumber numberWithFloat:preferredHardwareSampleRate], AVSampleRateKey,
648
#ifdef IS_OS_6_OR_LATER
649
							   // including AVNumberOfChannelsKey & AVChannelLayoutKey on iOS5 causes a crash,
650
							   // check if iOS6 or greater before including these.
651
							   [NSNumber numberWithInt:numOfChannels], AVNumberOfChannelsKey,
652
							   [NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)], AVChannelLayoutKey,
653
#endif
654
							   [NSNumber numberWithInt:16], AVLinearPCMBitDepthKey,
655
							   [NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
656
							   [NSNumber numberWithBool:NO], AVLinearPCMIsFloatKey,
657
							   [NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
658
							   nil];
659
		
660
		NSArray * audioTracks = [self.asset tracksWithMediaType:AVMediaTypeAudio];
661
		if([audioTracks count] > 0) {
662
			AVAssetTrack * audioTrack = [audioTracks objectAtIndex:0];
663
			
664
			self.assetReaderAudioTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack
665
																						  outputSettings:audioOutputSettings];
666
			
667
			if(self.assetReaderAudioTrackOutput != nil) {
668
				
669
				// dont copy sample data
670
				self.assetReaderAudioTrackOutput.alwaysCopiesSampleData = NO;
671
				
672
				if([self.assetReader canAddOutput:self.assetReaderAudioTrackOutput]) {
673
					[self.assetReader addOutput:self.assetReaderAudioTrackOutput];
674
				} else {
675
					NSLog(@"assetReaderAudioTrackOutput cannot be add to assetReader");
676
				}
677
				
678
			} else {
679
				NSLog(@"assetReaderAudioTrackOutput failed to load.");
680
			}
681
		}
682
	}
683
	
684
	//------------------------------------------------------------ start reading.
685
	BOOL bOk = [self.assetReader startReading];
686
	if(!bOk ) {
687
		NSLog(@"assetReader couldn't startReading: %@", [self.assetReader error]);
688
	}
689
	
690
	return YES;
691
}
692

693

694
//---------------------------------------------------------- player callbacks.
695
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
696
	
697
	if(context == &ItemStatusContext) {
698
		
699
		if (object == self.playerItem) {
700
			
701
			if ([self.playerItem status] == AVPlayerItemStatusReadyToPlay) {
702
				
703
				if (bReady) {
704
					return;
705
				}
706

707
				bReady = true;
708
				
709
				// set volume for current video
710
				[self setVolume:volume];
711
				
712
				// set speed for current video
713
				[self setSpeed:speed];
714
				
715
				// set start-frame
716
				if (frameBeforeReady > 0) {
717
					[self setFrame:frameBeforeReady];
718
				}
719
				
720
				// set start-position
721
				if (positionBeforeReady > 0.F) {
722
					[self setPosition:positionBeforeReady];
723
				}
724
				
725
				// auto-play or play if started before beeing ready
726
				if(bAutoPlayOnLoad || bPlayStateBeforeLoad) {
727
					[self play];
728
				} else {
729
					[self pause];
730
				}
731
				
732
				// update as soon is ready so pixels are loaded.
733
				[self update];
734

735
				
736
			} else if ([self.playerItem status] == AVPlayerItemStatusUnknown) {
737
				NSLog(@"AVPlayerItemStatusUnknown");
738
			} else if ([self.playerItem status] == AVPlayerItemStatusFailed) {
739
				NSLog(@"AVPlayerItemStatusFailed");
740
			} else {
741
				NSLog(@"AVPlayerItem: such status: %ld", (long)[self.playerItem status]);
742
			}
743
			
744
		} else {
745
			// ignore other objects
746
		}
747
		
748
		return;
749
	} else if (context == &PlayerRateContext) {
750
		
751
		if (object == self.player) {
752
		
753
			if (bReady &&
754
				[keyPath isEqualToString:kRateKey])
755
			{
756
				float rate = [[change objectForKey:@"new"] floatValue];
757
				bPlaying = (rate != 0);
758
			}
759
		} else {
760
			// ignore other object
761
		}
762
		
763
		return;
764
	}
765
	
766
	// push it up the observer chain
767
	[super observeValueForKeyPath:keyPath
768
						 ofObject:object
769
						   change:change
770
						  context:context];
771
}
772

773
- (void)playerItemDidReachEnd {
774
	
775
	bFinished = YES;
776
	bPlaying = NO;
777
	
778
	if (speed > 0.0) {
779
		// playing forward
780
		if (loop == LOOP_NORMAL) {
781
			[self seekToStart];
782
			[self play];
783
		} else if (loop == LOOP_PALINDROME) {
784
			[self setSpeed:-speed];
785
			[self play];
786
		}
787
		
788
	} else if (speed < 0.0) {
789
		// playing backwards
790
		if (loop == LOOP_NORMAL) {
791
			[self seekToEnd];
792
			[self play];
793
		} else if (loop == LOOP_PALINDROME) {
794
			[self setSpeed:-speed];
795
			[self play];
796
		}
797
	}
798
	
799
	
800
	if(loop > LOOP_NONE) {
801
		bFinished = NO;
802
	}
803
}
804

805

806
- (void)playerItemDidStall {
807
	NSLog(@"playerItem did stall - samples did not arrive in time");
808
}
809

810

811
//---------------------------------------------------------- update.
812
- (void)update {
813
	
814
	/**
815
	 *  return if,
816
	 *  video is not yet loaded,
817
	 *  video is finished playing.
818
	 */
819
	if(!bReady || bFinished) {
820
		bNewFrame = NO;
821
		return;
822
	}
823
	
824
	
825

826
#if !defined(USE_VIDEO_OUTPUT)
827
	[self updateFromAssetReader];
828
#else
829
	// get new sample
830
	if (!bStream && self.player.rate > 0.0) {
831
		// playing forward
832
		// pull out frames from assetreader
833
		[self updateFromAssetReader];
834
	} else {
835
		// playing paused or playing backwards
836
		// get samples from videooutput
837
		[self updateFromVideoOutput];
838
		if (bNewFrame) {
839
			videoSampleTimePrev = videoSampleTime;
840
		}
841
	}
842
#endif
843
}
844

845
#if defined(USE_VIDEO_OUTPUT)
846
- (void)updateFromVideoOutput {
847
	OSStatus err = noErr;
848
	
849
	// get time from player
850
	CMTime time = [_player currentTime];
851

852
	if ([self.videoOutput hasNewPixelBufferForItemTime:time]) {
853
		
854
		bNewFrame = YES;
855
		currentTime = time;
856
		
857
		// get buffer
858
		CVPixelBufferRef buffer = [self.videoOutput copyPixelBufferForItemTime:time itemTimeForDisplay:NULL];
859
		
860
		// set videosize in case it is not set yet
861
		if (videoWidth == 0 || videoHeight == 0) {
862
			CGSize presentationSize = _playerItem.presentationSize;
863
			videoWidth = presentationSize.width;
864
			videoHeight = presentationSize.height;
865
		}		
866
		
867
		// create or update video format description
868
		if (!_videoInfo || !CMVideoFormatDescriptionMatchesImageBuffer(_videoInfo, buffer)) {
869
			if (_videoInfo) {
870
				CFRelease(_videoInfo);
871
				_videoInfo = nil;
872
			}		
873
			err = CMVideoFormatDescriptionCreateForImageBuffer(NULL, buffer, &_videoInfo);
874
		}
875
		if (err) {
876
			NSLog(@"Error at CMVideoFormatDescriptionCreateForImageBuffer %ld", (long)err);
877
			bNewFrame = NO;
878
			// release temp buffer
879
			CVBufferRelease(buffer);
880
			return;
881
		}
882
		
883
		// decodeTimeStamp is set to kCMTimeInvalid since we already receive decoded frames
884
		CMSampleTimingInfo sampleTimingInfo = {
885
			.duration = kCMTimeInvalid,
886
			.presentationTimeStamp = time,
887
			.decodeTimeStamp = kCMTimeInvalid
888
		};
889
		
890
	
891
		// release old buffer.
892
		if(videoSampleBuffer) {
893
			CFRelease(videoSampleBuffer);
894
			videoSampleBuffer = nil;
895
		}
896
		
897
		// create new sampleBuffer
898
		err = CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault,
899
												 buffer,
900
												 true,
901
												 NULL,
902
												 NULL,
903
												 _videoInfo,
904
												 &sampleTimingInfo,
905
												 &videoSampleBuffer);
906
		if (err) {
907
			NSLog(@"Error at CMSampleBufferCreateForImageBuffer %ld", (long)err);
908
			bNewFrame = NO;
909
			// release temp buffer
910
			CVBufferRelease(buffer);
911
			return;
912
		}
913
		
914
		// release temp buffer
915
		CVBufferRelease(buffer);
916
		
917
		videoSampleTime = time;
918
		
919
		
920
	} else {
921
		// no new frame for time
922
		bNewFrame = NO;
923
	}
924
}
925
#endif
926

927
- (void)updateFromAssetReader {
928
	
929
	/**
930
	 *  in most cases we check at what time the video player is up to,
931
	 *  and use the time for sampling buffers in the code below.
932
	 *  but if a synchSampleTime is provided then we use it instead of video player time.
933
	 *  synchSampleTime is used when synchronous time is needed (video player plays asynchronously),
934
	 *  such as when needing to access the video frames, frame by frame.
935
	 *  synchSampleTime is cleared (invalidated) on every frame so it must be set before calling update.
936
	 */
937
	CMTime time = kCMTimeZero;
938
	if(CMTIME_IS_VALID(synchSampleTime)) {
939
		time = synchSampleTime;
940
		time = CMTimeMaximum(time, kCMTimeZero);
941
		time = CMTimeMinimum(time, duration);
942
		synchSampleTime = kCMTimeInvalid;
943
	} else {
944
		time = [_player currentTime];
945
	}
946
	
947
	BOOL bTimeChanged = CMTimeCompare(time, currentTime) != 0;
948
	currentTime = time;
949
	
950
	if(bUpdateFirstFrame) {
951
		
952
		// this forces the first frame to be updated.
953
		// here the values for time and currentTime are both zero.
954
		// so this is to get around the progress check below.
955
		
956
		bUpdateFirstFrame = NO;
957
		
958
	} else if(bTimeChanged == NO) {
959
		
960
		// current time has not changed,
961
		// so the video has not progressed.
962
		
963
		if(self.assetReader != nil) {
964
			
965
			// check that assetReader has been created.
966
			// if assetReader is nil, the video must still be seeking to a new position.
967
			// so even though the time has not changed, the assetReader needs to be created.
968
			
969
			bNewFrame = NO;
970
			return;
971
		}
972
	}
973
	
974
	if(self.assetReader == nil) {
975
		if(bSeeking == true) {
976
			
977
			// video player is seeking to new position.
978
			// asset reader can only be created when seeking has finished.
979
			
980
			bNewFrame = NO;
981
			return;
982
		}
983
		
984
		[self createAssetReaderWithTimeRange:CMTimeRangeMake(currentTime, duration)];
985
	}
986
	
987
	if (self.assetReader.status == AVAssetReaderStatusFailed) {
988
		NSLog(@"assetReader error: %@", self.assetReader.error);
989
	}
990
	
991
	if(self.assetReader.status != AVAssetReaderStatusReading)
992
	{
993
		bNewFrame = NO;
994
		return;
995
	}
996
	
997
	//---------------------------------------------------------- video buffer.
998
	BOOL bCopiedNewSamples = NO;
999
	while(bSampleVideo == true &&                                       // video sampling is on.
1000
		  self.assetReaderVideoTrackOutput != nil &&                    // asset has a video track.
1001
		  self.assetReader.status == AVAssetReaderStatusReading &&      // asset read is in reading state.
1002
		  ((CMTimeCompare(videoSampleTime, currentTime) == -1) ))       // timestamp is less then currentTime.
1003
		   
1004
	{
1005
		CMSampleBufferRef videoBufferTemp = nil;
1006

1007
		@try {
1008
			videoBufferTemp = [self.assetReaderVideoTrackOutput copyNextSampleBuffer];
1009
		} @catch (NSException * e) {
1010
			NSLog(@"error: %@", e);
1011
			break;
1012
		}
1013
		
1014
		if(videoBufferTemp) {
1015
			if(videoSampleBuffer) { // release old buffer.
1016
				CFRelease(videoSampleBuffer);
1017
				videoSampleBuffer = nil;
1018
			}
1019
			videoSampleBuffer = videoBufferTemp; // save reference to new buffer.
1020
			
1021
			videoSampleTime = CMSampleBufferGetPresentationTimeStamp(videoSampleBuffer);
1022
			
1023
			bCopiedNewSamples = YES;
1024
		} else {
1025
			bNewFrame = NO;
1026
			videoSampleTime = videoSampleTimePrev = kCMTimeNegativeInfinity;
1027
			bUpdateFirstFrame = YES;
1028
			break;
1029
		}
1030
	}
1031
	
1032
	//---------------------------------------------------------- audio buffer.
1033
	while(bSampleAudio == true &&                                   // audio sampling is on.
1034
		  self.assetReaderAudioTrackOutput != nil &&                // asset has a audio track.
1035
		  self.assetReader.status == AVAssetReaderStatusReading &&  // asset read is in reading state.
1036
		  ((CMTimeCompare(audioSampleTime, currentTime) == -1) ||    // timestamp is less then currentTime.
1037
		   (CMTimeCompare(audioSampleTime, currentTime) == 0)))      // timestamp is equal currentTime.
1038
	{
1039
		CMSampleBufferRef audioBufferTemp;
1040
		@try {
1041
			audioBufferTemp = [self.assetReaderAudioTrackOutput copyNextSampleBuffer];
1042
		} @catch (NSException * e) {
1043
			break;
1044
		}
1045
		
1046
		if(audioBufferTemp) {
1047
			if(audioSampleBuffer) { // release old buffer.
1048
				CFRelease(audioSampleBuffer);
1049
				audioSampleBuffer = nil;
1050
			}
1051
			audioSampleBuffer = audioBufferTemp; // save reference to new buffer.
1052
			
1053
			audioSampleTime = CMSampleBufferGetPresentationTimeStamp(audioSampleBuffer);
1054
		} else {
1055
			audioSampleTime = kCMTimeNegativeInfinity;
1056
			break;
1057
		}
1058
	}
1059
	
1060
	if(bCopiedNewSamples == true) {
1061
		bNewFrame = CMTimeCompare(videoSampleTime, videoSampleTimePrev) == 1;
1062
		
1063
		if(bNewFrame) {
1064
			videoSampleTimePrev = videoSampleTime;
1065
		}
1066
	}
1067
}
1068

1069

1070
- (void)addTimeObserverToPlayer {
1071
	if(bWillBeUpdatedExternally) {
1072
		return;
1073
	}
1074
	
1075
	if(timeObserver != nil){
1076
		return;
1077
	}
1078
	
1079
	double interval = 1.0 / (double)frameRate;
1080
	
1081
	__block ofAVFoundationVideoPlayer* refToSelf = self;
1082
	timeObserver = [_player addPeriodicTimeObserverForInterval:CMTimeMakeWithSeconds(interval, NSEC_PER_SEC)
1083
														 queue:dispatch_get_main_queue()
1084
                                                    usingBlock:^(CMTime time) {
1085
                                                        [refToSelf update];
1086
                                                    }];
1087
}
1088

1089
- (void)removeTimeObserverFromPlayer {
1090
	if(timeObserver != nil) {
1091
		[_player removeTimeObserver:timeObserver];
1092
		timeObserver = nil;
1093
	}
1094
}
1095

1096
//---------------------------------------------------------- play / pause.
1097
- (void)play {
1098
	if([self isReady]) {
1099
		if(![self isPlaying]) {
1100
			[self togglePlayPause];
1101
		}
1102
	} else {
1103
		bPlayStateBeforeLoad = YES;
1104
	}
1105
}
1106

1107
- (void)pause {
1108
	if([self isReady]) {
1109
		if([self isPlaying]) {
1110
			[self togglePlayPause];
1111
		}
1112
	} else {
1113
		bPlayStateBeforeLoad = NO;
1114
	}
1115
}
1116

1117
- (void)togglePlayPause {
1118
	bIsStopped = NO;
1119

1120
	bPlaying = !bPlaying;
1121
	if([self isPlaying]) {
1122
		if([self isFinished]) {
1123
			[self seekToStart];
1124
			bFinished = NO;
1125
		}
1126
		[_player setRate:speed];
1127
	} else {
1128
		[_player pause];
1129
	}
1130
}
1131

1132
- (void)stop {
1133
	[self setPosition:0];
1134
	[self pause];
1135
	bIsStopped = YES;
1136
}
1137

1138
- (void)stepByCount:(long)frames
1139
{
1140
	if(![self isReady]) {
1141
		return;
1142
	}
1143
	
1144
#if defined(USE_VIDEO_OUTPUT)
1145
	[_player.currentItem stepByCount:frames];
1146
#else
1147
	if (frames < 0) {
1148

1149
		double timeSec = CMTimeGetSeconds(currentTime) - (1.0/frameRate);
1150
		[self seekToTime:CMTimeMakeWithSeconds(timeSec, NSEC_PER_SEC) withTolerance:kCMTimeZero];
1151
		
1152
	} else if (![self isFinished] && frames > 0) {
1153

1154
		double timeSec = CMTimeGetSeconds(currentTime) + (1.0/frameRate);
1155
		CMTime time = CMTimeMakeWithSeconds(timeSec, NSEC_PER_SEC);
1156
		
1157
		bSeeking = YES;
1158
		time = CMTimeMaximum(time, kCMTimeZero);
1159
		time = CMTimeMinimum(time, duration);
1160
		
1161
		// frames are preloaded, only seek player
1162
		[_player seekToTime:time
1163
			toleranceBefore:kCMTimePositiveInfinity
1164
			 toleranceAfter:kCMTimePositiveInfinity
1165
		  completionHandler:^(BOOL finished) {
1166
			  bSeeking = NO;
1167
		  }];
1168
	}
1169
#endif
1170
}
1171

1172
//---------------------------------------------------------- seek.
1173
- (void)seekToStart {
1174
	[self seekToTime:kCMTimeZero withTolerance:kCMTimeZero];
1175
}
1176

1177
- (void)seekToEnd {
1178
	[self seekToTime:duration withTolerance:kCMTimeZero];
1179
}
1180

1181
- (void)seekToTime:(CMTime)time {
1182
	[self seekToTime:time withTolerance:kCMTimePositiveInfinity];
1183
}
1184

1185
- (void)seekToTime:(CMTime)time
1186
	 withTolerance:(CMTime)tolerance {
1187
	
1188
	if(![self isReady]) {
1189
		return;
1190
	}
1191
	
1192
	if([self isFinished]) {
1193
		bFinished = NO;
1194
	}
1195
	
1196
	// TODO?
1197
	// expensive call?
1198
	// destroy it on a thread?
1199
	[self.assetReader cancelReading];
1200
	self.assetReader = nil;
1201
	self.assetReaderVideoTrackOutput = nil;
1202
	self.assetReaderAudioTrackOutput = nil;
1203
	
1204
	bSeeking = YES;
1205
	
1206
	// restrict time
1207
	time = CMTimeMaximum(time, kCMTimeZero);
1208
	time = CMTimeMinimum(time, duration);
1209
	
1210
// 	if (!bStream && (CMTimeCompare(time, videoSampleTime) < 0)) {
1211
		// if jumping back in time
1212
		//[self createAssetReaderWithTimeRange:CMTimeRangeMake(time, duration)];
1213
// 	}
1214
	
1215
	// set reader to real requested time
1216
	[_player seekToTime:time
1217
		toleranceBefore:tolerance
1218
		 toleranceAfter:tolerance
1219
	  completionHandler:^(BOOL finished) {
1220
		self->bSeeking = NO;
1221
	  }];
1222
}
1223

1224
//---------------------------------------------------------- states.
1225
- (BOOL)isReady {
1226
	return bReady;
1227
}
1228

1229
- (BOOL)isLoaded {
1230
	return bLoaded;
1231
}
1232

1233
- (BOOL)isPlaying {
1234
	return bPlaying;
1235
}
1236

1237
- (BOOL)isPaused {
1238
	return !bPlaying & !bIsStopped;
1239
}
1240

1241
- (BOOL)isNewFrame {
1242
	return bNewFrame;
1243
}
1244

1245
- (BOOL)isFinished {
1246
	return bFinished;
1247
}
1248

1249
//---------------------------------------------------------- sampling getters / setters.
1250
- (void)setEnableVideoSampling:(BOOL)value {
1251
	bSampleVideo = value;
1252
}
1253
- (void)setEnableAudioSampling:(BOOL)value {
1254
	bSampleAudio = value;
1255
}
1256

1257
- (void)setSynchSampleTime:(CMTime)time {
1258
	synchSampleTime = time;
1259
}
1260

1261
- (void)setSynchSampleTimeInSec:(double)time {
1262
	[self setSynchSampleTime:CMTimeMakeWithSeconds(time, NSEC_PER_SEC)];
1263
}
1264

1265
- (CMTime)getVideoSampleTime {
1266
	return videoSampleTime;
1267
}
1268

1269
- (double)getVideoSampleTimeInSec {
1270
	return CMTimeGetSeconds(videoSampleTime);
1271
}
1272

1273
- (CMTime)getAudioSampleTime {
1274
	return audioSampleTime;
1275
}
1276

1277
- (double)getAudioSampleTimeInSec {
1278
	return CMTimeGetSeconds(audioSampleTime);
1279
}
1280

1281
- (CMSampleBufferRef)getVideoSampleBuffer {
1282
	return videoSampleBuffer;
1283
}
1284

1285
- (CMSampleBufferRef)getAudioSampleBuffer {
1286
	return audioSampleBuffer;
1287
}
1288

1289
- (CVImageBufferRef)getCurrentFrame {
1290
	return CMSampleBufferGetImageBuffer(videoSampleBuffer);
1291
}
1292

1293
//---------------------------------------------------------- getters / setters.
1294
- (NSInteger)getWidth {
1295
	return videoWidth;
1296
}
1297

1298
- (NSInteger)getHeight {
1299
	return videoHeight;
1300
}
1301

1302
- (CMTime)getCurrentTime {
1303
	return currentTime;
1304
}
1305

1306
- (double)getCurrentTimeInSec {
1307
	return CMTimeGetSeconds(videoSampleTime);
1308
}
1309

1310
- (CMTime)getDuration {
1311
	return duration;
1312
}
1313

1314
- (double)getDurationInSec {
1315
	return CMTimeGetSeconds(duration);
1316
}
1317

1318
- (float)getFrameRate {
1319
	return frameRate;
1320
}
1321

1322
- (int)getDurationInFrames {
1323
	return [self getDurationInSec] * [self getFrameRate];
1324
}
1325

1326
- (int)getCurrentFrameNum {
1327
	return [self getCurrentTimeInSec] * [self getFrameRate];
1328
}
1329

1330
- (void)setPosition:(float)position {
1331
	if ([self isReady]) {
1332
		double time = [self getDurationInSec] * position;
1333
		[self seekToTime:CMTimeMakeWithSeconds(time, NSEC_PER_SEC)];
1334
	} else {
1335
		positionBeforeReady = position;
1336
		frameBeforeReady = 0;
1337
	}
1338
}
1339

1340
- (void)setFrame:(int)frame {
1341
	if ([self isReady]) {
1342
		float position = frame / (float)[self getDurationInFrames];
1343
		[self setPosition:position];
1344
	} else {
1345
		frameBeforeReady = frame;
1346
		positionBeforeReady = 0.F;
1347
	}
1348
}
1349

1350
- (float)getPosition {
1351
	return ([self getCurrentTimeInSec] / [self getDurationInSec]);
1352
}
1353

1354
- (void)setVolume:(float)value {
1355
	
1356
	volume = value;
1357
	
1358
	if(![self isReady]) {
1359
		return;
1360
	}
1361
	
1362
	if (self.playerItem == nil) {
1363
		return;
1364
	}
1365
	
1366
	_player.volume = volume;
1367
}
1368

1369
- (float)getVolume {
1370
	return volume;
1371
}
1372

1373
- (void)setLoop:(playerLoopType)value {
1374
	loop = value;
1375
}
1376

1377
- (playerLoopType)getLoop {
1378
	return loop;
1379
}
1380

1381
- (void)setSpeed:(float)value {
1382
	
1383
	speed = value;
1384

1385
	if(![self isReady]) {
1386
		return;
1387
	}
1388
	
1389
	if (_player == nil) {
1390
		return;
1391
	}
1392
	
1393
	if (!bStream && !bSeeking && bWasPlayingBackwards && value > 0.0) {
1394
		// create assetReaders if we played backwards earlier
1395
		[self createAssetReaderWithTimeRange:CMTimeRangeMake(currentTime, duration)];
1396
		bWasPlayingBackwards = NO;
1397
	}
1398
	
1399
	if (!bWasPlayingBackwards && value < 0.0) {
1400
		
1401
#if !defined(USE_VIDEO_OUTPUT)
1402
		// not supported
1403
		NSLog(@"ERROR: Backwards playback is not supported. Minimum requirement is OSX 10.8 or iOS 6.0");
1404
		value = 0.0;
1405
#else
1406
		if (!self.playerItem.canPlayReverse) {
1407
			if (!bStream) {
1408
				NSLog(@"ERROR: can not play backwards: not supported (check your codec)");
1409
			} else {
1410
				NSLog(@"ERROR: can not play backwards a stream");
1411
			}
1412
			
1413
			value = 0.0;
1414
		}
1415
		if (self.videoOutput == nil) {
1416
			NSLog(@"ERROR: can not play backwards: no video output");
1417
			value = 0.0;
1418
		}
1419
#endif
1420
	}
1421
	
1422
	if (value < 0.0) {
1423
		bWasPlayingBackwards = YES;
1424
	}
1425
		
1426
	[_player setRate:value];
1427
}
1428

1429
- (float)getSpeed {
1430
	return speed;
1431
}
1432

1433
- (void)setAutoplay:(BOOL)value {
1434
	bAutoPlayOnLoad = value;
1435
}
1436

1437
- (BOOL)getAutoplay {
1438
	return bAutoPlayOnLoad;
1439
}
1440

1441
- (void)setWillBeUpdatedExternally:(BOOL)value {
1442
	bWillBeUpdatedExternally = value;
1443
}
1444

1445
- (void)setStreaming:(BOOL)value {
1446
	bStream = value;
1447
}
1448

1449
@end
1450

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.