framework2

Форк
0
540 строк · 14.5 Кб
1
/*
2
 *  ofAVFoundationGrabber.mm
3
 */
4

5
#include "ofAVFoundationGrabber.h"
6
#include "ofVectorMath.h"
7
#include "ofRectangle.h"
8
#include "ofGLUtils.h"
9

10
#ifdef OF_VIDEO_CAPTURE_AVF
11

12
#import <Accelerate/Accelerate.h>
13

14
@interface OSXVideoGrabber ()
15
@property (nonatomic,retain) AVCaptureSession *captureSession;
16
@end
17

18
@implementation OSXVideoGrabber
19
@synthesize captureSession;
20

21
#pragma mark -
22
#pragma mark Initialization
23
- (instancetype)init {
24
	self = [super init];
25
	if (self) {
26
		captureInput = nil;
27
		captureOutput = nil;
28
		device = nil;
29

30
		bInitCalled = NO;
31
		grabberPtr = NULL;
32
		deviceID = 0;
33
        width = 0;
34
        height = 0;
35
        currentFrame = 0;
36
	}
37
	return self;
38
}
39

40
- (BOOL)initCapture:(int)framerate capWidth:(int)w capHeight:(int)h{
41
	NSArray * devices;
42
	if (@available(macOS 10.15, *)) {
43
		AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[
44
			AVCaptureDeviceTypeBuiltInWideAngleCamera,
45
			AVCaptureDeviceTypeExternalUnknown,
46
		] mediaType:nil position:AVCaptureDevicePositionUnspecified];
47
		devices = [session devices];
48
	} else {
49
		devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
50
	}
51
	
52
	if([devices count] > 0) {
53
		if(deviceID>[devices count]-1)
54
			deviceID = [devices count]-1;
55

56

57
		// We set the device
58
		device = [devices objectAtIndex:deviceID];
59

60
		NSError *error = nil;
61
		[device lockForConfiguration:&error];
62

63
		if(!error) {
64

65
			float smallestDist = 99999999.0;
66
			int bestW, bestH = 0;
67

68
			// Set width and height to be passed in dimensions
69
			// We will then check to see if the dimensions are supported and if not find the closest matching size.
70
			width = w;
71
			height = h;
72

73
			glm::vec2 requestedDimension(width, height);
74

75
			AVCaptureDeviceFormat * bestFormat  = nullptr;
76
			for ( AVCaptureDeviceFormat * format in [device formats] ) {
77
				CMFormatDescriptionRef desc = format.formatDescription;
78
				CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(desc);
79

80
				int tw = dimensions.width;
81
				int th = dimensions.height;
82
                glm::vec2 formatDimension(tw, th);
83

84
				if( tw == width && th == height ){
85
					bestW = tw;
86
					bestH = th;
87
					bestFormat = format;
88
					break;
89
				}
90

91
				float dist = glm::length(formatDimension - requestedDimension);
92
				if( dist < smallestDist ){
93
					smallestDist = dist;
94
					bestW = tw;
95
					bestH = th;
96
					bestFormat = format;
97
				}
98

99
				ofLogVerbose("ofAvFoundationGrabber") << " supported dimensions are: " << dimensions.width << " " << dimensions.height;
100
			}
101

102
			// Set the new dimensions and format
103
			if( bestFormat != nullptr && bestW != 0 && bestH != 0 ){
104
				if( bestW != width || bestH != height ){
105
					ofLogWarning("ofAvFoundationGrabber") << " requested width and height aren't supported. Setting capture size to closest match: " << bestW << " by " << bestH<< std::endl;
106
				}
107

108
				[device setActiveFormat:bestFormat];
109
				width = bestW;
110
				height = bestH;
111
			}
112

113
			//only set the framerate if it has been set by the user
114
			if( framerate > 0 ){
115

116
				AVFrameRateRange * desiredRange = nil;
117
				NSArray * supportedFrameRates = device.activeFormat.videoSupportedFrameRateRanges;
118

119
				int numMatch = 0;
120
				for(AVFrameRateRange * range in supportedFrameRates){
121

122
					if( (floor(range.minFrameRate) <= framerate && ceil(range.maxFrameRate) >= framerate) ){
123
						ofLogVerbose("ofAvFoundationGrabber") << "found good framerate range, min: " << range.minFrameRate << " max: " << range.maxFrameRate << " for requested fps: " << framerate;
124
						desiredRange = range;
125
						numMatch++;
126
					}
127
				}
128

129
				if( numMatch > 0 ){
130
					//TODO: this crashes on some devices ( Orbecc Astra Pro )
131
					device.activeVideoMinFrameDuration = desiredRange.minFrameDuration;
132
					device.activeVideoMaxFrameDuration = desiredRange.maxFrameDuration;
133
				}else{
134
					ofLogError("ofAvFoundationGrabber") << " could not set framerate to: " << framerate << ". Device supports: ";
135
					for(AVFrameRateRange * range in supportedFrameRates){
136
						ofLogError() << "  framerate range of: " << range.minFrameRate <<
137
					 " to " << range.maxFrameRate;
138
					 }
139
				}
140

141
			}
142

143
			[device unlockForConfiguration];
144
		} else {
145
			NSLog(@"OSXVideoGrabber Init Error: %@", error);
146
		}
147

148
		// We setup the input
149
		captureInput						= [AVCaptureDeviceInput
150
											   deviceInputWithDevice:device
151
											   error:nil];
152

153
		// We setup the output
154
		captureOutput = [[AVCaptureVideoDataOutput alloc] init];
155
		// While a frame is processes in -captureOutput:didOutputSampleBuffer:fromConnection: delegate methods no other frames are added in the queue.
156
		// If you don't want this behaviour set the property to NO
157
		captureOutput.alwaysDiscardsLateVideoFrames = YES;
158

159

160

161
		// We create a serial queue to handle the processing of our frames
162
		dispatch_queue_t queue;
163
		queue = dispatch_queue_create("cameraQueue", NULL);
164
		[captureOutput setSampleBufferDelegate:self queue:queue];
165

166
		NSDictionary* videoSettings =[NSDictionary dictionaryWithObjectsAndKeys:
167
                              [NSNumber numberWithDouble:width], (id)kCVPixelBufferWidthKey,
168
                              [NSNumber numberWithDouble:height], (id)kCVPixelBufferHeightKey,
169
                              [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], (id)kCVPixelBufferPixelFormatTypeKey,
170
                              nil];
171
		[captureOutput setVideoSettings:videoSettings];
172

173
		// And we create a capture session
174
		if(self.captureSession) {
175
			self.captureSession = nil;
176
		}
177
		self.captureSession = [[AVCaptureSession alloc] init];
178

179
		[self.captureSession beginConfiguration];
180

181
		// We add input and output
182
		[self.captureSession addInput:captureInput];
183
		[self.captureSession addOutput:captureOutput];
184

185
		// We specify a minimum duration for each frame (play with this settings to avoid having too many frames waiting
186
		// in the queue because it can cause memory issues). It is similar to the inverse of the maximum framerate.
187
		// In this example we set a min frame duration of 1/10 seconds so a maximum framerate of 10fps. We say that
188
		// we are not able to process more than 10 frames per second.
189
		// Called after added to captureSession
190

191
		AVCaptureConnection *conn = [captureOutput connectionWithMediaType:AVMediaTypeVideo];
192
		if ([conn isVideoMinFrameDurationSupported] == YES &&
193
			[conn isVideoMaxFrameDurationSupported] == YES) {
194
				[conn setVideoMinFrameDuration:CMTimeMake(1, framerate)];
195
				[conn setVideoMaxFrameDuration:CMTimeMake(1, framerate)];
196
		}
197

198
		// We start the capture Session
199
		[self.captureSession commitConfiguration];
200
		[self.captureSession startRunning];
201

202
		bInitCalled = YES;
203
		return YES;
204
	}
205
	return NO;
206
}
207

208
-(void) startCapture{
209

210
	[self.captureSession startRunning];
211

212
	[captureInput.device lockForConfiguration:nil];
213

214
	//if( [captureInput.device isExposureModeSupported:AVCaptureExposureModeAutoExpose] ) [captureInput.device setExposureMode:AVCaptureExposureModeAutoExpose ];
215
	if( [captureInput.device isFocusModeSupported:AVCaptureFocusModeAutoFocus] )	[captureInput.device setFocusMode:AVCaptureFocusModeAutoFocus ];
216

217
}
218

219
-(void) lockExposureAndFocus{
220

221
	[captureInput.device lockForConfiguration:nil];
222

223
	//if( [captureInput.device isExposureModeSupported:AVCaptureExposureModeLocked] ) [captureInput.device setExposureMode:AVCaptureExposureModeLocked ];
224
	if( [captureInput.device isFocusModeSupported:AVCaptureFocusModeLocked] )	[captureInput.device setFocusMode:AVCaptureFocusModeLocked ];
225

226

227
}
228

229
-(void)stopCapture{
230
	if(self.captureSession) {
231
		if(captureOutput){
232
			if(captureOutput.sampleBufferDelegate != nil) {
233
				[captureOutput setSampleBufferDelegate:nil queue:NULL];
234
			}
235
		}
236

237
		// remove the input and outputs from session
238
		for(AVCaptureInput *input1 in self.captureSession.inputs) {
239
		    [self.captureSession removeInput:input1];
240
		}
241
		for(AVCaptureOutput *output1 in self.captureSession.outputs) {
242
		    [self.captureSession removeOutput:output1];
243
		}
244

245
		[self.captureSession stopRunning];
246
	}
247
}
248

249
-(CGImageRef)getCurrentFrame{
250
	return currentFrame;
251
}
252

253
-(std::vector <std::string>)listDevices{
254
    std::vector <std::string> deviceNames;
255

256
	NSArray * devices;
257
	if (@available(macOS 10.15, *)) {
258
		AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[
259
			AVCaptureDeviceTypeBuiltInWideAngleCamera,
260
			AVCaptureDeviceTypeExternalUnknown,
261
		] mediaType:nil position:AVCaptureDevicePositionUnspecified];
262
		devices = [session devices];
263
	} else {
264
		devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
265
	}
266

267
	int i=0;
268
	for (AVCaptureDevice * captureDevice in devices){
269
        deviceNames.push_back([captureDevice.localizedName UTF8String]);
270
		 ofLogNotice() << "Device: " << i << ": " << deviceNames.back();
271
		i++;
272
    }
273
    return deviceNames;
274
}
275

276
-(void)setDevice:(int)_device{
277
	deviceID = _device;
278
}
279

280
#pragma mark -
281
#pragma mark AVCaptureSession delegate
282
- (void)captureOutput:(AVCaptureOutput *)captureOutput
283
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
284
	   fromConnection:(AVCaptureConnection *)connection
285
{
286
	if(grabberPtr != NULL) {
287
		@autoreleasepool {
288
			CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
289
			// Lock the image buffer
290
			CVPixelBufferLockBaseAddress(imageBuffer,0);
291

292
			if( grabberPtr != NULL && !grabberPtr->bLock ){
293

294
				unsigned char *isrc4 = (unsigned char *)CVPixelBufferGetBaseAddress(imageBuffer);
295
				size_t widthIn  = CVPixelBufferGetWidth(imageBuffer);
296
				size_t heightIn	= CVPixelBufferGetHeight(imageBuffer);
297

298
				if( widthIn != grabberPtr->getWidth() || heightIn != grabberPtr->getHeight() ){
299
					ofLogError("ofAVFoundationGrabber") << " incoming image dimensions " << widthIn << " by " << heightIn << " don't match. This shouldn't happen! Returning.";
300
					return;
301
				}
302

303
				if( grabberPtr->pixelFormat == OF_PIXELS_BGRA ){
304

305
					if( grabberPtr->capMutex.try_lock() ){
306
						grabberPtr->pixelsTmp.setFromPixels(isrc4, widthIn, heightIn, 4);
307
						grabberPtr->updatePixelsCB();
308
						grabberPtr->capMutex.unlock();
309
					}
310

311
				}else{
312

313
					ofPixels rgbConvertPixels;
314
					rgbConvertPixels.allocate(widthIn, heightIn, 3);
315

316
					vImage_Buffer srcImg;
317
					srcImg.width = widthIn;
318
					srcImg.height = heightIn;
319
					srcImg.data = isrc4;
320
					srcImg.rowBytes = CVPixelBufferGetBytesPerRow(imageBuffer);
321

322
					vImage_Buffer dstImg;
323
					dstImg.width = srcImg.width;
324
					dstImg.height = srcImg.height;
325
					dstImg.rowBytes = width*3;
326
					dstImg.data = rgbConvertPixels.getData();
327

328
					vImage_Error err;
329
					err = vImageConvert_BGRA8888toRGB888(&srcImg, &dstImg, kvImageNoFlags);
330
					if(err != kvImageNoError){
331
						ofLogError("ofAVFoundationGrabber") << "Error using accelerate to convert bgra to rgb with vImageConvert_BGRA8888toRGB888 error: " << err;
332
					}else{
333

334
						if( grabberPtr->capMutex.try_lock() ){
335
							grabberPtr->pixelsTmp = rgbConvertPixels;
336
							grabberPtr->updatePixelsCB();
337
							grabberPtr->capMutex.unlock();
338
						}
339

340
					}
341
				}
342

343
			// Unlock the image buffer
344
			CVPixelBufferUnlockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly);
345

346
			}
347
		}
348
	}
349
}
350

351
#pragma mark -
352
#pragma mark Memory management
353

354
- (void)dealloc {
355
	// Stop the CaptureSession
356
	if(self.captureSession) {
357
		[self stopCapture];
358
		self.captureSession = nil;
359
	}
360
	if(captureOutput){
361
		if(captureOutput.sampleBufferDelegate != nil) {
362
			[captureOutput setSampleBufferDelegate:nil queue:NULL];
363
		}
364
		captureOutput = nil;
365
	}
366

367
	captureInput = nil;
368
	device = nil;
369

370
	if(grabberPtr) {
371
		[self eraseGrabberPtr];
372
	}
373
	grabberPtr = nil;
374
	if(currentFrame) {
375
		// release the currentFrame image
376
		CGImageRelease(currentFrame);
377
		currentFrame = nil;
378
	}
379
}
380

381
- (void)eraseGrabberPtr {
382
	grabberPtr = NULL;
383
}
384

385
@end
386

387

388
ofAVFoundationGrabber::ofAVFoundationGrabber(){
389
	fps		= -1;
390
	grabber = [[OSXVideoGrabber alloc] init];
391
    width = 0;
392
    height = 0;
393
	bIsInit = false;
394
	pixelFormat = OF_PIXELS_RGB;
395
	newFrame = false;
396
	bHavePixelsChanged = false;
397
	bLock = false;
398
}
399

400
ofAVFoundationGrabber::~ofAVFoundationGrabber(){
401
	ofLog(OF_LOG_VERBOSE, "ofAVFoundationGrabber destructor");
402
	close();
403
}
404

405
void ofAVFoundationGrabber::clear(){
406
	if( pixels.size() ){
407
		pixels.clear();
408
		pixelsTmp.clear();
409
	}
410
}
411

412
void ofAVFoundationGrabber::close(){
413
	bLock = true;
414
	if(grabber) {
415
		// Stop and release the the OSXVideoGrabber
416
		[grabber stopCapture];
417
		[grabber eraseGrabberPtr];
418
		grabber = nil;
419
	}
420
	clear();
421
	bIsInit = false;
422
	width = 0;
423
    height = 0;
424
	fps		= -1;
425
	pixelFormat = OF_PIXELS_RGB;
426
	newFrame = false;
427
	bHavePixelsChanged = false;
428
	bLock = false;
429
}
430

431
void ofAVFoundationGrabber::setDesiredFrameRate(int capRate){
432
	fps = capRate;
433
}
434

435
bool ofAVFoundationGrabber::setup(int w, int h){
436

437
	if( grabber == nil ){
438
		grabber = [[OSXVideoGrabber alloc] init];
439
	}
440

441
	grabber->grabberPtr = this;
442

443
	if( [grabber initCapture:fps capWidth:w capHeight:h] ) {
444

445
		//update the pixel dimensions based on what the camera supports
446
		width = grabber->width;
447
		height = grabber->height;
448

449
		clear();
450

451
		pixels.allocate(width, height, pixelFormat);
452
		pixelsTmp.allocate(width, height, pixelFormat);
453

454
		[grabber startCapture];
455

456
		newFrame=false;
457
		bIsInit = true;
458

459
		return true;
460
	} else {
461
		return false;
462
	}
463
}
464

465

466
bool ofAVFoundationGrabber::isInitialized() const{
467
    return bIsInit;
468
}
469

470
void ofAVFoundationGrabber::update(){
471
	newFrame = false;
472

473
	if (bHavePixelsChanged == true){
474
		capMutex.lock();
475
			pixels = pixelsTmp;
476
			bHavePixelsChanged = false;
477
		capMutex.unlock();
478
		newFrame = true;
479
	}
480
}
481

482
ofPixels & ofAVFoundationGrabber::getPixels(){
483
	return pixels;
484
}
485

486
const ofPixels & ofAVFoundationGrabber::getPixels() const{
487
	return pixels;
488
}
489

490
bool ofAVFoundationGrabber::isFrameNew() const{
491
	return newFrame;
492
}
493

494
void ofAVFoundationGrabber::updatePixelsCB(){
495
	//TODO: does this need a mutex? or some thread protection?
496
	bHavePixelsChanged = true;
497
}
498

499
std::vector <ofVideoDevice> ofAVFoundationGrabber::listDevices() const{
500
	std::vector <std::string> devList = [grabber listDevices];
501

502
    std::vector <ofVideoDevice> devices;
503
    for(int i = 0; i < devList.size(); i++){
504
        ofVideoDevice vd;
505
        vd.deviceName = devList[i];
506
        vd.id = i;
507
        vd.bAvailable = true;
508
        devices.push_back(vd);
509
    }
510

511
    return devices;
512
}
513

514
void ofAVFoundationGrabber::setDeviceID(int deviceID) {
515
	if( grabber == nil ){
516
		grabber = [[OSXVideoGrabber alloc] init];
517
	}
518
	[grabber setDevice:deviceID];
519
	device = deviceID;
520
}
521

522
bool ofAVFoundationGrabber::setPixelFormat(ofPixelFormat PixelFormat) {
523
	if(PixelFormat == OF_PIXELS_RGB){
524
		pixelFormat = PixelFormat;
525
		return true;
526
	} else if(PixelFormat == OF_PIXELS_RGBA){
527
		pixelFormat = PixelFormat;
528
		return true;
529
	} else if(PixelFormat == OF_PIXELS_BGRA){
530
		pixelFormat = PixelFormat;
531
		return true;
532
	}
533
	return false;
534
}
535

536
ofPixelFormat ofAVFoundationGrabber::getPixelFormat() const{
537
	return pixelFormat;
538
}
539

540
#endif
541

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.