framework2
540 строк · 14.5 Кб
1/*
2* ofAVFoundationGrabber.mm
3*/
4
5#include "ofAVFoundationGrabber.h"
6#include "ofVectorMath.h"
7#include "ofRectangle.h"
8#include "ofGLUtils.h"
9
10#ifdef OF_VIDEO_CAPTURE_AVF
11
12#import <Accelerate/Accelerate.h>
13
14@interface OSXVideoGrabber ()
15@property (nonatomic,retain) AVCaptureSession *captureSession;
16@end
17
18@implementation OSXVideoGrabber
19@synthesize captureSession;
20
21#pragma mark -
22#pragma mark Initialization
23- (instancetype)init {
24self = [super init];
25if (self) {
26captureInput = nil;
27captureOutput = nil;
28device = nil;
29
30bInitCalled = NO;
31grabberPtr = NULL;
32deviceID = 0;
33width = 0;
34height = 0;
35currentFrame = 0;
36}
37return self;
38}
39
40- (BOOL)initCapture:(int)framerate capWidth:(int)w capHeight:(int)h{
41NSArray * devices;
42if (@available(macOS 10.15, *)) {
43AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[
44AVCaptureDeviceTypeBuiltInWideAngleCamera,
45AVCaptureDeviceTypeExternalUnknown,
46] mediaType:nil position:AVCaptureDevicePositionUnspecified];
47devices = [session devices];
48} else {
49devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
50}
51
52if([devices count] > 0) {
53if(deviceID>[devices count]-1)
54deviceID = [devices count]-1;
55
56
57// We set the device
58device = [devices objectAtIndex:deviceID];
59
60NSError *error = nil;
61[device lockForConfiguration:&error];
62
63if(!error) {
64
65float smallestDist = 99999999.0;
66int bestW, bestH = 0;
67
68// Set width and height to be passed in dimensions
69// We will then check to see if the dimensions are supported and if not find the closest matching size.
70width = w;
71height = h;
72
73glm::vec2 requestedDimension(width, height);
74
75AVCaptureDeviceFormat * bestFormat = nullptr;
76for ( AVCaptureDeviceFormat * format in [device formats] ) {
77CMFormatDescriptionRef desc = format.formatDescription;
78CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(desc);
79
80int tw = dimensions.width;
81int th = dimensions.height;
82glm::vec2 formatDimension(tw, th);
83
84if( tw == width && th == height ){
85bestW = tw;
86bestH = th;
87bestFormat = format;
88break;
89}
90
91float dist = glm::length(formatDimension - requestedDimension);
92if( dist < smallestDist ){
93smallestDist = dist;
94bestW = tw;
95bestH = th;
96bestFormat = format;
97}
98
99ofLogVerbose("ofAvFoundationGrabber") << " supported dimensions are: " << dimensions.width << " " << dimensions.height;
100}
101
102// Set the new dimensions and format
103if( bestFormat != nullptr && bestW != 0 && bestH != 0 ){
104if( bestW != width || bestH != height ){
105ofLogWarning("ofAvFoundationGrabber") << " requested width and height aren't supported. Setting capture size to closest match: " << bestW << " by " << bestH<< std::endl;
106}
107
108[device setActiveFormat:bestFormat];
109width = bestW;
110height = bestH;
111}
112
113//only set the framerate if it has been set by the user
114if( framerate > 0 ){
115
116AVFrameRateRange * desiredRange = nil;
117NSArray * supportedFrameRates = device.activeFormat.videoSupportedFrameRateRanges;
118
119int numMatch = 0;
120for(AVFrameRateRange * range in supportedFrameRates){
121
122if( (floor(range.minFrameRate) <= framerate && ceil(range.maxFrameRate) >= framerate) ){
123ofLogVerbose("ofAvFoundationGrabber") << "found good framerate range, min: " << range.minFrameRate << " max: " << range.maxFrameRate << " for requested fps: " << framerate;
124desiredRange = range;
125numMatch++;
126}
127}
128
129if( numMatch > 0 ){
130//TODO: this crashes on some devices ( Orbecc Astra Pro )
131device.activeVideoMinFrameDuration = desiredRange.minFrameDuration;
132device.activeVideoMaxFrameDuration = desiredRange.maxFrameDuration;
133}else{
134ofLogError("ofAvFoundationGrabber") << " could not set framerate to: " << framerate << ". Device supports: ";
135for(AVFrameRateRange * range in supportedFrameRates){
136ofLogError() << " framerate range of: " << range.minFrameRate <<
137" to " << range.maxFrameRate;
138}
139}
140
141}
142
143[device unlockForConfiguration];
144} else {
145NSLog(@"OSXVideoGrabber Init Error: %@", error);
146}
147
148// We setup the input
149captureInput = [AVCaptureDeviceInput
150deviceInputWithDevice:device
151error:nil];
152
153// We setup the output
154captureOutput = [[AVCaptureVideoDataOutput alloc] init];
155// While a frame is processes in -captureOutput:didOutputSampleBuffer:fromConnection: delegate methods no other frames are added in the queue.
156// If you don't want this behaviour set the property to NO
157captureOutput.alwaysDiscardsLateVideoFrames = YES;
158
159
160
161// We create a serial queue to handle the processing of our frames
162dispatch_queue_t queue;
163queue = dispatch_queue_create("cameraQueue", NULL);
164[captureOutput setSampleBufferDelegate:self queue:queue];
165
166NSDictionary* videoSettings =[NSDictionary dictionaryWithObjectsAndKeys:
167[NSNumber numberWithDouble:width], (id)kCVPixelBufferWidthKey,
168[NSNumber numberWithDouble:height], (id)kCVPixelBufferHeightKey,
169[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], (id)kCVPixelBufferPixelFormatTypeKey,
170nil];
171[captureOutput setVideoSettings:videoSettings];
172
173// And we create a capture session
174if(self.captureSession) {
175self.captureSession = nil;
176}
177self.captureSession = [[AVCaptureSession alloc] init];
178
179[self.captureSession beginConfiguration];
180
181// We add input and output
182[self.captureSession addInput:captureInput];
183[self.captureSession addOutput:captureOutput];
184
185// We specify a minimum duration for each frame (play with this settings to avoid having too many frames waiting
186// in the queue because it can cause memory issues). It is similar to the inverse of the maximum framerate.
187// In this example we set a min frame duration of 1/10 seconds so a maximum framerate of 10fps. We say that
188// we are not able to process more than 10 frames per second.
189// Called after added to captureSession
190
191AVCaptureConnection *conn = [captureOutput connectionWithMediaType:AVMediaTypeVideo];
192if ([conn isVideoMinFrameDurationSupported] == YES &&
193[conn isVideoMaxFrameDurationSupported] == YES) {
194[conn setVideoMinFrameDuration:CMTimeMake(1, framerate)];
195[conn setVideoMaxFrameDuration:CMTimeMake(1, framerate)];
196}
197
198// We start the capture Session
199[self.captureSession commitConfiguration];
200[self.captureSession startRunning];
201
202bInitCalled = YES;
203return YES;
204}
205return NO;
206}
207
208-(void) startCapture{
209
210[self.captureSession startRunning];
211
212[captureInput.device lockForConfiguration:nil];
213
214//if( [captureInput.device isExposureModeSupported:AVCaptureExposureModeAutoExpose] ) [captureInput.device setExposureMode:AVCaptureExposureModeAutoExpose ];
215if( [captureInput.device isFocusModeSupported:AVCaptureFocusModeAutoFocus] ) [captureInput.device setFocusMode:AVCaptureFocusModeAutoFocus ];
216
217}
218
219-(void) lockExposureAndFocus{
220
221[captureInput.device lockForConfiguration:nil];
222
223//if( [captureInput.device isExposureModeSupported:AVCaptureExposureModeLocked] ) [captureInput.device setExposureMode:AVCaptureExposureModeLocked ];
224if( [captureInput.device isFocusModeSupported:AVCaptureFocusModeLocked] ) [captureInput.device setFocusMode:AVCaptureFocusModeLocked ];
225
226
227}
228
229-(void)stopCapture{
230if(self.captureSession) {
231if(captureOutput){
232if(captureOutput.sampleBufferDelegate != nil) {
233[captureOutput setSampleBufferDelegate:nil queue:NULL];
234}
235}
236
237// remove the input and outputs from session
238for(AVCaptureInput *input1 in self.captureSession.inputs) {
239[self.captureSession removeInput:input1];
240}
241for(AVCaptureOutput *output1 in self.captureSession.outputs) {
242[self.captureSession removeOutput:output1];
243}
244
245[self.captureSession stopRunning];
246}
247}
248
249-(CGImageRef)getCurrentFrame{
250return currentFrame;
251}
252
253-(std::vector <std::string>)listDevices{
254std::vector <std::string> deviceNames;
255
256NSArray * devices;
257if (@available(macOS 10.15, *)) {
258AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[
259AVCaptureDeviceTypeBuiltInWideAngleCamera,
260AVCaptureDeviceTypeExternalUnknown,
261] mediaType:nil position:AVCaptureDevicePositionUnspecified];
262devices = [session devices];
263} else {
264devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
265}
266
267int i=0;
268for (AVCaptureDevice * captureDevice in devices){
269deviceNames.push_back([captureDevice.localizedName UTF8String]);
270ofLogNotice() << "Device: " << i << ": " << deviceNames.back();
271i++;
272}
273return deviceNames;
274}
275
276-(void)setDevice:(int)_device{
277deviceID = _device;
278}
279
280#pragma mark -
281#pragma mark AVCaptureSession delegate
282- (void)captureOutput:(AVCaptureOutput *)captureOutput
283didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
284fromConnection:(AVCaptureConnection *)connection
285{
286if(grabberPtr != NULL) {
287@autoreleasepool {
288CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
289// Lock the image buffer
290CVPixelBufferLockBaseAddress(imageBuffer,0);
291
292if( grabberPtr != NULL && !grabberPtr->bLock ){
293
294unsigned char *isrc4 = (unsigned char *)CVPixelBufferGetBaseAddress(imageBuffer);
295size_t widthIn = CVPixelBufferGetWidth(imageBuffer);
296size_t heightIn = CVPixelBufferGetHeight(imageBuffer);
297
298if( widthIn != grabberPtr->getWidth() || heightIn != grabberPtr->getHeight() ){
299ofLogError("ofAVFoundationGrabber") << " incoming image dimensions " << widthIn << " by " << heightIn << " don't match. This shouldn't happen! Returning.";
300return;
301}
302
303if( grabberPtr->pixelFormat == OF_PIXELS_BGRA ){
304
305if( grabberPtr->capMutex.try_lock() ){
306grabberPtr->pixelsTmp.setFromPixels(isrc4, widthIn, heightIn, 4);
307grabberPtr->updatePixelsCB();
308grabberPtr->capMutex.unlock();
309}
310
311}else{
312
313ofPixels rgbConvertPixels;
314rgbConvertPixels.allocate(widthIn, heightIn, 3);
315
316vImage_Buffer srcImg;
317srcImg.width = widthIn;
318srcImg.height = heightIn;
319srcImg.data = isrc4;
320srcImg.rowBytes = CVPixelBufferGetBytesPerRow(imageBuffer);
321
322vImage_Buffer dstImg;
323dstImg.width = srcImg.width;
324dstImg.height = srcImg.height;
325dstImg.rowBytes = width*3;
326dstImg.data = rgbConvertPixels.getData();
327
328vImage_Error err;
329err = vImageConvert_BGRA8888toRGB888(&srcImg, &dstImg, kvImageNoFlags);
330if(err != kvImageNoError){
331ofLogError("ofAVFoundationGrabber") << "Error using accelerate to convert bgra to rgb with vImageConvert_BGRA8888toRGB888 error: " << err;
332}else{
333
334if( grabberPtr->capMutex.try_lock() ){
335grabberPtr->pixelsTmp = rgbConvertPixels;
336grabberPtr->updatePixelsCB();
337grabberPtr->capMutex.unlock();
338}
339
340}
341}
342
343// Unlock the image buffer
344CVPixelBufferUnlockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly);
345
346}
347}
348}
349}
350
351#pragma mark -
352#pragma mark Memory management
353
354- (void)dealloc {
355// Stop the CaptureSession
356if(self.captureSession) {
357[self stopCapture];
358self.captureSession = nil;
359}
360if(captureOutput){
361if(captureOutput.sampleBufferDelegate != nil) {
362[captureOutput setSampleBufferDelegate:nil queue:NULL];
363}
364captureOutput = nil;
365}
366
367captureInput = nil;
368device = nil;
369
370if(grabberPtr) {
371[self eraseGrabberPtr];
372}
373grabberPtr = nil;
374if(currentFrame) {
375// release the currentFrame image
376CGImageRelease(currentFrame);
377currentFrame = nil;
378}
379}
380
381- (void)eraseGrabberPtr {
382grabberPtr = NULL;
383}
384
385@end
386
387
388ofAVFoundationGrabber::ofAVFoundationGrabber(){
389fps = -1;
390grabber = [[OSXVideoGrabber alloc] init];
391width = 0;
392height = 0;
393bIsInit = false;
394pixelFormat = OF_PIXELS_RGB;
395newFrame = false;
396bHavePixelsChanged = false;
397bLock = false;
398}
399
400ofAVFoundationGrabber::~ofAVFoundationGrabber(){
401ofLog(OF_LOG_VERBOSE, "ofAVFoundationGrabber destructor");
402close();
403}
404
405void ofAVFoundationGrabber::clear(){
406if( pixels.size() ){
407pixels.clear();
408pixelsTmp.clear();
409}
410}
411
412void ofAVFoundationGrabber::close(){
413bLock = true;
414if(grabber) {
415// Stop and release the the OSXVideoGrabber
416[grabber stopCapture];
417[grabber eraseGrabberPtr];
418grabber = nil;
419}
420clear();
421bIsInit = false;
422width = 0;
423height = 0;
424fps = -1;
425pixelFormat = OF_PIXELS_RGB;
426newFrame = false;
427bHavePixelsChanged = false;
428bLock = false;
429}
430
431void ofAVFoundationGrabber::setDesiredFrameRate(int capRate){
432fps = capRate;
433}
434
435bool ofAVFoundationGrabber::setup(int w, int h){
436
437if( grabber == nil ){
438grabber = [[OSXVideoGrabber alloc] init];
439}
440
441grabber->grabberPtr = this;
442
443if( [grabber initCapture:fps capWidth:w capHeight:h] ) {
444
445//update the pixel dimensions based on what the camera supports
446width = grabber->width;
447height = grabber->height;
448
449clear();
450
451pixels.allocate(width, height, pixelFormat);
452pixelsTmp.allocate(width, height, pixelFormat);
453
454[grabber startCapture];
455
456newFrame=false;
457bIsInit = true;
458
459return true;
460} else {
461return false;
462}
463}
464
465
466bool ofAVFoundationGrabber::isInitialized() const{
467return bIsInit;
468}
469
470void ofAVFoundationGrabber::update(){
471newFrame = false;
472
473if (bHavePixelsChanged == true){
474capMutex.lock();
475pixels = pixelsTmp;
476bHavePixelsChanged = false;
477capMutex.unlock();
478newFrame = true;
479}
480}
481
482ofPixels & ofAVFoundationGrabber::getPixels(){
483return pixels;
484}
485
486const ofPixels & ofAVFoundationGrabber::getPixels() const{
487return pixels;
488}
489
490bool ofAVFoundationGrabber::isFrameNew() const{
491return newFrame;
492}
493
494void ofAVFoundationGrabber::updatePixelsCB(){
495//TODO: does this need a mutex? or some thread protection?
496bHavePixelsChanged = true;
497}
498
499std::vector <ofVideoDevice> ofAVFoundationGrabber::listDevices() const{
500std::vector <std::string> devList = [grabber listDevices];
501
502std::vector <ofVideoDevice> devices;
503for(int i = 0; i < devList.size(); i++){
504ofVideoDevice vd;
505vd.deviceName = devList[i];
506vd.id = i;
507vd.bAvailable = true;
508devices.push_back(vd);
509}
510
511return devices;
512}
513
514void ofAVFoundationGrabber::setDeviceID(int deviceID) {
515if( grabber == nil ){
516grabber = [[OSXVideoGrabber alloc] init];
517}
518[grabber setDevice:deviceID];
519device = deviceID;
520}
521
522bool ofAVFoundationGrabber::setPixelFormat(ofPixelFormat PixelFormat) {
523if(PixelFormat == OF_PIXELS_RGB){
524pixelFormat = PixelFormat;
525return true;
526} else if(PixelFormat == OF_PIXELS_RGBA){
527pixelFormat = PixelFormat;
528return true;
529} else if(PixelFormat == OF_PIXELS_BGRA){
530pixelFormat = PixelFormat;
531return true;
532}
533return false;
534}
535
536ofPixelFormat ofAVFoundationGrabber::getPixelFormat() const{
537return pixelFormat;
538}
539
540#endif
541