Нет описания
Вы не можете выбрать более 25 тем Темы должны начинаться с буквы или цифры, могут содержать дефисы(-) и должны содержать не более 35 символов.

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767
  1. #if !PLATFORM_TVOS && UNITY_USES_WEBCAM
  2. #include "CameraCapture.h"
  3. #include "AVCapture.h"
  4. #include "CMVideoSampling.h"
  5. #include "CVTextureCache.h"
  6. #import <CoreVideo/CoreVideo.h>
  7. #include <cmath>
  8. static NSMutableArray<CameraCaptureController*> *activeColorAndDepthCameraControllers = nil;
  9. @implementation CameraCaptureController
  10. {
  11. AVCaptureDevice* _captureDevice;
  12. AVCaptureSession* _captureSession;
  13. AVCaptureDeviceInput* _captureInput;
  14. AVCaptureVideoDataOutput* _captureOutput;
  15. AVCaptureDepthDataOutput* _captureDepthOutput;
  16. AVCaptureDataOutputSynchronizer* _captureSynchronizer;
  17. @public bool _isDepth;
  18. uint8_t* _pixelBufferCopy;
  19. CMVideoSampling _cmVideoSampling;
  20. NSString* _preset;
  21. CGPoint _focusPoint;
  22. AVCaptureFocusMode _focusMode;
  23. @public void* _userData;
  24. @public size_t _width, _height;
  25. }
  26. - (bool)initCapture:(AVCaptureDevice*)device
  27. {
  28. if (UnityGetAVCapturePermission(avVideoCapture) == avCapturePermissionDenied)
  29. return false;
  30. self.captureDevice = device;
  31. self.captureInput = [AVCaptureDeviceInput deviceInputWithDevice: device error: nil];
  32. self.captureOutput = [[AVCaptureVideoDataOutput alloc] init];
  33. if (self.captureOutput == nil || self.captureInput == nil)
  34. return false;
  35. self.captureOutput.alwaysDiscardsLateVideoFrames = YES;
  36. NSDictionary* options = @{ (NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
  37. [self.captureOutput setVideoSettings: options];
  38. CMVideoSampling_Initialize(&self->_cmVideoSampling);
  39. _width = _height = 0;
  40. _focusPoint = CGPointMake(0.5, 0.5); // default focus point is center
  41. _focusMode = AVCaptureFocusModeContinuousAutoFocus;
  42. _pixelBufferCopy = nullptr;
  43. return true;
  44. }
  45. - (void)setCaptureFPS:(float)fps
  46. {
  47. if ([self.captureDevice lockForConfiguration: nil])
  48. {
  49. if (self.captureDevice.activeFormat)
  50. {
  51. fps = [self pickAvailableFrameRate: fps];
  52. self.captureDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps);
  53. self.captureDevice.activeVideoMaxFrameDuration = CMTimeMake(1, fps);
  54. }
  55. else
  56. {
  57. // In some corner cases (seeing this on iPod iOS 6.1.5) activeFormat is null.
  58. #pragma clang diagnostic push
  59. #pragma clang diagnostic ignored "-Wdeprecated-declarations"
  60. self.captureOutput.minFrameDuration = CMTimeMake(1, fps);
  61. #pragma clang diagnostic pop
  62. }
  63. [self.captureDevice unlockForConfiguration];
  64. }
  65. }
  66. - (bool)initCapture:(AVCaptureDevice*)device preset:(NSString*)preset fps:(float)fps
  67. {
  68. if (![self initCapture: device])
  69. return false;
  70. self.captureSession = [[AVCaptureSession alloc] init];
  71. [self.captureSession addInput: self.captureInput];
  72. [self.captureSession addOutput: self.captureOutput];
  73. // queue on main thread to simplify gles life
  74. [self.captureOutput setSampleBufferDelegate: self queue: dispatch_get_main_queue()];
  75. self->_preset = preset;
  76. [self.captureSession setSessionPreset: preset];
  77. [self setCaptureFPS: fps];
  78. return true;
  79. }
  80. - (void)captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection*)connection
  81. {
  82. intptr_t tex = (intptr_t)CMVideoSampling_SampleBuffer(&self->_cmVideoSampling, sampleBuffer, &_width, &_height);
  83. UnityDidCaptureVideoFrame(tex, self->_userData);
  84. }
  85. - (void)capturePixelBufferToMemBuffer:(uint8_t*)dst
  86. {
  87. CVPixelBufferRef pbuf = (CVPixelBufferRef)self->_cmVideoSampling.cvImageBuffer;
  88. const size_t srcRowSize = CVPixelBufferGetBytesPerRow(pbuf);
  89. const size_t bufSize = srcRowSize * self->_height;
  90. if (self->_pixelBufferCopy == nullptr)
  91. {
  92. self->_pixelBufferCopy = (uint8_t*)::malloc(bufSize);
  93. }
  94. // while not the best way memory-wise, we want to minimize stalling
  95. CVPixelBufferLockBaseAddress(pbuf, kCVPixelBufferLock_ReadOnly);
  96. {
  97. ::memcpy(self->_pixelBufferCopy, CVPixelBufferGetBaseAddress(pbuf), bufSize);
  98. }
  99. CVPixelBufferUnlockBaseAddress(pbuf, kCVPixelBufferLock_ReadOnly);
  100. OSType pixelFormat = CVPixelBufferGetPixelFormatType(pbuf);
  101. size_t bpp = 0;
  102. switch (pixelFormat)
  103. {
  104. case kCVPixelFormatType_32BGRA:
  105. bpp = 4;
  106. break;
  107. case kCVPixelFormatType_DepthFloat16:
  108. bpp = 2;
  109. break;
  110. default:
  111. assert(false);
  112. break;
  113. }
  114. const size_t dstRowSize = self->_width * bpp;
  115. uint8_t* src = self->_pixelBufferCopy + (self->_height - 1) * srcRowSize;
  116. for (size_t i = 0; i < self->_height; ++i)
  117. {
  118. ::memcpy(dst, src, dstRowSize);
  119. dst += dstRowSize;
  120. src -= srcRowSize;
  121. }
  122. }
  123. - (int)isCVTextureFlipped
  124. {
  125. return IsCVTextureFlipped(self->_cmVideoSampling.cvTextureCacheTexture);
  126. }
  127. + (BOOL)focusPointSupported:(AVCaptureDevice*)captureDevice withFocusMode:(AVCaptureFocusMode)focusMode
  128. {
  129. return captureDevice.focusPointOfInterestSupported && [captureDevice isFocusModeSupported: focusMode];
  130. }
  131. - (int)setFocusPointWithX:(float)x Y:(float)y
  132. {
  133. if (x < 0 || x > 1 || y < 0 || y > 1)
  134. {
  135. _focusPoint = CGPointMake(0.5, 0.5); // default value for iOS
  136. _focusMode = AVCaptureFocusModeContinuousAutoFocus;
  137. }
  138. else
  139. {
  140. _focusPoint = CGPointMake(x, 1.0 - y);
  141. _focusMode = AVCaptureFocusModeAutoFocus;
  142. }
  143. return [self setFocusPoint];
  144. }
  145. - (int)setFocusPoint
  146. {
  147. if (self.captureDevice != nil && [CameraCaptureController focusPointSupported: self.captureDevice withFocusMode: _focusMode])
  148. {
  149. if ([self.captureDevice lockForConfiguration: nil])
  150. {
  151. self.captureDevice.focusPointOfInterest = _focusPoint;
  152. self.captureDevice.focusMode = _focusMode;
  153. [self.captureDevice unlockForConfiguration];
  154. return 1;
  155. }
  156. }
  157. return 0;
  158. }
  159. + (NSMutableArray<CameraCaptureController*>*)getActiveColorAndDepthCameraControllers
  160. {
  161. if (activeColorAndDepthCameraControllers == nil)
  162. {
  163. activeColorAndDepthCameraControllers = [[NSMutableArray alloc] init];
  164. }
  165. return activeColorAndDepthCameraControllers;
  166. }
  167. + (void)addColorAndDepthCameraController:(CameraCaptureController*)controller
  168. {
  169. CameraCaptureController* prevController = [self findColorAndDepthCameraController: controller.captureDevice isDepth: controller->_isDepth];
  170. if (prevController != nil)
  171. [prevController pause];
  172. CameraCaptureController* otherController = [self findColorAndDepthCameraController: controller.captureDevice isDepth: !controller->_isDepth];
  173. if (otherController != nil)
  174. {
  175. [otherController.captureSession stopRunning];
  176. [otherController clearColorAndDepthCameraCaptureSession];
  177. }
  178. [[self getActiveColorAndDepthCameraControllers] addObject: controller];
  179. }
  180. + (void)removeColorAndDepthCameraController:(CameraCaptureController*)controller
  181. {
  182. [[self getActiveColorAndDepthCameraControllers] removeObject: controller];
  183. CameraCaptureController* otherController = [self findColorAndDepthCameraController: controller.captureDevice isDepth: !controller->_isDepth];
  184. if (otherController != nil)
  185. {
  186. [otherController initColorAndDepthCameraCaptureSession];
  187. [otherController.captureSession startRunning];
  188. }
  189. }
  190. + (void)clearColorAndDepthCameraControllers
  191. {
  192. NSMutableArray<CameraCaptureController*>* activeColorAndDepthCameraControllers = [self getActiveColorAndDepthCameraControllers];
  193. for (CameraCaptureController *controller in activeColorAndDepthCameraControllers)
  194. {
  195. if (controller.captureSession != nil)
  196. {
  197. [controller.captureSession stopRunning];
  198. [controller clearColorAndDepthCameraCaptureSession];
  199. }
  200. }
  201. [activeColorAndDepthCameraControllers removeAllObjects];
  202. }
  203. + (CameraCaptureController*)findColorAndDepthCameraController:(AVCaptureDevice*)device isDepth:(bool)isDepth
  204. {
  205. for (CameraCaptureController *controller in [self getActiveColorAndDepthCameraControllers])
  206. {
  207. if (controller.captureDevice == device && controller->_isDepth == isDepth)
  208. return controller;
  209. }
  210. return nil;
  211. }
  212. - (bool)initColorAndDepthCameraCapture:(AVCaptureDevice*)device preset:(NSString*)preset fps:(float)fps isDepth:(bool)isDepth
  213. {
  214. if (![self initCapture: device])
  215. return false;
  216. self.captureDepthOutput = [[AVCaptureDepthDataOutput alloc] init];
  217. if (self.captureDepthOutput == nil)
  218. return false;
  219. self.captureDepthOutput.filteringEnabled = YES; // getting filtered depth data to avoid invalid values
  220. self.captureDepthOutput.alwaysDiscardsLateDepthData = YES;
  221. self->_preset = preset;
  222. [self initColorAndDepthCameraCaptureSession];
  223. [self setCaptureFPS: fps];
  224. NSArray<AVCaptureOutput*> *outputs = [NSArray arrayWithObjects: self.captureOutput, self.captureDepthOutput, nil];
  225. self.captureSynchronizer = [[AVCaptureDataOutputSynchronizer alloc] initWithDataOutputs: outputs];
  226. // queue on main thread to simplify gles life
  227. [self.captureSynchronizer setDelegate: self queue: dispatch_get_main_queue()];
  228. _isDepth = isDepth;
  229. return true;
  230. }
  231. - (void)initColorAndDepthCameraCaptureSession
  232. {
  233. self.captureSession = [[AVCaptureSession alloc] init];
  234. [self.captureSession setSessionPreset: self->_preset];
  235. [self.captureSession addInput: self.captureInput];
  236. [self.captureSession addOutput: self.captureOutput];
  237. [self.captureSession addOutput: self.captureDepthOutput];
  238. }
  239. - (void)clearColorAndDepthCameraCaptureSession
  240. {
  241. [self.captureSession removeInput: self.captureInput];
  242. [self.captureSession removeOutput: self.captureOutput];
  243. [self.captureSession removeOutput: self.captureDepthOutput];
  244. self.captureSession = nil;
  245. }
  246. - (void)dataOutputSynchronizer:(AVCaptureDataOutputSynchronizer *)synchronizer didOutputSynchronizedDataCollection:(AVCaptureSynchronizedDataCollection *)synchronizedDataCollection
  247. {
  248. AVCaptureSynchronizedSampleBufferData *sampleData = (AVCaptureSynchronizedSampleBufferData*)[synchronizedDataCollection synchronizedDataForCaptureOutput: self.captureOutput];
  249. if (CMSampleBufferGetImageBuffer(sampleData.sampleBuffer) != nil)
  250. {
  251. CameraCaptureController* colorController = !self->_isDepth ? self : [CameraCaptureController findColorAndDepthCameraController: self.captureDevice isDepth: false];
  252. if (colorController != nil)
  253. {
  254. intptr_t tex = (intptr_t)CMVideoSampling_SampleBuffer(&colorController->_cmVideoSampling, sampleData.sampleBuffer, &(colorController->_width), &(colorController->_height));
  255. UnityDidCaptureVideoFrame(tex, colorController->_userData);
  256. }
  257. }
  258. AVCaptureSynchronizedDepthData *depthData = (AVCaptureSynchronizedDepthData*)[synchronizedDataCollection synchronizedDataForCaptureOutput: self.captureDepthOutput];
  259. if (depthData.depthData.depthDataMap != nil)
  260. {
  261. CameraCaptureController* depthController = self->_isDepth ? self : [CameraCaptureController findColorAndDepthCameraController: self.captureDevice isDepth: true];
  262. if (depthController != nil)
  263. {
  264. intptr_t tex = (intptr_t)CMVideoSampling_ImageBuffer(&depthController->_cmVideoSampling, [depthData.depthData depthDataByConvertingToDepthDataType: kCVPixelFormatType_DepthFloat16].depthDataMap, &(depthController->_width), &(depthController->_height));
  265. UnityDidCaptureVideoFrame(tex, depthController->_userData);
  266. }
  267. }
  268. }
  269. - (void)start
  270. {
  271. if (self.captureDepthOutput != nil)
  272. {
  273. [CameraCaptureController addColorAndDepthCameraController: self];
  274. }
  275. else
  276. {
  277. [CameraCaptureController clearColorAndDepthCameraControllers];
  278. }
  279. [self.captureSession startRunning];
  280. }
  281. - (void)pause
  282. {
  283. [self.captureSession stopRunning];
  284. if (self.captureDepthOutput != nil)
  285. {
  286. [CameraCaptureController removeColorAndDepthCameraController: self];
  287. }
  288. }
  289. - (void)stop
  290. {
  291. [self.captureSession stopRunning];
  292. [self.captureSession removeInput: self.captureInput];
  293. [self.captureSession removeOutput: self.captureOutput];
  294. self.captureInput = nil;
  295. self.captureOutput = nil;
  296. if (self.captureDepthOutput != nil)
  297. {
  298. self.captureSynchronizer = nil;
  299. [self.captureSession removeOutput: self.captureDepthOutput];
  300. self.captureDepthOutput = nil;
  301. [CameraCaptureController removeColorAndDepthCameraController: self];
  302. }
  303. self.captureDevice = nil;
  304. self.captureSession = nil;
  305. if (self->_pixelBufferCopy != nullptr)
  306. {
  307. ::free(self->_pixelBufferCopy);
  308. self->_pixelBufferCopy = nullptr;
  309. }
  310. CMVideoSampling_Uninitialize(&self->_cmVideoSampling);
  311. }
  312. - (float)pickAvailableFrameRate:(float)fps
  313. {
  314. AVFrameRateRange* bestRange = nil;
  315. float minDiff = INFINITY;
  316. float epsilon = 0.1;
  317. fps = fps > epsilon ? fps : 24;
  318. for (AVFrameRateRange* rate in self.captureDevice.activeFormat.videoSupportedFrameRateRanges)
  319. {
  320. if (fps + epsilon > rate.minFrameRate && fps - epsilon < rate.maxFrameRate)
  321. return fps;
  322. else
  323. {
  324. float diff = ::fmin(::fabs(fps - rate.minFrameRate), ::fabs(fps - rate.maxFrameRate));
  325. if (diff < minDiff)
  326. {
  327. minDiff = diff;
  328. bestRange = rate;
  329. }
  330. }
  331. }
  332. return fps > bestRange.maxFrameRate ? bestRange.maxFrameRate : bestRange.minFrameRate;
  333. }
  334. @synthesize captureDevice = _captureDevice;
  335. @synthesize captureSession = _captureSession;
  336. @synthesize captureOutput = _captureOutput;
  337. @synthesize captureInput = _captureInput;
  338. @synthesize captureDepthOutput = _captureDepthOutput;
  339. @synthesize captureSynchronizer = _captureSynchronizer;
  340. @end
  341. // Preset for getting depth data with max resolution available
  342. static NSString* const depthCaptureSessionPreset = AVCaptureSessionPresetPhoto;
  343. static NSMutableArray<CameraCaptureDevice*> *videoCaptureDevices = nil;
  344. @implementation CameraCaptureDevice
  345. {
  346. @public AVCaptureDevice* _device;
  347. @public int _frontFacing;
  348. @public int _autoFocusPointSupported;
  349. @public WebCamKind _kind;
  350. @public NSMutableArray<NSValue*>* _resolutions;
  351. NSMutableArray<NSString*>* _resPresets;
  352. }
  353. - (bool)isColorAndDepthCaptureDevice
  354. {
  355. for (AVCaptureDeviceFormat *format in [self->_device formats])
  356. {
  357. if ([format supportedDepthDataFormats].count > 0)
  358. return true;
  359. }
  360. return false;
  361. }
  362. - (WebCamKind)getKind
  363. {
  364. if ([self isColorAndDepthCaptureDevice])
  365. return kWebCamColorAndDepth;
  366. AVCaptureDeviceType type = _device.deviceType;
  367. if ([type isEqualToString: AVCaptureDeviceTypeBuiltInWideAngleCamera])
  368. return kWebCamWideAngle;
  369. if ([type isEqualToString: AVCaptureDeviceTypeBuiltInUltraWideCamera])
  370. return kWebCamUltraWideAngle;
  371. if ([type isEqualToString: AVCaptureDeviceTypeBuiltInTelephotoCamera])
  372. return kWebCamTelephoto;
  373. if ([type isEqualToString: AVCaptureDeviceTypeBuiltInDualCamera])
  374. return kWebCamTelephoto;
  375. if ([type isEqualToString: AVCaptureDeviceTypeBuiltInDualWideCamera])
  376. return kWebCamWideAngle;
  377. if ([type isEqualToString: AVCaptureDeviceTypeBuiltInTripleCamera])
  378. return kWebCamUltraWideAngle;
  379. #if defined(__IPHONE_17_0) || defined(__TVOS_17_0)
  380. if (@available(iOS 17.0, *))
  381. {
  382. if ([type isEqualToString: AVCaptureDeviceTypeContinuityCamera])
  383. return kWebCamWideAngle;
  384. }
  385. #endif
  386. #if PLATFORM_IOS && defined(__IPHONE_15_4)
  387. if (@available(iOS 15.4, *))
  388. {
  389. if ([type isEqualToString: AVCaptureDeviceTypeBuiltInLiDARDepthCamera])
  390. return kWebCamColorAndDepth;
  391. }
  392. if ([type isEqualToString: AVCaptureDeviceTypeBuiltInTrueDepthCamera])
  393. return kWebCamColorAndDepth;
  394. #endif
  395. return kWebCamUnknown;
  396. }
  397. - (void)fillCaptureDeviceResolutions
  398. {
  399. static NSString* preset[] =
  400. {
  401. AVCaptureSessionPresetLow, // usually 192x144
  402. AVCaptureSessionPreset352x288,
  403. AVCaptureSessionPresetMedium, // usually 480x320
  404. AVCaptureSessionPreset640x480,
  405. AVCaptureSessionPreset1280x720,
  406. AVCaptureSessionPreset1920x1080, // usually the same as AVCaptureSessionPresetHigh
  407. AVCaptureSessionPreset3840x2160,
  408. };
  409. const int count = sizeof(preset) / sizeof(preset[0]);
  410. self->_resolutions = [NSMutableArray arrayWithCapacity: count];
  411. self->_resPresets = [NSMutableArray arrayWithCapacity: count];
  412. AVCaptureInput* captureInput = [AVCaptureDeviceInput deviceInputWithDevice: self->_device error: nil];
  413. //Don't attempt to setup an AVCaptureSession if the user has explicitly denied permission to use the camera.
  414. if (captureInput != nil)
  415. {
  416. AVCaptureSession* captureSession = [[AVCaptureSession alloc] init];
  417. [captureSession addInput: captureInput];
  418. if (self->_kind == kWebCamColorAndDepth)
  419. {
  420. AVCaptureDepthDataOutput* captureDepthOutput = [[AVCaptureDepthDataOutput alloc] init];
  421. if ([captureSession canSetSessionPreset: depthCaptureSessionPreset])
  422. {
  423. [captureSession setSessionPreset: AVCaptureSessionPresetPhoto];
  424. [captureSession addOutput: captureDepthOutput];
  425. CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(self->_device.activeDepthDataFormat.formatDescription); // for ColorAndDepth camera return depth buffer resolution
  426. [self->_resolutions addObject: [NSValue valueWithCGSize: CGSizeMake(dim.width, dim.height)]];
  427. [self->_resPresets addObject: AVCaptureSessionPresetPhoto];
  428. }
  429. }
  430. else
  431. {
  432. for (int i = 0; i < count; ++i)
  433. {
  434. if ([captureSession canSetSessionPreset: preset[i]])
  435. {
  436. [captureSession setSessionPreset: preset[i]];
  437. CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(self->_device.activeFormat.formatDescription);
  438. [self->_resolutions addObject: [NSValue valueWithCGSize: CGSizeMake(dim.width, dim.height)]];
  439. [self->_resPresets addObject: preset[i]];
  440. }
  441. }
  442. }
  443. }
  444. }
  445. - (NSString*)pickPresetFromWidth:(int)w height:(int)h
  446. {
  447. if (self->_kind == kWebCamColorAndDepth)
  448. {
  449. return depthCaptureSessionPreset;
  450. }
  451. int requestedWidth = w > 0 ? w : 640;
  452. int requestedHeight = h > 0 ? h : 480;
  453. if (requestedHeight > requestedWidth) // hardware camera frame is landscape oriented
  454. std::swap(requestedWidth, requestedHeight);
  455. NSInteger ret = -1;
  456. double bestMatch = std::numeric_limits<double>::max();
  457. for (NSInteger i = 0, n = [_resolutions count]; i < n; ++i)
  458. {
  459. double width = [self->_resolutions[i] CGSizeValue].width;
  460. double height = [self->_resolutions[i] CGSizeValue].height;
  461. double match = std::abs(std::log(requestedWidth / width)) + std::abs(std::log(requestedHeight / height));
  462. if (match < bestMatch)
  463. {
  464. ret = i;
  465. bestMatch = match;
  466. }
  467. }
  468. NSAssert(ret != -1, @"Cannot pick capture preset");
  469. return ret != -1 ? self->_resPresets[ret] : AVCaptureSessionPresetHigh;
  470. }
  471. - (CameraCaptureDevice*)initWithDevice:(AVCaptureDevice*)device
  472. {
  473. self->_device = device;
  474. self->_frontFacing = device.position == AVCaptureDevicePositionFront ? 1 : 0;
  475. self->_autoFocusPointSupported = [CameraCaptureController focusPointSupported: device withFocusMode: AVCaptureFocusModeAutoFocus] ? 1 : 0;
  476. self->_kind = [self getKind];
  477. [self fillCaptureDeviceResolutions];
  478. return self;
  479. }
  480. - (bool)initCaptureForController:(CameraCaptureController*)controller width:(int)w height:(int)h fps:(float)fps isDepth:(bool)isDepth
  481. {
  482. bool initResult = false;
  483. NSString *preset = [self pickPresetFromWidth: w height: h];
  484. if ([self isColorAndDepthCaptureDevice])
  485. {
  486. initResult = [controller initColorAndDepthCameraCapture: self->_device preset: preset fps: fps isDepth: isDepth];
  487. }
  488. else
  489. {
  490. assert(!isDepth);
  491. initResult = [controller initCapture: self->_device preset: preset fps: fps];
  492. }
  493. return initResult;
  494. }
  495. + (bool)initialized
  496. {
  497. return videoCaptureDevices != nil;
  498. }
  499. + (void)createCameraCaptureDevicesArray
  500. {
  501. videoCaptureDevices = [NSMutableArray arrayWithCapacity: 8];
  502. }
  503. + (void)addCameraCaptureDevice:(AVCaptureDevice*)device
  504. {
  505. [videoCaptureDevices addObject: [[CameraCaptureDevice alloc] initWithDevice: device]];
  506. }
  507. @end
  508. extern "C" void UnityEnumVideoCaptureDevices(void* udata, void(*callback)(void* udata, const char* name, int frontFacing, int autoFocusPointSupported, int kind, const int* resolutions, int resCount))
  509. {
  510. if (![CameraCaptureDevice initialized])
  511. {
  512. [CameraCaptureDevice createCameraCaptureDevicesArray];
  513. NSMutableArray<AVCaptureDeviceType>* captureDevices = [NSMutableArray arrayWithObjects: AVCaptureDeviceTypeBuiltInWideAngleCamera, AVCaptureDeviceTypeBuiltInTelephotoCamera, nil];
  514. [captureDevices addObject: AVCaptureDeviceTypeBuiltInDualCamera];
  515. [captureDevices addObject: AVCaptureDeviceTypeBuiltInTrueDepthCamera];
  516. if (UnityiOS130orNewer())
  517. {
  518. [captureDevices addObject: AVCaptureDeviceTypeBuiltInUltraWideCamera];
  519. [captureDevices addObject: AVCaptureDeviceTypeBuiltInDualWideCamera];
  520. [captureDevices addObject: AVCaptureDeviceTypeBuiltInTripleCamera];
  521. }
  522. AVCaptureDeviceDiscoverySession *captureDeviceDiscoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes: captureDevices mediaType: AVMediaTypeVideo position: AVCaptureDevicePositionUnspecified];
  523. for (AVCaptureDevice* device in [captureDeviceDiscoverySession devices])
  524. {
  525. [CameraCaptureDevice addCameraCaptureDevice: device];
  526. }
  527. }
  528. // we should not provide camera devices information while access has not been granted
  529. // but we need to try to enumerate camera devices anyway to trigger permission request dialog
  530. if ([AVCaptureDevice authorizationStatusForMediaType: AVMediaTypeVideo] != AVAuthorizationStatusAuthorized)
  531. return;
  532. const unsigned kMaxResolutions = 16;
  533. int resolutions[kMaxResolutions * 2];
  534. for (CameraCaptureDevice *cameraCaptureDevice in videoCaptureDevices)
  535. {
  536. int resCount = (int)[cameraCaptureDevice->_resolutions count];
  537. assert(resCount <= kMaxResolutions && "Increase the constant above");
  538. for (int i = 0; i < resCount; ++i)
  539. {
  540. resolutions[i * 2] = (int)[cameraCaptureDevice->_resolutions[i] CGSizeValue].width;
  541. resolutions[i * 2 + 1] = (int)[cameraCaptureDevice->_resolutions[i] CGSizeValue].height;
  542. }
  543. callback(udata, [cameraCaptureDevice->_device.localizedName UTF8String], cameraCaptureDevice->_frontFacing, cameraCaptureDevice->_autoFocusPointSupported, cameraCaptureDevice->_kind, resolutions, resCount);
  544. }
  545. }
  546. extern "C" void* UnityInitCameraCapture(int deviceIndex, int w, int h, int fps, int isDepth, void* udata)
  547. {
  548. if (videoCaptureDevices != nil && deviceIndex < videoCaptureDevices.count)
  549. {
  550. CameraCaptureController* controller = [CameraCaptureController alloc];
  551. bool initResult = [videoCaptureDevices[deviceIndex] initCaptureForController: controller width: w height: h fps: (float)fps isDepth: (isDepth != 0)];
  552. if (initResult)
  553. {
  554. controller->_userData = udata;
  555. return (__bridge_retained void*)controller;
  556. }
  557. controller = nil;
  558. }
  559. return 0;
  560. }
  561. extern "C" void UnityStartCameraCapture(void* capture)
  562. {
  563. [(__bridge CameraCaptureController*)capture start];
  564. }
  565. extern "C" void UnityPauseCameraCapture(void* capture)
  566. {
  567. [(__bridge CameraCaptureController*)capture pause];
  568. }
  569. extern "C" void UnityStopCameraCapture(void* capture)
  570. {
  571. CameraCaptureController* controller = (__bridge_transfer CameraCaptureController*)capture;
  572. [controller stop];
  573. controller = nil;
  574. }
  575. extern "C" void UnityCameraCaptureExtents(void* capture, int* w, int* h)
  576. {
  577. CameraCaptureController* controller = (__bridge CameraCaptureController*)capture;
  578. if (controller == nil)
  579. return;
  580. *w = (int)controller->_width;
  581. *h = (int)controller->_height;
  582. }
  583. extern "C" void UnityCameraCaptureReadToMemory(void* capture, void* dst_, int w, int h)
  584. {
  585. CameraCaptureController* controller = (__bridge CameraCaptureController*)capture;
  586. if (controller == nil)
  587. return;
  588. assert(w == controller->_width && h == controller->_height);
  589. [controller capturePixelBufferToMemBuffer: (uint8_t*)dst_];
  590. }
  591. extern "C" int UnityCameraCaptureVideoRotationDeg(void* capture)
  592. {
  593. CameraCaptureController* controller = (__bridge CameraCaptureController*)capture;
  594. if (controller == nil)
  595. return 0;
  596. // all cams are landscape.
  597. switch (UnityCurrentOrientation())
  598. {
  599. case portrait: return 90;
  600. case portraitUpsideDown: return 270;
  601. case landscapeLeft: return controller.captureDevice.position == AVCaptureDevicePositionFront ? 180 : 0;
  602. case landscapeRight: return controller.captureDevice.position == AVCaptureDevicePositionFront ? 0 : 180;
  603. default: assert(false && "bad orientation returned from UnityCurrentOrientation()"); break;
  604. }
  605. return 0;
  606. }
  607. extern "C" int UnityCameraCaptureVerticallyMirrored(void* capture)
  608. {
  609. CameraCaptureController* controller = (__bridge CameraCaptureController*)capture;
  610. if (controller == nil)
  611. return 0;
  612. return [controller isCVTextureFlipped];
  613. }
  614. extern "C" int UnityCameraCaptureSetAutoFocusPoint(void* capture, float x, float y)
  615. {
  616. CameraCaptureController* controller = (__bridge CameraCaptureController*)capture;
  617. if (controller == nil)
  618. return 0;
  619. return [controller setFocusPointWithX: x Y: y];
  620. }
  621. #else
  622. // STUBBED OUT UNTIL DEVELOPER FINDs AN AWESOME CAMERA SOLUTION FOR APPLE TV //
  623. extern "C" void UnityEnumVideoCaptureDevices(void* udata, void(*callback)(void* udata, const char* name, int frontFacing, int autoFocusPointSupported, int kind, const int* resolutions, int resCount))
  624. {
  625. }
  626. extern "C" void* UnityInitCameraCapture(int deviceIndex, int w, int h, int fps, int isDepth, void* udata)
  627. {
  628. return 0;
  629. }
  630. extern "C" void UnityStartCameraCapture(void* capture)
  631. {
  632. }
  633. extern "C" void UnityPauseCameraCapture(void* capture)
  634. {
  635. }
  636. extern "C" void UnityStopCameraCapture(void* capture)
  637. {
  638. }
  639. extern "C" void UnityCameraCaptureExtents(void* capture, int* w, int* h)
  640. {
  641. }
  642. extern "C" void UnityCameraCaptureReadToMemory(void* capture, void* dst_, int w, int h)
  643. {
  644. }
  645. extern "C" int UnityCameraCaptureVideoRotationDeg(void* capture)
  646. {
  647. return 0;
  648. }
  649. extern "C" int UnityCameraCaptureVerticallyMirrored(void* capture)
  650. {
  651. return 0;
  652. }
  653. extern "C" int UnityCameraCaptureSetAutoFocusPoint(void* capture, float x, float y)
  654. {
  655. return 0;
  656. }
  657. #endif