PageRenderTime 58ms CodeModel.GetById 17ms RepoModel.GetById 0ms app.codeStats 1ms

/WHAMBUSH/PBJVision/Source/PBJVision.m

https://gitlab.com/urbanjunglestudio/whambush-ios
Objective C | 1401 lines | 1029 code | 281 blank | 91 comment | 237 complexity | 0c954ee4977abf3f477e3bd4020d55fe MD5 | raw file
  1. //
  2. // PBJVision.m
  3. // PBJVision
  4. //
  5. // Created by Patrick Piemonte on 4/30/13.
  6. // Copyright (c) 2013-present, Patrick Piemonte, http://patrickpiemonte.com
  7. //
  8. // Permission is hereby granted, free of charge, to any person obtaining a copy of
  9. // this software and associated documentation files (the "Software"), to deal in
  10. // the Software without restriction, including without limitation the rights to
  11. // use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
  12. // the Software, and to permit persons to whom the Software is furnished to do so,
  13. // subject to the following conditions:
  14. //
  15. // The above copyright notice and this permission notice shall be included in all
  16. // copies or substantial portions of the Software.
  17. //
  18. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  19. // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
  20. // FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
  21. // COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
  22. // IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
  23. // CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
  24. //
  25. #import "PBJVision.h"
  26. #import "PBJVisionUtilities.h"
  27. #import "PBJMediaWriter.h"
  28. #import "PBJGLProgram.h"
  29. #import <CoreImage/CoreImage.h>
  30. #import <ImageIO/ImageIO.h>
  31. #import <OpenGLES/EAGL.h>
  32. #define LOG_VISION 1
  33. #ifndef DLog
  34. #if !defined(NDEBUG) && LOG_VISION
  35. # define DLog(fmt, ...) NSLog((@"VISION: " fmt), ##__VA_ARGS__);
  36. #else
  37. # define DLog(...)
  38. #endif
  39. #endif
  40. NSString * const PBJVisionErrorDomain = @"PBJVisionErrorDomain";
  41. static uint64_t const PBJVisionRequiredMinimumDiskSpaceInBytes = 49999872; // ~ 47 MB
  42. static CGFloat const PBJVisionThumbnailWidth = 160.0f;
  43. // KVO contexts
  44. static NSString * const PBJVisionFocusObserverContext = @"PBJVisionFocusObserverContext";
  45. static NSString * const PBJVisionExposureObserverContext = @"PBJVisionExposureObserverContext";
  46. static NSString * const PBJVisionWhiteBalanceObserverContext = @"PBJVisionWhiteBalanceObserverContext";
  47. static NSString * const PBJVisionFlashModeObserverContext = @"PBJVisionFlashModeObserverContext";
  48. static NSString * const PBJVisionTorchModeObserverContext = @"PBJVisionTorchModeObserverContext";
  49. static NSString * const PBJVisionFlashAvailabilityObserverContext = @"PBJVisionFlashAvailabilityObserverContext";
  50. static NSString * const PBJVisionTorchAvailabilityObserverContext = @"PBJVisionTorchAvailabilityObserverContext";
  51. static NSString * const PBJVisionCaptureStillImageIsCapturingStillImageObserverContext = @"PBJVisionCaptureStillImageIsCapturingStillImageObserverContext";
  52. // photo dictionary key definitions
  53. NSString * const PBJVisionPhotoMetadataKey = @"PBJVisionPhotoMetadataKey";
  54. NSString * const PBJVisionPhotoJPEGKey = @"PBJVisionPhotoJPEGKey";
  55. NSString * const PBJVisionPhotoImageKey = @"PBJVisionPhotoImageKey";
  56. NSString * const PBJVisionPhotoThumbnailKey = @"PBJVisionPhotoThumbnailKey";
  57. // video dictionary key definitions
  58. NSString * const PBJVisionVideoPathKey = @"PBJVisionVideoPathKey";
  59. NSString * const PBJVisionVideoThumbnailKey = @"PBJVisionVideoThumbnailKey";
  60. NSString * const PBJVisionVideoThumbnailArrayKey = @"PBJVisionVideoThumbnailArrayKey";
  61. NSString * const PBJVisionVideoCapturedDurationKey = @"PBJVisionVideoCapturedDurationKey";
  62. // PBJGLProgram shader uniforms for pixel format conversion on the GPU
  63. typedef NS_ENUM(GLint, PBJVisionUniformLocationTypes)
  64. {
  65. PBJVisionUniformY,
  66. PBJVisionUniformUV,
  67. PBJVisionUniformCount
  68. };
  69. ///
  70. @interface PBJVision () <
  71. AVCaptureAudioDataOutputSampleBufferDelegate,
  72. AVCaptureVideoDataOutputSampleBufferDelegate,
  73. PBJMediaWriterDelegate>
  74. {
  75. // AV
  76. AVCaptureSession *_captureSession;
  77. AVCaptureDevice *_captureDeviceFront;
  78. AVCaptureDevice *_captureDeviceBack;
  79. AVCaptureDevice *_captureDeviceAudio;
  80. AVCaptureDeviceInput *_captureDeviceInputFront;
  81. AVCaptureDeviceInput *_captureDeviceInputBack;
  82. AVCaptureDeviceInput *_captureDeviceInputAudio;
  83. AVCaptureStillImageOutput *_captureOutputPhoto;
  84. AVCaptureAudioDataOutput *_captureOutputAudio;
  85. AVCaptureVideoDataOutput *_captureOutputVideo;
  86. // vision core
  87. PBJMediaWriter *_mediaWriter;
  88. dispatch_queue_t _captureSessionDispatchQueue;
  89. dispatch_queue_t _captureCaptureDispatchQueue;
  90. PBJCameraDevice _cameraDevice;
  91. PBJCameraMode _cameraMode;
  92. PBJCameraOrientation _cameraOrientation;
  93. PBJCameraOrientation _previewOrientation;
  94. BOOL _autoUpdatePreviewOrientation;
  95. BOOL _autoFreezePreviewDuringCapture;
  96. BOOL _usesApplicationAudioSession;
  97. PBJFocusMode _focusMode;
  98. PBJExposureMode _exposureMode;
  99. PBJFlashMode _flashMode;
  100. PBJMirroringMode _mirroringMode;
  101. NSString *_captureSessionPreset;
  102. NSString *_captureDirectory;
  103. PBJOutputFormat _outputFormat;
  104. NSMutableSet* _captureThumbnailTimes;
  105. NSMutableSet* _captureThumbnailFrames;
  106. CGFloat _videoBitRate;
  107. NSInteger _audioBitRate;
  108. NSInteger _videoFrameRate;
  109. NSDictionary *_additionalCompressionProperties;
  110. AVCaptureDevice *_currentDevice;
  111. AVCaptureDeviceInput *_currentInput;
  112. AVCaptureOutput *_currentOutput;
  113. AVCaptureVideoPreviewLayer *_previewLayer;
  114. CGRect _cleanAperture;
  115. CMTime _startTimestamp;
  116. CMTime _timeOffset;
  117. CMTime _maximumCaptureDuration;
  118. // sample buffer rendering
  119. PBJCameraDevice _bufferDevice;
  120. PBJCameraOrientation _bufferOrientation;
  121. size_t _bufferWidth;
  122. size_t _bufferHeight;
  123. CGRect _presentationFrame;
  124. EAGLContext *_context;
  125. PBJGLProgram *_program;
  126. CVOpenGLESTextureRef _lumaTexture;
  127. CVOpenGLESTextureRef _chromaTexture;
  128. CVOpenGLESTextureCacheRef _videoTextureCache;
  129. CIContext *_ciContext;
  130. // flags
  131. struct {
  132. unsigned int previewRunning:1;
  133. unsigned int changingModes:1;
  134. unsigned int recording:1;
  135. unsigned int paused:1;
  136. unsigned int interrupted:1;
  137. unsigned int videoWritten:1;
  138. unsigned int videoRenderingEnabled:1;
  139. unsigned int audioCaptureEnabled:1;
  140. unsigned int thumbnailEnabled:1;
  141. unsigned int defaultVideoThumbnails:1;
  142. unsigned int videoCaptureFrame:1;
  143. } __block _flags;
  144. }
  145. @property (nonatomic) AVCaptureDevice *currentDevice;
  146. @end
  147. @implementation PBJVision
  148. @synthesize delegate = _delegate;
  149. @synthesize currentDevice = _currentDevice;
  150. @synthesize previewLayer = _previewLayer;
  151. @synthesize cleanAperture = _cleanAperture;
  152. @synthesize cameraOrientation = _cameraOrientation;
  153. @synthesize previewOrientation = _previewOrientation;
  154. @synthesize autoUpdatePreviewOrientation = _autoUpdatePreviewOrientation;
  155. @synthesize autoFreezePreviewDuringCapture = _autoFreezePreviewDuringCapture;
  156. @synthesize usesApplicationAudioSession = _usesApplicationAudioSession;
  157. @synthesize cameraDevice = _cameraDevice;
  158. @synthesize cameraMode = _cameraMode;
  159. @synthesize focusMode = _focusMode;
  160. @synthesize exposureMode = _exposureMode;
  161. @synthesize flashMode = _flashMode;
  162. @synthesize mirroringMode = _mirroringMode;
  163. @synthesize outputFormat = _outputFormat;
  164. @synthesize context = _context;
  165. @synthesize presentationFrame = _presentationFrame;
  166. @synthesize captureSessionPreset = _captureSessionPreset;
  167. @synthesize captureDirectory = _captureDirectory;
  168. @synthesize audioBitRate = _audioBitRate;
  169. @synthesize videoBitRate = _videoBitRate;
  170. @synthesize additionalCompressionProperties = _additionalCompressionProperties;
  171. @synthesize maximumCaptureDuration = _maximumCaptureDuration;
  172. #pragma mark - singleton
  173. + (PBJVision *)sharedInstance
  174. {
  175. static PBJVision *singleton = nil;
  176. static dispatch_once_t once = 0;
  177. dispatch_once(&once, ^{
  178. singleton = [[PBJVision alloc] init];
  179. });
  180. return singleton;
  181. }
  182. #pragma mark - getters/setters
  183. - (BOOL)isCaptureSessionActive
  184. {
  185. return ([_captureSession isRunning]);
  186. }
  187. - (BOOL)isRecording
  188. {
  189. return _flags.recording;
  190. }
  191. - (BOOL)isPaused
  192. {
  193. return _flags.paused;
  194. }
  195. - (void)setVideoRenderingEnabled:(BOOL)videoRenderingEnabled
  196. {
  197. _flags.videoRenderingEnabled = (unsigned int)videoRenderingEnabled;
  198. }
  199. - (BOOL)isVideoRenderingEnabled
  200. {
  201. return _flags.videoRenderingEnabled;
  202. }
  203. - (void)setAudioCaptureEnabled:(BOOL)audioCaptureEnabled
  204. {
  205. _flags.audioCaptureEnabled = (unsigned int)audioCaptureEnabled;
  206. }
  207. - (BOOL)isAudioCaptureEnabled
  208. {
  209. return _flags.audioCaptureEnabled;
  210. }
  211. - (void)setThumbnailEnabled:(BOOL)thumbnailEnabled
  212. {
  213. _flags.thumbnailEnabled = (unsigned int)thumbnailEnabled;
  214. }
  215. - (BOOL)thumbnailEnabled
  216. {
  217. return _flags.thumbnailEnabled;
  218. }
  219. - (void)setDefaultVideoThumbnails:(BOOL)defaultVideoThumbnails
  220. {
  221. _flags.defaultVideoThumbnails = (unsigned int)defaultVideoThumbnails;
  222. }
  223. - (BOOL)defaultVideoThumbnails
  224. {
  225. return _flags.defaultVideoThumbnails;
  226. }
  227. - (Float64)capturedAudioSeconds
  228. {
  229. if (_mediaWriter && CMTIME_IS_VALID(_mediaWriter.audioTimestamp)) {
  230. return CMTimeGetSeconds(CMTimeSubtract(_mediaWriter.audioTimestamp, _startTimestamp));
  231. } else {
  232. return 0.0;
  233. }
  234. }
  235. - (Float64)capturedVideoSeconds
  236. {
  237. if (_mediaWriter && CMTIME_IS_VALID(_mediaWriter.videoTimestamp)) {
  238. return CMTimeGetSeconds(CMTimeSubtract(_mediaWriter.videoTimestamp, _startTimestamp));
  239. } else {
  240. return 0.0;
  241. }
  242. }
  243. - (void)setCameraOrientation:(PBJCameraOrientation)cameraOrientation
  244. {
  245. if (cameraOrientation == _cameraOrientation)
  246. return;
  247. _cameraOrientation = cameraOrientation;
  248. if (self.autoUpdatePreviewOrientation) {
  249. [self setPreviewOrientation:cameraOrientation];
  250. }
  251. }
  252. - (void)setPreviewOrientation:(PBJCameraOrientation)previewOrientation {
  253. if (previewOrientation == _previewOrientation)
  254. return;
  255. if ([_previewLayer.connection isVideoOrientationSupported]) {
  256. _previewOrientation = previewOrientation;
  257. [self _setOrientationForConnection:_previewLayer.connection];
  258. }
  259. }
  260. - (void)_setOrientationForConnection:(AVCaptureConnection *)connection
  261. {
  262. if (!connection || ![connection isVideoOrientationSupported])
  263. return;
  264. AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
  265. switch (_cameraOrientation) {
  266. case PBJCameraOrientationPortraitUpsideDown:
  267. orientation = AVCaptureVideoOrientationPortraitUpsideDown;
  268. break;
  269. case PBJCameraOrientationLandscapeRight:
  270. orientation = AVCaptureVideoOrientationLandscapeRight;
  271. break;
  272. case PBJCameraOrientationLandscapeLeft:
  273. orientation = AVCaptureVideoOrientationLandscapeLeft;
  274. break;
  275. case PBJCameraOrientationPortrait:
  276. default:
  277. break;
  278. }
  279. [connection setVideoOrientation:orientation];
  280. }
  281. - (void)_setCameraMode:(PBJCameraMode)cameraMode cameraDevice:(PBJCameraDevice)cameraDevice outputFormat:(PBJOutputFormat)outputFormat
  282. {
  283. BOOL changeDevice = (_cameraDevice != cameraDevice);
  284. BOOL changeMode = (_cameraMode != cameraMode);
  285. BOOL changeOutputFormat = (_outputFormat != outputFormat);
  286. DLog(@"change device (%d) mode (%d) format (%d)", changeDevice, changeMode, changeOutputFormat);
  287. if (!changeMode && !changeDevice && !changeOutputFormat) {
  288. return;
  289. }
  290. if (changeDevice && [_delegate respondsToSelector:@selector(visionCameraDeviceWillChange:)]) {
  291. #pragma clang diagnostic push
  292. #pragma clang diagnostic ignored "-Warc-performSelector-leaks"
  293. [_delegate performSelector:@selector(visionCameraDeviceWillChange:) withObject:self];
  294. #pragma clang diagnostic pop
  295. }
  296. if (changeMode && [_delegate respondsToSelector:@selector(visionCameraModeWillChange:)]) {
  297. #pragma clang diagnostic push
  298. #pragma clang diagnostic ignored "-Warc-performSelector-leaks"
  299. [_delegate performSelector:@selector(visionCameraModeWillChange:) withObject:self];
  300. #pragma clang diagnostic pop
  301. }
  302. if (changeOutputFormat && [_delegate respondsToSelector:@selector(visionOutputFormatWillChange:)]) {
  303. #pragma clang diagnostic push
  304. #pragma clang diagnostic ignored "-Warc-performSelector-leaks"
  305. [_delegate performSelector:@selector(visionOutputFormatWillChange:) withObject:self];
  306. #pragma clang diagnostic pop
  307. }
  308. _flags.changingModes = YES;
  309. _cameraDevice = cameraDevice;
  310. _cameraMode = cameraMode;
  311. _outputFormat = outputFormat;
  312. PBJVisionBlock didChangeBlock = ^{
  313. _flags.changingModes = NO;
  314. if (changeDevice && [_delegate respondsToSelector:@selector(visionCameraDeviceDidChange:)]) {
  315. #pragma clang diagnostic push
  316. #pragma clang diagnostic ignored "-Warc-performSelector-leaks"
  317. [_delegate performSelector:@selector(visionCameraDeviceDidChange:) withObject:self];
  318. #pragma clang diagnostic pop
  319. }
  320. if (changeMode && [_delegate respondsToSelector:@selector(visionCameraModeDidChange:)]) {
  321. #pragma clang diagnostic push
  322. #pragma clang diagnostic ignored "-Warc-performSelector-leaks"
  323. [_delegate performSelector:@selector(visionCameraModeDidChange:) withObject:self];
  324. #pragma clang diagnostic pop
  325. }
  326. if (changeOutputFormat && [_delegate respondsToSelector:@selector(visionOutputFormatDidChange:)]) {
  327. #pragma clang diagnostic push
  328. #pragma clang diagnostic ignored "-Warc-performSelector-leaks"
  329. [_delegate performSelector:@selector(visionOutputFormatDidChange:) withObject:self];
  330. #pragma clang diagnostic pop
  331. }
  332. };
  333. // since there is no session in progress, set and bail
  334. if (!_captureSession) {
  335. _flags.changingModes = NO;
  336. didChangeBlock();
  337. return;
  338. }
  339. [self _enqueueBlockOnCaptureSessionQueue:^{
  340. // camera is already setup, no need to call _setupCamera
  341. [self _setupSession];
  342. [self setMirroringMode:_mirroringMode];
  343. [self _enqueueBlockOnMainQueue:didChangeBlock];
  344. }];
  345. }
  346. - (void)setCameraDevice:(PBJCameraDevice)cameraDevice
  347. {
  348. [self _setCameraMode:_cameraMode cameraDevice:cameraDevice outputFormat:_outputFormat];
  349. }
  350. - (void)setCaptureSessionPreset:(NSString *)captureSessionPreset
  351. {
  352. _captureSessionPreset = captureSessionPreset;
  353. if ([_captureSession canSetSessionPreset:captureSessionPreset]){
  354. [self _commitBlock:^{
  355. [_captureSession setSessionPreset:captureSessionPreset];
  356. }];
  357. }
  358. }
  359. - (void)setCameraMode:(PBJCameraMode)cameraMode
  360. {
  361. [self _setCameraMode:cameraMode cameraDevice:_cameraDevice outputFormat:_outputFormat];
  362. }
  363. - (void)setOutputFormat:(PBJOutputFormat)outputFormat
  364. {
  365. [self _setCameraMode:_cameraMode cameraDevice:_cameraDevice outputFormat:outputFormat];
  366. }
  367. - (BOOL)isCameraDeviceAvailable:(PBJCameraDevice)cameraDevice
  368. {
  369. return [UIImagePickerController isCameraDeviceAvailable:(UIImagePickerControllerCameraDevice)cameraDevice];
  370. }
  371. - (BOOL)isFocusPointOfInterestSupported
  372. {
  373. return [_currentDevice isFocusPointOfInterestSupported];
  374. }
  375. - (BOOL)isFocusLockSupported
  376. {
  377. return [_currentDevice isFocusModeSupported:AVCaptureFocusModeLocked];
  378. }
  379. - (void)setFocusMode:(PBJFocusMode)focusMode
  380. {
  381. BOOL shouldChangeFocusMode = (_focusMode != focusMode);
  382. if (![_currentDevice isFocusModeSupported:(AVCaptureFocusMode)focusMode] || !shouldChangeFocusMode)
  383. return;
  384. _focusMode = focusMode;
  385. NSError *error = nil;
  386. if (_currentDevice && [_currentDevice lockForConfiguration:&error]) {
  387. [_currentDevice setFocusMode:(AVCaptureFocusMode)focusMode];
  388. [_currentDevice unlockForConfiguration];
  389. } else if (error) {
  390. DLog(@"error locking device for focus mode change (%@)", error);
  391. }
  392. }
  393. - (BOOL)isExposureLockSupported
  394. {
  395. return [_currentDevice isExposureModeSupported:AVCaptureExposureModeLocked];
  396. }
  397. - (void)setExposureMode:(PBJExposureMode)exposureMode
  398. {
  399. BOOL shouldChangeExposureMode = (_exposureMode != exposureMode);
  400. if (![_currentDevice isExposureModeSupported:(AVCaptureExposureMode)exposureMode] || !shouldChangeExposureMode)
  401. return;
  402. _exposureMode = exposureMode;
  403. NSError *error = nil;
  404. if (_currentDevice && [_currentDevice lockForConfiguration:&error]) {
  405. [_currentDevice setExposureMode:(AVCaptureExposureMode)exposureMode];
  406. [_currentDevice unlockForConfiguration];
  407. } else if (error) {
  408. DLog(@"error locking device for exposure mode change (%@)", error);
  409. }
  410. }
  411. - (BOOL)isFlashAvailable
  412. {
  413. return (_currentDevice && [_currentDevice hasFlash]);
  414. }
  415. - (void)setFlashMode:(PBJFlashMode)flashMode
  416. {
  417. BOOL shouldChangeFlashMode = (_flashMode != flashMode);
  418. if (![_currentDevice hasFlash] || !shouldChangeFlashMode)
  419. return;
  420. _flashMode = flashMode;
  421. NSError *error = nil;
  422. if (_currentDevice && [_currentDevice lockForConfiguration:&error]) {
  423. switch (_cameraMode) {
  424. case PBJCameraModePhoto:
  425. {
  426. if ([_currentDevice isFlashModeSupported:(AVCaptureFlashMode)_flashMode]) {
  427. [_currentDevice setFlashMode:(AVCaptureFlashMode)_flashMode];
  428. }
  429. break;
  430. }
  431. case PBJCameraModeVideo:
  432. {
  433. if ([_currentDevice isFlashModeSupported:(AVCaptureFlashMode)_flashMode]) {
  434. [_currentDevice setFlashMode:AVCaptureFlashModeOff];
  435. }
  436. if ([_currentDevice isTorchModeSupported:(AVCaptureTorchMode)_flashMode]) {
  437. [_currentDevice setTorchMode:(AVCaptureTorchMode)_flashMode];
  438. }
  439. break;
  440. }
  441. default:
  442. break;
  443. }
  444. [_currentDevice unlockForConfiguration];
  445. } else if (error) {
  446. DLog(@"error locking device for flash mode change (%@)", error);
  447. }
  448. }
  449. // framerate
  450. - (void)setVideoFrameRate:(NSInteger)videoFrameRate
  451. {
  452. if (![self supportsVideoFrameRate:videoFrameRate]) {
  453. DLog(@"frame rate range not supported for current device format");
  454. return;
  455. }
  456. BOOL isRecording = _flags.recording;
  457. if (isRecording) {
  458. [self pauseVideoCapture];
  459. }
  460. CMTime fps = CMTimeMake(1, (int32_t)videoFrameRate);
  461. AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  462. AVCaptureDeviceFormat *supportingFormat = nil;
  463. int32_t maxWidth = 0;
  464. NSArray *formats = [videoDevice formats];
  465. for (AVCaptureDeviceFormat *format in formats) {
  466. NSArray *videoSupportedFrameRateRanges = format.videoSupportedFrameRateRanges;
  467. for (AVFrameRateRange *range in videoSupportedFrameRateRanges) {
  468. CMFormatDescriptionRef desc = format.formatDescription;
  469. CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(desc);
  470. int32_t width = dimensions.width;
  471. if (range.minFrameRate <= videoFrameRate && videoFrameRate <= range.maxFrameRate && width >= maxWidth) {
  472. supportingFormat = format;
  473. maxWidth = width;
  474. }
  475. }
  476. }
  477. if (supportingFormat) {
  478. NSError *error = nil;
  479. [_captureSession beginConfiguration]; // the session to which the receiver's AVCaptureDeviceInput is added.
  480. if ([_currentDevice lockForConfiguration:&error]) {
  481. [_currentDevice setActiveFormat:supportingFormat];
  482. _currentDevice.activeVideoMinFrameDuration = fps;
  483. _currentDevice.activeVideoMaxFrameDuration = fps;
  484. _videoFrameRate = videoFrameRate;
  485. [_currentDevice unlockForConfiguration];
  486. } else if (error) {
  487. DLog(@"error locking device for frame rate change (%@)", error);
  488. }
  489. }
  490. [_captureSession commitConfiguration];
  491. [self _enqueueBlockOnMainQueue:^{
  492. if ([_delegate respondsToSelector:@selector(visionDidChangeVideoFormatAndFrameRate:)])
  493. [_delegate visionDidChangeVideoFormatAndFrameRate:self];
  494. }];
  495. if (isRecording) {
  496. [self resumeVideoCapture];
  497. }
  498. }
  499. - (NSInteger)videoFrameRate
  500. {
  501. if (!_currentDevice)
  502. return 0;
  503. return _currentDevice.activeVideoMaxFrameDuration.timescale;
  504. }
  505. - (BOOL)supportsVideoFrameRate:(NSInteger)videoFrameRate
  506. {
  507. AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  508. NSArray *formats = [videoDevice formats];
  509. for (AVCaptureDeviceFormat *format in formats) {
  510. NSArray *videoSupportedFrameRateRanges = [format videoSupportedFrameRateRanges];
  511. for (AVFrameRateRange *frameRateRange in videoSupportedFrameRateRanges) {
  512. if ( (frameRateRange.minFrameRate <= videoFrameRate) && (videoFrameRate <= frameRateRange.maxFrameRate) ) {
  513. return YES;
  514. }
  515. }
  516. }
  517. return NO;
  518. }
  519. #pragma mark - init
  520. - (id)init
  521. {
  522. self = [super init];
  523. if (self) {
  524. // setup GLES
  525. _context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
  526. if (!_context) {
  527. DLog(@"failed to create GL context");
  528. }
  529. [self _setupGL];
  530. _captureSessionPreset = AVCaptureSessionPresetMedium;
  531. _captureDirectory = nil;
  532. _autoUpdatePreviewOrientation = YES;
  533. _autoFreezePreviewDuringCapture = YES;
  534. _usesApplicationAudioSession = NO;
  535. // Average bytes per second based on video dimensions
  536. // lower the bitRate, higher the compression
  537. _videoBitRate = PBJVideoBitRate640x480;
  538. // default audio/video configuration
  539. _audioBitRate = 64000;
  540. // default flags
  541. _flags.thumbnailEnabled = YES;
  542. _flags.defaultVideoThumbnails = YES;
  543. _flags.audioCaptureEnabled = YES;
  544. // setup queues
  545. _captureSessionDispatchQueue = dispatch_queue_create("PBJVisionSession", DISPATCH_QUEUE_SERIAL); // protects session
  546. _captureCaptureDispatchQueue = dispatch_queue_create("PBJVisionCapture", DISPATCH_QUEUE_SERIAL); // protects capture
  547. _previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:nil];
  548. _maximumCaptureDuration = kCMTimeInvalid;
  549. [self setMirroringMode:PBJMirroringAuto];
  550. [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(_applicationWillEnterForeground:) name:UIApplicationWillEnterForegroundNotification object:[UIApplication sharedApplication]];
  551. [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(_applicationDidEnterBackground:) name:UIApplicationDidEnterBackgroundNotification object:[UIApplication sharedApplication]];
  552. }
  553. return self;
  554. }
  555. - (void)dealloc
  556. {
  557. [[NSNotificationCenter defaultCenter] removeObserver:self];
  558. _delegate = nil;
  559. [self _cleanUpTextures];
  560. if (_videoTextureCache) {
  561. CFRelease(_videoTextureCache);
  562. _videoTextureCache = NULL;
  563. }
  564. [self _destroyGL];
  565. [self _destroyCamera];
  566. }
  567. #pragma mark - queue helper methods
  568. typedef void (^PBJVisionBlock)();
  569. - (void)_enqueueBlockOnCaptureSessionQueue:(PBJVisionBlock)block
  570. {
  571. dispatch_async(_captureSessionDispatchQueue, ^{
  572. block();
  573. });
  574. }
  575. - (void)_enqueueBlockOnCaptureVideoQueue:(PBJVisionBlock)block
  576. {
  577. dispatch_async(_captureCaptureDispatchQueue, ^{
  578. block();
  579. });
  580. }
  581. - (void)_enqueueBlockOnMainQueue:(PBJVisionBlock)block
  582. {
  583. dispatch_async(dispatch_get_main_queue(), ^{
  584. block();
  585. });
  586. }
  587. - (void)_executeBlockOnMainQueue:(PBJVisionBlock)block
  588. {
  589. dispatch_sync(dispatch_get_main_queue(), ^{
  590. block();
  591. });
  592. }
  593. - (void)_commitBlock:(PBJVisionBlock)block
  594. {
  595. [_captureSession beginConfiguration];
  596. block();
  597. [_captureSession commitConfiguration];
  598. }
  599. #pragma mark - camera
  600. // only call from the session queue
  601. - (void)_setupCamera
  602. {
  603. if (_captureSession)
  604. return;
  605. #if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API
  606. CVReturn cvError = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _context, NULL, &_videoTextureCache);
  607. #else
  608. CVReturn cvError = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)_context, NULL, &_videoTextureCache);
  609. #endif
  610. if (cvError) {
  611. NSLog(@"error CVOpenGLESTextureCacheCreate (%d)", cvError);
  612. }
  613. // create session
  614. _captureSession = [[AVCaptureSession alloc] init];
  615. if (_usesApplicationAudioSession) {
  616. _captureSession.usesApplicationAudioSession = YES;
  617. }
  618. // capture devices
  619. _captureDeviceFront = [PBJVisionUtilities captureDeviceForPosition:AVCaptureDevicePositionFront];
  620. _captureDeviceBack = [PBJVisionUtilities captureDeviceForPosition:AVCaptureDevicePositionBack];
  621. // capture device inputs
  622. NSError *error = nil;
  623. _captureDeviceInputFront = [AVCaptureDeviceInput deviceInputWithDevice:_captureDeviceFront error:&error];
  624. if (error) {
  625. DLog(@"error setting up front camera input (%@)", error);
  626. error = nil;
  627. }
  628. _captureDeviceInputBack = [AVCaptureDeviceInput deviceInputWithDevice:_captureDeviceBack error:&error];
  629. if (error) {
  630. DLog(@"error setting up back camera input (%@)", error);
  631. error = nil;
  632. }
  633. if (_cameraMode != PBJCameraModePhoto && _flags.audioCaptureEnabled) {
  634. _captureDeviceAudio = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
  635. _captureDeviceInputAudio = [AVCaptureDeviceInput deviceInputWithDevice:_captureDeviceAudio error:&error];
  636. if (error) {
  637. DLog(@"error setting up audio input (%@)", error);
  638. }
  639. }
  640. // capture device ouputs
  641. _captureOutputPhoto = [[AVCaptureStillImageOutput alloc] init];
  642. if (_cameraMode != PBJCameraModePhoto && _flags.audioCaptureEnabled) {
  643. _captureOutputAudio = [[AVCaptureAudioDataOutput alloc] init];
  644. }
  645. _captureOutputVideo = [[AVCaptureVideoDataOutput alloc] init];
  646. if (_cameraMode != PBJCameraModePhoto && _flags.audioCaptureEnabled) {
  647. [_captureOutputAudio setSampleBufferDelegate:self queue:_captureCaptureDispatchQueue];
  648. }
  649. [_captureOutputVideo setSampleBufferDelegate:self queue:_captureCaptureDispatchQueue];
  650. // capture device initial settings
  651. _videoFrameRate = 30;
  652. // add notification observers
  653. NSNotificationCenter *notificationCenter = [NSNotificationCenter defaultCenter];
  654. // session notifications
  655. [notificationCenter addObserver:self selector:@selector(_sessionRuntimeErrored:) name:AVCaptureSessionRuntimeErrorNotification object:_captureSession];
  656. [notificationCenter addObserver:self selector:@selector(_sessionStarted:) name:AVCaptureSessionDidStartRunningNotification object:_captureSession];
  657. [notificationCenter addObserver:self selector:@selector(_sessionStopped:) name:AVCaptureSessionDidStopRunningNotification object:_captureSession];
  658. [notificationCenter addObserver:self selector:@selector(_sessionWasInterrupted:) name:AVCaptureSessionWasInterruptedNotification object:_captureSession];
  659. [notificationCenter addObserver:self selector:@selector(_sessionInterruptionEnded:) name:AVCaptureSessionInterruptionEndedNotification object:_captureSession];
  660. // capture input notifications
  661. [notificationCenter addObserver:self selector:@selector(_inputPortFormatDescriptionDidChange:) name:AVCaptureInputPortFormatDescriptionDidChangeNotification object:nil];
  662. // capture device notifications
  663. [notificationCenter addObserver:self selector:@selector(_deviceSubjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:nil];
  664. // current device KVO notifications
  665. [self addObserver:self forKeyPath:@"currentDevice.adjustingFocus" options:NSKeyValueObservingOptionNew context:(__bridge void *)PBJVisionFocusObserverContext];
  666. [self addObserver:self forKeyPath:@"currentDevice.adjustingExposure" options:NSKeyValueObservingOptionNew context:(__bridge void *)PBJVisionExposureObserverContext];
  667. [self addObserver:self forKeyPath:@"currentDevice.adjustingWhiteBalance" options:NSKeyValueObservingOptionNew context:(__bridge void *)PBJVisionWhiteBalanceObserverContext];
  668. [self addObserver:self forKeyPath:@"currentDevice.flashMode" options:NSKeyValueObservingOptionNew context:(__bridge void *)PBJVisionFlashModeObserverContext];
  669. [self addObserver:self forKeyPath:@"currentDevice.torchMode" options:NSKeyValueObservingOptionNew context:(__bridge void *)PBJVisionTorchModeObserverContext];
  670. [self addObserver:self forKeyPath:@"currentDevice.flashAvailable" options:NSKeyValueObservingOptionNew context:(__bridge void *)PBJVisionFlashAvailabilityObserverContext];
  671. [self addObserver:self forKeyPath:@"currentDevice.torchAvailable" options:NSKeyValueObservingOptionNew context:(__bridge void *)PBJVisionTorchAvailabilityObserverContext];
  672. // KVO is only used to monitor focus and capture events
  673. [_captureOutputPhoto addObserver:self forKeyPath:@"capturingStillImage" options:NSKeyValueObservingOptionNew context:(__bridge void *)(PBJVisionCaptureStillImageIsCapturingStillImageObserverContext)];
  674. DLog(@"camera setup");
  675. }
  676. // only call from the session queue
  677. - (void)_destroyCamera
  678. {
  679. if (!_captureSession)
  680. return;
  681. // current device KVO notifications
  682. [self removeObserver:self forKeyPath:@"currentDevice.adjustingFocus"];
  683. [self removeObserver:self forKeyPath:@"currentDevice.adjustingExposure"];
  684. [self removeObserver:self forKeyPath:@"currentDevice.adjustingWhiteBalance"];
  685. [self removeObserver:self forKeyPath:@"currentDevice.flashMode"];
  686. [self removeObserver:self forKeyPath:@"currentDevice.torchMode"];
  687. [self removeObserver:self forKeyPath:@"currentDevice.flashAvailable"];
  688. [self removeObserver:self forKeyPath:@"currentDevice.torchAvailable"];
  689. // capture events KVO notifications
  690. [_captureOutputPhoto removeObserver:self forKeyPath:@"capturingStillImage"];
  691. // remove notification observers (we don't want to just 'remove all' because we're also observing background notifications
  692. NSNotificationCenter *notificationCenter = [NSNotificationCenter defaultCenter];
  693. // session notifications
  694. [notificationCenter removeObserver:self name:AVCaptureSessionRuntimeErrorNotification object:_captureSession];
  695. [notificationCenter removeObserver:self name:AVCaptureSessionDidStartRunningNotification object:_captureSession];
  696. [notificationCenter removeObserver:self name:AVCaptureSessionDidStopRunningNotification object:_captureSession];
  697. [notificationCenter removeObserver:self name:AVCaptureSessionWasInterruptedNotification object:_captureSession];
  698. [notificationCenter removeObserver:self name:AVCaptureSessionInterruptionEndedNotification object:_captureSession];
  699. // capture input notifications
  700. [notificationCenter removeObserver:self name:AVCaptureInputPortFormatDescriptionDidChangeNotification object:nil];
  701. // capture device notifications
  702. [notificationCenter removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:nil];
  703. _captureOutputPhoto = nil;
  704. _captureOutputAudio = nil;
  705. _captureOutputVideo = nil;
  706. _captureDeviceAudio = nil;
  707. _captureDeviceInputAudio = nil;
  708. _captureDeviceInputFront = nil;
  709. _captureDeviceInputBack = nil;
  710. _captureDeviceFront = nil;
  711. _captureDeviceBack = nil;
  712. _captureSession = nil;
  713. _currentDevice = nil;
  714. _currentInput = nil;
  715. _currentOutput = nil;
  716. DLog(@"camera destroyed");
  717. }
  718. #pragma mark - AVCaptureSession
  719. - (BOOL)_canSessionCaptureWithOutput:(AVCaptureOutput *)captureOutput
  720. {
  721. BOOL sessionContainsOutput = [[_captureSession outputs] containsObject:captureOutput];
  722. BOOL outputHasConnection = ([captureOutput connectionWithMediaType:AVMediaTypeVideo] != nil);
  723. return (sessionContainsOutput && outputHasConnection);
  724. }
  725. // _setupSession is always called from the captureSession queue
  726. - (void)_setupSession
  727. {
  728. if (!_captureSession) {
  729. DLog(@"error, no session running to setup");
  730. return;
  731. }
  732. BOOL shouldSwitchDevice = (_currentDevice == nil) ||
  733. ((_currentDevice == _captureDeviceFront) && (_cameraDevice != PBJCameraDeviceFront)) ||
  734. ((_currentDevice == _captureDeviceBack) && (_cameraDevice != PBJCameraDeviceBack));
  735. BOOL shouldSwitchMode = (_currentOutput == nil) ||
  736. ((_currentOutput == _captureOutputPhoto) && (_cameraMode != PBJCameraModePhoto)) ||
  737. ((_currentOutput == _captureOutputVideo) && (_cameraMode != PBJCameraModeVideo));
  738. DLog(@"switchDevice %d switchMode %d", shouldSwitchDevice, shouldSwitchMode);
  739. if (!shouldSwitchDevice && !shouldSwitchMode)
  740. return;
  741. AVCaptureDeviceInput *newDeviceInput = nil;
  742. AVCaptureOutput *newCaptureOutput = nil;
  743. AVCaptureDevice *newCaptureDevice = nil;
  744. [_captureSession beginConfiguration];
  745. // setup session device
  746. if (shouldSwitchDevice) {
  747. switch (_cameraDevice) {
  748. case PBJCameraDeviceFront:
  749. {
  750. if (_captureDeviceInputBack)
  751. [_captureSession removeInput:_captureDeviceInputBack];
  752. if (_captureDeviceInputFront && [_captureSession canAddInput:_captureDeviceInputFront]) {
  753. [_captureSession addInput:_captureDeviceInputFront];
  754. newDeviceInput = _captureDeviceInputFront;
  755. newCaptureDevice = _captureDeviceFront;
  756. }
  757. break;
  758. }
  759. case PBJCameraDeviceBack:
  760. {
  761. if (_captureDeviceInputFront)
  762. [_captureSession removeInput:_captureDeviceInputFront];
  763. if (_captureDeviceInputBack && [_captureSession canAddInput:_captureDeviceInputBack]) {
  764. [_captureSession addInput:_captureDeviceInputBack];
  765. newDeviceInput = _captureDeviceInputBack;
  766. newCaptureDevice = _captureDeviceBack;
  767. }
  768. break;
  769. }
  770. default:
  771. break;
  772. }
  773. } // shouldSwitchDevice
  774. // setup session input/output
  775. if (shouldSwitchMode) {
  776. // disable audio when in use for photos, otherwise enable it
  777. if (self.cameraMode == PBJCameraModePhoto) {
  778. if (_captureDeviceInputAudio)
  779. [_captureSession removeInput:_captureDeviceInputAudio];
  780. if (_captureOutputAudio)
  781. [_captureSession removeOutput:_captureOutputAudio];
  782. } else if (!_captureDeviceAudio && !_captureDeviceInputAudio && !_captureOutputAudio && _flags.audioCaptureEnabled) {
  783. NSError *error = nil;
  784. _captureDeviceAudio = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
  785. _captureDeviceInputAudio = [AVCaptureDeviceInput deviceInputWithDevice:_captureDeviceAudio error:&error];
  786. if (error) {
  787. DLog(@"error setting up audio input (%@)", error);
  788. }
  789. _captureOutputAudio = [[AVCaptureAudioDataOutput alloc] init];
  790. [_captureOutputAudio setSampleBufferDelegate:self queue:_captureCaptureDispatchQueue];
  791. }
  792. [_captureSession removeOutput:_captureOutputVideo];
  793. [_captureSession removeOutput:_captureOutputPhoto];
  794. switch (_cameraMode) {
  795. case PBJCameraModeVideo:
  796. {
  797. // audio input
  798. if ([_captureSession canAddInput:_captureDeviceInputAudio]) {
  799. [_captureSession addInput:_captureDeviceInputAudio];
  800. }
  801. // audio output
  802. if ([_captureSession canAddOutput:_captureOutputAudio]) {
  803. [_captureSession addOutput:_captureOutputAudio];
  804. }
  805. // vidja output
  806. if ([_captureSession canAddOutput:_captureOutputVideo]) {
  807. [_captureSession addOutput:_captureOutputVideo];
  808. newCaptureOutput = _captureOutputVideo;
  809. }
  810. break;
  811. }
  812. case PBJCameraModePhoto:
  813. {
  814. // photo output
  815. if ([_captureSession canAddOutput:_captureOutputPhoto]) {
  816. [_captureSession addOutput:_captureOutputPhoto];
  817. newCaptureOutput = _captureOutputPhoto;
  818. }
  819. break;
  820. }
  821. default:
  822. break;
  823. }
  824. } // shouldSwitchMode
  825. if (!newCaptureDevice)
  826. newCaptureDevice = _currentDevice;
  827. if (!newCaptureOutput)
  828. newCaptureOutput = _currentOutput;
  829. // setup video connection
  830. AVCaptureConnection *videoConnection = [_captureOutputVideo connectionWithMediaType:AVMediaTypeVideo];
  831. // setup input/output
  832. NSString *sessionPreset = _captureSessionPreset;
  833. if ( newCaptureOutput && (newCaptureOutput == _captureOutputVideo) && videoConnection ) {
  834. // setup video orientation
  835. [self _setOrientationForConnection:videoConnection];
  836. // setup video stabilization, if available
  837. if ([videoConnection isVideoStabilizationSupported]) {
  838. if ([videoConnection respondsToSelector:@selector(setPreferredVideoStabilizationMode:)]) {
  839. [videoConnection setPreferredVideoStabilizationMode:AVCaptureVideoStabilizationModeAuto];
  840. } else {
  841. [videoConnection setEnablesVideoStabilizationWhenAvailable:YES];
  842. }
  843. }
  844. // discard late frames
  845. [_captureOutputVideo setAlwaysDiscardsLateVideoFrames:YES];
  846. // specify video preset
  847. sessionPreset = _captureSessionPreset;
  848. // setup video settings
  849. // kCVPixelFormatType_420YpCbCr8BiPlanarFullRange Bi-Planar Component Y'CbCr 8-bit 4:2:0, full-range (luma=[0,255] chroma=[1,255])
  850. // baseAddr points to a big-endian CVPlanarPixelBufferInfo_YCbCrBiPlanar struct
  851. BOOL supportsFullRangeYUV = NO;
  852. BOOL supportsVideoRangeYUV = NO;
  853. NSArray *supportedPixelFormats = _captureOutputVideo.availableVideoCVPixelFormatTypes;
  854. for (NSNumber *currentPixelFormat in supportedPixelFormats) {
  855. if ([currentPixelFormat intValue] == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
  856. supportsFullRangeYUV = YES;
  857. }
  858. if ([currentPixelFormat intValue] == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) {
  859. supportsVideoRangeYUV = YES;
  860. }
  861. }
  862. NSDictionary *videoSettings = nil;
  863. if (supportsFullRangeYUV) {
  864. videoSettings = @{ (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) };
  865. } else if (supportsVideoRangeYUV) {
  866. videoSettings = @{ (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) };
  867. }
  868. if (videoSettings) {
  869. [_captureOutputVideo setVideoSettings:videoSettings];
  870. }
  871. // setup video device configuration
  872. NSError *error = nil;
  873. if ([newCaptureDevice lockForConfiguration:&error]) {
  874. // smooth autofocus for videos
  875. if ([newCaptureDevice isSmoothAutoFocusSupported])
  876. [newCaptureDevice setSmoothAutoFocusEnabled:YES];
  877. [newCaptureDevice unlockForConfiguration];
  878. } else if (error) {
  879. DLog(@"error locking device for video device configuration (%@)", error);
  880. }
  881. } else if ( newCaptureOutput && (newCaptureOutput == _captureOutputPhoto) ) {
  882. // specify photo preset
  883. sessionPreset = AVCaptureSessionPresetPhoto;
  884. // setup photo settings
  885. NSDictionary *photoSettings = @{AVVideoCodecKey : AVVideoCodecJPEG};
  886. [_captureOutputPhoto setOutputSettings:photoSettings];
  887. // setup photo device configuration
  888. NSError *error = nil;
  889. if ([newCaptureDevice lockForConfiguration:&error]) {
  890. if ([newCaptureDevice isLowLightBoostSupported])
  891. [newCaptureDevice setAutomaticallyEnablesLowLightBoostWhenAvailable:YES];
  892. [newCaptureDevice unlockForConfiguration];
  893. } else if (error) {
  894. DLog(@"error locking device for photo device configuration (%@)", error);
  895. }
  896. }
  897. // apply presets
  898. if ([_captureSession canSetSessionPreset:sessionPreset])
  899. [_captureSession setSessionPreset:sessionPreset];
  900. if (newDeviceInput)
  901. _currentInput = newDeviceInput;
  902. if (newCaptureOutput)
  903. _currentOutput = newCaptureOutput;
  904. // ensure there is a capture device setup
  905. if (_currentInput) {
  906. AVCaptureDevice *device = [_currentInput device];
  907. if (device) {
  908. [self willChangeValueForKey:@"currentDevice"];
  909. _currentDevice = device;
  910. [self didChangeValueForKey:@"currentDevice"];
  911. }
  912. }
  913. [_captureSession commitConfiguration];
  914. DLog(@"capture session setup");
  915. }
  916. #pragma mark - preview
  917. - (void)startPreview
  918. {
  919. [self _enqueueBlockOnCaptureSessionQueue:^{
  920. if (!_captureSession) {
  921. [self _setupCamera];
  922. [self _setupSession];
  923. }
  924. [self setMirroringMode:_mirroringMode];
  925. if (_previewLayer && _previewLayer.session != _captureSession) {
  926. _previewLayer.session = _captureSession;
  927. [self _setOrientationForConnection:_previewLayer.connection];
  928. }
  929. if (_previewLayer)
  930. _previewLayer.connection.enabled = YES;
  931. if (![_captureSession isRunning]) {
  932. [_captureSession startRunning];
  933. [self _enqueueBlockOnMainQueue:^{
  934. if ([_delegate respondsToSelector:@selector(visionSessionDidStartPreview:)]) {
  935. [_delegate visionSessionDidStartPreview:self];
  936. }
  937. }];
  938. DLog(@"capture session running");
  939. }
  940. _flags.previewRunning = YES;
  941. }];
  942. }
  943. - (void)stopPreview
  944. {
  945. [self _enqueueBlockOnCaptureSessionQueue:^{
  946. if (!_flags.previewRunning)
  947. return;
  948. if (_previewLayer)
  949. _previewLayer.connection.enabled = NO;
  950. if ([_captureSession isRunning])
  951. [_captureSession stopRunning];
  952. [self _executeBlockOnMainQueue:^{
  953. if ([_delegate respondsToSelector:@selector(visionSessionDidStopPreview:)]) {
  954. [_delegate visionSessionDidStopPreview:self];
  955. }
  956. }];
  957. DLog(@"capture session stopped");
  958. _flags.previewRunning = NO;
  959. }];
  960. }
  961. - (void)freezePreview
  962. {
  963. if (_previewLayer)
  964. _previewLayer.connection.enabled = NO;
  965. }
  966. - (void)unfreezePreview
  967. {
  968. if (_previewLayer)
  969. _previewLayer.connection.enabled = YES;
  970. }
  971. #pragma mark - focus, exposure, white balance
  972. - (void)_focusStarted
  973. {
  974. // DLog(@"focus started");
  975. if ([_delegate respondsToSelector:@selector(visionWillStartFocus:)])
  976. [_delegate visionWillStartFocus:self];
  977. }
  978. - (void)_focusEnded
  979. {
  980. AVCaptureFocusMode focusMode = [_currentDevice focusMode];
  981. BOOL isFocusing = [_currentDevice isAdjustingFocus];
  982. BOOL isAutoFocusEnabled = (focusMode == AVCaptureFocusModeAutoFocus ||
  983. focusMode == AVCaptureFocusModeContinuousAutoFocus);
  984. if (!isFocusing && isAutoFocusEnabled) {
  985. NSError *error = nil;
  986. if ([_currentDevice lockForConfiguration:&error]) {
  987. [_currentDevice setSubjectAreaChangeMonitoringEnabled:YES];
  988. [_currentDevice unlockForConfiguration];
  989. } else if (error) {
  990. DLog(@"error locking device post exposure for subject area change monitoring (%@)", error);
  991. }
  992. }
  993. if ([_delegate respondsToSelector:@selector(visionDidStopFocus:)])
  994. [_delegate visionDidStopFocus:self];
  995. // DLog(@"focus ended");
  996. }
  997. - (void)_exposureChangeStarted
  998. {
  999. // DLog(@"exposure change started");
  1000. if ([_delegate respondsToSelector:@selector(visionWillChangeExposure:)])
  1001. [_delegate visionWillChangeExposure:self];
  1002. }
  1003. - (void)_exposureChangeEnded
  1004. {
  1005. BOOL isContinuousAutoExposureEnabled = [_currentDevice exposureMode] == AVCaptureExposureModeContinuousAutoExposure;
  1006. BOOL isExposing = [_currentDevice isAdjustingExposure];
  1007. BOOL isFocusSupported = [_currentDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus];
  1008. if (isContinuousAutoExposureEnabled && !isExposing && !isFocusSupported) {
  1009. NSError *error = nil;
  1010. if ([_currentDevice lockForConfiguration:&error]) {
  1011. [_currentDevice setSubjectAreaChangeMonitoringEnabled:YES];
  1012. [_currentDevice unlockForConfiguration];
  1013. } else if (error) {
  1014. DLog(@"error locking device post exposure for subject area change monitoring (%@)", error);
  1015. }
  1016. }
  1017. if ([_delegate respondsToSelector:@selector(visionDidChangeExposure:)])
  1018. [_delegate visionDidChangeExposure:self];
  1019. // DLog(@"exposure change ended");
  1020. }
  1021. - (void)_whiteBalanceChangeStarted
  1022. {
  1023. }
  1024. - (void)_whiteBalanceChangeEnded
  1025. {
  1026. }
  1027. - (void)focusAtAdjustedPointOfInterest:(CGPoint)adjustedPoint
  1028. {
  1029. if ([_currentDevice isAdjustingFocus] || [_currentDevice isAdjustingExposure])
  1030. return;
  1031. NSError *error = nil;
  1032. if ([_currentDevice lockForConfiguration:&error]) {
  1033. BOOL isFocusAtPointSupported = [_currentDevice isFocusPointOfInterestSupported];
  1034. if (isFocusAtPointSupported && [_currentDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
  1035. AVCaptureFocusMode fm = [_currentDevice focusMode];
  1036. [_currentDevice setFocusPointOfInterest:adjustedPoint];
  1037. [_currentDevice setFocusMode:fm];
  1038. }
  1039. [_currentDevice unlockForConfiguration];
  1040. } else if (error) {
  1041. DLog(@"error locking device for focus adjustment (%@)", error);
  1042. }
  1043. }
  1044. - (BOOL)isAdjustingFocus
  1045. {
  1046. return [_currentDevice isAdjustingFocus];
  1047. }
  1048. - (void)exposeAtAdjustedPointOfInterest:(CGPoint)adjustedPoint
  1049. {
  1050. if ([_currentDevice isAdjustingExposure])
  1051. return;
  1052. NSError *error = nil;
  1053. if ([_currentDevice lockForConfiguration:&error]) {
  1054. BOOL isExposureAtPointSupported = [_currentDevice isExposurePointOfInterestSupported];
  1055. if (isExposureAtPointSupported && [_currentDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
  1056. AVCaptureExposureMode em = [_currentDevice exposureMode];
  1057. [_currentDevice setExposurePointOfInterest:adjustedPoint];
  1058. [_currentDevice setExposureMode:em];
  1059. }
  1060. [_currentDevice unlockForConfiguration];
  1061. } else if (error) {
  1062. DLog(@"error locking device for exposure adjustment (%@)", error);
  1063. }
  1064. }
  1065. - (BOOL)isAdjustingExposure
  1066. {
  1067. return [_currentDevice isAdjustingExposure];
  1068. }
  1069. - (void)_adjustFocusExposureAndWhiteBalance
  1070. {
  1071. if ([_currentDevice isAdjustingFocus] || [_currentDevice isAdjustingExposure])
  1072. return;
  1073. // only notify clients when focus is triggered from an event
  1074. if ([_delegate respondsToSelector:@selector(visionWillStartFocus:)])
  1075. [_delegate visionWillStartFocus:self];
  1076. CGPoint focusPoint = CGPointMake(0.5f, 0.5f);
  1077. [self focusAtAdjustedPointOfInterest:focusPoint];
  1078. }
  1079. // focusExposeAndAdjustWhiteBalanceAtAdjustedPoint: will put focus and exposure into auto
  1080. - (void)focusExposeAndAdjustWhiteBalanceAtAdjustedPoint:(CGPoint)adjustedPoint
  1081. {
  1082. if ([_currentDevice isAdjustingFocus] || [_currentDevice isAdjustingExposure])
  1083. return;
  1084. NSError *error = nil;
  1085. if ([_currentDevice lockForConfiguration:&error]) {
  1086. BOOL isFocusAtPointSupported = [_currentDevice isFocusPointOfInterestSupported];
  1087. BOOL isExposureAtPointSupported = [_currentDevice isExposurePointOfInterestSupported];
  1088. BOOL isWhiteBalanceModeSupported = [_currentDevice isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance];
  1089. if (isFocusAtPointSupported && [_currentDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
  1090. [_currentDevice setFocusPointOfInterest:adjustedPoint];
  1091. [_currentDevice setFocusMode:AVCaptureFocusModeAutoFocus];
  1092. }
  1093. if (isExposureAtPointSupported && [_currentDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
  1094. [_currentDevice setExposurePointOfInterest:adjustedPoint];
  1095. [_currentDevice setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
  1096. }
  1097. if (isWhiteBalanceModeSupported) {
  1098. [_currentDevice setWhiteBalanceMode:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance];
  1099. }
  1100. [_currentDevice setSubjectAreaChangeMonitoringEnabled:NO];
  1101. [_currentDevice unlockForConfiguration];
  1102. } else if (error) {
  1103. DLog(@"error locking device for focus / exposure / white-balance adjustment (%@)", error);
  1104. }
  1105. }
  1106. #pragma mark - mirroring
  1107. - (void)setMirroringMode:(PBJMirroringMode)mirroringMode
  1108. {
  1109. _mirroringMode = mirroringMode;
  1110. AVCaptureConnection *videoConnection = [_currentOutput connectionWithMediaType:AVMediaTypeVideo];
  1111. AVCaptureConnection *previewConnection = [_previewLayer connection];
  1112. switch (_mirroringMode) {
  1113. case PBJMirroringOff:
  1114. {
  1115. if ([videoConnection isVideoMirroringSupported]) {
  1116. [videoConnection setVideoMirrored:NO];
  1117. }
  1118. if ([previewConnection isVideoMirroringSupported]) {
  1119. [previewConnection setAutomaticallyAdjustsVideoMirroring:NO];
  1120. [previewConnection setVideoMirrored:NO];