PageRenderTime 52ms CodeModel.GetById 16ms RepoModel.GetById 1ms app.codeStats 0ms

/Dependencies/GPUImage/Source/GPUImageVideoCamera.m

https://gitlab.com/Mr.Tomato/VideoEffects
Objective C | 1129 lines | 882 code | 177 blank | 70 comment | 119 complexity | 86124dac2a5bb10fd81ea26eb8d66829 MD5 | raw file
  1. #import "GPUImageVideoCamera.h"
  2. #import "GPUImageMovieWriter.h"
  3. #import "GPUImageFilter.h"
  4. // Color Conversion Constants (YUV to RGB) including adjustment from 16-235/16-240 (video range)
  5. // BT.601, which is the standard for SDTV.
  6. const GLfloat kColorConversion601[] = {
  7. 1.164, 1.164, 1.164,
  8. 0.0, -0.392, 2.017,
  9. 1.596, -0.813, 0.0,
  10. };
  11. // BT.709, which is the standard for HDTV.
  12. const GLfloat kColorConversion709[] = {
  13. 1.164, 1.164, 1.164,
  14. 0.0, -0.213, 2.112,
  15. 1.793, -0.533, 0.0,
  16. };
  17. // BT.601 full range (ref: http://www.equasys.de/colorconversion.html)
  18. const GLfloat kColorConversion601FullRange[] = {
  19. 1.0, 1.0, 1.0,
  20. 0.0, -0.343, 1.765,
  21. 1.4, -0.711, 0.0,
  22. };
  23. NSString *const kGPUImageYUVVideoRangeConversionForRGFragmentShaderString = SHADER_STRING
  24. (
  25. varying highp vec2 textureCoordinate;
  26. uniform sampler2D luminanceTexture;
  27. uniform sampler2D chrominanceTexture;
  28. uniform mediump mat3 colorConversionMatrix;
  29. void main()
  30. {
  31. mediump vec3 yuv;
  32. lowp vec3 rgb;
  33. yuv.x = texture2D(luminanceTexture, textureCoordinate).r;
  34. yuv.yz = texture2D(chrominanceTexture, textureCoordinate).rg - vec2(0.5, 0.5);
  35. rgb = colorConversionMatrix * yuv;
  36. gl_FragColor = vec4(rgb, 1);
  37. }
  38. );
  39. NSString *const kGPUImageYUVFullRangeConversionForLAFragmentShaderString = SHADER_STRING
  40. (
  41. varying highp vec2 textureCoordinate;
  42. uniform sampler2D luminanceTexture;
  43. uniform sampler2D chrominanceTexture;
  44. uniform mediump mat3 colorConversionMatrix;
  45. void main()
  46. {
  47. mediump vec3 yuv;
  48. lowp vec3 rgb;
  49. yuv.x = texture2D(luminanceTexture, textureCoordinate).r;
  50. yuv.yz = texture2D(chrominanceTexture, textureCoordinate).ra - vec2(0.5, 0.5);
  51. rgb = colorConversionMatrix * yuv;
  52. gl_FragColor = vec4(rgb, 1);
  53. }
  54. );
  55. NSString *const kGPUImageYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRING
  56. (
  57. varying highp vec2 textureCoordinate;
  58. uniform sampler2D luminanceTexture;
  59. uniform sampler2D chrominanceTexture;
  60. uniform mediump mat3 colorConversionMatrix;
  61. void main()
  62. {
  63. mediump vec3 yuv;
  64. lowp vec3 rgb;
  65. yuv.x = texture2D(luminanceTexture, textureCoordinate).r - (16.0/255.0);
  66. yuv.yz = texture2D(chrominanceTexture, textureCoordinate).ra - vec2(0.5, 0.5);
  67. rgb = colorConversionMatrix * yuv;
  68. gl_FragColor = vec4(rgb, 1);
  69. }
  70. );
  71. #pragma mark -
  72. #pragma mark Private methods and instance variables
  73. @interface GPUImageVideoCamera ()
  74. {
  75. AVCaptureDeviceInput *audioInput;
  76. AVCaptureAudioDataOutput *audioOutput;
  77. NSDate *startingCaptureTime;
  78. dispatch_queue_t cameraProcessingQueue, audioProcessingQueue;
  79. GLProgram *yuvConversionProgram;
  80. GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute;
  81. GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform;
  82. GLint yuvConversionMatrixUniform;
  83. const GLfloat *_preferredConversion;
  84. BOOL isFullYUVRange;
  85. int imageBufferWidth, imageBufferHeight;
  86. BOOL addedAudioInputsDueToEncodingTarget;
  87. }
  88. - (void)updateOrientationSendToTargets;
  89. - (void)convertYUVToRGBOutput;
  90. @end
  91. @implementation GPUImageVideoCamera
  92. @synthesize captureSessionPreset = _captureSessionPreset;
  93. @synthesize captureSession = _captureSession;
  94. @synthesize inputCamera = _inputCamera;
  95. @synthesize runBenchmark = _runBenchmark;
  96. @synthesize outputImageOrientation = _outputImageOrientation;
  97. @synthesize delegate = _delegate;
  98. @synthesize horizontallyMirrorFrontFacingCamera = _horizontallyMirrorFrontFacingCamera, horizontallyMirrorRearFacingCamera = _horizontallyMirrorRearFacingCamera;
  99. @synthesize frameRate = _frameRate;
  100. #pragma mark -
  101. #pragma mark Initialization and teardown
  102. - (id)init;
  103. {
  104. if (!(self = [self initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack]))
  105. {
  106. return nil;
  107. }
  108. return self;
  109. }
  110. - (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition;
  111. {
  112. if (!(self = [super init]))
  113. {
  114. return nil;
  115. }
  116. cameraProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH,0);
  117. audioProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW,0);
  118. frameRenderingSemaphore = dispatch_semaphore_create(1);
  119. _frameRate = 0; // This will not set frame rate unless this value gets set to 1 or above
  120. _runBenchmark = NO;
  121. capturePaused = NO;
  122. outputRotation = kGPUImageNoRotation;
  123. internalRotation = kGPUImageNoRotation;
  124. captureAsYUV = YES;
  125. _preferredConversion = kColorConversion709;
  126. // Grab the back-facing or front-facing camera
  127. _inputCamera = nil;
  128. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  129. for (AVCaptureDevice *device in devices)
  130. {
  131. if ([device position] == cameraPosition)
  132. {
  133. _inputCamera = device;
  134. }
  135. }
  136. if (!_inputCamera) {
  137. return nil;
  138. }
  139. // Create the capture session
  140. _captureSession = [[AVCaptureSession alloc] init];
  141. [_captureSession beginConfiguration];
  142. // Add the video input
  143. NSError *error = nil;
  144. videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:_inputCamera error:&error];
  145. if ([_captureSession canAddInput:videoInput])
  146. {
  147. [_captureSession addInput:videoInput];
  148. }
  149. // Add the video frame output
  150. videoOutput = [[AVCaptureVideoDataOutput alloc] init];
  151. [videoOutput setAlwaysDiscardsLateVideoFrames:NO];
  152. // if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
  153. if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])
  154. {
  155. BOOL supportsFullYUVRange = NO;
  156. NSArray *supportedPixelFormats = videoOutput.availableVideoCVPixelFormatTypes;
  157. for (NSNumber *currentPixelFormat in supportedPixelFormats)
  158. {
  159. if ([currentPixelFormat intValue] == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
  160. {
  161. supportsFullYUVRange = YES;
  162. }
  163. }
  164. if (supportsFullYUVRange)
  165. {
  166. [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
  167. isFullYUVRange = YES;
  168. }
  169. else
  170. {
  171. [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
  172. isFullYUVRange = NO;
  173. }
  174. }
  175. else
  176. {
  177. [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
  178. }
  179. runSynchronouslyOnVideoProcessingQueue(^{
  180. if (captureAsYUV)
  181. {
  182. [GPUImageContext useImageProcessingContext];
  183. // if ([GPUImageContext deviceSupportsRedTextures])
  184. // {
  185. // yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForRGFragmentShaderString];
  186. // }
  187. // else
  188. // {
  189. if (isFullYUVRange)
  190. {
  191. yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVFullRangeConversionForLAFragmentShaderString];
  192. }
  193. else
  194. {
  195. yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForLAFragmentShaderString];
  196. }
  197. // }
  198. if (!yuvConversionProgram.initialized)
  199. {
  200. [yuvConversionProgram addAttribute:@"position"];
  201. [yuvConversionProgram addAttribute:@"inputTextureCoordinate"];
  202. if (![yuvConversionProgram link])
  203. {
  204. NSString *progLog = [yuvConversionProgram programLog];
  205. NSLog(@"Program link log: %@", progLog);
  206. NSString *fragLog = [yuvConversionProgram fragmentShaderLog];
  207. NSLog(@"Fragment shader compile log: %@", fragLog);
  208. NSString *vertLog = [yuvConversionProgram vertexShaderLog];
  209. NSLog(@"Vertex shader compile log: %@", vertLog);
  210. yuvConversionProgram = nil;
  211. NSAssert(NO, @"Filter shader link failed");
  212. }
  213. }
  214. yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@"position"];
  215. yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@"inputTextureCoordinate"];
  216. yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@"luminanceTexture"];
  217. yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@"chrominanceTexture"];
  218. yuvConversionMatrixUniform = [yuvConversionProgram uniformIndex:@"colorConversionMatrix"];
  219. [GPUImageContext setActiveShaderProgram:yuvConversionProgram];
  220. glEnableVertexAttribArray(yuvConversionPositionAttribute);
  221. glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute);
  222. }
  223. });
  224. [videoOutput setSampleBufferDelegate:self queue:cameraProcessingQueue];
  225. if ([_captureSession canAddOutput:videoOutput])
  226. {
  227. [_captureSession addOutput:videoOutput];
  228. }
  229. else
  230. {
  231. NSLog(@"Couldn't add video output");
  232. return nil;
  233. }
  234. _captureSessionPreset = sessionPreset;
  235. [_captureSession setSessionPreset:_captureSessionPreset];
  236. // This will let you get 60 FPS video from the 720p preset on an iPhone 4S, but only that device and that preset
  237. // AVCaptureConnection *conn = [videoOutput connectionWithMediaType:AVMediaTypeVideo];
  238. //
  239. // if (conn.supportsVideoMinFrameDuration)
  240. // conn.videoMinFrameDuration = CMTimeMake(1,60);
  241. // if (conn.supportsVideoMaxFrameDuration)
  242. // conn.videoMaxFrameDuration = CMTimeMake(1,60);
  243. [_captureSession commitConfiguration];
  244. return self;
  245. }
  246. - (GPUImageFramebuffer *)framebufferForOutput;
  247. {
  248. return outputFramebuffer;
  249. }
  250. - (void)dealloc
  251. {
  252. [self stopCameraCapture];
  253. [videoOutput setSampleBufferDelegate:nil queue:dispatch_get_main_queue()];
  254. [audioOutput setSampleBufferDelegate:nil queue:dispatch_get_main_queue()];
  255. [self removeInputsAndOutputs];
  256. // ARC forbids explicit message send of 'release'; since iOS 6 even for dispatch_release() calls: stripping it out in that case is required.
  257. #if ( (__IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_6_0) || (!defined(__IPHONE_6_0)) )
  258. if (frameRenderingSemaphore != NULL)
  259. {
  260. dispatch_release(frameRenderingSemaphore);
  261. }
  262. #endif
  263. }
  264. - (BOOL)addAudioInputsAndOutputs
  265. {
  266. if (audioOutput)
  267. return NO;
  268. [_captureSession beginConfiguration];
  269. _microphone = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
  270. audioInput = [AVCaptureDeviceInput deviceInputWithDevice:_microphone error:nil];
  271. if ([_captureSession canAddInput:audioInput])
  272. {
  273. [_captureSession addInput:audioInput];
  274. }
  275. audioOutput = [[AVCaptureAudioDataOutput alloc] init];
  276. if ([_captureSession canAddOutput:audioOutput])
  277. {
  278. [_captureSession addOutput:audioOutput];
  279. }
  280. else
  281. {
  282. NSLog(@"Couldn't add audio output");
  283. }
  284. [audioOutput setSampleBufferDelegate:self queue:audioProcessingQueue];
  285. [_captureSession commitConfiguration];
  286. return YES;
  287. }
  288. - (BOOL)removeAudioInputsAndOutputs
  289. {
  290. if (!audioOutput)
  291. return NO;
  292. [_captureSession beginConfiguration];
  293. [_captureSession removeInput:audioInput];
  294. [_captureSession removeOutput:audioOutput];
  295. audioInput = nil;
  296. audioOutput = nil;
  297. _microphone = nil;
  298. [_captureSession commitConfiguration];
  299. return YES;
  300. }
  301. - (void)removeInputsAndOutputs;
  302. {
  303. [_captureSession beginConfiguration];
  304. if (videoInput) {
  305. [_captureSession removeInput:videoInput];
  306. [_captureSession removeOutput:videoOutput];
  307. videoInput = nil;
  308. videoOutput = nil;
  309. }
  310. if (_microphone != nil)
  311. {
  312. [_captureSession removeInput:audioInput];
  313. [_captureSession removeOutput:audioOutput];
  314. audioInput = nil;
  315. audioOutput = nil;
  316. _microphone = nil;
  317. }
  318. [_captureSession commitConfiguration];
  319. }
  320. #pragma mark -
  321. #pragma mark Managing targets
  322. - (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;
  323. {
  324. [super addTarget:newTarget atTextureLocation:textureLocation];
  325. [newTarget setInputRotation:outputRotation atIndex:textureLocation];
  326. }
  327. #pragma mark -
  328. #pragma mark Manage the camera video stream
  329. - (void)startCameraCapture;
  330. {
  331. if (![_captureSession isRunning])
  332. {
  333. startingCaptureTime = [NSDate date];
  334. [_captureSession startRunning];
  335. };
  336. }
  337. - (void)stopCameraCapture;
  338. {
  339. if ([_captureSession isRunning])
  340. {
  341. [_captureSession stopRunning];
  342. }
  343. }
  344. - (void)pauseCameraCapture;
  345. {
  346. capturePaused = YES;
  347. }
  348. - (void)resumeCameraCapture;
  349. {
  350. capturePaused = NO;
  351. }
  352. - (void)rotateCamera
  353. {
  354. if (self.frontFacingCameraPresent == NO)
  355. return;
  356. NSError *error;
  357. AVCaptureDeviceInput *newVideoInput;
  358. AVCaptureDevicePosition currentCameraPosition = [[videoInput device] position];
  359. if (currentCameraPosition == AVCaptureDevicePositionBack)
  360. {
  361. currentCameraPosition = AVCaptureDevicePositionFront;
  362. }
  363. else
  364. {
  365. currentCameraPosition = AVCaptureDevicePositionBack;
  366. }
  367. AVCaptureDevice *backFacingCamera = nil;
  368. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  369. for (AVCaptureDevice *device in devices)
  370. {
  371. if ([device position] == currentCameraPosition)
  372. {
  373. backFacingCamera = device;
  374. }
  375. }
  376. newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:backFacingCamera error:&error];
  377. if (newVideoInput != nil)
  378. {
  379. [_captureSession beginConfiguration];
  380. [_captureSession removeInput:videoInput];
  381. if ([_captureSession canAddInput:newVideoInput])
  382. {
  383. [_captureSession addInput:newVideoInput];
  384. videoInput = newVideoInput;
  385. }
  386. else
  387. {
  388. [_captureSession addInput:videoInput];
  389. }
  390. //captureSession.sessionPreset = oriPreset;
  391. [_captureSession commitConfiguration];
  392. }
  393. _inputCamera = backFacingCamera;
  394. [self setOutputImageOrientation:_outputImageOrientation];
  395. }
  396. - (AVCaptureDevicePosition)cameraPosition
  397. {
  398. return [[videoInput device] position];
  399. }
  400. + (BOOL)isBackFacingCameraPresent;
  401. {
  402. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  403. for (AVCaptureDevice *device in devices)
  404. {
  405. if ([device position] == AVCaptureDevicePositionBack)
  406. return YES;
  407. }
  408. return NO;
  409. }
  410. - (BOOL)isBackFacingCameraPresent
  411. {
  412. return [GPUImageVideoCamera isBackFacingCameraPresent];
  413. }
  414. + (BOOL)isFrontFacingCameraPresent;
  415. {
  416. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  417. for (AVCaptureDevice *device in devices)
  418. {
  419. if ([device position] == AVCaptureDevicePositionFront)
  420. return YES;
  421. }
  422. return NO;
  423. }
  424. - (BOOL)isFrontFacingCameraPresent
  425. {
  426. return [GPUImageVideoCamera isFrontFacingCameraPresent];
  427. }
  428. - (void)setCaptureSessionPreset:(NSString *)captureSessionPreset;
  429. {
  430. [_captureSession beginConfiguration];
  431. _captureSessionPreset = captureSessionPreset;
  432. [_captureSession setSessionPreset:_captureSessionPreset];
  433. [_captureSession commitConfiguration];
  434. }
  435. - (void)setFrameRate:(int32_t)frameRate;
  436. {
  437. _frameRate = frameRate;
  438. if (_frameRate > 0)
  439. {
  440. if ([_inputCamera respondsToSelector:@selector(setActiveVideoMinFrameDuration:)] &&
  441. [_inputCamera respondsToSelector:@selector(setActiveVideoMaxFrameDuration:)]) {
  442. NSError *error;
  443. [_inputCamera lockForConfiguration:&error];
  444. if (error == nil) {
  445. #if defined(__IPHONE_7_0)
  446. [_inputCamera setActiveVideoMinFrameDuration:CMTimeMake(1, _frameRate)];
  447. [_inputCamera setActiveVideoMaxFrameDuration:CMTimeMake(1, _frameRate)];
  448. #endif
  449. }
  450. [_inputCamera unlockForConfiguration];
  451. } else {
  452. for (AVCaptureConnection *connection in videoOutput.connections)
  453. {
  454. #pragma clang diagnostic push
  455. #pragma clang diagnostic ignored "-Wdeprecated-declarations"
  456. if ([connection respondsToSelector:@selector(setVideoMinFrameDuration:)])
  457. connection.videoMinFrameDuration = CMTimeMake(1, _frameRate);
  458. if ([connection respondsToSelector:@selector(setVideoMaxFrameDuration:)])
  459. connection.videoMaxFrameDuration = CMTimeMake(1, _frameRate);
  460. #pragma clang diagnostic pop
  461. }
  462. }
  463. }
  464. else
  465. {
  466. if ([_inputCamera respondsToSelector:@selector(setActiveVideoMinFrameDuration:)] &&
  467. [_inputCamera respondsToSelector:@selector(setActiveVideoMaxFrameDuration:)]) {
  468. NSError *error;
  469. [_inputCamera lockForConfiguration:&error];
  470. if (error == nil) {
  471. #if defined(__IPHONE_7_0)
  472. [_inputCamera setActiveVideoMinFrameDuration:kCMTimeInvalid];
  473. [_inputCamera setActiveVideoMaxFrameDuration:kCMTimeInvalid];
  474. #endif
  475. }
  476. [_inputCamera unlockForConfiguration];
  477. } else {
  478. for (AVCaptureConnection *connection in videoOutput.connections)
  479. {
  480. #pragma clang diagnostic push
  481. #pragma clang diagnostic ignored "-Wdeprecated-declarations"
  482. if ([connection respondsToSelector:@selector(setVideoMinFrameDuration:)])
  483. connection.videoMinFrameDuration = kCMTimeInvalid; // This sets videoMinFrameDuration back to default
  484. if ([connection respondsToSelector:@selector(setVideoMaxFrameDuration:)])
  485. connection.videoMaxFrameDuration = kCMTimeInvalid; // This sets videoMaxFrameDuration back to default
  486. #pragma clang diagnostic pop
  487. }
  488. }
  489. }
  490. }
  491. - (int32_t)frameRate;
  492. {
  493. return _frameRate;
  494. }
  495. - (AVCaptureConnection *)videoCaptureConnection {
  496. for (AVCaptureConnection *connection in [videoOutput connections] ) {
  497. for ( AVCaptureInputPort *port in [connection inputPorts] ) {
  498. if ( [[port mediaType] isEqual:AVMediaTypeVideo] ) {
  499. return connection;
  500. }
  501. }
  502. }
  503. return nil;
  504. }
  505. #define INITIALFRAMESTOIGNOREFORBENCHMARK 5
  506. - (void)updateTargetsForVideoCameraUsingCacheTextureAtWidth:(int)bufferWidth height:(int)bufferHeight time:(CMTime)currentTime;
  507. {
  508. // First, update all the framebuffers in the targets
  509. for (id<GPUImageInput> currentTarget in targets)
  510. {
  511. if ([currentTarget enabled])
  512. {
  513. NSInteger indexOfObject = [targets indexOfObject:currentTarget];
  514. NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
  515. if (currentTarget != self.targetToIgnoreForUpdates)
  516. {
  517. [currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];
  518. [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:textureIndexOfTarget];
  519. if ([currentTarget wantsMonochromeInput] && captureAsYUV)
  520. {
  521. [currentTarget setCurrentlyReceivingMonochromeInput:YES];
  522. // TODO: Replace optimization for monochrome output
  523. [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
  524. }
  525. else
  526. {
  527. [currentTarget setCurrentlyReceivingMonochromeInput:NO];
  528. [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
  529. }
  530. }
  531. else
  532. {
  533. [currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];
  534. [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
  535. }
  536. }
  537. }
  538. // Then release our hold on the local framebuffer to send it back to the cache as soon as it's no longer needed
  539. [outputFramebuffer unlock];
  540. outputFramebuffer = nil;
  541. // Finally, trigger rendering as needed
  542. for (id<GPUImageInput> currentTarget in targets)
  543. {
  544. if ([currentTarget enabled])
  545. {
  546. NSInteger indexOfObject = [targets indexOfObject:currentTarget];
  547. NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
  548. if (currentTarget != self.targetToIgnoreForUpdates)
  549. {
  550. [currentTarget newFrameReadyAtTime:currentTime atIndex:textureIndexOfTarget];
  551. }
  552. }
  553. }
  554. }
  555. - (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;
  556. {
  557. if (capturePaused)
  558. {
  559. return;
  560. }
  561. CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
  562. CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
  563. int bufferWidth = (int) CVPixelBufferGetWidth(cameraFrame);
  564. int bufferHeight = (int) CVPixelBufferGetHeight(cameraFrame);
  565. CFTypeRef colorAttachments = CVBufferGetAttachment(cameraFrame, kCVImageBufferYCbCrMatrixKey, NULL);
  566. if (colorAttachments != NULL)
  567. {
  568. if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo)
  569. {
  570. if (isFullYUVRange)
  571. {
  572. _preferredConversion = kColorConversion601FullRange;
  573. }
  574. else
  575. {
  576. _preferredConversion = kColorConversion601;
  577. }
  578. }
  579. else
  580. {
  581. _preferredConversion = kColorConversion709;
  582. }
  583. }
  584. else
  585. {
  586. if (isFullYUVRange)
  587. {
  588. _preferredConversion = kColorConversion601FullRange;
  589. }
  590. else
  591. {
  592. _preferredConversion = kColorConversion601;
  593. }
  594. }
  595. CMTime currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
  596. [GPUImageContext useImageProcessingContext];
  597. if ([GPUImageContext supportsFastTextureUpload] && captureAsYUV)
  598. {
  599. CVOpenGLESTextureRef luminanceTextureRef = NULL;
  600. CVOpenGLESTextureRef chrominanceTextureRef = NULL;
  601. // if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
  602. if (CVPixelBufferGetPlaneCount(cameraFrame) > 0) // Check for YUV planar inputs to do RGB conversion
  603. {
  604. CVPixelBufferLockBaseAddress(cameraFrame, 0);
  605. if ( (imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight) )
  606. {
  607. imageBufferWidth = bufferWidth;
  608. imageBufferHeight = bufferHeight;
  609. }
  610. CVReturn err;
  611. // Y-plane
  612. glActiveTexture(GL_TEXTURE4);
  613. if ([GPUImageContext deviceSupportsRedTextures])
  614. {
  615. // err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, cameraFrame, NULL, GL_TEXTURE_2D, GL_RED_EXT, bufferWidth, bufferHeight, GL_RED_EXT, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
  616. err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
  617. }
  618. else
  619. {
  620. err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
  621. }
  622. if (err)
  623. {
  624. NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
  625. }
  626. luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef);
  627. glBindTexture(GL_TEXTURE_2D, luminanceTexture);
  628. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  629. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
  630. // UV-plane
  631. glActiveTexture(GL_TEXTURE5);
  632. if ([GPUImageContext deviceSupportsRedTextures])
  633. {
  634. // err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, cameraFrame, NULL, GL_TEXTURE_2D, GL_RG_EXT, bufferWidth/2, bufferHeight/2, GL_RG_EXT, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
  635. err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
  636. }
  637. else
  638. {
  639. err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
  640. }
  641. if (err)
  642. {
  643. NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
  644. }
  645. chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef);
  646. glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
  647. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  648. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
  649. // if (!allTargetsWantMonochromeData)
  650. // {
  651. [self convertYUVToRGBOutput];
  652. // }
  653. int rotatedImageBufferWidth = bufferWidth, rotatedImageBufferHeight = bufferHeight;
  654. if (GPUImageRotationSwapsWidthAndHeight(internalRotation))
  655. {
  656. rotatedImageBufferWidth = bufferHeight;
  657. rotatedImageBufferHeight = bufferWidth;
  658. }
  659. [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:rotatedImageBufferWidth height:rotatedImageBufferHeight time:currentTime];
  660. CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
  661. CFRelease(luminanceTextureRef);
  662. CFRelease(chrominanceTextureRef);
  663. }
  664. else
  665. {
  666. // TODO: Mesh this with the output framebuffer structure
  667. // CVPixelBufferLockBaseAddress(cameraFrame, 0);
  668. //
  669. // CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_RGBA, bufferWidth, bufferHeight, GL_BGRA, GL_UNSIGNED_BYTE, 0, &texture);
  670. //
  671. // if (!texture || err) {
  672. // NSLog(@"Camera CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)", err);
  673. // NSAssert(NO, @"Camera failure");
  674. // return;
  675. // }
  676. //
  677. // outputTexture = CVOpenGLESTextureGetName(texture);
  678. // // glBindTexture(CVOpenGLESTextureGetTarget(texture), outputTexture);
  679. // glBindTexture(GL_TEXTURE_2D, outputTexture);
  680. // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  681. // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  682. // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  683. // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
  684. //
  685. // [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:bufferWidth height:bufferHeight time:currentTime];
  686. //
  687. // CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
  688. // CFRelease(texture);
  689. //
  690. // outputTexture = 0;
  691. }
  692. if (_runBenchmark)
  693. {
  694. numberOfFramesCaptured++;
  695. if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK)
  696. {
  697. CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
  698. totalFrameTimeDuringCapture += currentFrameTime;
  699. NSLog(@"Average frame time : %f ms", [self averageFrameDurationDuringCapture]);
  700. NSLog(@"Current frame time : %f ms", 1000.0 * currentFrameTime);
  701. }
  702. }
  703. }
  704. else
  705. {
  706. CVPixelBufferLockBaseAddress(cameraFrame, 0);
  707. int bytesPerRow = (int) CVPixelBufferGetBytesPerRow(cameraFrame);
  708. outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bytesPerRow / 4, bufferHeight) onlyTexture:YES];
  709. [outputFramebuffer activateFramebuffer];
  710. glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
  711. // glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bufferWidth, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));
  712. // Using BGRA extension to pull in video frame data directly
  713. // The use of bytesPerRow / 4 accounts for a display glitch present in preview video frames when using the photo preset on the camera
  714. glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bytesPerRow / 4, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));
  715. [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:bytesPerRow / 4 height:bufferHeight time:currentTime];
  716. CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
  717. if (_runBenchmark)
  718. {
  719. numberOfFramesCaptured++;
  720. if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK)
  721. {
  722. CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
  723. totalFrameTimeDuringCapture += currentFrameTime;
  724. }
  725. }
  726. }
  727. }
  728. - (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;
  729. {
  730. [self.audioEncodingTarget processAudioBuffer:sampleBuffer];
  731. }
  732. - (void)convertYUVToRGBOutput;
  733. {
  734. [GPUImageContext setActiveShaderProgram:yuvConversionProgram];
  735. int rotatedImageBufferWidth = imageBufferWidth, rotatedImageBufferHeight = imageBufferHeight;
  736. if (GPUImageRotationSwapsWidthAndHeight(internalRotation))
  737. {
  738. rotatedImageBufferWidth = imageBufferHeight;
  739. rotatedImageBufferHeight = imageBufferWidth;
  740. }
  741. outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(rotatedImageBufferWidth, rotatedImageBufferHeight) textureOptions:self.outputTextureOptions onlyTexture:NO];
  742. [outputFramebuffer activateFramebuffer];
  743. glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
  744. glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
  745. static const GLfloat squareVertices[] = {
  746. -1.0f, -1.0f,
  747. 1.0f, -1.0f,
  748. -1.0f, 1.0f,
  749. 1.0f, 1.0f,
  750. };
  751. glActiveTexture(GL_TEXTURE4);
  752. glBindTexture(GL_TEXTURE_2D, luminanceTexture);
  753. glUniform1i(yuvConversionLuminanceTextureUniform, 4);
  754. glActiveTexture(GL_TEXTURE5);
  755. glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
  756. glUniform1i(yuvConversionChrominanceTextureUniform, 5);
  757. glUniformMatrix3fv(yuvConversionMatrixUniform, 1, GL_FALSE, _preferredConversion);
  758. glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
  759. glVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [GPUImageFilter textureCoordinatesForRotation:internalRotation]);
  760. glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
  761. }
  762. #pragma mark -
  763. #pragma mark Benchmarking
  764. - (CGFloat)averageFrameDurationDuringCapture;
  765. {
  766. return (totalFrameTimeDuringCapture / (CGFloat)(numberOfFramesCaptured - INITIALFRAMESTOIGNOREFORBENCHMARK)) * 1000.0;
  767. }
  768. - (void)resetBenchmarkAverage;
  769. {
  770. numberOfFramesCaptured = 0;
  771. totalFrameTimeDuringCapture = 0.0;
  772. }
  773. #pragma mark -
  774. #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
  775. - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
  776. {
  777. if (!self.captureSession.isRunning)
  778. {
  779. return;
  780. }
  781. else if (captureOutput == audioOutput)
  782. {
  783. [self processAudioSampleBuffer:sampleBuffer];
  784. }
  785. else
  786. {
  787. if (dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0)
  788. {
  789. return;
  790. }
  791. CFRetain(sampleBuffer);
  792. runAsynchronouslyOnVideoProcessingQueue(^{
  793. //Feature Detection Hook.
  794. if (self.delegate)
  795. {
  796. [self.delegate willOutputSampleBuffer:sampleBuffer];
  797. }
  798. [self processVideoSampleBuffer:sampleBuffer];
  799. CFRelease(sampleBuffer);
  800. dispatch_semaphore_signal(frameRenderingSemaphore);
  801. });
  802. }
  803. }
  804. #pragma mark -
  805. #pragma mark Accessors
  806. - (void)setAudioEncodingTarget:(GPUImageMovieWriter *)newValue;
  807. {
  808. if (newValue) {
  809. /* Add audio inputs and outputs, if necessary */
  810. addedAudioInputsDueToEncodingTarget |= [self addAudioInputsAndOutputs];
  811. } else if (addedAudioInputsDueToEncodingTarget) {
  812. /* Remove audio inputs and outputs, if they were added by previously setting the audio encoding target */
  813. [self removeAudioInputsAndOutputs];
  814. addedAudioInputsDueToEncodingTarget = NO;
  815. }
  816. [super setAudioEncodingTarget:newValue];
  817. }
  818. - (void)updateOrientationSendToTargets;
  819. {
  820. runSynchronouslyOnVideoProcessingQueue(^{
  821. // From the iOS 5.0 release notes:
  822. // In previous iOS versions, the front-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeLeft and the back-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeRight.
  823. if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])
  824. {
  825. outputRotation = kGPUImageNoRotation;
  826. if ([self cameraPosition] == AVCaptureDevicePositionBack)
  827. {
  828. if (_horizontallyMirrorRearFacingCamera)
  829. {
  830. switch(_outputImageOrientation)
  831. {
  832. case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRightFlipVertical; break;
  833. case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotate180; break;
  834. case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageFlipHorizonal; break;
  835. case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageFlipVertical; break;
  836. default:internalRotation = kGPUImageNoRotation;
  837. }
  838. }
  839. else
  840. {
  841. switch(_outputImageOrientation)
  842. {
  843. case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRight; break;
  844. case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotateLeft; break;
  845. case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageRotate180; break;
  846. case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageNoRotation; break;
  847. default:internalRotation = kGPUImageNoRotation;
  848. }
  849. }
  850. }
  851. else
  852. {
  853. if (_horizontallyMirrorFrontFacingCamera)
  854. {
  855. switch(_outputImageOrientation)
  856. {
  857. case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRightFlipVertical; break;
  858. case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotateRightFlipHorizontal; break;
  859. case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageFlipHorizonal; break;
  860. case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageFlipVertical; break;
  861. default:internalRotation = kGPUImageNoRotation;
  862. }
  863. }
  864. else
  865. {
  866. switch(_outputImageOrientation)
  867. {
  868. case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRight; break;
  869. case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotateLeft; break;
  870. case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageNoRotation; break;
  871. case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageRotate180; break;
  872. default:internalRotation = kGPUImageNoRotation;
  873. }
  874. }
  875. }
  876. }
  877. else
  878. {
  879. if ([self cameraPosition] == AVCaptureDevicePositionBack)
  880. {
  881. if (_horizontallyMirrorRearFacingCamera)
  882. {
  883. switch(_outputImageOrientation)
  884. {
  885. case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRightFlipVertical; break;
  886. case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotate180; break;
  887. case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageFlipHorizonal; break;
  888. case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageFlipVertical; break;
  889. default:outputRotation = kGPUImageNoRotation;
  890. }
  891. }
  892. else
  893. {
  894. switch(_outputImageOrientation)
  895. {
  896. case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRight; break;
  897. case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotateLeft; break;
  898. case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageRotate180; break;
  899. case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageNoRotation; break;
  900. default:outputRotation = kGPUImageNoRotation;
  901. }
  902. }
  903. }
  904. else
  905. {
  906. if (_horizontallyMirrorFrontFacingCamera)
  907. {
  908. switch(_outputImageOrientation)
  909. {
  910. case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRightFlipVertical; break;
  911. case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotateRightFlipHorizontal; break;
  912. case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageFlipHorizonal; break;
  913. case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageFlipVertical; break;
  914. default:outputRotation = kGPUImageNoRotation;
  915. }
  916. }
  917. else
  918. {
  919. switch(_outputImageOrientation)
  920. {
  921. case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRight; break;
  922. case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotateLeft; break;
  923. case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageNoRotation; break;
  924. case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageRotate180; break;
  925. default:outputRotation = kGPUImageNoRotation;
  926. }
  927. }
  928. }
  929. }
  930. for (id<GPUImageInput> currentTarget in targets)
  931. {
  932. NSInteger indexOfObject = [targets indexOfObject:currentTarget];
  933. [currentTarget setInputRotation:outputRotation atIndex:[[targetTextureIndices objectAtIndex:indexOfObject] integerValue]];
  934. }
  935. });
  936. }
  937. - (void)setOutputImageOrientation:(UIInterfaceOrientation)newValue;
  938. {
  939. _outputImageOrientation = newValue;
  940. [self updateOrientationSendToTargets];
  941. }
  942. - (void)setHorizontallyMirrorFrontFacingCamera:(BOOL)newValue
  943. {
  944. _horizontallyMirrorFrontFacingCamera = newValue;
  945. [self updateOrientationSendToTargets];
  946. }
  947. - (void)setHorizontallyMirrorRearFacingCamera:(BOOL)newValue
  948. {
  949. _horizontallyMirrorRearFacingCamera = newValue;
  950. [self updateOrientationSendToTargets];
  951. }
  952. @end