PageRenderTime 36ms CodeModel.GetById 21ms RepoModel.GetById 1ms app.codeStats 0ms

/Dependencies/GPUImage/Source/GPUImageMovie.m

https://gitlab.com/Mr.Tomato/VideoEffects
Objective C | 759 lines | 576 code | 119 blank | 64 comment | 102 complexity | befa608c99dfd1998eb5832cb58d9509 MD5 | raw file
  1. #import "GPUImageMovie.h"
  2. #import "GPUImageMovieWriter.h"
  3. #import "GPUImageFilter.h"
  4. #import "GPUImageVideoCamera.h"
  5. @interface GPUImageMovie () <AVPlayerItemOutputPullDelegate>
  6. {
  7. BOOL audioEncodingIsFinished, videoEncodingIsFinished;
  8. GPUImageMovieWriter *synchronizedMovieWriter;
  9. AVAssetReader *reader;
  10. AVPlayerItemVideoOutput *playerItemOutput;
  11. CADisplayLink *displayLink;
  12. CMTime previousFrameTime, processingFrameTime;
  13. CFAbsoluteTime previousActualFrameTime;
  14. BOOL keepLooping;
  15. GLuint luminanceTexture, chrominanceTexture;
  16. GLProgram *yuvConversionProgram;
  17. GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute;
  18. GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform;
  19. GLint yuvConversionMatrixUniform;
  20. const GLfloat *_preferredConversion;
  21. BOOL isFullYUVRange;
  22. int imageBufferWidth, imageBufferHeight;
  23. }
  24. - (void)processAsset;
  25. @end
  26. @implementation GPUImageMovie
  27. @synthesize url = _url;
  28. @synthesize asset = _asset;
  29. @synthesize runBenchmark = _runBenchmark;
  30. @synthesize playAtActualSpeed = _playAtActualSpeed;
  31. @synthesize delegate = _delegate;
  32. @synthesize shouldRepeat = _shouldRepeat;
  33. #pragma mark -
  34. #pragma mark Initialization and teardown
  35. - (id)initWithURL:(NSURL *)url;
  36. {
  37. if (!(self = [super init]))
  38. {
  39. return nil;
  40. }
  41. [self yuvConversionSetup];
  42. self.url = url;
  43. self.asset = nil;
  44. return self;
  45. }
  46. - (id)initWithAsset:(AVAsset *)asset;
  47. {
  48. if (!(self = [super init]))
  49. {
  50. return nil;
  51. }
  52. [self yuvConversionSetup];
  53. self.url = nil;
  54. self.asset = asset;
  55. return self;
  56. }
  57. - (id)initWithPlayerItem:(AVPlayerItem *)playerItem;
  58. {
  59. if (!(self = [super init]))
  60. {
  61. return nil;
  62. }
  63. [self yuvConversionSetup];
  64. self.url = nil;
  65. self.asset = nil;
  66. self.playerItem = playerItem;
  67. return self;
  68. }
  69. - (void)yuvConversionSetup;
  70. {
  71. if ([GPUImageContext supportsFastTextureUpload])
  72. {
  73. runSynchronouslyOnVideoProcessingQueue(^{
  74. [GPUImageContext useImageProcessingContext];
  75. _preferredConversion = kColorConversion709;
  76. isFullYUVRange = YES;
  77. yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVFullRangeConversionForLAFragmentShaderString];
  78. if (!yuvConversionProgram.initialized)
  79. {
  80. [yuvConversionProgram addAttribute:@"position"];
  81. [yuvConversionProgram addAttribute:@"inputTextureCoordinate"];
  82. if (![yuvConversionProgram link])
  83. {
  84. NSString *progLog = [yuvConversionProgram programLog];
  85. NSLog(@"Program link log: %@", progLog);
  86. NSString *fragLog = [yuvConversionProgram fragmentShaderLog];
  87. NSLog(@"Fragment shader compile log: %@", fragLog);
  88. NSString *vertLog = [yuvConversionProgram vertexShaderLog];
  89. NSLog(@"Vertex shader compile log: %@", vertLog);
  90. yuvConversionProgram = nil;
  91. NSAssert(NO, @"Filter shader link failed");
  92. }
  93. }
  94. yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@"position"];
  95. yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@"inputTextureCoordinate"];
  96. yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@"luminanceTexture"];
  97. yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@"chrominanceTexture"];
  98. yuvConversionMatrixUniform = [yuvConversionProgram uniformIndex:@"colorConversionMatrix"];
  99. [GPUImageContext setActiveShaderProgram:yuvConversionProgram];
  100. glEnableVertexAttribArray(yuvConversionPositionAttribute);
  101. glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute);
  102. });
  103. }
  104. }
  105. - (void)dealloc
  106. {
  107. // Moved into endProcessing
  108. //if (self.playerItem && (displayLink != nil))
  109. //{
  110. // [displayLink invalidate]; // remove from all run loops
  111. // displayLink = nil;
  112. //}
  113. }
  114. #pragma mark -
  115. #pragma mark Movie processing
  116. - (void)enableSynchronizedEncodingUsingMovieWriter:(GPUImageMovieWriter *)movieWriter;
  117. {
  118. synchronizedMovieWriter = movieWriter;
  119. movieWriter.encodingLiveVideo = NO;
  120. }
  121. - (void)startProcessing
  122. {
  123. if( self.playerItem ) {
  124. [self processPlayerItem];
  125. return;
  126. }
  127. if(self.url == nil)
  128. {
  129. [self processAsset];
  130. return;
  131. }
  132. if (_shouldRepeat) keepLooping = YES;
  133. previousFrameTime = kCMTimeZero;
  134. previousActualFrameTime = CFAbsoluteTimeGetCurrent();
  135. NSDictionary *inputOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
  136. AVURLAsset *inputAsset = [[AVURLAsset alloc] initWithURL:self.url options:inputOptions];
  137. GPUImageMovie __block *blockSelf = self;
  138. [inputAsset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler: ^{
  139. NSError *error = nil;
  140. AVKeyValueStatus tracksStatus = [inputAsset statusOfValueForKey:@"tracks" error:&error];
  141. if (tracksStatus != AVKeyValueStatusLoaded)
  142. {
  143. return;
  144. }
  145. blockSelf.asset = inputAsset;
  146. [blockSelf processAsset];
  147. blockSelf = nil;
  148. }];
  149. }
  150. - (AVAssetReader*)createAssetReader
  151. {
  152. NSError *error = nil;
  153. AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:self.asset error:&error];
  154. NSMutableDictionary *outputSettings = [NSMutableDictionary dictionary];
  155. if ([GPUImageContext supportsFastTextureUpload]) {
  156. [outputSettings setObject:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
  157. isFullYUVRange = YES;
  158. }
  159. else {
  160. [outputSettings setObject:@(kCVPixelFormatType_32BGRA) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
  161. isFullYUVRange = NO;
  162. }
  163. // Maybe set alwaysCopiesSampleData to NO on iOS 5.0 for faster video decoding
  164. AVAssetReaderTrackOutput *readerVideoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:[[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] outputSettings:outputSettings];
  165. readerVideoTrackOutput.alwaysCopiesSampleData = NO;
  166. [assetReader addOutput:readerVideoTrackOutput];
  167. NSArray *audioTracks = [self.asset tracksWithMediaType:AVMediaTypeAudio];
  168. BOOL shouldRecordAudioTrack = (([audioTracks count] > 0) && (self.audioEncodingTarget != nil) );
  169. AVAssetReaderTrackOutput *readerAudioTrackOutput = nil;
  170. if (shouldRecordAudioTrack)
  171. {
  172. [self.audioEncodingTarget setShouldInvalidateAudioSampleWhenDone:YES];
  173. // This might need to be extended to handle movies with more than one audio track
  174. AVAssetTrack* audioTrack = [audioTracks objectAtIndex:0];
  175. readerAudioTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
  176. readerAudioTrackOutput.alwaysCopiesSampleData = NO;
  177. [assetReader addOutput:readerAudioTrackOutput];
  178. }
  179. return assetReader;
  180. }
  181. - (void)processAsset
  182. {
  183. reader = [self createAssetReader];
  184. AVAssetReaderOutput *readerVideoTrackOutput = nil;
  185. AVAssetReaderOutput *readerAudioTrackOutput = nil;
  186. audioEncodingIsFinished = YES;
  187. for( AVAssetReaderOutput *output in reader.outputs ) {
  188. if( [output.mediaType isEqualToString:AVMediaTypeAudio] ) {
  189. audioEncodingIsFinished = NO;
  190. readerAudioTrackOutput = output;
  191. }
  192. else if( [output.mediaType isEqualToString:AVMediaTypeVideo] ) {
  193. readerVideoTrackOutput = output;
  194. }
  195. }
  196. if ([reader startReading] == NO)
  197. {
  198. NSLog(@"Error reading from file at URL: %@", self.url);
  199. return;
  200. }
  201. __unsafe_unretained GPUImageMovie *weakSelf = self;
  202. if (synchronizedMovieWriter != nil)
  203. {
  204. [synchronizedMovieWriter setVideoInputReadyCallback:^{
  205. return [weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];
  206. }];
  207. [synchronizedMovieWriter setAudioInputReadyCallback:^{
  208. return [weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];
  209. }];
  210. [synchronizedMovieWriter enableSynchronizationCallbacks];
  211. }
  212. else
  213. {
  214. while (reader.status == AVAssetReaderStatusReading && (!_shouldRepeat || keepLooping))
  215. {
  216. [weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];
  217. if ( (readerAudioTrackOutput) && (!audioEncodingIsFinished) )
  218. {
  219. [weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];
  220. }
  221. }
  222. if (reader.status == AVAssetWriterStatusCompleted) {
  223. [reader cancelReading];
  224. if (keepLooping) {
  225. reader = nil;
  226. dispatch_async(dispatch_get_main_queue(), ^{
  227. [self startProcessing];
  228. });
  229. } else {
  230. [weakSelf endProcessing];
  231. }
  232. }
  233. }
  234. }
  235. - (void)processPlayerItem
  236. {
  237. runSynchronouslyOnVideoProcessingQueue(^{
  238. displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkCallback:)];
  239. [displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
  240. [displayLink setPaused:YES];
  241. dispatch_queue_t videoProcessingQueue = [GPUImageContext sharedContextQueue];
  242. NSMutableDictionary *pixBuffAttributes = [NSMutableDictionary dictionary];
  243. if ([GPUImageContext supportsFastTextureUpload]) {
  244. [pixBuffAttributes setObject:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
  245. }
  246. else {
  247. [pixBuffAttributes setObject:@(kCVPixelFormatType_32BGRA) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
  248. }
  249. playerItemOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:pixBuffAttributes];
  250. [playerItemOutput setDelegate:self queue:videoProcessingQueue];
  251. [_playerItem addOutput:playerItemOutput];
  252. [playerItemOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0.1];
  253. });
  254. }
  255. - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender
  256. {
  257. // Restart display link.
  258. [displayLink setPaused:NO];
  259. }
  260. - (void)displayLinkCallback:(CADisplayLink *)sender
  261. {
  262. /*
  263. The callback gets called once every Vsync.
  264. Using the display link's timestamp and duration we can compute the next time the screen will be refreshed, and copy the pixel buffer for that time
  265. This pixel buffer can then be processed and later rendered on screen.
  266. */
  267. // Calculate the nextVsync time which is when the screen will be refreshed next.
  268. CFTimeInterval nextVSync = ([sender timestamp] + [sender duration]);
  269. CMTime outputItemTime = [playerItemOutput itemTimeForHostTime:nextVSync];
  270. if ([playerItemOutput hasNewPixelBufferForItemTime:outputItemTime]) {
  271. __unsafe_unretained GPUImageMovie *weakSelf = self;
  272. CVPixelBufferRef pixelBuffer = [playerItemOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL];
  273. if( pixelBuffer )
  274. runSynchronouslyOnVideoProcessingQueue(^{
  275. [weakSelf processMovieFrame:pixelBuffer withSampleTime:outputItemTime];
  276. CFRelease(pixelBuffer);
  277. });
  278. }
  279. }
  280. - (BOOL)readNextVideoFrameFromOutput:(AVAssetReaderOutput *)readerVideoTrackOutput;
  281. {
  282. if (reader.status == AVAssetReaderStatusReading && ! videoEncodingIsFinished)
  283. {
  284. CMSampleBufferRef sampleBufferRef = [readerVideoTrackOutput copyNextSampleBuffer];
  285. if (sampleBufferRef)
  286. {
  287. //NSLog(@"read a video frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferRef))));
  288. if (_playAtActualSpeed)
  289. {
  290. // Do this outside of the video processing queue to not slow that down while waiting
  291. CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferRef);
  292. CMTime differenceFromLastFrame = CMTimeSubtract(currentSampleTime, previousFrameTime);
  293. CFAbsoluteTime currentActualTime = CFAbsoluteTimeGetCurrent();
  294. CGFloat frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame);
  295. CGFloat actualTimeDifference = currentActualTime - previousActualFrameTime;
  296. if (frameTimeDifference > actualTimeDifference)
  297. {
  298. usleep(1000000.0 * (frameTimeDifference - actualTimeDifference));
  299. }
  300. previousFrameTime = currentSampleTime;
  301. previousActualFrameTime = CFAbsoluteTimeGetCurrent();
  302. }
  303. __unsafe_unretained GPUImageMovie *weakSelf = self;
  304. runSynchronouslyOnVideoProcessingQueue(^{
  305. [weakSelf processMovieFrame:sampleBufferRef];
  306. CMSampleBufferInvalidate(sampleBufferRef);
  307. CFRelease(sampleBufferRef);
  308. });
  309. return YES;
  310. }
  311. else
  312. {
  313. if (!keepLooping) {
  314. videoEncodingIsFinished = YES;
  315. if( videoEncodingIsFinished && audioEncodingIsFinished )
  316. [self endProcessing];
  317. }
  318. }
  319. }
  320. else if (synchronizedMovieWriter != nil)
  321. {
  322. if (reader.status == AVAssetReaderStatusCompleted)
  323. {
  324. [self endProcessing];
  325. }
  326. }
  327. return NO;
  328. }
  329. - (BOOL)readNextAudioSampleFromOutput:(AVAssetReaderOutput *)readerAudioTrackOutput;
  330. {
  331. if (reader.status == AVAssetReaderStatusReading && ! audioEncodingIsFinished)
  332. {
  333. CMSampleBufferRef audioSampleBufferRef = [readerAudioTrackOutput copyNextSampleBuffer];
  334. if (audioSampleBufferRef)
  335. {
  336. //NSLog(@"read an audio frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, CMSampleBufferGetOutputPresentationTimeStamp(audioSampleBufferRef))));
  337. [self.audioEncodingTarget processAudioBuffer:audioSampleBufferRef];
  338. CFRelease(audioSampleBufferRef);
  339. return YES;
  340. }
  341. else
  342. {
  343. if (!keepLooping) {
  344. audioEncodingIsFinished = YES;
  345. if( videoEncodingIsFinished && audioEncodingIsFinished )
  346. [self endProcessing];
  347. }
  348. }
  349. }
  350. else if (synchronizedMovieWriter != nil)
  351. {
  352. if (reader.status == AVAssetReaderStatusCompleted || reader.status == AVAssetReaderStatusFailed ||
  353. reader.status == AVAssetReaderStatusCancelled)
  354. {
  355. [self endProcessing];
  356. }
  357. }
  358. return NO;
  359. }
  360. - (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;
  361. {
  362. // CMTimeGetSeconds
  363. // CMTimeSubtract
  364. CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(movieSampleBuffer);
  365. CVImageBufferRef movieFrame = CMSampleBufferGetImageBuffer(movieSampleBuffer);
  366. processingFrameTime = currentSampleTime;
  367. [self processMovieFrame:movieFrame withSampleTime:currentSampleTime];
  368. }
  369. - (float)progress
  370. {
  371. if ( AVAssetReaderStatusReading == reader.status )
  372. {
  373. float current = processingFrameTime.value * 1.0f / processingFrameTime.timescale;
  374. float duration = self.asset.duration.value * 1.0f / self.asset.duration.timescale;
  375. return current / duration;
  376. }
  377. else if ( AVAssetReaderStatusCompleted == reader.status )
  378. {
  379. return 1.f;
  380. }
  381. else
  382. {
  383. return 0.f;
  384. }
  385. }
  386. - (void)processMovieFrame:(CVPixelBufferRef)movieFrame withSampleTime:(CMTime)currentSampleTime
  387. {
  388. int bufferHeight = (int) CVPixelBufferGetHeight(movieFrame);
  389. int bufferWidth = (int) CVPixelBufferGetWidth(movieFrame);
  390. CFTypeRef colorAttachments = CVBufferGetAttachment(movieFrame, kCVImageBufferYCbCrMatrixKey, NULL);
  391. if (colorAttachments != NULL)
  392. {
  393. if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo)
  394. {
  395. if (isFullYUVRange)
  396. {
  397. _preferredConversion = kColorConversion601FullRange;
  398. }
  399. else
  400. {
  401. _preferredConversion = kColorConversion601;
  402. }
  403. }
  404. else
  405. {
  406. _preferredConversion = kColorConversion709;
  407. }
  408. }
  409. else
  410. {
  411. if (isFullYUVRange)
  412. {
  413. _preferredConversion = kColorConversion601FullRange;
  414. }
  415. else
  416. {
  417. _preferredConversion = kColorConversion601;
  418. }
  419. }
  420. CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
  421. // Fix issue 1580
  422. [GPUImageContext useImageProcessingContext];
  423. if ([GPUImageContext supportsFastTextureUpload])
  424. {
  425. CVOpenGLESTextureRef luminanceTextureRef = NULL;
  426. CVOpenGLESTextureRef chrominanceTextureRef = NULL;
  427. // if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
  428. if (CVPixelBufferGetPlaneCount(movieFrame) > 0) // Check for YUV planar inputs to do RGB conversion
  429. {
  430. if ( (imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight) )
  431. {
  432. imageBufferWidth = bufferWidth;
  433. imageBufferHeight = bufferHeight;
  434. }
  435. CVReturn err;
  436. // Y-plane
  437. glActiveTexture(GL_TEXTURE4);
  438. if ([GPUImageContext deviceSupportsRedTextures])
  439. {
  440. err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
  441. }
  442. else
  443. {
  444. err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
  445. }
  446. if (err)
  447. {
  448. NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
  449. }
  450. luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef);
  451. glBindTexture(GL_TEXTURE_2D, luminanceTexture);
  452. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  453. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
  454. // UV-plane
  455. glActiveTexture(GL_TEXTURE5);
  456. if ([GPUImageContext deviceSupportsRedTextures])
  457. {
  458. err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
  459. }
  460. else
  461. {
  462. err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
  463. }
  464. if (err)
  465. {
  466. NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
  467. }
  468. chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef);
  469. glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
  470. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  471. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
  472. // if (!allTargetsWantMonochromeData)
  473. // {
  474. [self convertYUVToRGBOutput];
  475. // }
  476. for (id<GPUImageInput> currentTarget in targets)
  477. {
  478. NSInteger indexOfObject = [targets indexOfObject:currentTarget];
  479. NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
  480. [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
  481. [currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
  482. }
  483. [outputFramebuffer unlock];
  484. for (id<GPUImageInput> currentTarget in targets)
  485. {
  486. NSInteger indexOfObject = [targets indexOfObject:currentTarget];
  487. NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
  488. [currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];
  489. }
  490. CVPixelBufferUnlockBaseAddress(movieFrame, 0);
  491. CFRelease(luminanceTextureRef);
  492. CFRelease(chrominanceTextureRef);
  493. }
  494. else
  495. {
  496. // TODO: Mesh this with the new framebuffer cache
  497. // CVPixelBufferLockBaseAddress(movieFrame, 0);
  498. //
  499. // CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, movieFrame, NULL, GL_TEXTURE_2D, GL_RGBA, bufferWidth, bufferHeight, GL_BGRA, GL_UNSIGNED_BYTE, 0, &texture);
  500. //
  501. // if (!texture || err) {
  502. // NSLog(@"Movie CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)", err);
  503. // NSAssert(NO, @"Camera failure");
  504. // return;
  505. // }
  506. //
  507. // outputTexture = CVOpenGLESTextureGetName(texture);
  508. // // glBindTexture(CVOpenGLESTextureGetTarget(texture), outputTexture);
  509. // glBindTexture(GL_TEXTURE_2D, outputTexture);
  510. // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  511. // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  512. // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  513. // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
  514. //
  515. // for (id<GPUImageInput> currentTarget in targets)
  516. // {
  517. // NSInteger indexOfObject = [targets indexOfObject:currentTarget];
  518. // NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
  519. //
  520. // [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
  521. // [currentTarget setInputTexture:outputTexture atIndex:targetTextureIndex];
  522. //
  523. // [currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];
  524. // }
  525. //
  526. // CVPixelBufferUnlockBaseAddress(movieFrame, 0);
  527. // CVOpenGLESTextureCacheFlush(coreVideoTextureCache, 0);
  528. // CFRelease(texture);
  529. //
  530. // outputTexture = 0;
  531. }
  532. }
  533. else
  534. {
  535. // Upload to texture
  536. CVPixelBufferLockBaseAddress(movieFrame, 0);
  537. outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bufferWidth, bufferHeight) textureOptions:self.outputTextureOptions onlyTexture:YES];
  538. glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
  539. // Using BGRA extension to pull in video frame data directly
  540. glTexImage2D(GL_TEXTURE_2D,
  541. 0,
  542. self.outputTextureOptions.internalFormat,
  543. bufferWidth,
  544. bufferHeight,
  545. 0,
  546. self.outputTextureOptions.format,
  547. self.outputTextureOptions.type,
  548. CVPixelBufferGetBaseAddress(movieFrame));
  549. for (id<GPUImageInput> currentTarget in targets)
  550. {
  551. NSInteger indexOfObject = [targets indexOfObject:currentTarget];
  552. NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
  553. [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
  554. [currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
  555. }
  556. [outputFramebuffer unlock];
  557. for (id<GPUImageInput> currentTarget in targets)
  558. {
  559. NSInteger indexOfObject = [targets indexOfObject:currentTarget];
  560. NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
  561. [currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];
  562. }
  563. CVPixelBufferUnlockBaseAddress(movieFrame, 0);
  564. }
  565. if (_runBenchmark)
  566. {
  567. CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
  568. NSLog(@"Current frame time : %f ms", 1000.0 * currentFrameTime);
  569. }
  570. }
  571. - (void)endProcessing;
  572. {
  573. keepLooping = NO;
  574. [displayLink setPaused:YES];
  575. for (id<GPUImageInput> currentTarget in targets)
  576. {
  577. [currentTarget endProcessing];
  578. }
  579. if (synchronizedMovieWriter != nil)
  580. {
  581. [synchronizedMovieWriter setVideoInputReadyCallback:^{return NO;}];
  582. [synchronizedMovieWriter setAudioInputReadyCallback:^{return NO;}];
  583. }
  584. if (self.playerItem && (displayLink != nil))
  585. {
  586. [displayLink invalidate]; // remove from all run loops
  587. displayLink = nil;
  588. }
  589. if ([self.delegate respondsToSelector:@selector(didCompletePlayingMovie)]) {
  590. [self.delegate didCompletePlayingMovie];
  591. }
  592. self.delegate = nil;
  593. }
  594. - (void)cancelProcessing
  595. {
  596. if (reader) {
  597. [reader cancelReading];
  598. }
  599. [self endProcessing];
  600. }
  601. - (void)convertYUVToRGBOutput;
  602. {
  603. [GPUImageContext setActiveShaderProgram:yuvConversionProgram];
  604. outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(imageBufferWidth, imageBufferHeight) onlyTexture:NO];
  605. [outputFramebuffer activateFramebuffer];
  606. glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
  607. glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
  608. static const GLfloat squareVertices[] = {
  609. -1.0f, -1.0f,
  610. 1.0f, -1.0f,
  611. -1.0f, 1.0f,
  612. 1.0f, 1.0f,
  613. };
  614. static const GLfloat textureCoordinates[] = {
  615. 0.0f, 0.0f,
  616. 1.0f, 0.0f,
  617. 0.0f, 1.0f,
  618. 1.0f, 1.0f,
  619. };
  620. glActiveTexture(GL_TEXTURE4);
  621. glBindTexture(GL_TEXTURE_2D, luminanceTexture);
  622. glUniform1i(yuvConversionLuminanceTextureUniform, 4);
  623. glActiveTexture(GL_TEXTURE5);
  624. glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
  625. glUniform1i(yuvConversionChrominanceTextureUniform, 5);
  626. glUniformMatrix3fv(yuvConversionMatrixUniform, 1, GL_FALSE, _preferredConversion);
  627. glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
  628. glVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
  629. glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
  630. }
  631. - (AVAssetReader*)assetReader {
  632. return reader;
  633. }
  634. - (BOOL)audioEncodingIsFinished {
  635. return audioEncodingIsFinished;
  636. }
  637. - (BOOL)videoEncodingIsFinished {
  638. return videoEncodingIsFinished;
  639. }
  640. @end