PageRenderTime 54ms CodeModel.GetById 21ms RepoModel.GetById 0ms app.codeStats 0ms

/Dependencies/VideoEffects/ExportEffects.m

https://gitlab.com/Mr.Tomato/VideoEffects
Objective C | 938 lines | 722 code | 149 blank | 67 comment | 96 complexity | 7b0589ffaecdeac531002a6d9b3e0095 MD5 | raw file
  1. //
  2. // ExportEffects
  3. // VideoEffects
  4. //
  5. // Created by Johnny Xu(徐景周) on 5/30/15.
  6. // Copyright (c) 2015 Future Studio. All rights reserved.
  7. //
  8. #import <QuartzCore/QuartzCore.h>
  9. #import <AssetsLibrary/AssetsLibrary.h>
  10. #import "ExportEffects.h"
  11. #import "VideoThemesData.h"
  12. #import "CustomVideoCompositor.h"
  13. #import "GifAnimationLayer.h"
  14. #define DefaultOutputVideoName @"outputMovie.mp4"
  15. #define DefaultOutputAudioName @"outputAudio.caf"
  16. @interface ExportEffects ()
  17. {
  18. }
  19. @property (strong, nonatomic) NSTimer *timerEffect;
  20. @property (strong, nonatomic) AVAssetExportSession *exportSession;
  21. @property (strong, nonatomic) GPUImageMovie *movieFile;
  22. @property (strong, nonatomic) GPUImageOutput<GPUImageInput> *filter;
  23. @property (strong, nonatomic) GPUImageMovieWriter *movieWriter;
  24. @property (strong, nonatomic) NSTimer *timerFilter;
  25. @property (strong, nonatomic) NSMutableDictionary *themesDic;
  26. @end
  27. @implementation ExportEffects
  28. {
  29. }
  30. + (ExportEffects *)sharedInstance
  31. {
  32. static ExportEffects *sharedInstance = nil;
  33. static dispatch_once_t pred;
  34. dispatch_once(&pred, ^{
  35. sharedInstance = [[ExportEffects alloc] init];
  36. });
  37. return sharedInstance;
  38. }
  39. - (id)init
  40. {
  41. self = [super init];
  42. if (self)
  43. {
  44. _timerEffect = nil;
  45. _exportSession = nil;
  46. _filenameBlock = nil;
  47. _timerFilter = nil;
  48. self.themeCurrentType = kThemeNone;
  49. self.themesDic = [[VideoThemesData sharedInstance] getThemeData];
  50. }
  51. return self;
  52. }
  53. - (void)dealloc
  54. {
  55. [[NSNotificationCenter defaultCenter] removeObserver:self];
  56. if (_exportSession)
  57. {
  58. _exportSession = nil;
  59. }
  60. if (_timerEffect)
  61. {
  62. [_timerEffect invalidate];
  63. _timerEffect = nil;
  64. }
  65. if (_movieFile)
  66. {
  67. _movieFile = nil;
  68. }
  69. if (_movieWriter)
  70. {
  71. _movieWriter = nil;
  72. }
  73. if (_exportSession)
  74. {
  75. _exportSession = nil;
  76. }
  77. if (_timerFilter)
  78. {
  79. [_timerFilter invalidate];
  80. _timerFilter = nil;
  81. }
  82. }
  83. #pragma mark Utility methods
  84. - (NSString*)getOutputFilePath
  85. {
  86. NSString* mp4OutputFile = [NSTemporaryDirectory() stringByAppendingPathComponent:DefaultOutputVideoName];
  87. return mp4OutputFile;
  88. }
  89. - (NSString*)getTempOutputFilePath
  90. {
  91. NSString *path = NSTemporaryDirectory();
  92. NSDateFormatter *formatter = [[NSDateFormatter alloc] init];
  93. [formatter setDateStyle:NSDateFormatterMediumStyle];
  94. [formatter setTimeStyle:NSDateFormatterShortStyle];
  95. formatter.dateFormat = @"yyyyMMddHHmmssSSS";
  96. NSString *nowTimeStr = [formatter stringFromDate:[NSDate dateWithTimeIntervalSinceNow:0]];
  97. NSString *fileName = [[path stringByAppendingPathComponent:nowTimeStr] stringByAppendingString:@".mov"];
  98. return fileName;
  99. }
  100. #pragma mark - writeExportedVideoToAssetsLibrary
  101. - (void)writeExportedVideoToAssetsLibrary:(NSString *)outputPath
  102. {
  103. __unsafe_unretained typeof(self) weakSelf = self;
  104. NSURL *exportURL = [NSURL fileURLWithPath:outputPath];
  105. ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
  106. if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:exportURL])
  107. {
  108. [library writeVideoAtPathToSavedPhotosAlbum:exportURL completionBlock:^(NSURL *assetURL, NSError *error)
  109. {
  110. NSString *message;
  111. if (!error)
  112. {
  113. message = GBLocalizedString(@"MsgSuccess");
  114. }
  115. else
  116. {
  117. message = [error description];
  118. }
  119. NSLog(@"%@", message);
  120. // Output path
  121. self.filenameBlock = ^(void) {
  122. return outputPath;
  123. };
  124. if (weakSelf.finishVideoBlock)
  125. {
  126. weakSelf.finishVideoBlock(YES, message);
  127. }
  128. }];
  129. }
  130. else
  131. {
  132. NSString *message = GBLocalizedString(@"MsgFailed");;
  133. NSLog(@"%@", message);
  134. // Output path
  135. self.filenameBlock = ^(void) {
  136. return @"";
  137. };
  138. if (_finishVideoBlock)
  139. {
  140. _finishVideoBlock(NO, message);
  141. }
  142. }
  143. library = nil;
  144. }
  145. #pragma mark - GPUImage
  146. - (void) pause
  147. {
  148. if (_movieFile.progress < 1.0)
  149. {
  150. [_movieWriter cancelRecording];
  151. }
  152. else if (_exportSession.progress < 1.0)
  153. {
  154. [_exportSession cancelExport];
  155. }
  156. }
  157. - (void)initializeVideoFilter:(NSURL*)inputMovieURL fromSystemCamera:(BOOL)fromSystemCamera
  158. {
  159. // 1.
  160. _movieFile = [[GPUImageMovie alloc] initWithURL:inputMovieURL];
  161. _movieFile.runBenchmark = NO;
  162. _movieFile.playAtActualSpeed = YES;
  163. // 2. Add filter effect
  164. _filter = nil;
  165. NSUInteger themesCount = [[[VideoThemesData sharedInstance] getThemeData] count];
  166. if (self.themeCurrentType != kThemeNone && themesCount >= self.themeCurrentType)
  167. {
  168. GPUImageOutput<GPUImageInput> *filterCurrent = [[[VideoThemesData sharedInstance] getThemeFilter:fromSystemCamera] objectForKey:[NSNumber numberWithInt:self.themeCurrentType]];
  169. _filter = filterCurrent;
  170. }
  171. // 3.
  172. if ((NSNull*)_filter != [NSNull null] && _filter != nil)
  173. {
  174. [_movieFile addTarget:_filter];
  175. }
  176. }
  177. - (void)buildVideoFilter:(NSString*)videoFilePath fromSystemCamera:(BOOL)fromSystemCamera finishBlock:(GenericCallback)finishBlock
  178. {
  179. if (self.themeCurrentType == kThemeNone)
  180. {
  181. NSLog(@"Theme is empty!");
  182. // Output path
  183. self.filenameBlock = ^(void) {
  184. return @"";
  185. };
  186. if (finishBlock)
  187. {
  188. finishBlock(NO, GBLocalizedString(@"MsgConvertFailed"));
  189. }
  190. return;
  191. }
  192. // if (isStringEmpty(videoFilePath))
  193. // {
  194. // NSLog(@"videoFilePath is empty!");
  195. //
  196. // // Output path
  197. // self.filenameBlock = ^(void) {
  198. // return @"";
  199. // };
  200. //
  201. // if (finishBlock)
  202. // {
  203. // finishBlock(NO, GBLocalizedString(@"MsgConvertFailed"));
  204. // }
  205. // return;
  206. // }
  207. self.themesDic = [[VideoThemesData sharedInstance] getThemeData];
  208. // 2.
  209. NSURL *inputVideoURL = getFileURL(videoFilePath);
  210. [self initializeVideoFilter:inputVideoURL fromSystemCamera:fromSystemCamera];
  211. // 3. Movie output temp file
  212. NSString *pathToTempMov = [NSTemporaryDirectory() stringByAppendingPathComponent:@"tempMovie.mov"];
  213. unlink([pathToTempMov UTF8String]); // If a file already exists, AVAssetWriter won't let you record new frames, so delete the old movie
  214. NSURL *outputTempMovieURL = [NSURL fileURLWithPath:pathToTempMov];
  215. AVURLAsset *asset = [AVURLAsset URLAssetWithURL:inputVideoURL options:nil];
  216. NSArray *assetVideoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
  217. if (assetVideoTracks.count <= 0)
  218. {
  219. NSLog(@"Video track is empty!");
  220. return;
  221. }
  222. AVAssetTrack *videoAssetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
  223. // If this if from system camera, it will rotate 90c, and swap width and height
  224. CGSize sizeVideo = CGSizeMake(videoAssetTrack.naturalSize.width, videoAssetTrack.naturalSize.height);
  225. if (fromSystemCamera)
  226. {
  227. sizeVideo = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
  228. }
  229. _movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:outputTempMovieURL size:sizeVideo];
  230. if ((NSNull*)_filter != [NSNull null] && _filter != nil)
  231. {
  232. [_filter addTarget:_movieWriter];
  233. }
  234. else
  235. {
  236. [_movieFile addTarget:_movieWriter];
  237. }
  238. // 4. Configure this for video from the movie file, where we want to preserve all video frames and audio samples
  239. _movieWriter.shouldPassthroughAudio = YES;
  240. _movieFile.audioEncodingTarget = _movieWriter;
  241. [_movieFile enableSynchronizedEncodingUsingMovieWriter:_movieWriter];
  242. // 5.
  243. [_movieWriter startRecording];
  244. [_movieFile startProcessing];
  245. // 6. Progress monitor
  246. _timerFilter = [NSTimer scheduledTimerWithTimeInterval:0.3f
  247. target:self
  248. selector:@selector(retrievingFilterProgress)
  249. userInfo:nil
  250. repeats:YES];
  251. __weak typeof(self) weakSelf = self;
  252. // 7. Filter finished
  253. [weakSelf.movieWriter setCompletionBlock:^{
  254. if ((NSNull*)_filter != [NSNull null] && _filter != nil)
  255. {
  256. [_filter removeTarget:weakSelf.movieWriter];
  257. }
  258. else
  259. {
  260. [_movieFile removeTarget:weakSelf.movieWriter];
  261. }
  262. [_movieWriter finishRecordingWithCompletionHandler:^{
  263. // Closer timer
  264. [_timerFilter invalidate];
  265. _timerFilter = nil;
  266. if (finishBlock)
  267. {
  268. finishBlock(YES, pathToTempMov);
  269. }
  270. }];
  271. }];
  272. // 8. Filter failed
  273. [weakSelf.movieWriter setFailureBlock: ^(NSError* error){
  274. if ((NSNull*)_filter != [NSNull null] && _filter != nil)
  275. {
  276. [_filter removeTarget:weakSelf.movieWriter];
  277. }
  278. else
  279. {
  280. [_movieFile removeTarget:weakSelf.movieWriter];
  281. }
  282. // [_movieWriter finishRecordingWithCompletionHandler:^{
  283. // Closer timer
  284. [_timerFilter invalidate];
  285. _timerFilter = nil;
  286. dispatch_async(dispatch_get_main_queue(), ^{
  287. self.filenameBlock = ^(void) {
  288. return @"";
  289. };
  290. if (finishBlock)
  291. {
  292. finishBlock(NO, GBLocalizedString(@"MsgConvertFailed"));
  293. }
  294. });
  295. NSLog(@"Add filter effect failed! - %@", error.description);
  296. return;
  297. // }];
  298. }];
  299. }
  300. #pragma mark - Export Video
  301. - (void)addEffectToVideo:(NSString *)videoFilePath withAudioFilePath:(NSString *)audioFilePath
  302. {
  303. if (isStringEmpty(videoFilePath))
  304. {
  305. NSLog(@"videoFilePath is empty!");
  306. // Output path
  307. self.filenameBlock = ^(void) {
  308. return @"";
  309. };
  310. if (self.finishVideoBlock)
  311. {
  312. self.finishVideoBlock(NO, GBLocalizedString(@"MsgConvertFailed"));
  313. }
  314. return;
  315. }
  316. BOOL systemCamera = NO;
  317. NSURL *videoURL = getFileURL(videoFilePath);
  318. AVAsset *videoAsset = [AVAsset assetWithURL:videoURL];
  319. if (videoAsset)
  320. {
  321. UIInterfaceOrientation videoOrientation = orientationForTrack(videoAsset);
  322. NSLog(@"videoOrientation: %ld", (long)videoOrientation);
  323. if (videoOrientation == UIInterfaceOrientationPortrait)
  324. {
  325. // Right rotation 90 degree
  326. [self setShouldRightRotate90:YES withTrackID:TrackIDCustom];
  327. systemCamera = YES;
  328. }
  329. else
  330. {
  331. [self setShouldRightRotate90:NO withTrackID:TrackIDCustom];
  332. systemCamera = NO;
  333. }
  334. }
  335. else
  336. {
  337. NSLog(@"videoAsset is empty!");
  338. // Output path
  339. self.filenameBlock = ^(void) {
  340. return @"";
  341. };
  342. if (self.finishVideoBlock)
  343. {
  344. self.finishVideoBlock(NO, GBLocalizedString(@"MsgConvertFailed"));
  345. }
  346. return;
  347. }
  348. // Filter
  349. [self buildVideoFilter:videoFilePath fromSystemCamera:systemCamera finishBlock:^(BOOL success, id result) {
  350. if (success)
  351. {
  352. NSLog(@"buildVideoFilter success.");
  353. NSString *filterVideoFile = result;
  354. NSMutableArray *videoFileArray = [NSMutableArray arrayWithCapacity:2];
  355. [videoFileArray addObject:videoFilePath];
  356. [videoFileArray addObject:filterVideoFile];
  357. [self exportVideo:videoFileArray withAudioFilePath:audioFilePath];
  358. }
  359. else
  360. {
  361. self.filenameBlock = ^(void) {
  362. return @"";
  363. };
  364. if (self.finishVideoBlock)
  365. {
  366. self.finishVideoBlock(NO, GBLocalizedString(@"MsgConvertFailed"));
  367. }
  368. }
  369. }];
  370. }
  371. #pragma mark - addAudioMixToComposition
  372. - (void)addAudioMixToComposition:(AVMutableComposition *)composition withAudioMix:(AVMutableAudioMix *)audioMix withAsset:(AVURLAsset*)audioAsset
  373. {
  374. NSInteger i;
  375. NSArray *tracksToDuck = [composition tracksWithMediaType:AVMediaTypeAudio];
  376. // 1. Clip commentary duration to composition duration.
  377. CMTimeRange commentaryTimeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
  378. if (CMTIME_COMPARE_INLINE(CMTimeRangeGetEnd(commentaryTimeRange), >, [composition duration]))
  379. commentaryTimeRange.duration = CMTimeSubtract([composition duration], commentaryTimeRange.start);
  380. // 2. Add the commentary track.
  381. AVMutableCompositionTrack *compositionCommentaryTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:TrackIDCustom];
  382. AVAssetTrack * commentaryTrack = [[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
  383. [compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, commentaryTimeRange.duration) ofTrack:commentaryTrack atTime:commentaryTimeRange.start error:nil];
  384. // 3. Fade in for bgMusic
  385. CMTime fadeTime = CMTimeMake(1, 1);
  386. CMTimeRange startRange = CMTimeRangeMake(kCMTimeZero, fadeTime);
  387. NSMutableArray *trackMixArray = [NSMutableArray array];
  388. AVMutableAudioMixInputParameters *trackMixComentray = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:commentaryTrack];
  389. [trackMixComentray setVolumeRampFromStartVolume:0.0f toEndVolume:0.5f timeRange:startRange];
  390. [trackMixArray addObject:trackMixComentray];
  391. // 4. Fade in & Fade out for original voices
  392. for (i = 0; i < [tracksToDuck count]; i++)
  393. {
  394. CMTimeRange timeRange = [[tracksToDuck objectAtIndex:i] timeRange];
  395. if (CMTIME_COMPARE_INLINE(CMTimeRangeGetEnd(timeRange), ==, kCMTimeInvalid))
  396. {
  397. break;
  398. }
  399. CMTime halfSecond = CMTimeMake(1, 2);
  400. CMTime startTime = CMTimeSubtract(timeRange.start, halfSecond);
  401. CMTime endRangeStartTime = CMTimeAdd(timeRange.start, timeRange.duration);
  402. CMTimeRange endRange = CMTimeRangeMake(endRangeStartTime, halfSecond);
  403. if (startTime.value < 0)
  404. {
  405. startTime.value = 0;
  406. }
  407. [trackMixComentray setVolumeRampFromStartVolume:0.5f toEndVolume:0.2f timeRange:CMTimeRangeMake(startTime, halfSecond)];
  408. [trackMixComentray setVolumeRampFromStartVolume:0.2f toEndVolume:0.5f timeRange:endRange];
  409. [trackMixArray addObject:trackMixComentray];
  410. }
  411. audioMix.inputParameters = trackMixArray;
  412. }
  413. - (void)addAsset:(AVAsset *)asset toComposition:(AVMutableComposition *)composition withTrackID:(CMPersistentTrackID)trackID withRecordAudio:(BOOL)recordAudio withTimeRange:(CMTimeRange)timeRange
  414. {
  415. AVMutableCompositionTrack *videoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:trackID];
  416. AVAssetTrack *assetVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
  417. [videoTrack insertTimeRange:timeRange ofTrack:assetVideoTrack atTime:kCMTimeZero error:nil];
  418. [videoTrack setPreferredTransform:assetVideoTrack.preferredTransform];
  419. if (recordAudio)
  420. {
  421. AVMutableCompositionTrack *audioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:trackID];
  422. if ([[asset tracksWithMediaType:AVMediaTypeAudio] count] > 0)
  423. {
  424. AVAssetTrack *assetAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
  425. [audioTrack insertTimeRange:timeRange ofTrack:assetAudioTrack atTime:kCMTimeZero error:nil];
  426. }
  427. else
  428. {
  429. NSLog(@"Reminder: video hasn't audio!");
  430. }
  431. }
  432. }
  433. - (void)exportVideo:(NSArray *)videoFilePathArray withAudioFilePath:(NSString *)audioFilePath
  434. {
  435. if (!videoFilePathArray || [videoFilePathArray count] < 1)
  436. {
  437. NSLog(@"videoFilePath is empty!");
  438. // Output path
  439. self.filenameBlock = ^(void) {
  440. return @"";
  441. };
  442. if (self.finishVideoBlock)
  443. {
  444. self.finishVideoBlock(NO, GBLocalizedString(@"MsgConvertFailed"));
  445. }
  446. return;
  447. }
  448. CGFloat duration = 0;
  449. CMTime totalDuration = kCMTimeZero;
  450. CMTimeRange bgVideoTimeRange = kCMTimeRangeZero;
  451. NSMutableArray *assetArray = [[NSMutableArray alloc] initWithCapacity:2];
  452. AVMutableComposition *composition = [AVMutableComposition composition];
  453. for (int i = 0; i < [videoFilePathArray count]; ++i)
  454. {
  455. NSString *videoPath = [videoFilePathArray objectAtIndex:i];
  456. NSURL *videoURL = getFileURL(videoPath);
  457. AVAsset *videoAsset = [AVAsset assetWithURL:videoURL];
  458. if (i == 0)
  459. {
  460. // BG video duration
  461. bgVideoTimeRange = [[[videoAsset tracksWithMediaType:AVMediaTypeVideo] firstObject] timeRange];
  462. }
  463. if (videoAsset)
  464. {
  465. [self addAsset:videoAsset toComposition:composition withTrackID:i+1 withRecordAudio:NO withTimeRange:bgVideoTimeRange];
  466. [assetArray addObject:videoAsset];
  467. // Max duration
  468. duration = MAX(duration, CMTimeGetSeconds(videoAsset.duration));
  469. totalDuration = CMTimeAdd(totalDuration, videoAsset.duration);
  470. }
  471. }
  472. if ([assetArray count] < 1)
  473. {
  474. NSLog(@"assetArray is empty!");
  475. // Output path
  476. self.filenameBlock = ^(void) {
  477. return @"";
  478. };
  479. if (self.finishVideoBlock)
  480. {
  481. self.finishVideoBlock(NO, GBLocalizedString(@"MsgConvertFailed"));
  482. }
  483. return;
  484. }
  485. // Music effect
  486. AVMutableAudioMix *audioMix = nil;
  487. if (!isStringEmpty(audioFilePath))
  488. {
  489. NSString *fileName = [audioFilePath lastPathComponent];
  490. NSLog(@"%@",fileName);
  491. NSURL *bgMusicURL = [[NSBundle mainBundle] URLForResource:fileName withExtension:nil];
  492. AVURLAsset *assetMusic = [[AVURLAsset alloc] initWithURL:bgMusicURL options:nil];
  493. if (assetMusic)
  494. {
  495. audioMix = [AVMutableAudioMix audioMix];
  496. [self addAudioMixToComposition:composition withAudioMix:audioMix withAsset:assetMusic];
  497. }
  498. }
  499. else
  500. {
  501. // BG video music
  502. AVAssetTrack *assetAudioTrack = nil;
  503. AVAsset *audioAsset = [assetArray objectAtIndex:0];
  504. if ([[audioAsset tracksWithMediaType:AVMediaTypeAudio] count] > 0)
  505. {
  506. assetAudioTrack = [[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
  507. if (assetAudioTrack)
  508. {
  509. AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
  510. [compositionAudioTrack insertTimeRange:bgVideoTimeRange ofTrack:assetAudioTrack atTime:kCMTimeZero error:nil];
  511. }
  512. }
  513. else
  514. {
  515. NSLog(@"Reminder: embeded BG video hasn't audio!");
  516. }
  517. }
  518. // BG video
  519. AVAssetTrack *firstVideoTrack = [[assetArray[0] tracksWithMediaType:AVMediaTypeVideo] firstObject];
  520. CGSize videoSize = firstVideoTrack.naturalSize;
  521. AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
  522. AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
  523. BOOL shouldRotate = [self shouldRightRotate90ByTrackID:TrackIDCustom];
  524. if (shouldRotate)
  525. {
  526. videoSize = CGSizeMake(firstVideoTrack.naturalSize.height, firstVideoTrack.naturalSize.width);
  527. }
  528. videoComposition.renderSize = CGSizeMake(videoSize.width, videoSize.height);
  529. videoComposition.frameDuration = CMTimeMakeWithSeconds(1.0 / firstVideoTrack.nominalFrameRate, firstVideoTrack.naturalTimeScale);
  530. instruction.timeRange = [composition.tracks.firstObject timeRange];
  531. NSMutableArray *layerInstructionArray = [[NSMutableArray alloc] initWithCapacity:1];
  532. for (int i = 0; i < [assetArray count]; ++i)
  533. {
  534. AVMutableVideoCompositionLayerInstruction *videoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstruction];
  535. videoLayerInstruction.trackID = i + 1;
  536. [layerInstructionArray addObject:videoLayerInstruction];
  537. }
  538. instruction.layerInstructions = layerInstructionArray;
  539. videoComposition.instructions = @[ instruction ];
  540. videoComposition.customVideoCompositorClass = [CustomVideoCompositor class];
  541. // Animation
  542. CALayer *parentLayer = [CALayer layer];
  543. CALayer *videoLayer = [CALayer layer];
  544. int limitMinLen = 100;
  545. CGSize videoSizeResult = CGSizeZero;
  546. if (videoSize.width >= limitMinLen || videoSize.height >= limitMinLen)
  547. {
  548. // Assign a output size
  549. parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
  550. videoSizeResult = videoSize;
  551. }
  552. else
  553. {
  554. NSLog(@"videoSize is empty!");
  555. // Output path
  556. self.filenameBlock = ^(void) {
  557. return @"";
  558. };
  559. if (self.finishVideoBlock)
  560. {
  561. self.finishVideoBlock(NO, GBLocalizedString(@"MsgConvertFailed"));
  562. }
  563. return;
  564. }
  565. videoLayer.frame = parentLayer.frame;
  566. [parentLayer addSublayer:videoLayer];
  567. VideoThemes *themeCurrent = nil;
  568. if (self.themeCurrentType != kThemeNone && [self.themesDic count] >= self.themeCurrentType)
  569. {
  570. themeCurrent = [self.themesDic objectForKey:[NSNumber numberWithInt:self.themeCurrentType]];
  571. }
  572. // Animation effects
  573. NSMutableArray *animatedLayers = [[NSMutableArray alloc] initWithCapacity:[[themeCurrent animationActions] count]];
  574. if ([self shouldDisplayGhost])
  575. {
  576. NSString *imageName = @"ghost.gif";
  577. UIImage *image = [UIImage imageNamed:imageName];
  578. CGFloat imageFactor = image.size.width / image.size.height;
  579. CGFloat widthFactor = videoSize.width / image.size.width;
  580. CGFloat heightFactor = videoSize.height / image.size.height;
  581. CGFloat imageWidth = image.size.width;
  582. CGFloat imageHeight = image.size.height;
  583. if (widthFactor <= 1)
  584. {
  585. imageWidth = videoSize.width;
  586. imageHeight = imageWidth / imageFactor;
  587. }
  588. else if (heightFactor <= 1)
  589. {
  590. imageHeight = videoSize.height;
  591. imageWidth = imageHeight * imageFactor;
  592. }
  593. CGRect gifFrame = CGRectMake((videoSize.width - imageWidth)/2, (videoSize.height - imageHeight)/2, imageWidth, imageHeight);
  594. NSLog(@"gifFrame: %@", NSStringFromCGRect(gifFrame));
  595. NSString *gifPath = getFilePath(imageName);
  596. CALayer *animatedLayer = nil;
  597. CFTimeInterval beginTime = 2.0f;
  598. animatedLayer = [GifAnimationLayer layerWithGifFilePath:gifPath withFrame:gifFrame withAniBeginTime:beginTime];
  599. if (animatedLayer && [animatedLayer isKindOfClass:[GifAnimationLayer class]])
  600. {
  601. animatedLayer.opacity = 0.0f;
  602. CAKeyframeAnimation *animation = [[CAKeyframeAnimation alloc] init];
  603. [animation setKeyPath:@"contents"];
  604. animation.calculationMode = kCAAnimationDiscrete;
  605. animation.autoreverses = NO;
  606. animation.repeatCount = 1;
  607. animation.beginTime = beginTime;
  608. NSDictionary *gifDic = [(GifAnimationLayer*)animatedLayer getValuesAndKeyTimes];
  609. NSMutableArray *keyTimes = [gifDic objectForKey:@"keyTimes"];
  610. NSMutableArray *imageArray = [NSMutableArray arrayWithCapacity:[keyTimes count]];
  611. for (int i = 0; i < [keyTimes count]; ++i)
  612. {
  613. CGImageRef image = [(GifAnimationLayer*)animatedLayer copyImageAtFrameIndex:i];
  614. if (image)
  615. {
  616. [imageArray addObject:(__bridge id)image];
  617. }
  618. }
  619. animation.values = imageArray;
  620. animation.keyTimes = keyTimes;
  621. animation.duration = [(GifAnimationLayer*)animatedLayer getTotalDuration];
  622. animation.removedOnCompletion = YES;
  623. animation.delegate = self;
  624. [animation setValue:@"stop" forKey:@"TAG"];
  625. [animatedLayer addAnimation:animation forKey:@"contents"];
  626. CABasicAnimation *fadeOutAnimation = [CABasicAnimation animationWithKeyPath:@"opacity"];
  627. fadeOutAnimation.fromValue = @0.6f;
  628. fadeOutAnimation.toValue = @0.3f;
  629. fadeOutAnimation.additive = YES;
  630. fadeOutAnimation.removedOnCompletion = YES;
  631. fadeOutAnimation.beginTime = beginTime;
  632. fadeOutAnimation.duration = animation.beginTime + animation.duration + 2;
  633. fadeOutAnimation.fillMode = kCAFillModeBoth;
  634. [animatedLayer addAnimation:fadeOutAnimation forKey:@"opacityOut"];
  635. [animatedLayers addObject:(id)animatedLayer];
  636. [parentLayer addSublayer:animatedLayer];
  637. }
  638. }
  639. videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
  640. NSLog(@"videoSizeResult width: %f, Height: %f", videoSizeResult.width, videoSizeResult.height);
  641. if (animatedLayers)
  642. {
  643. [animatedLayers removeAllObjects];
  644. animatedLayers = nil;
  645. }
  646. // Export
  647. NSString *exportPath = [self getOutputFilePath];
  648. NSURL *exportURL = [NSURL fileURLWithPath:[self returnFormatString:exportPath]];
  649. // Delete old file
  650. unlink([exportPath UTF8String]);
  651. _exportSession = [AVAssetExportSession exportSessionWithAsset:composition presetName:AVAssetExportPresetMediumQuality];
  652. _exportSession.outputURL = exportURL;
  653. _exportSession.outputFileType = AVFileTypeMPEG4;
  654. _exportSession.shouldOptimizeForNetworkUse = YES;
  655. if (audioMix)
  656. {
  657. _exportSession.audioMix = audioMix;
  658. }
  659. if (videoComposition)
  660. {
  661. _exportSession.videoComposition = videoComposition;
  662. }
  663. dispatch_async(dispatch_get_main_queue(), ^{
  664. // Progress monitor
  665. _timerEffect = [NSTimer scheduledTimerWithTimeInterval:0.3f
  666. target:self
  667. selector:@selector(retrievingExportProgress)
  668. userInfo:nil
  669. repeats:YES];
  670. });
  671. __block typeof(self) blockSelf = self;
  672. [_exportSession exportAsynchronouslyWithCompletionHandler:^(void) {
  673. switch ([_exportSession status])
  674. {
  675. case AVAssetExportSessionStatusCompleted:
  676. {
  677. // Close timer
  678. [blockSelf.timerEffect invalidate];
  679. blockSelf.timerEffect = nil;
  680. // Save video to Album
  681. [self writeExportedVideoToAssetsLibrary:exportPath];
  682. NSLog(@"Export Successful: %@", exportPath);
  683. break;
  684. }
  685. case AVAssetExportSessionStatusFailed:
  686. {
  687. // Close timer
  688. [blockSelf.timerEffect invalidate];
  689. blockSelf.timerEffect = nil;
  690. // Output path
  691. self.filenameBlock = ^(void) {
  692. return @"";
  693. };
  694. if (self.finishVideoBlock)
  695. {
  696. self.finishVideoBlock(NO, GBLocalizedString(@"MsgConvertFailed"));
  697. }
  698. NSLog(@"Export failed: %@, %@", [[blockSelf.exportSession error] localizedDescription], [blockSelf.exportSession error]);
  699. break;
  700. }
  701. case AVAssetExportSessionStatusCancelled:
  702. {
  703. NSLog(@"Canceled: %@", blockSelf.exportSession.error);
  704. break;
  705. }
  706. default:
  707. break;
  708. }
  709. }];
  710. }
  711. // Convert 'space' char
  712. - (NSString *)returnFormatString:(NSString *)str
  713. {
  714. return [str stringByReplacingOccurrencesOfString:@" " withString:@""];
  715. }
  716. #pragma mark - Export Progress Callback
  717. - (void)retrievingFilterProgress
  718. {
  719. if (_movieFile && _exportProgressBlock)
  720. {
  721. NSString *title = GBLocalizedString(@"Processing");
  722. self.exportProgressBlock([NSNumber numberWithFloat:_movieFile.progress], title);
  723. }
  724. }
  725. - (void)retrievingExportProgress
  726. {
  727. if (_exportSession && _exportProgressBlock)
  728. {
  729. self.exportProgressBlock([NSNumber numberWithFloat:_exportSession.progress], nil);
  730. }
  731. }
  732. #pragma mark - NSUserDefaults
  733. #pragma mark - setShouldRightRotate90
  734. - (void)setShouldRightRotate90:(BOOL)shouldRotate withTrackID:(NSInteger)trackID
  735. {
  736. NSString *identifier = [NSString stringWithFormat:@"TrackID_%ld", (long)trackID];
  737. NSUserDefaults *userDefaultes = [NSUserDefaults standardUserDefaults];
  738. if (shouldRotate)
  739. {
  740. [userDefaultes setBool:YES forKey:identifier];
  741. }
  742. else
  743. {
  744. [userDefaultes setBool:NO forKey:identifier];
  745. }
  746. [userDefaultes synchronize];
  747. }
  748. - (BOOL)shouldRightRotate90ByTrackID:(NSInteger)trackID
  749. {
  750. NSUserDefaults *userDefaultes = [NSUserDefaults standardUserDefaults];
  751. NSString *identifier = [NSString stringWithFormat:@"TrackID_%ld", (long)trackID];
  752. BOOL result = [[userDefaultes objectForKey:identifier] boolValue];
  753. NSLog(@"shouldRightRotate90ByTrackID %@ : %@", identifier, result?@"Yes":@"No");
  754. if (result)
  755. {
  756. return YES;
  757. }
  758. else
  759. {
  760. return NO;
  761. }
  762. }
  763. #pragma mark - shouldDisplayTextEffects
  764. - (BOOL)shouldDisplayTextEffects
  765. {
  766. NSString *flag = @"ShouldDisplayTextEffects";
  767. if ([[[NSUserDefaults standardUserDefaults] objectForKey:flag] boolValue])
  768. {
  769. return YES;
  770. }
  771. else
  772. {
  773. return NO;
  774. }
  775. }
  776. #pragma mark - shouldDisplayGhost
  777. - (BOOL)shouldDisplayGhost
  778. {
  779. NSString *flag = @"ShouldDisplayGhost";
  780. if ([[[NSUserDefaults standardUserDefaults] objectForKey:flag] boolValue])
  781. {
  782. return YES;
  783. }
  784. else
  785. {
  786. return NO;
  787. }
  788. }
  789. @end