PageRenderTime 51ms CodeModel.GetById 13ms RepoModel.GetById 0ms app.codeStats 0ms

/Dependencies/CaptureCamera/VideoCapture/CameraRecorder.m

https://gitlab.com/Mr.Tomato/VideoEffects
Objective C | 1047 lines | 850 code | 166 blank | 31 comment | 127 complexity | ea2f754f387b1e019f2e33b0a0dad422 MD5 | raw file
  1. #import "CameraRecorder.h"
  2. #import "CaptureDefine.h"
  3. #import "CaptureToolKit.h"
  4. #import <AssetsLibrary/AssetsLibrary.h>
  5. @interface VideoData: NSObject
  6. @property (assign, nonatomic) CGFloat duration;
  7. @property (strong, nonatomic) NSURL *fileURL;
  8. @end
  9. @implementation VideoData
  10. @end
  11. #define COUNT_DUR_TIMER_INTERVAL 0.05
  12. @interface CameraRecorder ()
  13. {
  14. }
  15. @property (strong, nonatomic) NSTimer *countDurTimer;
  16. @property (assign, nonatomic) CGFloat currentVideoDur;
  17. @property (assign, nonatomic) NSURL *currentFileURL;
  18. @property (assign ,nonatomic) CGFloat totalVideoDur;
  19. @property (strong, nonatomic) NSMutableArray *videoFileDataArray;
  20. @property (assign, nonatomic) BOOL isFrontCameraSupported;
  21. @property (assign, nonatomic) BOOL isCameraSupported;
  22. @property (assign, nonatomic) BOOL isTorchSupported;
  23. @property (assign, nonatomic) BOOL isTorchOn;
  24. @property (assign, nonatomic) BOOL isUsingFrontFacingCamera;
  25. @property (strong, nonatomic) AVCaptureDeviceInput *videoDeviceInput;
  26. @property (strong, nonatomic) AVCaptureStillImageOutput *stillImageOutput;
  27. @property (assign, nonatomic) AVCaptureVideoOrientation orientation;
  28. @property (strong, nonatomic) AVCaptureSession *captureSession;
  29. @property (strong, nonatomic) AVCaptureMovieFileOutput *movieFileOutput;
  30. - (void)mergeVideoFiles;
  31. @end
  32. @implementation CameraRecorder
  33. #pragma mark - Life Cycle
  34. - (id)init
  35. {
  36. self = [super init];
  37. if (self)
  38. {
  39. [self initalize];
  40. }
  41. return self;
  42. }
  43. - (void)initalize
  44. {
  45. // Set camera orientation
  46. [self initCaptureByBackCamera:TRUE];
  47. self.videoFileDataArray = [[NSMutableArray alloc] init];
  48. self.totalVideoDur = 0.0f;
  49. }
  50. - (void)initCaptureByBackCamera:(BOOL)back
  51. {
  52. // Session
  53. self.captureSession = [[AVCaptureSession alloc] init];
  54. // Input
  55. AVCaptureDevice *frontCamera = nil;
  56. AVCaptureDevice *backCamera = nil;
  57. NSArray *cameras = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  58. for (AVCaptureDevice *camera in cameras)
  59. {
  60. if (camera.position == AVCaptureDevicePositionFront)
  61. {
  62. frontCamera = camera;
  63. }
  64. else
  65. {
  66. backCamera = camera;
  67. }
  68. }
  69. if (!backCamera)
  70. {
  71. self.isCameraSupported = NO;
  72. return;
  73. }
  74. else
  75. {
  76. self.isCameraSupported = YES;
  77. if ([backCamera hasTorch])
  78. {
  79. self.isTorchSupported = YES;
  80. }
  81. else
  82. {
  83. self.isTorchSupported = NO;
  84. }
  85. }
  86. if (!frontCamera)
  87. {
  88. self.isFrontCameraSupported = NO;
  89. }
  90. else
  91. {
  92. self.isFrontCameraSupported = YES;
  93. }
  94. [backCamera lockForConfiguration:nil];
  95. if ([backCamera isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure])
  96. {
  97. [backCamera setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
  98. }
  99. [backCamera unlockForConfiguration];
  100. // Add default camera direction by Johnny Xu.
  101. if (back)
  102. {
  103. self.isUsingFrontFacingCamera = FALSE;
  104. self.videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error:nil];
  105. }
  106. else
  107. {
  108. self.isUsingFrontFacingCamera = TRUE;
  109. self.videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:frontCamera error:nil];
  110. }
  111. AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio] error:nil];
  112. if ([_captureSession canAddInput:_videoDeviceInput])
  113. {
  114. [_captureSession addInput:_videoDeviceInput];
  115. }
  116. if ([_captureSession canAddInput:audioDeviceInput])
  117. {
  118. [_captureSession addInput:audioDeviceInput];
  119. }
  120. // Output by video
  121. self.movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
  122. if ([_captureSession canAddOutput:_movieFileOutput])
  123. {
  124. [_captureSession addOutput:_movieFileOutput];
  125. }
  126. // Output by Picture
  127. AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
  128. NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:
  129. AVVideoCodecJPEG, AVVideoCodecKey,
  130. nil];
  131. [stillImageOutput setOutputSettings:outputSettings];
  132. self.stillImageOutput = stillImageOutput;
  133. if ([_captureSession canAddOutput:self.stillImageOutput])
  134. {
  135. [_captureSession addOutput:self.stillImageOutput];
  136. }
  137. // Preset
  138. _captureSession.sessionPreset = AVCaptureSessionPreset640x480; // AVCaptureSessionPresetHigh
  139. // Preview layer
  140. self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:_captureSession];
  141. _previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
  142. [_captureSession startRunning];
  143. }
  144. - (void)clearAll
  145. {
  146. [_captureSession stopRunning];
  147. [_previewLayer removeFromSuperlayer];
  148. _stillImageOutput = nil;
  149. _movieFileOutput = nil;
  150. _captureSession = nil;
  151. _previewLayer = nil;
  152. }
  153. - (void)startCountDurTimer
  154. {
  155. self.countDurTimer = [NSTimer scheduledTimerWithTimeInterval:COUNT_DUR_TIMER_INTERVAL target:self selector:@selector(onTimer:) userInfo:nil repeats:YES];
  156. }
  157. - (void)onTimer:(NSTimer *)timer
  158. {
  159. self.currentVideoDur += COUNT_DUR_TIMER_INTERVAL;
  160. if ([_delegate respondsToSelector:@selector(doingCurrentRecording:duration:recordedVideosTotalDuration:)])
  161. {
  162. [_delegate doingCurrentRecording:_currentFileURL duration:_currentVideoDur recordedVideosTotalDuration:_totalVideoDur];
  163. }
  164. if (_totalVideoDur + _currentVideoDur >= MAX_VIDEO_DUR)
  165. {
  166. [self stopCurrentVideoRecording];
  167. }
  168. }
  169. - (void)stopCountDurTimer
  170. {
  171. [_countDurTimer invalidate];
  172. self.countDurTimer = nil;
  173. }
  174. - (AVCaptureConnection *)connectionWithMediaType:(NSString *)mediaType fromConnections:(NSArray *)connections;
  175. {
  176. for ( AVCaptureConnection *connection in connections )
  177. {
  178. for ( AVCaptureInputPort *port in [connection inputPorts] )
  179. {
  180. if ( [[port mediaType] isEqual:mediaType] )
  181. {
  182. return connection;
  183. }
  184. }
  185. }
  186. return nil;
  187. }
  188. - (void)mergeAndExportVideosAtFileURLs:(NSArray *)fileURLArray
  189. {
  190. NSError *error = nil;
  191. CGSize renderSize = CGSizeMake(0, 0);
  192. NSMutableArray *layerInstructionArray = [[NSMutableArray alloc] init];
  193. AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
  194. CMTime totalDuration = kCMTimeZero;
  195. NSMutableArray *assetTrackArray = [[NSMutableArray alloc] init];
  196. NSMutableArray *assetArray = [[NSMutableArray alloc] init];
  197. for (NSURL *fileURL in fileURLArray)
  198. {
  199. NSLog(@"fileURL: %@", fileURL);
  200. AVAsset *asset = [AVAsset assetWithURL:fileURL];
  201. if (!asset)
  202. {
  203. // Retry once
  204. asset = [AVAsset assetWithURL:fileURL];
  205. if (!asset)
  206. {
  207. continue;
  208. }
  209. }
  210. [assetArray addObject:asset];
  211. AVAssetTrack *assetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
  212. if (!assetTrack)
  213. {
  214. // Retry once
  215. assetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
  216. if (!assetTrack)
  217. {
  218. NSLog(@"Error reading the transformed video track");
  219. }
  220. }
  221. [assetTrackArray addObject:assetTrack];
  222. NSLog(@"assetTrack.naturalSize Width: %f, Height: %f", assetTrack.naturalSize.width, assetTrack.naturalSize.height);
  223. renderSize.width = MAX(renderSize.width, assetTrack.naturalSize.width);
  224. renderSize.height = MAX(renderSize.height, assetTrack.naturalSize.height);
  225. }
  226. NSLog(@"renderSize width: %f, Height: %f", renderSize.width, renderSize.height);
  227. if (renderSize.height == 0 || renderSize.width == 0)
  228. {
  229. if ([_delegate respondsToSelector:@selector(didRecordingVideosError:)])
  230. {
  231. [_delegate didRecordingVideosError:nil];
  232. }
  233. return;
  234. }
  235. CGFloat renderW = MIN(renderSize.width, renderSize.height);
  236. for (int i = 0; i < [assetArray count] && i < [assetTrackArray count]; i++)
  237. {
  238. AVAsset *asset = [assetArray objectAtIndex:i];
  239. AVAssetTrack *assetTrack = [assetTrackArray objectAtIndex:i];
  240. AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
  241. if ([[asset tracksWithMediaType:AVMediaTypeAudio] count]>0)
  242. {
  243. AVAssetTrack *assetAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
  244. [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:assetAudioTrack atTime:totalDuration error:nil];
  245. }
  246. else
  247. {
  248. NSLog(@"Reminder: video hasn't audio!");
  249. }
  250. AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
  251. [videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
  252. ofTrack:assetTrack
  253. atTime:totalDuration
  254. error:&error];
  255. // Fix orientation issue
  256. AVMutableVideoCompositionLayerInstruction *layerInstruciton = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
  257. totalDuration = CMTimeAdd(totalDuration, asset.duration);
  258. CGFloat rate;
  259. rate = renderW / MIN(assetTrack.naturalSize.width, assetTrack.naturalSize.height);
  260. CGAffineTransform layerTransform = CGAffineTransformMake(assetTrack.preferredTransform.a, assetTrack.preferredTransform.b, assetTrack.preferredTransform.c, assetTrack.preferredTransform.d, assetTrack.preferredTransform.tx * rate, assetTrack.preferredTransform.ty * rate);
  261. layerTransform = CGAffineTransformConcat(layerTransform, CGAffineTransformMake(1, 0, 0, 1, 0, -(assetTrack.naturalSize.width - assetTrack.naturalSize.height) / 2.0));
  262. layerTransform = CGAffineTransformScale(layerTransform, rate, rate);
  263. [layerInstruciton setTransform:layerTransform atTime:kCMTimeZero];
  264. [layerInstruciton setOpacity:0.0 atTime:totalDuration];
  265. [layerInstructionArray addObject:layerInstruciton];
  266. }
  267. // Get save path
  268. NSURL *mergeFileURL = [NSURL fileURLWithPath:[CaptureToolKit getVideoMergeFilePathString]];
  269. // Export
  270. AVMutableVideoCompositionInstruction *mainInstruciton = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
  271. mainInstruciton.timeRange = CMTimeRangeMake(kCMTimeZero, totalDuration);
  272. mainInstruciton.layerInstructions = layerInstructionArray;
  273. AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
  274. mainCompositionInst.instructions = @[mainInstruciton];
  275. mainCompositionInst.frameDuration = CMTimeMake(1, 30);
  276. mainCompositionInst.renderSize = CGSizeMake(renderW, renderW);
  277. NSLog(@"Video: width = %f, height = %f", renderW, renderW);
  278. AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
  279. exporter.videoComposition = mainCompositionInst;
  280. exporter.outputURL = mergeFileURL;
  281. exporter.shouldOptimizeForNetworkUse = YES;
  282. // Fix iOS 5.x crash issue by Johnny Xu.
  283. if (iOS5)
  284. {
  285. exporter.outputFileType = AVFileTypeQuickTimeMovie;
  286. }
  287. else
  288. {
  289. exporter.outputFileType = AVFileTypeMPEG4;
  290. }
  291. [exporter exportAsynchronouslyWithCompletionHandler:^{
  292. // Fix can't export issue under iOS 5.x by Johnny Xu.
  293. switch ([exporter status])
  294. {
  295. case AVAssetExportSessionStatusCompleted:
  296. {
  297. dispatch_async(dispatch_get_main_queue(), ^{
  298. if ([_delegate respondsToSelector:@selector(didRecordingVideosSuccess:)])
  299. {
  300. [_delegate didRecordingVideosSuccess:mergeFileURL];
  301. }
  302. NSLog(@"Export video success.");
  303. // Test
  304. // [self writeExportedVideoToAssetsLibrary:mergeFileURL];
  305. });
  306. break;
  307. }
  308. case AVAssetExportSessionStatusFailed:
  309. {
  310. dispatch_async(dispatch_get_main_queue(), ^{
  311. if ([_delegate respondsToSelector:@selector(didRecordingVideosError:)])
  312. {
  313. [_delegate didRecordingVideosError:[exporter error]];
  314. }
  315. NSLog(@"Export video failed.");
  316. });
  317. break;
  318. }
  319. case AVAssetExportSessionStatusCancelled:
  320. {
  321. NSLog(@"Export canceled");
  322. break;
  323. }
  324. case AVAssetExportSessionStatusWaiting:
  325. {
  326. NSLog(@"Export Waiting");
  327. break;
  328. }
  329. case AVAssetExportSessionStatusExporting:
  330. {
  331. NSLog(@"Export Exporting");
  332. break;
  333. }
  334. default:
  335. break;
  336. }
  337. }];
  338. }
  339. - (AVCaptureDevice *)getCameraDevice:(BOOL)isFront
  340. {
  341. NSArray *cameras = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  342. AVCaptureDevice *frontCamera;
  343. AVCaptureDevice *backCamera;
  344. for (AVCaptureDevice *camera in cameras)
  345. {
  346. if (camera.position == AVCaptureDevicePositionBack)
  347. {
  348. backCamera = camera;
  349. }
  350. else
  351. {
  352. frontCamera = camera;
  353. }
  354. }
  355. if (isFront)
  356. {
  357. return frontCamera;
  358. }
  359. return backCamera;
  360. }
  361. - (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates
  362. {
  363. CGPoint pointOfInterest = CGPointMake(.5f, .5f);
  364. CGSize frameSize = _previewLayer.bounds.size;
  365. AVCaptureVideoPreviewLayer *videoPreviewLayer = self.previewLayer;
  366. if([[videoPreviewLayer videoGravity]isEqualToString:AVLayerVideoGravityResize])
  367. {
  368. pointOfInterest = CGPointMake(viewCoordinates.y / frameSize.height, 1.f - (viewCoordinates.x / frameSize.width));
  369. }
  370. else
  371. {
  372. CGRect cleanAperture;
  373. for(AVCaptureInputPort *port in [self.videoDeviceInput ports])
  374. {
  375. // 正在使用的videoInput
  376. if([port mediaType] == AVMediaTypeVideo)
  377. {
  378. cleanAperture = CMVideoFormatDescriptionGetCleanAperture([port formatDescription], YES);
  379. CGSize apertureSize = cleanAperture.size;
  380. CGPoint point = viewCoordinates;
  381. CGFloat apertureRatio = apertureSize.height / apertureSize.width;
  382. CGFloat viewRatio = frameSize.width / frameSize.height;
  383. CGFloat xc = .5f;
  384. CGFloat yc = .5f;
  385. if([[videoPreviewLayer videoGravity]isEqualToString:AVLayerVideoGravityResizeAspect])
  386. {
  387. if(viewRatio > apertureRatio)
  388. {
  389. CGFloat y2 = frameSize.height;
  390. CGFloat x2 = frameSize.height * apertureRatio;
  391. CGFloat x1 = frameSize.width;
  392. CGFloat blackBar = (x1 - x2) / 2;
  393. if(point.x >= blackBar && point.x <= blackBar + x2)
  394. {
  395. xc = point.y / y2;
  396. yc = 1.f - ((point.x - blackBar) / x2);
  397. }
  398. }
  399. else
  400. {
  401. CGFloat y2 = frameSize.width / apertureRatio;
  402. CGFloat y1 = frameSize.height;
  403. CGFloat x2 = frameSize.width;
  404. CGFloat blackBar = (y1 - y2) / 2;
  405. if(point.y >= blackBar && point.y <= blackBar + y2)
  406. {
  407. xc = ((point.y - blackBar) / y2);
  408. yc = 1.f - (point.x / x2);
  409. }
  410. }
  411. }
  412. else if([[videoPreviewLayer videoGravity]isEqualToString:AVLayerVideoGravityResizeAspectFill])
  413. {
  414. if(viewRatio > apertureRatio)
  415. {
  416. CGFloat y2 = apertureSize.width * (frameSize.width / apertureSize.height);
  417. xc = (point.y + ((y2 - frameSize.height) / 2.f)) / y2;
  418. yc = (frameSize.width - point.x) / frameSize.width;
  419. }
  420. else
  421. {
  422. CGFloat x2 = apertureSize.height * (frameSize.height / apertureSize.width);
  423. yc = 1.f - ((point.x + ((x2 - frameSize.width) / 2)) / x2);
  424. xc = point.y / frameSize.height;
  425. }
  426. }
  427. pointOfInterest = CGPointMake(xc, yc);
  428. break;
  429. }
  430. }
  431. }
  432. return pointOfInterest;
  433. }
  434. - (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
  435. {
  436. // NSLog(@"focus point: %f %f", point.x, point.y);
  437. dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
  438. AVCaptureDevice *device = [_videoDeviceInput device];
  439. NSError *error = nil;
  440. if ([device lockForConfiguration:&error])
  441. {
  442. if ([device isFocusPointOfInterestSupported])
  443. {
  444. [device setFocusPointOfInterest:point];
  445. }
  446. if ([device isFocusModeSupported:focusMode])
  447. {
  448. [device setFocusMode:focusMode];
  449. }
  450. if ([device isExposurePointOfInterestSupported])
  451. {
  452. [device setExposurePointOfInterest:point];
  453. }
  454. if ([device isExposureModeSupported:exposureMode])
  455. {
  456. [device setExposureMode:exposureMode];
  457. }
  458. [device setSubjectAreaChangeMonitoringEnabled:monitorSubjectAreaChange];
  459. [device unlockForConfiguration];
  460. }
  461. else
  462. {
  463. NSLog(@"对焦错误:%@", error);
  464. }
  465. });
  466. }
  467. #pragma mark - Method
  468. - (void)focusInPoint:(CGPoint)touchPoint
  469. {
  470. CGPoint devicePoint = [self convertToPointOfInterestFromViewCoordinates:touchPoint];
  471. [self focusWithMode:AVCaptureFocusModeAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:YES];
  472. }
  473. - (void)openTorch:(BOOL)open
  474. {
  475. if (!_isTorchSupported || self.isUsingFrontFacingCamera)
  476. {
  477. return;
  478. }
  479. dispatch_async(dispatch_get_main_queue(), ^{
  480. AVCaptureDevice *flashLight = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  481. BOOL success = [flashLight lockForConfiguration:nil];
  482. if (success)
  483. {
  484. if (!open)
  485. {
  486. [flashLight setTorchMode:AVCaptureTorchModeOff];
  487. self.isTorchOn = FALSE;
  488. }
  489. else
  490. {
  491. [flashLight setTorchMode:AVCaptureTorchModeOn];
  492. self.isTorchOn = TRUE;
  493. }
  494. [flashLight unlockForConfiguration];
  495. }
  496. });
  497. }
  498. - (void)switchCamera
  499. {
  500. if (!_isFrontCameraSupported || !_isCameraSupported || !_videoDeviceInput)
  501. {
  502. return;
  503. }
  504. if (_isTorchOn)
  505. {
  506. [self openTorch:NO];
  507. }
  508. [_captureSession beginConfiguration];
  509. [_captureSession removeInput:_videoDeviceInput];
  510. self.isUsingFrontFacingCamera = !_isUsingFrontFacingCamera;
  511. AVCaptureDevice *device = [self getCameraDevice:_isUsingFrontFacingCamera];
  512. [device lockForConfiguration:nil];
  513. if ([device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure])
  514. {
  515. [device setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
  516. }
  517. [device unlockForConfiguration];
  518. self.videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil];
  519. [_captureSession addInput:_videoDeviceInput];
  520. [_captureSession commitConfiguration];
  521. }
  522. - (BOOL)isFrontCamera
  523. {
  524. return _isUsingFrontFacingCamera;
  525. }
  526. - (BOOL)isTorchOn
  527. {
  528. return _isTorchOn;
  529. }
  530. - (BOOL)isTorchSupported
  531. {
  532. return _isTorchSupported;
  533. }
  534. - (BOOL)isFrontCameraSupported
  535. {
  536. return _isFrontCameraSupported;
  537. }
  538. - (BOOL)isCameraSupported
  539. {
  540. return _isCameraSupported;
  541. }
  542. - (void)mergeVideoFiles
  543. {
  544. NSMutableArray *fileURLArray = [[NSMutableArray alloc] init];
  545. for (VideoData *data in _videoFileDataArray)
  546. {
  547. [fileURLArray addObject:data.fileURL];
  548. }
  549. [self mergeAndExportVideosAtFileURLs:fileURLArray];
  550. }
  551. // 总时长
  552. - (CGFloat)getTotalVideoDuration
  553. {
  554. return _totalVideoDur;
  555. }
  556. // 现在录了多少视频
  557. - (NSUInteger)getVideoCount
  558. {
  559. return [_videoFileDataArray count];
  560. }
  561. - (void)startRecordingToOutputFileURL:(NSURL *)fileURL
  562. {
  563. if (_totalVideoDur >= MAX_VIDEO_DUR)
  564. {
  565. NSLog(@"视频总长达到最大");
  566. return;
  567. }
  568. [_movieFileOutput startRecordingToOutputFileURL:fileURL recordingDelegate:self];
  569. }
  570. - (void)stopCurrentVideoRecording
  571. {
  572. [self stopCountDurTimer];
  573. [_movieFileOutput stopRecording];
  574. }
  575. // End recording
  576. - (void)endVideoRecording
  577. {
  578. [self mergeVideoFiles];
  579. }
  580. // 不调用delegate
  581. - (void)deleteAllVideo
  582. {
  583. for (VideoData *data in _videoFileDataArray)
  584. {
  585. NSURL *videoFileURL = data.fileURL;
  586. dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
  587. NSString *filePath = [[videoFileURL absoluteString] stringByReplacingOccurrencesOfString:@"file://" withString:@""];
  588. NSFileManager *fileManager = [NSFileManager defaultManager];
  589. if ([fileManager fileExistsAtPath:filePath])
  590. {
  591. NSError *error = nil;
  592. [fileManager removeItemAtPath:filePath error:&error];
  593. if (error)
  594. {
  595. NSLog(@"deleteAllVideo删除视频文件出错:%@", error);
  596. }
  597. }
  598. });
  599. }
  600. }
  601. // 会调用delegate
  602. - (void)deleteLastVideo
  603. {
  604. if ([_videoFileDataArray count] == 0)
  605. {
  606. return;
  607. }
  608. VideoData *data = (VideoData *)[_videoFileDataArray lastObject];
  609. NSURL *videoFileURL = data.fileURL;
  610. CGFloat videoDuration = data.duration;
  611. [_videoFileDataArray removeLastObject];
  612. _totalVideoDur -= videoDuration;
  613. // delete
  614. dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
  615. NSString *filePath = [[videoFileURL absoluteString] stringByReplacingOccurrencesOfString:@"file://" withString:@""];
  616. NSFileManager *fileManager = [NSFileManager defaultManager];
  617. if ([fileManager fileExistsAtPath:filePath])
  618. {
  619. NSError *error = nil;
  620. [fileManager removeItemAtPath:filePath error:&error];
  621. dispatch_async(dispatch_get_main_queue(), ^{
  622. // delegate
  623. if ([_delegate respondsToSelector:@selector(didRemoveCurrentVideo:totalDuration:error:)])
  624. {
  625. [_delegate didRemoveCurrentVideo:videoFileURL totalDuration:_totalVideoDur error:error];
  626. }
  627. });
  628. }
  629. });
  630. }
  631. #pragma mark - AVCaptureFileOutputRecordignDelegate
  632. - (void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections
  633. {
  634. self.currentFileURL = fileURL;
  635. self.currentVideoDur = 0.0f;
  636. [self startCountDurTimer];
  637. if ([_delegate respondsToSelector:@selector(didStartCurrentRecording:)])
  638. {
  639. [_delegate didStartCurrentRecording:fileURL];
  640. }
  641. }
  642. - (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
  643. {
  644. self.totalVideoDur += _currentVideoDur;
  645. NSLog(@"本段视频长度: %f", _currentVideoDur);
  646. NSLog(@"现在的视频总长度: %f", _totalVideoDur);
  647. if (!error)
  648. {
  649. VideoData *data = [[VideoData alloc] init];
  650. data.duration = _currentVideoDur;
  651. data.fileURL = outputFileURL;
  652. [_videoFileDataArray addObject:data];
  653. }
  654. if ([_delegate respondsToSelector:@selector(didFinishCurrentRecording:duration:totalDuration:error:)])
  655. {
  656. [_delegate didFinishCurrentRecording:outputFileURL duration:_currentVideoDur totalDuration:_totalVideoDur error:error];
  657. }
  658. }
  659. #pragma mark - Take picture
  660. - (UIImage*)capturePicture
  661. {
  662. AVCaptureConnection *videoConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:[self.stillImageOutput connections]];
  663. if ([videoConnection isVideoOrientationSupported])
  664. {
  665. [videoConnection setVideoOrientation:[self orientation]];
  666. }
  667. if (self.isTorchOn)
  668. {
  669. [self openTorch:YES];
  670. [NSThread sleepForTimeInterval:0.05];
  671. }
  672. __block UIImage *resultImage = nil;
  673. [[self stillImageOutput] captureStillImageAsynchronouslyFromConnection:videoConnection
  674. completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error)
  675. {
  676. if (imageDataSampleBuffer != NULL)
  677. {
  678. NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
  679. UIImage *image = [[UIImage alloc] initWithData:imageData];
  680. CGFloat minLen = MIN(DEVICE_SIZE.width, DEVICE_SIZE.height);
  681. CGRect cropRect = CGRectMake(0, 0, minLen, minLen);
  682. resultImage = [self getCropImage:[self imageFixOrientation:image] cropRect:cropRect];
  683. BOOL success = [self saveImage:resultImage];
  684. if (success)
  685. {
  686. NSString *imageFile = [self getImageOutputFile];
  687. dispatch_async(dispatch_get_main_queue(), ^{
  688. if ([_delegate respondsToSelector:@selector(didTakePictureSuccess:)])
  689. {
  690. [_delegate didTakePictureSuccess:imageFile];
  691. }
  692. NSLog(@"CaptureImage save success: %@", imageFile);
  693. });
  694. }
  695. // Test
  696. // [self writeExportedPhotoToAssetsLibrary:resultImage];
  697. }
  698. else if (error)
  699. {
  700. dispatch_async(dispatch_get_main_queue(), ^{
  701. if ([_delegate respondsToSelector:@selector(didTakePictureError:)])
  702. {
  703. [_delegate didTakePictureError:error];
  704. }
  705. NSLog(@"CaptureImage Failed: %@", error.description);
  706. });
  707. }
  708. }];
  709. if (self.isTorchOn)
  710. {
  711. [self openTorch:NO];
  712. }
  713. return resultImage;
  714. }
  715. - (NSString*)getImageOutputFile
  716. {
  717. NSString *filename = @"image.jpg";
  718. NSString* imageOutputFile = [NSTemporaryDirectory() stringByAppendingPathComponent:filename];
  719. return imageOutputFile;
  720. }
  721. - (BOOL)saveImage:(UIImage*)image
  722. {
  723. NSData *data = UIImageJPEGRepresentation(image, 1);
  724. NSString *imageOutputFile = [self getImageOutputFile];
  725. unlink([imageOutputFile UTF8String]);
  726. return [data writeToFile:imageOutputFile atomically:YES];
  727. }
  728. - (UIImage*)imageFixOrientation:(UIImage*)image
  729. {
  730. UIImageOrientation imageOrientation = [image imageOrientation];
  731. if (imageOrientation == UIImageOrientationUp)
  732. return image;
  733. CGAffineTransform transform = CGAffineTransformIdentity;
  734. UIImageOrientation io = imageOrientation;
  735. if (io == UIImageOrientationDown || io == UIImageOrientationDownMirrored)
  736. {
  737. transform = CGAffineTransformTranslate(transform, image.size.width, image.size.height);
  738. transform = CGAffineTransformRotate(transform, M_PI);
  739. }
  740. else if (io == UIImageOrientationLeft || io == UIImageOrientationLeftMirrored)
  741. {
  742. transform = CGAffineTransformTranslate(transform, image.size.width, 0);
  743. transform = CGAffineTransformRotate(transform, M_PI_2);
  744. }
  745. else if (io == UIImageOrientationRight || io == UIImageOrientationRightMirrored)
  746. {
  747. transform = CGAffineTransformTranslate(transform, 0, image.size.height);
  748. transform = CGAffineTransformRotate(transform, -M_PI_2);
  749. }
  750. if (io == UIImageOrientationUpMirrored || io == UIImageOrientationDownMirrored)
  751. {
  752. transform = CGAffineTransformTranslate(transform, image.size.width, 0);
  753. transform = CGAffineTransformScale(transform, -1, 1);
  754. }
  755. else if (io == UIImageOrientationLeftMirrored || io == UIImageOrientationRightMirrored)
  756. {
  757. transform = CGAffineTransformTranslate(transform, image.size.height, 0);
  758. transform = CGAffineTransformScale(transform, -1, 1);
  759. }
  760. CGContextRef ctx = CGBitmapContextCreate(NULL, image.size.width, image.size.height,
  761. CGImageGetBitsPerComponent(image.CGImage), 0,
  762. CGImageGetColorSpace(image.CGImage),
  763. CGImageGetBitmapInfo(image.CGImage));
  764. CGContextConcatCTM(ctx, transform);
  765. if (io == UIImageOrientationLeft || io == UIImageOrientationLeftMirrored || io == UIImageOrientationRight || io == UIImageOrientationRightMirrored)
  766. {
  767. CGContextDrawImage(ctx, CGRectMake(0,0,image.size.height,image.size.width), image.CGImage);
  768. }
  769. else
  770. {
  771. CGContextDrawImage(ctx, CGRectMake(0,0,image.size.width,image.size.height), image.CGImage);
  772. }
  773. CGImageRef cgimg = CGBitmapContextCreateImage(ctx);
  774. UIImage *img = [UIImage imageWithCGImage:cgimg];
  775. CGContextRelease(ctx);
  776. CGImageRelease(cgimg);
  777. return img;
  778. }
  779. -(UIImage*)getCropImage:(UIImage*)originalImage cropRect:(CGRect)cropRect
  780. {
  781. // Scale to fit the screen
  782. CGFloat oriWidth = cropRect.size.width;
  783. CGFloat oriHeight = originalImage.size.height * (oriWidth / originalImage.size.width);
  784. CGFloat oriX = cropRect.origin.x + (cropRect.size.width - oriWidth) / 2;
  785. CGFloat oriY = cropRect.origin.y + (cropRect.size.height - oriHeight) / 2;
  786. CGRect latestRect = CGRectMake(oriX, oriY, oriWidth, oriHeight);
  787. return [self getSubImageByCropRect:cropRect latestRect:latestRect originalImage:originalImage];
  788. }
  789. -(UIImage*)getSubImageByCropRect:(CGRect)cropRect latestRect:(CGRect)latestRect originalImage:(UIImage*)originalImage
  790. {
  791. CGRect squareFrame = cropRect;
  792. CGFloat scaleRatio = latestRect.size.width / originalImage.size.width;
  793. CGFloat x = (squareFrame.origin.x - latestRect.origin.x) / scaleRatio;
  794. CGFloat y = (squareFrame.origin.y - latestRect.origin.y) / scaleRatio;
  795. CGFloat w = squareFrame.size.width / scaleRatio;
  796. CGFloat h = squareFrame.size.width / scaleRatio;
  797. if (latestRect.size.width < cropRect.size.width)
  798. {
  799. CGFloat newW = originalImage.size.width;
  800. CGFloat newH = newW * (cropRect.size.height / cropRect.size.width);
  801. x = 0;
  802. y = y + (h - newH) / 2;
  803. w = newH;
  804. h = newH;
  805. }
  806. if (latestRect.size.height < cropRect.size.height)
  807. {
  808. CGFloat newH = originalImage.size.height;
  809. CGFloat newW = newH * (cropRect.size.width / cropRect.size.height);
  810. x = x + (w - newW) / 2;
  811. y = 0;
  812. w = newH;
  813. h = newH;
  814. }
  815. CGRect imageRect = CGRectMake(x, y, w, h);
  816. CGImageRef imageRef = originalImage.CGImage;
  817. CGImageRef subImageRef = CGImageCreateWithImageInRect(imageRef, imageRect);
  818. CGSize size = CGSizeMake(imageRect.size.width, imageRect.size.height);
  819. // NSLog(@"Crop width: %f, height: %f", size.width, size.height);
  820. UIImage* smallImage;
  821. UIGraphicsBeginImageContext(size);
  822. CGContextRef context = UIGraphicsGetCurrentContext();
  823. CGContextDrawImage(context, imageRect, subImageRef);
  824. // NSLog(@"context: %@", context);
  825. if (context)
  826. {
  827. smallImage = [UIImage imageWithCGImage:subImageRef];
  828. }
  829. else
  830. {
  831. smallImage = nil;
  832. }
  833. UIGraphicsEndImageContext();
  834. CGImageRelease(subImageRef);
  835. return smallImage;
  836. }
  837. #pragma mark - Private Methods
  838. - (void)writeExportedVideoToAssetsLibrary:(NSURL *)outputURL
  839. {
  840. NSURL *exportURL = outputURL; // [NSURL fileURLWithPath:outputURL];
  841. ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
  842. if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:exportURL])
  843. {
  844. [library writeVideoAtPathToSavedPhotosAlbum:exportURL completionBlock:^(NSURL *assetURL, NSError *error)
  845. {
  846. dispatch_async(dispatch_get_main_queue(), ^{
  847. if (error)
  848. {
  849. }
  850. else
  851. {
  852. }
  853. #if !TARGET_IPHONE_SIMULATOR
  854. [[NSFileManager defaultManager] removeItemAtURL:exportURL error:nil];
  855. #endif
  856. });
  857. }];
  858. }
  859. else
  860. {
  861. NSLog(@"Video could not be exported to camera roll.");
  862. }
  863. }
  864. - (void)writeExportedPhotoToAssetsLibrary:(UIImage*)image
  865. {
  866. ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
  867. [library writeImageToSavedPhotosAlbum:[image CGImage]
  868. orientation:(ALAssetOrientation)[image imageOrientation]
  869. completionBlock:^(NSURL *assetURL, NSError *error)
  870. {
  871. if (error)
  872. {
  873. NSLog(@"Photo could not be exported to camera roll. -- %@", error.description);
  874. }
  875. else
  876. {
  877. NSLog(@"Photo export is success.");
  878. }
  879. }];
  880. }
  881. @end