PageRenderTime 48ms CodeModel.GetById 19ms RepoModel.GetById 0ms app.codeStats 0ms

/Dependencies/CustomVideoCompositor/CustomVideoCompositor.m

https://gitlab.com/Mr.Tomato/VideoEffects
Objective C | 500 lines | 375 code | 64 blank | 61 comment | 53 complexity | afd5aa27edb10d4a17e36dabe792d3b7 MD5 | raw file
  1. //
  2. // CustomVideoCompositor
  3. // VideoEffects
  4. //
  5. // Created by Johnny Xu(徐景周) on 5/30/15.
  6. // Copyright (c) 2015 Future Studio. All rights reserved.
  7. //
  8. @import UIKit;
  9. #import "CustomVideoCompositor.h"
  10. #import <CoreImage/CoreImage.h>
  11. @interface CustomVideoCompositor()
  12. @end
  13. @implementation CustomVideoCompositor
  14. - (instancetype)init
  15. {
  16. return self;
  17. }
  18. #pragma mark - startVideoCompositionRequest
  19. - (void)startVideoCompositionRequest:(AVAsynchronousVideoCompositionRequest *)request
  20. {
  21. NSMutableArray *videoArray = [[NSMutableArray alloc] init];
  22. CVPixelBufferRef destination = [request.renderContext newPixelBuffer];
  23. if (request.sourceTrackIDs.count > 0)
  24. {
  25. for (NSUInteger i = 0; i < [request.sourceTrackIDs count]; ++i)
  26. {
  27. CVPixelBufferRef videoBufferRef = [request sourceFrameByTrackID:[[request.sourceTrackIDs objectAtIndex:i] intValue]];
  28. if (videoBufferRef)
  29. {
  30. [videoArray addObject:(__bridge id)(videoBufferRef)];
  31. }
  32. }
  33. for (NSUInteger i = 0; i < [videoArray count]; ++i)
  34. {
  35. CVPixelBufferRef video = (__bridge CVPixelBufferRef)([videoArray objectAtIndex:i]);
  36. CVPixelBufferLockBaseAddress(video, kCVPixelBufferLock_ReadOnly);
  37. }
  38. CVPixelBufferLockBaseAddress(destination, 0);
  39. [self renderBuffer:videoArray toBuffer:destination];
  40. CVPixelBufferUnlockBaseAddress(destination, 0);
  41. for (NSUInteger i = 0; i < [videoArray count]; ++i)
  42. {
  43. CVPixelBufferRef video = (__bridge CVPixelBufferRef)([videoArray objectAtIndex:i]);
  44. CVPixelBufferUnlockBaseAddress(video, kCVPixelBufferLock_ReadOnly);
  45. }
  46. }
  47. [request finishWithComposedVideoFrame:destination];
  48. CVBufferRelease(destination);
  49. }
  50. - (void)renderContextChanged:(AVVideoCompositionRenderContext *)newRenderContext
  51. {
  52. }
  53. - (NSDictionary *)requiredPixelBufferAttributesForRenderContext
  54. {
  55. return @{ (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey : @[ @(kCVPixelFormatType_32BGRA) ] };
  56. }
  57. - (NSDictionary *)sourcePixelBufferAttributes
  58. {
  59. return @{ (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey : @[ @(kCVPixelFormatType_32BGRA) ] };
  60. }
  61. #pragma mark - renderBuffer
  62. - (void)renderBuffer:(NSMutableArray *)videoBufferRefArray toBuffer:(CVPixelBufferRef)destination
  63. {
  64. size_t width = CVPixelBufferGetWidth(destination);
  65. size_t height = CVPixelBufferGetHeight(destination);
  66. NSMutableArray *imageRefArray = [[NSMutableArray alloc] init];
  67. for (NSUInteger i = 0; i < [videoBufferRefArray count]; ++i)
  68. {
  69. CVPixelBufferRef videoFrame = (__bridge CVPixelBufferRef)([videoBufferRefArray objectAtIndex:i]);
  70. CGImageRef imageRef = [self createSourceImageFromBuffer:videoFrame];
  71. if (imageRef)
  72. {
  73. if ([self shouldRightRotate90ByTrackID:i+1])
  74. {
  75. // Right rotation 90
  76. imageRef = CGImageRotated(imageRef, M_PI_2);
  77. }
  78. [imageRefArray addObject:(__bridge id)(imageRef)];
  79. }
  80. CGImageRelease(imageRef);
  81. }
  82. if ([imageRefArray count] < 2)
  83. {
  84. NSLog(@"imageRefArray is empty.");
  85. return;
  86. }
  87. CGContextRef gc = CGBitmapContextCreate(CVPixelBufferGetBaseAddress(destination), width, height, 8, CVPixelBufferGetBytesPerRow(destination), CGImageGetColorSpace((CGImageRef)imageRefArray[0]), CGImageGetBitmapInfo((CGImageRef)imageRefArray[0]));
  88. CGRect rectVideo = CGRectZero;
  89. rectVideo.size = CGSizeMake(width, height);
  90. // Background video
  91. CGContextDrawImage(gc, rectVideo, (CGImageRef)imageRefArray[0]);
  92. // Face detection
  93. NSMutableArray *faceRects = [[NSMutableArray alloc] init];
  94. // CIImage* image = [CIImage imageWithCGImage:(CGImageRef)imageRefArray[0]];
  95. // NSDictionary *opts = [NSDictionary dictionaryWithObject:CIDetectorAccuracyHigh
  96. // forKey:CIDetectorAccuracy];
  97. // CIDetector* detector = [CIDetector detectorOfType:CIDetectorTypeFace
  98. // context:nil
  99. // options:opts];
  100. //
  101. // NSArray* features = [detector featuresInImage:image];
  102. // for (CIFaceFeature *face in features)
  103. // {
  104. // CGRect faceRect = face.bounds;
  105. // NSLog(@"faceRect.x: %f, faceRect.y: %f, faceRect.width: %f, faceRect.height: %f", faceRect.origin.x, faceRect.origin.y, faceRect.size.width, faceRect.size.height);
  106. //
  107. // [faceRects addObject:[NSValue valueWithCGRect:faceRect]];
  108. // }
  109. // Foreground video
  110. [self addPath:gc width:width height:height faceRects:faceRects needCalc:YES];
  111. CGContextClip(gc);
  112. CGContextDrawImage(gc, rectVideo, (CGImageRef)imageRefArray[1]);
  113. if ([self shouldDisplayInnerBorder])
  114. {
  115. [self addPath:gc width:width height:height faceRects:faceRects needCalc:NO];
  116. CGContextDrawPath(gc, kCGPathStroke);
  117. if (!CGContextIsPathEmpty(gc))
  118. {
  119. CGContextClip(gc);
  120. }
  121. }
  122. CGContextRelease(gc);
  123. }
  124. #pragma mark - createSourceImageFromBuffer
  125. - (CGImageRef)createSourceImageFromBuffer:(CVPixelBufferRef)buffer
  126. {
  127. size_t width = CVPixelBufferGetWidth(buffer);
  128. size_t height = CVPixelBufferGetHeight(buffer);
  129. size_t stride = CVPixelBufferGetBytesPerRow(buffer);
  130. void *data = CVPixelBufferGetBaseAddress(buffer);
  131. CGColorSpaceRef rgb = CGColorSpaceCreateDeviceRGB();
  132. CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, data, height * stride, NULL);
  133. CGImageRef image = CGImageCreate(width, height, 8, 32, stride, rgb, kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast, provider, NULL, NO, kCGRenderingIntentDefault);
  134. CGDataProviderRelease(provider);
  135. CGColorSpaceRelease(rgb);
  136. return image;
  137. }
  138. #pragma mark - CGImageRotated
  139. CGImageRef CGImageRotated(CGImageRef originalCGImage, double radians)
  140. {
  141. CGSize imageSize = CGSizeMake(CGImageGetWidth(originalCGImage), CGImageGetHeight(originalCGImage));
  142. CGSize rotatedSize;
  143. if (radians == M_PI_2 || radians == -M_PI_2)
  144. {
  145. rotatedSize = CGSizeMake(imageSize.height, imageSize.width);
  146. }
  147. else
  148. {
  149. rotatedSize = imageSize;
  150. }
  151. double rotatedCenterX = rotatedSize.width / 2.f;
  152. double rotatedCenterY = rotatedSize.height / 2.f;
  153. // //bitmap context properties
  154. // CGSize size = imageSize;
  155. // NSUInteger bytesPerPixel = 4;
  156. // NSUInteger bytesPerRow = bytesPerPixel * size.width;
  157. // NSUInteger bitsPerComponent = 8;
  158. //
  159. // //create bitmap context
  160. // unsigned char *rawData = malloc(size.height * size.width * 4);
  161. // memset(rawData, 0, size.height * size.width * 4);
  162. // CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
  163. // CGContextRef rotatedContext = CGBitmapContextCreate(rawData, size.width, size.height, bitsPerComponent, bytesPerRow, colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
  164. UIGraphicsBeginImageContextWithOptions(rotatedSize, NO, 1.f);
  165. CGContextRef rotatedContext = UIGraphicsGetCurrentContext();
  166. if (radians == 0.f || radians == M_PI)
  167. {
  168. // 0 or 180 degrees
  169. CGContextTranslateCTM(rotatedContext, rotatedCenterX, rotatedCenterY);
  170. if (radians == 0.0f)
  171. {
  172. CGContextScaleCTM(rotatedContext, 1.f, -1.f);
  173. }
  174. else
  175. {
  176. CGContextScaleCTM(rotatedContext, -1.f, 1.f);
  177. }
  178. CGContextTranslateCTM(rotatedContext, -rotatedCenterX, -rotatedCenterY);
  179. }
  180. else if (radians == M_PI_2 || radians == -M_PI_2)
  181. {
  182. // +/- 90 degrees
  183. CGContextTranslateCTM(rotatedContext, rotatedCenterX, rotatedCenterY);
  184. CGContextRotateCTM(rotatedContext, radians);
  185. CGContextScaleCTM(rotatedContext, 1.f, -1.f);
  186. CGContextTranslateCTM(rotatedContext, -rotatedCenterY, -rotatedCenterX);
  187. }
  188. CGRect drawingRect = CGRectMake(0.f, 0.f, imageSize.width, imageSize.height);
  189. CGContextDrawImage(rotatedContext, drawingRect, originalCGImage);
  190. CGImageRef rotatedCGImage = CGBitmapContextCreateImage(rotatedContext);
  191. UIGraphicsEndImageContext();
  192. // CGColorSpaceRelease(colorSpace);
  193. // CGContextRelease(rotatedContext);
  194. // free(rawData);
  195. return rotatedCGImage;
  196. }
  197. static CGFloat startX = 50, startY = 50, signX = 1, signY = 1;
  198. - (void)addPath:(CGContextRef)gc width:(CGFloat)width height:(CGFloat)height faceRects:(NSMutableArray*)faceRects needCalc:(BOOL)needCalc
  199. {
  200. // CGContextSaveGState(gc);
  201. // CGSize shadowOffset = CGSizeMake (-15, 20);
  202. // CGContextSetShadow (gc, shadowOffset, 5);
  203. CGFloat whiteColor[4] = {1.0, 1.0, 1.0, 1.0};
  204. CGContextSetStrokeColor(gc, whiteColor);
  205. CGContextSetLineWidth(gc, 2);
  206. CGContextSetShouldAntialias(gc, YES);
  207. CGContextBeginPath(gc);
  208. if (faceRects && [faceRects count] > 1)
  209. {
  210. for (int i = 0; i < [faceRects count]; ++i)
  211. {
  212. CGRect faceRect = [faceRects[i] CGRectValue];
  213. CGPathRef strokeRect = [UIBezierPath bezierPathWithRoundedRect:faceRect cornerRadius:10.f].CGPath;
  214. CGContextAddPath(gc, strokeRect);
  215. }
  216. }
  217. else
  218. {
  219. CGFloat minValue = MIN(width, height);
  220. CGFloat ovalWidth = minValue * 2/3;
  221. if (needCalc)
  222. {
  223. CGFloat offsetX = 5, offsetY = 3;
  224. startX = startX - signX*offsetX;
  225. if (startX <= 0)
  226. {
  227. signX = -signX;
  228. startX = 0;
  229. }
  230. else if ((startX + ovalWidth) >= width)
  231. {
  232. signX = -signX;
  233. startX = width - ovalWidth;
  234. }
  235. startY = startY - signY*offsetY;
  236. if (startY <= 0)
  237. {
  238. signY = -signY;
  239. startY = 0;
  240. }
  241. else if ((startY + ovalWidth) >= height)
  242. {
  243. signY = -signY;
  244. startY = height - ovalWidth;
  245. }
  246. }
  247. if ([self shouldDisplayPloygon])
  248. {
  249. CGPathRef strokeRect = [self pathForPolygon:CGRectMake(startX, startY, ovalWidth, ovalWidth)].CGPath;
  250. CGContextAddPath(gc, strokeRect);
  251. }
  252. else
  253. {
  254. CGPathRef strokeRect = [UIBezierPath bezierPathWithOvalInRect:CGRectMake(startX, startY, ovalWidth, ovalWidth)].CGPath;
  255. CGContextAddPath(gc, strokeRect);
  256. }
  257. }
  258. // CGContextRestoreGState(gc);
  259. }
  260. #pragma mark - pathForPolygon
  261. - (UIBezierPath *)pathForPolygon:(CGRect)bounds
  262. {
  263. NSInteger numberOfEdges = 16;
  264. CGFloat innerRadiusRatio = 0.75;
  265. CGFloat inset = 1.0f;
  266. return [self pathForPolygon:inset withBounds:bounds withNumberOfEdges:numberOfEdges withInnerRadiusRatio:innerRadiusRatio];
  267. }
  268. - (UIBezierPath *)pathForPolygon:(CGFloat)inset withBounds:(CGRect)bounds withNumberOfEdges:(NSInteger)numberOfEdges withInnerRadiusRatio:(CGFloat)innerRadiusRatio
  269. {
  270. CGPoint center = CGPointMake(CGRectGetMinX(bounds) + CGRectGetWidth(bounds)/2, CGRectGetMinY(bounds) + CGRectGetHeight(bounds)/2);
  271. CGFloat outerRadius = MIN(bounds.size.width, bounds.size.height) / 2.0 - inset;
  272. CGFloat innerRadius = outerRadius * innerRadiusRatio;
  273. CGFloat angle = M_PI * 2.0 / (numberOfEdges * 2);
  274. UIBezierPath *path = [UIBezierPath bezierPath];
  275. for (NSInteger cc = 0; cc < numberOfEdges; cc++)
  276. {
  277. CGPoint p0 = CGPointMake(center.x + outerRadius * cos(angle * (cc*2)), center.y + outerRadius * sin(angle * (cc*2)));
  278. CGPoint p1 = CGPointMake(center.x + innerRadius * cos(angle * (cc*2+1)), center.y + innerRadius * sin(angle * (cc*2+1)));
  279. if (cc == 0)
  280. {
  281. [path moveToPoint: p0];
  282. }
  283. else
  284. {
  285. [path addLineToPoint: p0];
  286. }
  287. [path addLineToPoint: p1];
  288. }
  289. [path closePath];
  290. return path;
  291. }
  292. #pragma mark - drawBorderInFrame
  293. - (void)drawBorderInFrames:(NSArray *)frames withContextRef:(CGContextRef)contextRef
  294. {
  295. if (!frames || [frames count] < 1)
  296. {
  297. NSLog(@"drawBorderInFrames is empty.");
  298. return;
  299. }
  300. if ([self shouldDisplayInnerBorder])
  301. {
  302. // Fill background
  303. CGContextSetFillColorWithColor(contextRef, [UIColor whiteColor].CGColor);
  304. CGContextFillRect(contextRef, [frames[0] CGRectValue]);
  305. // Draw
  306. CGContextBeginPath(contextRef);
  307. CGFloat lineWidth = 5;
  308. for (int i = 1; i < [frames count]; ++i)
  309. {
  310. CGRect innerVideoRect = [frames[i] CGRectValue];
  311. if (!CGRectIsEmpty(innerVideoRect))
  312. {
  313. CGContextAddRect(contextRef, CGRectInset(innerVideoRect, lineWidth, lineWidth));
  314. }
  315. }
  316. CGContextClip(contextRef);
  317. }
  318. }
  319. #pragma mark - getCroppedRect
  320. - (CGRect)getCroppedRect
  321. {
  322. NSArray *pointsPath = [self getPathPoints];
  323. return getCroppedBounds(pointsPath);
  324. }
  325. #pragma mark - NSUserDefaults
  326. #pragma mark - PathPoints
  327. - (NSArray *)getPathPoints
  328. {
  329. NSArray *arrayResult = nil;
  330. NSString *flag = @"ArrayPathPoints";
  331. NSUserDefaults *userDefaultes = [NSUserDefaults standardUserDefaults];
  332. NSData *dataPathPoints = [userDefaultes objectForKey:flag];
  333. if (dataPathPoints)
  334. {
  335. arrayResult = [NSKeyedUnarchiver unarchiveObjectWithData:dataPathPoints];
  336. // if (arrayResult && [arrayResult count] > 0)
  337. // {
  338. // NSLog(@"points has content.");
  339. // }
  340. }
  341. else
  342. {
  343. // NSLog(@"getPathPoints is empty.");
  344. }
  345. return arrayResult;
  346. }
  347. - (NSArray *)getArrayRects
  348. {
  349. NSString *flag = @"arrayRect";
  350. NSUserDefaults *userDefaultes = [NSUserDefaults standardUserDefaults];
  351. NSData *dataRect = [userDefaultes objectForKey:flag];
  352. NSArray *arrayResult = nil;
  353. if (dataRect)
  354. {
  355. arrayResult = [NSKeyedUnarchiver unarchiveObjectWithData:dataRect];
  356. if (arrayResult && [arrayResult count] > 0)
  357. {
  358. // CGRect innerVideoRect = [arrayResult[0] CGRectValue];
  359. // if (!CGRectIsEmpty(innerVideoRect))
  360. // {
  361. // NSLog(@"[arrayResult[0] CGRectValue: %@", NSStringFromCGRect(innerVideoRect));
  362. // }
  363. }
  364. else
  365. {
  366. NSLog(@"getArrayRects is empty!");
  367. }
  368. }
  369. return arrayResult;
  370. }
  371. - (void)setArrayRects:(NSMutableArray *)arrayRect
  372. {
  373. // Embeded Video Frame
  374. NSString *flag = @"arrayRect";
  375. NSData *dataRect = [NSKeyedArchiver archivedDataWithRootObject:arrayRect];
  376. [[NSUserDefaults standardUserDefaults] setObject:dataRect forKey:flag];
  377. [[NSUserDefaults standardUserDefaults] synchronize];
  378. }
  379. #pragma mark - OutputBGColor
  380. - (UIColor *)getOutputBGColor
  381. {
  382. NSString *flag = @"OutputBGColor";
  383. NSUserDefaults *userDefaultes = [NSUserDefaults standardUserDefaults];
  384. NSData *objColor = [userDefaultes objectForKey:flag];
  385. UIColor *bgColor = nil;
  386. if (objColor)
  387. {
  388. bgColor = [NSKeyedUnarchiver unarchiveObjectWithData:objColor];
  389. }
  390. return bgColor;
  391. }
  392. #pragma mark - shouldDisplayInnerBorder
  393. - (BOOL)shouldDisplayInnerBorder
  394. {
  395. NSString *flag = @"ShouldDisplayInnerBorder";
  396. // NSLog(@"shouldDisplayInnerBorder: %@", [[[NSUserDefaults standardUserDefaults] objectForKey:shouldDisplayInnerBorder] boolValue]?@"Yes":@"No");
  397. if ([[[NSUserDefaults standardUserDefaults] objectForKey:flag] boolValue])
  398. {
  399. return YES;
  400. }
  401. else
  402. {
  403. return NO;
  404. }
  405. }
  406. #pragma mark - ShouldDisplayPloygon
  407. - (BOOL)shouldDisplayPloygon
  408. {
  409. NSString *flag = @"ShouldDisplayPloygon";
  410. NSUserDefaults *userDefaultes = [NSUserDefaults standardUserDefaults];
  411. if ([[userDefaultes objectForKey:flag] boolValue])
  412. {
  413. return YES;
  414. }
  415. else
  416. {
  417. return NO;
  418. }
  419. }
  420. #pragma mark - shouldRightRotate90ByTrackID
  421. - (BOOL)shouldRightRotate90ByTrackID:(NSInteger)trackID
  422. {
  423. NSUserDefaults *userDefaultes = [NSUserDefaults standardUserDefaults];
  424. NSString *identifier = [NSString stringWithFormat:@"TrackID_%ld", (long)trackID];
  425. BOOL result = [[userDefaultes objectForKey:identifier] boolValue];
  426. // NSLog(@"shouldRightRotate90ByTrackID %@ : %@", identifier, result?@"Yes":@"No");
  427. if (result)
  428. {
  429. return YES;
  430. }
  431. else
  432. {
  433. return NO;
  434. }
  435. }
  436. @end