2014 snapchat source code
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

140 lines
5.2 KiB

  1. //
  2. // SCStillImageCaptureVideoInputMethod.m
  3. // Snapchat
  4. //
  5. // Created by Alexander Grytsiuk on 3/16/16.
  6. // Copyright © 2016 Snapchat, Inc. All rights reserved.
  7. //
  8. #import "SCStillImageCaptureVideoInputMethod.h"
  9. #import "SCManagedCapturer.h"
  10. #import "SCManagedVideoFileStreamer.h"
  11. typedef unsigned char uchar_t;
  12. int clamp(int val, int low, int high)
  13. {
  14. if (val < low)
  15. val = low;
  16. if (val > high)
  17. val = high;
  18. return val;
  19. }
  20. void yuv2rgb(uchar_t yValue, uchar_t uValue, uchar_t vValue, uchar_t *r, uchar_t *g, uchar_t *b)
  21. {
  22. double red = yValue + (1.370705 * (vValue - 128));
  23. double green = yValue - (0.698001 * (vValue - 128)) - (0.337633 * (uValue - 128));
  24. double blue = yValue + (1.732446 * (uValue - 128));
  25. *r = clamp(red, 0, 255);
  26. *g = clamp(green, 0, 255);
  27. *b = clamp(blue, 0, 255);
  28. }
  29. void convertNV21DataToRGBData(int width, int height, uchar_t *nv21Data, uchar_t *rgbData, int rgbBytesPerPixel,
  30. int rgbBytesPerRow)
  31. {
  32. uchar_t *uvData = nv21Data + height * width;
  33. for (int h = 0; h < height; h++) {
  34. uchar_t *yRowBegin = nv21Data + h * width;
  35. uchar_t *uvRowBegin = uvData + h / 2 * width;
  36. uchar_t *rgbRowBegin = rgbData + rgbBytesPerRow * h;
  37. for (int w = 0; w < width; w++) {
  38. uchar_t *rgbPixelBegin = rgbRowBegin + rgbBytesPerPixel * w;
  39. yuv2rgb(yRowBegin[w], uvRowBegin[w / 2 * 2], uvRowBegin[w / 2 * 2 + 1], &(rgbPixelBegin[0]),
  40. &(rgbPixelBegin[1]), &(rgbPixelBegin[2]));
  41. }
  42. }
  43. }
  44. @implementation SCStillImageCaptureVideoInputMethod
  45. - (void)captureStillImageWithCapturerState:(SCManagedCapturerState *)state
  46. successBlock:(void (^)(NSData *imageData, NSDictionary *cameraInfo,
  47. NSError *error))successBlock
  48. failureBlock:(void (^)(NSError *error))failureBlock
  49. {
  50. id<SCManagedVideoDataSource> videoDataSource = [[SCManagedCapturer sharedInstance] currentVideoDataSource];
  51. if ([videoDataSource isKindOfClass:[SCManagedVideoFileStreamer class]]) {
  52. SCManagedVideoFileStreamer *videoFileStreamer = (SCManagedVideoFileStreamer *)videoDataSource;
  53. [videoFileStreamer getNextPixelBufferWithCompletion:^(CVPixelBufferRef pixelBuffer) {
  54. BOOL shouldFlip = state.devicePosition == SCManagedCaptureDevicePositionFront;
  55. #if TARGET_IPHONE_SIMULATOR
  56. UIImage *uiImage = [self imageWithCVPixelBuffer:pixelBuffer];
  57. CGImageRef videoImage = uiImage.CGImage;
  58. UIImage *capturedImage = [UIImage
  59. imageWithCGImage:shouldFlip ? [self flipCGImage:videoImage size:uiImage.size].CGImage : videoImage
  60. scale:1.0
  61. orientation:UIImageOrientationRight];
  62. #else
  63. CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
  64. CIContext *temporaryContext = [CIContext contextWithOptions:nil];
  65. CGSize size = CGSizeMake(CVPixelBufferGetWidth(pixelBuffer), CVPixelBufferGetHeight(pixelBuffer));
  66. CGImageRef videoImage =
  67. [temporaryContext createCGImage:ciImage fromRect:CGRectMake(0, 0, size.width, size.height)];
  68. UIImage *capturedImage =
  69. [UIImage imageWithCGImage:shouldFlip ? [self flipCGImage:videoImage size:size].CGImage : videoImage
  70. scale:1.0
  71. orientation:UIImageOrientationRight];
  72. CGImageRelease(videoImage);
  73. #endif
  74. if (successBlock) {
  75. successBlock(UIImageJPEGRepresentation(capturedImage, 1.0), nil, nil);
  76. }
  77. }];
  78. } else {
  79. if (failureBlock) {
  80. failureBlock([NSError errorWithDomain:NSStringFromClass(self.class) code:-1 userInfo:nil]);
  81. }
  82. }
  83. }
  84. - (UIImage *)flipCGImage:(CGImageRef)cgImage size:(CGSize)size
  85. {
  86. UIGraphicsBeginImageContext(size);
  87. CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0, 0, size.width, size.height), cgImage);
  88. UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
  89. UIGraphicsEndImageContext();
  90. return image;
  91. }
  92. - (UIImage *)imageWithCVPixelBuffer:(CVPixelBufferRef)imageBuffer
  93. {
  94. CVPixelBufferLockBaseAddress(imageBuffer, 0);
  95. size_t width = CVPixelBufferGetWidth(imageBuffer);
  96. size_t height = CVPixelBufferGetHeight(imageBuffer);
  97. size_t rgbBytesPerPixel = 4;
  98. size_t rgbBytesPerRow = width * rgbBytesPerPixel;
  99. uchar_t *nv21Data = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
  100. uchar_t *rgbData = malloc(rgbBytesPerRow * height);
  101. convertNV21DataToRGBData((int)width, (int)height, nv21Data, rgbData, (int)rgbBytesPerPixel, (int)rgbBytesPerRow);
  102. CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
  103. CGContextRef context =
  104. CGBitmapContextCreate(rgbData, width, height, 8, rgbBytesPerRow, colorSpace, kCGImageAlphaNoneSkipLast);
  105. CGImageRef cgImage = CGBitmapContextCreateImage(context);
  106. UIImage *result = [UIImage imageWithCGImage:cgImage];
  107. CGImageRelease(cgImage);
  108. CGContextRelease(context);
  109. CGColorSpaceRelease(colorSpace);
  110. free(rgbData);
  111. CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
  112. return result;
  113. }
  114. - (NSString *)methodName
  115. {
  116. return @"VideoInput";
  117. }
  118. @end