2014 snapchat source code
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

205 lines
7.8 KiB

  1. //
  2. // SCCaptureCoreImageFaceDetector.m
  3. // Snapchat
  4. //
  5. // Created by Jiyang Zhu on 3/27/18.
  6. // Copyright © 2018 Snapchat, Inc. All rights reserved.
  7. //
  8. #import "SCCaptureCoreImageFaceDetector.h"
  9. #import "SCCameraTweaks.h"
  10. #import "SCCaptureFaceDetectionParser.h"
  11. #import "SCCaptureFaceDetectorTrigger.h"
  12. #import "SCCaptureResource.h"
  13. #import "SCManagedCapturer.h"
  14. #import <SCFoundation/NSArray+Helpers.h>
  15. #import <SCFoundation/SCAssertWrapper.h>
  16. #import <SCFoundation/SCLog.h>
  17. #import <SCFoundation/SCQueuePerformer.h>
  18. #import <SCFoundation/SCTrace.h>
  19. #import <SCFoundation/SCTraceODPCompatible.h>
  20. #import <SCFoundation/SCZeroDependencyExperiments.h>
  21. #import <SCFoundation/UIImage+CVPixelBufferRef.h>
  22. @import ImageIO;
  23. static const NSTimeInterval kSCCaptureCoreImageFaceDetectorMaxAllowedLatency =
  24. 1; // Drop the face detection result if it is 1 second late.
  25. static const NSInteger kDefaultNumberOfSequentialOutputSampleBuffer = -1; // -1 means no sequential sample buffers.
  26. static char *const kSCCaptureCoreImageFaceDetectorProcessQueue =
  27. "com.snapchat.capture-core-image-face-detector-process";
  28. @implementation SCCaptureCoreImageFaceDetector {
  29. CIDetector *_detector;
  30. SCCaptureResource *_captureResource;
  31. BOOL _isDetecting;
  32. BOOL _hasDetectedFaces;
  33. NSInteger _numberOfSequentialOutputSampleBuffer;
  34. NSUInteger _detectionFrequency;
  35. NSDictionary *_detectorOptions;
  36. SCManagedCaptureDevicePosition _devicePosition;
  37. CIContext *_context;
  38. SCQueuePerformer *_callbackPerformer;
  39. SCQueuePerformer *_processPerformer;
  40. SCCaptureFaceDetectionParser *_parser;
  41. SCCaptureFaceDetectorTrigger *_trigger;
  42. }
  43. @synthesize trigger = _trigger;
  44. @synthesize parser = _parser;
  45. - (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource
  46. {
  47. SCTraceODPCompatibleStart(2);
  48. self = [super init];
  49. if (self) {
  50. SCAssert(captureResource, @"SCCaptureResource should not be nil");
  51. SCAssert(captureResource.queuePerformer, @"SCQueuePerformer should not be nil");
  52. _callbackPerformer = captureResource.queuePerformer;
  53. _captureResource = captureResource;
  54. _parser = [[SCCaptureFaceDetectionParser alloc]
  55. initWithFaceBoundsAreaThreshold:pow(SCCameraFaceFocusMinFaceSize(), 2)];
  56. _processPerformer = [[SCQueuePerformer alloc] initWithLabel:kSCCaptureCoreImageFaceDetectorProcessQueue
  57. qualityOfService:QOS_CLASS_USER_INITIATED
  58. queueType:DISPATCH_QUEUE_SERIAL
  59. context:SCQueuePerformerContextCamera];
  60. _detectionFrequency = SCExperimentWithFaceDetectionFrequency();
  61. _devicePosition = captureResource.device.position;
  62. _trigger = [[SCCaptureFaceDetectorTrigger alloc] initWithDetector:self];
  63. }
  64. return self;
  65. }
  66. - (void)_setupDetectionIfNeeded
  67. {
  68. SCTraceODPCompatibleStart(2);
  69. SC_GUARD_ELSE_RETURN(!_detector);
  70. if (!_context) {
  71. _context = [CIContext context];
  72. }
  73. // For CIDetectorMinFeatureSize, the valid range is [0.0100, 0.5000], otherwise, it will cause a crash.
  74. if (!_detectorOptions) {
  75. _detectorOptions = @{
  76. CIDetectorAccuracy : CIDetectorAccuracyLow,
  77. CIDetectorTracking : @(YES),
  78. CIDetectorMaxFeatureCount : @(2),
  79. CIDetectorMinFeatureSize : @(SCCameraFaceFocusMinFaceSize()),
  80. CIDetectorNumberOfAngles : @(3)
  81. };
  82. }
  83. @try {
  84. _detector = [CIDetector detectorOfType:CIDetectorTypeFace context:_context options:_detectorOptions];
  85. } @catch (NSException *exception) {
  86. SCLogCoreCameraError(@"Failed to create CIDetector with exception:%@", exception);
  87. }
  88. }
  89. - (void)_resetDetection
  90. {
  91. SCTraceODPCompatibleStart(2);
  92. _detector = nil;
  93. [self _setupDetectionIfNeeded];
  94. }
  95. - (SCQueuePerformer *)detectionPerformer
  96. {
  97. return _processPerformer;
  98. }
  99. - (void)startDetection
  100. {
  101. SCTraceODPCompatibleStart(2);
  102. SCAssert([[self detectionPerformer] isCurrentPerformer], @"Calling -startDetection in an invalid queue.");
  103. [self _setupDetectionIfNeeded];
  104. _isDetecting = YES;
  105. _hasDetectedFaces = NO;
  106. _numberOfSequentialOutputSampleBuffer = kDefaultNumberOfSequentialOutputSampleBuffer;
  107. }
  108. - (void)stopDetection
  109. {
  110. SCTraceODPCompatibleStart(2);
  111. SCAssert([[self detectionPerformer] isCurrentPerformer], @"Calling -stopDetection in an invalid queue.");
  112. _isDetecting = NO;
  113. }
  114. - (NSDictionary<NSNumber *, NSValue *> *)_detectFaceFeaturesInImage:(CIImage *)image
  115. withOrientation:(CGImagePropertyOrientation)orientation
  116. {
  117. SCTraceODPCompatibleStart(2);
  118. NSDictionary *opts =
  119. @{ CIDetectorImageOrientation : @(orientation),
  120. CIDetectorEyeBlink : @(NO),
  121. CIDetectorSmile : @(NO) };
  122. NSArray<CIFeature *> *features = [_detector featuresInImage:image options:opts];
  123. return [_parser parseFaceBoundsByFaceIDFromCIFeatures:features
  124. withImageSize:image.extent.size
  125. imageOrientation:orientation];
  126. }
  127. #pragma mark - SCManagedVideoDataSourceListener
  128. - (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource
  129. didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
  130. devicePosition:(SCManagedCaptureDevicePosition)devicePosition
  131. {
  132. SCTraceODPCompatibleStart(2);
  133. SC_GUARD_ELSE_RETURN(_isDetecting);
  134. // Reset detection if the device position changes. Resetting detection should execute in _processPerformer, so we
  135. // just set a flag here, and then do it later in the perform block.
  136. BOOL shouldForceResetDetection = NO;
  137. if (devicePosition != _devicePosition) {
  138. _devicePosition = devicePosition;
  139. shouldForceResetDetection = YES;
  140. _numberOfSequentialOutputSampleBuffer = kDefaultNumberOfSequentialOutputSampleBuffer;
  141. }
  142. _numberOfSequentialOutputSampleBuffer++;
  143. SC_GUARD_ELSE_RETURN(_numberOfSequentialOutputSampleBuffer % _detectionFrequency == 0);
  144. @weakify(self);
  145. CFRetain(sampleBuffer);
  146. [_processPerformer perform:^{
  147. SCTraceStart();
  148. @strongify(self);
  149. SC_GUARD_ELSE_RETURN(self);
  150. if (shouldForceResetDetection) {
  151. // Resetting detection usually costs no more than 1ms.
  152. [self _resetDetection];
  153. }
  154. CGImagePropertyOrientation orientation =
  155. (devicePosition == SCManagedCaptureDevicePositionBack ? kCGImagePropertyOrientationRight
  156. : kCGImagePropertyOrientationLeftMirrored);
  157. CIImage *image = [CIImage imageWithCVPixelBuffer:CMSampleBufferGetImageBuffer(sampleBuffer)];
  158. NSDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID =
  159. [self _detectFaceFeaturesInImage:image withOrientation:orientation];
  160. // Calculate the latency for face detection, if it is too long, discard the face detection results.
  161. NSTimeInterval latency =
  162. CACurrentMediaTime() - CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer));
  163. CFRelease(sampleBuffer);
  164. if (latency >= kSCCaptureCoreImageFaceDetectorMaxAllowedLatency) {
  165. faceBoundsByFaceID = nil;
  166. }
  167. // Only announce face detection result if faceBoundsByFaceID is not empty, or faceBoundsByFaceID was not empty
  168. // last time.
  169. if (faceBoundsByFaceID.count > 0 || self->_hasDetectedFaces) {
  170. self->_hasDetectedFaces = faceBoundsByFaceID.count > 0;
  171. [self->_callbackPerformer perform:^{
  172. [self->_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
  173. didDetectFaceBounds:faceBoundsByFaceID];
  174. }];
  175. }
  176. }];
  177. }
  178. @end