2014 snapchat source code
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

175 lines
7.1 KiB

  1. //
  2. // SCCaptureMetadataOutputDetector.m
  3. // Snapchat
  4. //
  5. // Created by Jiyang Zhu on 12/21/17.
  6. // Copyright © 2017 Snapchat, Inc. All rights reserved.
  7. //
  8. #import "SCCaptureMetadataOutputDetector.h"
  9. #import "SCCameraTweaks.h"
  10. #import "SCCaptureFaceDetectionParser.h"
  11. #import "SCCaptureFaceDetectorTrigger.h"
  12. #import "SCCaptureResource.h"
  13. #import "SCManagedCaptureSession.h"
  14. #import "SCManagedCapturer.h"
  15. #import <SCFoundation/SCAssertWrapper.h>
  16. #import <SCFoundation/SCLog.h>
  17. #import <SCFoundation/SCQueuePerformer.h>
  18. #import <SCFoundation/SCTrace.h>
  19. #import <SCFoundation/SCTraceODPCompatible.h>
  20. #import <SCFoundation/SCZeroDependencyExperiments.h>
  21. #import <SCFoundation/UIImage+CVPixelBufferRef.h>
  22. #define SCLogCaptureMetaDetectorInfo(fmt, ...) \
  23. SCLogCoreCameraInfo(@"[SCCaptureMetadataOutputDetector] " fmt, ##__VA_ARGS__)
  24. #define SCLogCaptureMetaDetectorWarning(fmt, ...) \
  25. SCLogCoreCameraWarning(@"[SCCaptureMetadataOutputDetector] " fmt, ##__VA_ARGS__)
  26. #define SCLogCaptureMetaDetectorError(fmt, ...) \
  27. SCLogCoreCameraError(@"[SCCaptureMetadataOutputDetector] " fmt, ##__VA_ARGS__)
  28. static char *const kSCCaptureMetadataOutputDetectorProcessQueue =
  29. "com.snapchat.capture-metadata-output-detector-process";
  30. static const NSInteger kDefaultNumberOfSequentialFramesWithFaces = -1; // -1 means no sequential frames with faces.
  31. @interface SCCaptureMetadataOutputDetector () <AVCaptureMetadataOutputObjectsDelegate>
  32. @end
  33. @implementation SCCaptureMetadataOutputDetector {
  34. BOOL _isDetecting;
  35. AVCaptureMetadataOutput *_metadataOutput;
  36. SCCaptureResource *_captureResource;
  37. SCCaptureFaceDetectionParser *_parser;
  38. NSInteger _numberOfSequentialFramesWithFaces;
  39. NSUInteger _detectionFrequency;
  40. SCQueuePerformer *_callbackPerformer;
  41. SCQueuePerformer *_metadataProcessPerformer;
  42. SCCaptureFaceDetectorTrigger *_trigger;
  43. }
  44. @synthesize trigger = _trigger;
  45. @synthesize parser = _parser;
  46. - (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource
  47. {
  48. SCTraceODPCompatibleStart(2);
  49. self = [super init];
  50. if (self) {
  51. SCAssert(captureResource, @"SCCaptureResource should not be nil");
  52. SCAssert(captureResource.managedSession.avSession, @"AVCaptureSession should not be nil");
  53. SCAssert(captureResource.queuePerformer, @"SCQueuePerformer should not be nil");
  54. _metadataOutput = [AVCaptureMetadataOutput new];
  55. _callbackPerformer = captureResource.queuePerformer;
  56. _captureResource = captureResource;
  57. _detectionFrequency = SCExperimentWithFaceDetectionFrequency();
  58. _parser = [[SCCaptureFaceDetectionParser alloc]
  59. initWithFaceBoundsAreaThreshold:pow(SCCameraFaceFocusMinFaceSize(), 2)];
  60. _metadataProcessPerformer = [[SCQueuePerformer alloc] initWithLabel:kSCCaptureMetadataOutputDetectorProcessQueue
  61. qualityOfService:QOS_CLASS_DEFAULT
  62. queueType:DISPATCH_QUEUE_SERIAL
  63. context:SCQueuePerformerContextCamera];
  64. if ([self _initDetection]) {
  65. _trigger = [[SCCaptureFaceDetectorTrigger alloc] initWithDetector:self];
  66. }
  67. }
  68. return self;
  69. }
  70. - (AVCaptureSession *)_captureSession
  71. {
  72. // _captureResource.avSession may change, so we don't retain any specific AVCaptureSession.
  73. return _captureResource.managedSession.avSession;
  74. }
  75. - (BOOL)_initDetection
  76. {
  77. BOOL success = NO;
  78. if ([[self _captureSession] canAddOutput:_metadataOutput]) {
  79. [[self _captureSession] addOutput:_metadataOutput];
  80. if ([_metadataOutput.availableMetadataObjectTypes containsObject:AVMetadataObjectTypeFace]) {
  81. _numberOfSequentialFramesWithFaces = kDefaultNumberOfSequentialFramesWithFaces;
  82. _metadataOutput.metadataObjectTypes = @[ AVMetadataObjectTypeFace ];
  83. success = YES;
  84. SCLogCaptureMetaDetectorInfo(@"AVMetadataObjectTypeFace detection successfully enabled.");
  85. } else {
  86. [[self _captureSession] removeOutput:_metadataOutput];
  87. success = NO;
  88. SCLogCaptureMetaDetectorError(@"AVMetadataObjectTypeFace is not available for "
  89. @"AVMetadataOutput[%@]",
  90. _metadataOutput);
  91. }
  92. } else {
  93. success = NO;
  94. SCLogCaptureMetaDetectorError(@"AVCaptureSession[%@] cannot add AVMetadataOutput[%@] as an output",
  95. [self _captureSession], _metadataOutput);
  96. }
  97. return success;
  98. }
  99. - (void)startDetection
  100. {
  101. SCAssert([[self detectionPerformer] isCurrentPerformer], @"Calling -startDetection in an invalid queue.");
  102. SC_GUARD_ELSE_RETURN(!_isDetecting);
  103. [_captureResource.queuePerformer performImmediatelyIfCurrentPerformer:^{
  104. [_metadataOutput setMetadataObjectsDelegate:self queue:_metadataProcessPerformer.queue];
  105. _isDetecting = YES;
  106. SCLogCaptureMetaDetectorInfo(@"AVMetadataObjectTypeFace detection successfully enabled.");
  107. }];
  108. }
  109. - (void)stopDetection
  110. {
  111. SCAssert([[self detectionPerformer] isCurrentPerformer], @"Calling -stopDetection in an invalid queue.");
  112. SC_GUARD_ELSE_RETURN(_isDetecting);
  113. [_captureResource.queuePerformer performImmediatelyIfCurrentPerformer:^{
  114. [_metadataOutput setMetadataObjectsDelegate:nil queue:NULL];
  115. _isDetecting = NO;
  116. SCLogCaptureMetaDetectorInfo(@"AVMetadataObjectTypeFace detection successfully disabled.");
  117. }];
  118. }
  119. - (SCQueuePerformer *)detectionPerformer
  120. {
  121. return _captureResource.queuePerformer;
  122. }
  123. #pragma mark - AVCaptureMetadataOutputObjectsDelegate
  124. - (void)captureOutput:(AVCaptureOutput *)output
  125. didOutputMetadataObjects:(NSArray<__kindof AVMetadataObject *> *)metadataObjects
  126. fromConnection:(AVCaptureConnection *)connection
  127. {
  128. SCTraceODPCompatibleStart(2);
  129. BOOL shouldNotify = NO;
  130. if (metadataObjects.count == 0 &&
  131. _numberOfSequentialFramesWithFaces !=
  132. kDefaultNumberOfSequentialFramesWithFaces) { // There were faces detected before, but there is no face right
  133. // now, so send out the notification.
  134. _numberOfSequentialFramesWithFaces = kDefaultNumberOfSequentialFramesWithFaces;
  135. shouldNotify = YES;
  136. } else if (metadataObjects.count > 0) {
  137. _numberOfSequentialFramesWithFaces++;
  138. shouldNotify = (_numberOfSequentialFramesWithFaces % _detectionFrequency == 0);
  139. }
  140. SC_GUARD_ELSE_RETURN(shouldNotify);
  141. NSDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID =
  142. [_parser parseFaceBoundsByFaceIDFromMetadataObjects:metadataObjects];
  143. [_callbackPerformer perform:^{
  144. [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
  145. didDetectFaceBounds:faceBoundsByFaceID];
  146. }];
  147. }
  148. @end