Browse Source

Add files via upload

pull/2/head
Jonny Banana 6 years ago
committed by GitHub
parent
commit
9a5d07f3f2
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
  1. 17
      ManagedCapturer/ARConfiguration+SCConfiguration.h
  2. 36
      ManagedCapturer/ARConfiguration+SCConfiguration.m
  3. 15
      ManagedCapturer/AVCaptureConnection+InputDevice.h
  4. 25
      ManagedCapturer/AVCaptureConnection+InputDevice.m
  5. 34
      ManagedCapturer/AVCaptureDevice+ConfigurationLock.h
  6. 47
      ManagedCapturer/AVCaptureDevice+ConfigurationLock.m
  7. 21
      ManagedCapturer/NSURL+Asset.h
  8. 23
      ManagedCapturer/NSURL+Asset.m
  9. 10
      ManagedCapturer/OWNERS
  10. 39
      ManagedCapturer/SCAudioCaptureSession.h
  11. 289
      ManagedCapturer/SCAudioCaptureSession.m
  12. 23
      ManagedCapturer/SCCameraSettingUtils.h
  13. 79
      ManagedCapturer/SCCameraSettingUtils.m
  14. 74
      ManagedCapturer/SCCaptureCommon.h
  15. 31
      ManagedCapturer/SCCaptureCommon.m
  16. 22
      ManagedCapturer/SCCaptureCoreImageFaceDetector.h
  17. 205
      ManagedCapturer/SCCaptureCoreImageFaceDetector.m
  18. 24
      ManagedCapturer/SCCaptureDeviceAuthorization.h
  19. 71
      ManagedCapturer/SCCaptureDeviceAuthorization.m
  20. 31
      ManagedCapturer/SCCaptureDeviceAuthorizationChecker.h
  21. 71
      ManagedCapturer/SCCaptureDeviceAuthorizationChecker.m
  22. 31
      ManagedCapturer/SCCaptureDeviceResolver.h
  23. 147
      ManagedCapturer/SCCaptureDeviceResolver.m
  24. 43
      ManagedCapturer/SCCaptureFaceDetectionParser.h
  25. 94
      ManagedCapturer/SCCaptureFaceDetectionParser.m
  26. 31
      ManagedCapturer/SCCaptureFaceDetector.h
  27. 22
      ManagedCapturer/SCCaptureFaceDetectorTrigger.h
  28. 97
      ManagedCapturer/SCCaptureFaceDetectorTrigger.m
  29. 23
      ManagedCapturer/SCCaptureMetadataObjectParser.h
  30. 38
      ManagedCapturer/SCCaptureMetadataObjectParser.m
  31. 19
      ManagedCapturer/SCCaptureMetadataOutputDetector.h
  32. 175
      ManagedCapturer/SCCaptureMetadataOutputDetector.m
  33. 225
      ManagedCapturer/SCCapturer.h
  34. 44
      ManagedCapturer/SCCapturerBufferedVideoWriter.h
  35. 430
      ManagedCapturer/SCCapturerBufferedVideoWriter.m
  36. 20
      ManagedCapturer/SCCapturerDefines.h
  37. 18
      ManagedCapturer/SCCapturerToken.h
  38. 30
      ManagedCapturer/SCCapturerToken.m
  39. 20
      ManagedCapturer/SCCapturerTokenProvider.h
  40. 42
      ManagedCapturer/SCCapturerTokenProvider.m
  41. 18
      ManagedCapturer/SCExposureState.h
  42. 47
      ManagedCapturer/SCExposureState.m
  43. 19
      ManagedCapturer/SCFileAudioCaptureSession.h
  44. 243
      ManagedCapturer/SCFileAudioCaptureSession.m
  45. 20
      ManagedCapturer/SCManagedAudioStreamer.h
  46. 115
      ManagedCapturer/SCManagedAudioStreamer.m
  47. 71
      ManagedCapturer/SCManagedCaptureDevice+SCManagedCapturer.h
  48. 17
      ManagedCapturer/SCManagedCaptureDevice+SCManagedDeviceCapacityAnalyzer.h
  49. 60
      ManagedCapturer/SCManagedCaptureDevice.h
  50. 821
      ManagedCapturer/SCManagedCaptureDevice.m
  51. 17
      ManagedCapturer/SCManagedCaptureDeviceAutoExposureHandler.h
  52. 63
      ManagedCapturer/SCManagedCaptureDeviceAutoExposureHandler.m
  53. 18
      ManagedCapturer/SCManagedCaptureDeviceAutoFocusHandler.h
  54. 131
      ManagedCapturer/SCManagedCaptureDeviceAutoFocusHandler.m
  55. 25
      ManagedCapturer/SCManagedCaptureDeviceDefaultZoomHandler.h
  56. 93
      ManagedCapturer/SCManagedCaptureDeviceDefaultZoomHandler.m
  57. 17
      ManagedCapturer/SCManagedCaptureDeviceDefaultZoomHandler_Private.h
  58. 22
      ManagedCapturer/SCManagedCaptureDeviceExposureHandler.h
  59. 28
      ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.h
  60. 121
      ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.m
  61. 28
      ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.h
  62. 153
      ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.m
  63. 28
      ManagedCapturer/SCManagedCaptureDeviceFocusHandler.h
  64. 23
      ManagedCapturer/SCManagedCaptureDeviceHandler.h
  65. 77
      ManagedCapturer/SCManagedCaptureDeviceHandler.m
  66. 12
      ManagedCapturer/SCManagedCaptureDeviceLinearInterpolationZoomHandler.h
  67. 190
      ManagedCapturer/SCManagedCaptureDeviceLinearInterpolationZoomHandler.m
  68. 20
      ManagedCapturer/SCManagedCaptureDeviceLockOnRecordExposureHandler.h
  69. 90
      ManagedCapturer/SCManagedCaptureDeviceLockOnRecordExposureHandler.m
  70. 13
      ManagedCapturer/SCManagedCaptureDeviceSavitzkyGolayZoomHandler.h
  71. 95
      ManagedCapturer/SCManagedCaptureDeviceSavitzkyGolayZoomHandler.m
  72. 23
      ManagedCapturer/SCManagedCaptureDeviceSubjectAreaHandler.h
  73. 67
      ManagedCapturer/SCManagedCaptureDeviceSubjectAreaHandler.m
  74. 19
      ManagedCapturer/SCManagedCaptureDeviceThresholdExposureHandler.h
  75. 133
      ManagedCapturer/SCManagedCaptureDeviceThresholdExposureHandler.m
  76. 61
      ManagedCapturer/SCManagedCaptureFaceDetectionAdjustingPOIResource.h
  77. 232
      ManagedCapturer/SCManagedCaptureFaceDetectionAdjustingPOIResource.m
  78. 80
      ManagedCapturer/SCManagedCapturePreviewLayerController.h
  79. 563
      ManagedCapturer/SCManagedCapturePreviewLayerController.m
  80. 25
      ManagedCapturer/SCManagedCapturePreviewView.h
  81. 173
      ManagedCapturer/SCManagedCapturePreviewView.m
  82. 14
      ManagedCapturer/SCManagedCapturePreviewViewDebugView.h
  83. 204
      ManagedCapturer/SCManagedCapturePreviewViewDebugView.m
  84. 23
      ManagedCapturer/SCManagedCapturer.h

17
ManagedCapturer/ARConfiguration+SCConfiguration.h

@ -0,0 +1,17 @@
//
// ARConfiguration+SCConfiguration.h
// Snapchat
//
// Created by Max Goedjen on 11/7/17.
//
#import "SCManagedCaptureDevice.h"
#import <ARKit/ARKit.h>
@interface ARConfiguration (SCConfiguration)
+ (BOOL)sc_supportedForDevicePosition:(SCManagedCaptureDevicePosition)position;
+ (ARConfiguration *_Nullable)sc_configurationForDevicePosition:(SCManagedCaptureDevicePosition)position;
@end

36
ManagedCapturer/ARConfiguration+SCConfiguration.m

@ -0,0 +1,36 @@
//
// ARConfiguration+SCConfiguration.m
// Snapchat
//
// Created by Max Goedjen on 11/7/17.
//
#import "ARConfiguration+SCConfiguration.h"
#import "SCCapturerDefines.h"
@implementation ARConfiguration (SCConfiguration)
+ (BOOL)sc_supportedForDevicePosition:(SCManagedCaptureDevicePosition)position
{
return [[[self sc_configurationForDevicePosition:position] class] isSupported];
}
+ (ARConfiguration *)sc_configurationForDevicePosition:(SCManagedCaptureDevicePosition)position
{
if (@available(iOS 11.0, *)) {
if (position == SCManagedCaptureDevicePositionBack) {
ARWorldTrackingConfiguration *config = [[ARWorldTrackingConfiguration alloc] init];
config.planeDetection = ARPlaneDetectionHorizontal;
config.lightEstimationEnabled = NO;
return config;
} else {
#ifdef SC_USE_ARKIT_FACE
return [[ARFaceTrackingConfiguration alloc] init];
#endif
}
}
return nil;
}
@end

15
ManagedCapturer/AVCaptureConnection+InputDevice.h

@ -0,0 +1,15 @@
//
// AVCaptureConnection+InputDevice.h
// Snapchat
//
// Created by William Morriss on 1/20/15
// Copyright (c) 2015 Snapchat, Inc. All rights reserved.
//
#import <AVFoundation/AVFoundation.h>
@interface AVCaptureConnection (InputDevice)
- (AVCaptureDevice *)inputDevice;
@end

25
ManagedCapturer/AVCaptureConnection+InputDevice.m

@ -0,0 +1,25 @@
//
// AVCaptureConnection+InputDevice.m
// Snapchat
//
// Created by William Morriss on 1/20/15
// Copyright (c) 2015 Snapchat, Inc. All rights reserved.
//
#import "AVCaptureConnection+InputDevice.h"
#import <SCFoundation/SCAssertWrapper.h>
@implementation AVCaptureConnection (InputDevice)
- (AVCaptureDevice *)inputDevice
{
NSArray *inputPorts = self.inputPorts;
AVCaptureInputPort *port = [inputPorts firstObject];
SCAssert([port.input isKindOfClass:[AVCaptureDeviceInput class]], @"unexpected port");
AVCaptureDeviceInput *deviceInput = (AVCaptureDeviceInput *)port.input;
AVCaptureDevice *device = deviceInput.device;
return device;
}
@end

34
ManagedCapturer/AVCaptureDevice+ConfigurationLock.h

@ -0,0 +1,34 @@
//
// AVCaptureDevice+ConfigurationLock.h
// Snapchat
//
// Created by Derek Peirce on 4/19/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
@interface AVCaptureDevice (ConfigurationLock)
/*
The following method will lock this AVCaptureDevice, run the task, then unlock the device.
The task is usually related to set AVCaptureDevice.
It will return a boolean telling you whether or not your task ran successfully. You can use the boolean to adjust your
strategy to handle this failure. For some cases, we don't have a good mechanism to handle the failure. E.g. if we want
to re-focus, but failed to do so. What is next step? Pop up a alert view to user? If yes, it is intrusive, if not, user
will get confused. Just because the error handling is difficulty, we would like to notify you if the task fails.
If the task does not run successfully. We will log an event using SCLogger for better visibility.
*/
- (BOOL)runTask:(NSString *)taskName withLockedConfiguration:(void (^)(void))task;
/*
The following method has the same function as the above one.
The difference is that it retries the operation for certain times. Please give a number below or equal 2.
When retry equals 0, we will only try to lock for once.
When retry equals 1, we will retry once if the 1st try fails.
....
*/
- (BOOL)runTask:(NSString *)taskName withLockedConfiguration:(void (^)(void))task retry:(NSUInteger)retryTimes;
@end

47
ManagedCapturer/AVCaptureDevice+ConfigurationLock.m

@ -0,0 +1,47 @@
//
// AVCaptureDevice+ConfigurationLock.m
// Snapchat
//
// Created by Derek Peirce on 4/19/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "AVCaptureDevice+ConfigurationLock.h"
#import "SCLogger+Camera.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCLog.h>
#import <SCLogger/SCLogger.h>
@implementation AVCaptureDevice (ConfigurationLock)
- (BOOL)runTask:(NSString *)taskName withLockedConfiguration:(void (^)(void))task
{
return [self runTask:taskName withLockedConfiguration:task retry:0];
}
- (BOOL)runTask:(NSString *)taskName withLockedConfiguration:(void (^)(void))task retry:(NSUInteger)retryTimes
{
SCAssert(taskName, @"camera logger taskString should not be empty");
SCAssert(retryTimes <= 2 && retryTimes >= 0, @"retry times should be equal to or below 2.");
NSError *error = nil;
BOOL deviceLockSuccess = NO;
NSUInteger retryCounter = 0;
while (retryCounter <= retryTimes && !deviceLockSuccess) {
deviceLockSuccess = [self lockForConfiguration:&error];
retryCounter++;
}
if (deviceLockSuccess) {
task();
[self unlockForConfiguration];
SCLogCoreCameraInfo(@"AVCapture Device setting success, task:%@ tryCount:%zu", taskName,
(unsigned long)retryCounter);
} else {
SCLogCoreCameraError(@"AVCapture Device Encountered error when %@ %@", taskName, error);
[[SCLogger sharedInstance] logManagedCapturerSettingFailure:taskName error:error];
}
return deviceLockSuccess;
}
@end

21
ManagedCapturer/NSURL+Asset.h

@ -0,0 +1,21 @@
//
// NSURL+NSURL_Asset.h
// Snapchat
//
// Created by Michel Loenngren on 4/30/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface NSURL (Asset)
/**
In case the media server is reset while recording AVFoundation
gets in a weird state. Even though we reload our AVFoundation
object we still need to reload the assetkeys on the
outputfile. If we don't the AVAssetWriter will fail when started.
*/
- (void)reloadAssetKeys;
@end

23
ManagedCapturer/NSURL+Asset.m

@ -0,0 +1,23 @@
//
// NSURL+NSURL_Asset.m
// Snapchat
//
// Created by Michel Loenngren on 4/30/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "NSURL+Asset.h"
#import <SCBase/SCMacros.h>
@import AVFoundation;
@implementation NSURL (Asset)
- (void)reloadAssetKeys
{
AVAsset *videoAsset = [AVAsset assetWithURL:self];
[videoAsset loadValuesAsynchronouslyForKeys:@[ @keypath(videoAsset.duration) ] completionHandler:nil];
}
@end

10
ManagedCapturer/OWNERS

@ -0,0 +1,10 @@
--- !OWNERS
version: 2
default:
jira_project: CCAM
owners:
num_required_reviewers: 0
teams:
- Snapchat/core-camera-ios

39
ManagedCapturer/SCAudioCaptureSession.h

@ -0,0 +1,39 @@
//
// SCAudioCaptureSession.h
// Snapchat
//
// Created by Liu Liu on 3/5/15.
// Copyright (c) 2015 Snapchat, Inc. All rights reserved.
//
#import <CoreMedia/CoreMedia.h>
#import <Foundation/Foundation.h>
extern double const kSCAudioCaptureSessionDefaultSampleRate;
typedef void (^audio_capture_session_block)(NSError *error);
@protocol SCAudioCaptureSession;
@protocol SCAudioCaptureSessionDelegate <NSObject>
- (void)audioCaptureSession:(id<SCAudioCaptureSession>)audioCaptureSession
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer;
@end
@protocol SCAudioCaptureSession <NSObject>
@property (nonatomic, weak) id<SCAudioCaptureSessionDelegate> delegate;
// Return detail informantions dictionary if error occured, else return nil
- (void)beginAudioRecordingAsynchronouslyWithSampleRate:(double)sampleRate
completionHandler:(audio_capture_session_block)completionHandler;
- (void)disposeAudioRecordingSynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler;
@end
@interface SCAudioCaptureSession : NSObject <SCAudioCaptureSession>
@end

289
ManagedCapturer/SCAudioCaptureSession.m

@ -0,0 +1,289 @@
//
// SCAudioCaptureSession.m
// Snapchat
//
// Created by Liu Liu on 3/5/15.
// Copyright (c) 2015 Snapchat, Inc. All rights reserved.
//
#import "SCAudioCaptureSession.h"
#import <SCAudio/SCAudioSession.h>
#import <SCFoundation/SCLog.h>
#import <SCFoundation/SCQueuePerformer.h>
#import <SCFoundation/SCTrace.h>
#import <mach/mach.h>
#import <mach/mach_time.h>
@import AVFoundation;
double const kSCAudioCaptureSessionDefaultSampleRate = 44100;
NSString *const SCAudioCaptureSessionErrorDomain = @"SCAudioCaptureSessionErrorDomain";
static NSInteger const kNumberOfAudioBuffersInQueue = 15;
static float const kAudioBufferDurationInSeconds = 0.2;
static char *const kSCAudioCaptureSessionQueueLabel = "com.snapchat.audio-capture-session";
@implementation SCAudioCaptureSession {
SCQueuePerformer *_performer;
AudioQueueRef _audioQueue;
AudioQueueBufferRef _audioQueueBuffers[kNumberOfAudioBuffersInQueue];
CMAudioFormatDescriptionRef _audioFormatDescription;
}
@synthesize delegate = _delegate;
- (instancetype)init
{
SCTraceStart();
self = [super init];
if (self) {
_performer = [[SCQueuePerformer alloc] initWithLabel:kSCAudioCaptureSessionQueueLabel
qualityOfService:QOS_CLASS_USER_INTERACTIVE
queueType:DISPATCH_QUEUE_SERIAL
context:SCQueuePerformerContextCamera];
}
return self;
}
- (void)dealloc
{
[self disposeAudioRecordingSynchronouslyWithCompletionHandler:NULL];
}
static AudioStreamBasicDescription setupAudioFormat(UInt32 inFormatID, Float64 sampleRate)
{
SCTraceStart();
AudioStreamBasicDescription recordFormat = {0};
recordFormat.mSampleRate = sampleRate;
recordFormat.mChannelsPerFrame = (UInt32)[SCAudioSession sharedInstance].inputNumberOfChannels;
recordFormat.mFormatID = inFormatID;
if (inFormatID == kAudioFormatLinearPCM) {
// if we want pcm, default to signed 16-bit little-endian
recordFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
recordFormat.mBitsPerChannel = 16;
recordFormat.mBytesPerPacket = recordFormat.mBytesPerFrame =
(recordFormat.mBitsPerChannel / 8) * recordFormat.mChannelsPerFrame;
recordFormat.mFramesPerPacket = 1;
}
return recordFormat;
}
static int computeRecordBufferSize(const AudioStreamBasicDescription *format, const AudioQueueRef audioQueue,
float seconds)
{
SCTraceStart();
int packets, frames, bytes = 0;
frames = (int)ceil(seconds * format->mSampleRate);
if (format->mBytesPerFrame > 0) {
bytes = frames * format->mBytesPerFrame;
} else {
UInt32 maxPacketSize;
if (format->mBytesPerPacket > 0)
maxPacketSize = format->mBytesPerPacket; // constant packet size
else {
UInt32 propertySize = sizeof(maxPacketSize);
AudioQueueGetProperty(audioQueue, kAudioQueueProperty_MaximumOutputPacketSize, &maxPacketSize,
&propertySize);
}
if (format->mFramesPerPacket > 0)
packets = frames / format->mFramesPerPacket;
else
packets = frames; // worst-case scenario: 1 frame in a packet
if (packets == 0) // sanity check
packets = 1;
bytes = packets * maxPacketSize;
}
return bytes;
}
static NSTimeInterval machHostTimeToSeconds(UInt64 mHostTime)
{
static dispatch_once_t onceToken;
static mach_timebase_info_data_t timebase_info;
dispatch_once(&onceToken, ^{
(void)mach_timebase_info(&timebase_info);
});
return (double)mHostTime * timebase_info.numer / timebase_info.denom / NSEC_PER_SEC;
}
static void audioQueueBufferHandler(void *inUserData, AudioQueueRef inAQ, AudioQueueBufferRef inBuffer,
const AudioTimeStamp *nStartTime, UInt32 inNumPackets,
const AudioStreamPacketDescription *inPacketDesc)
{
SCTraceStart();
SCAudioCaptureSession *audioCaptureSession = (__bridge SCAudioCaptureSession *)inUserData;
if (inNumPackets > 0) {
CMTime PTS = CMTimeMakeWithSeconds(machHostTimeToSeconds(nStartTime->mHostTime), 600);
[audioCaptureSession appendAudioQueueBuffer:inBuffer
numPackets:inNumPackets
PTS:PTS
packetDescriptions:inPacketDesc];
}
AudioQueueEnqueueBuffer(inAQ, inBuffer, 0, NULL);
}
- (void)appendAudioQueueBuffer:(AudioQueueBufferRef)audioQueueBuffer
numPackets:(UInt32)numPackets
PTS:(CMTime)PTS
packetDescriptions:(const AudioStreamPacketDescription *)packetDescriptions
{
SCTraceStart();
CMBlockBufferRef dataBuffer = NULL;
CMBlockBufferCreateWithMemoryBlock(NULL, NULL, audioQueueBuffer->mAudioDataByteSize, NULL, NULL, 0,
audioQueueBuffer->mAudioDataByteSize, 0, &dataBuffer);
if (dataBuffer) {
CMBlockBufferReplaceDataBytes(audioQueueBuffer->mAudioData, dataBuffer, 0,
audioQueueBuffer->mAudioDataByteSize);
CMSampleBufferRef sampleBuffer = NULL;
CMAudioSampleBufferCreateWithPacketDescriptions(NULL, dataBuffer, true, NULL, NULL, _audioFormatDescription,
numPackets, PTS, packetDescriptions, &sampleBuffer);
if (sampleBuffer) {
[self processAudioSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
}
CFRelease(dataBuffer);
}
}
- (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
SCTraceStart();
[_delegate audioCaptureSession:self didOutputSampleBuffer:sampleBuffer];
}
- (NSError *)_generateErrorForType:(NSString *)errorType
errorCode:(int)errorCode
format:(AudioStreamBasicDescription)format
{
NSDictionary *errorInfo = @{
@"error_type" : errorType,
@"error_code" : @(errorCode),
@"record_format" : @{
@"format_id" : @(format.mFormatID),
@"format_flags" : @(format.mFormatFlags),
@"sample_rate" : @(format.mSampleRate),
@"bytes_per_packet" : @(format.mBytesPerPacket),
@"frames_per_packet" : @(format.mFramesPerPacket),
@"bytes_per_frame" : @(format.mBytesPerFrame),
@"channels_per_frame" : @(format.mChannelsPerFrame),
@"bits_per_channel" : @(format.mBitsPerChannel)
}
};
SCLogGeneralInfo(@"Audio queue error occured. ErrorInfo: %@", errorInfo);
return [NSError errorWithDomain:SCAudioCaptureSessionErrorDomain code:errorCode userInfo:errorInfo];
}
- (NSError *)beginAudioRecordingWithSampleRate:(Float64)sampleRate
{
SCTraceStart();
if ([SCAudioSession sharedInstance].inputAvailable) {
// SCAudioSession should be activated already
SCTraceSignal(@"Set audio session to be active");
AudioStreamBasicDescription recordFormat = setupAudioFormat(kAudioFormatLinearPCM, sampleRate);
OSStatus audioQueueCreationStatus = AudioQueueNewInput(&recordFormat, audioQueueBufferHandler,
(__bridge void *)self, NULL, NULL, 0, &_audioQueue);
if (audioQueueCreationStatus != 0) {
NSError *error = [self _generateErrorForType:@"audio_queue_create_error"
errorCode:audioQueueCreationStatus
format:recordFormat];
return error;
}
SCTraceSignal(@"Initialize audio queue with new input");
UInt32 bufferByteSize = computeRecordBufferSize(
&recordFormat, _audioQueue, kAudioBufferDurationInSeconds); // Enough bytes for half a second
for (int i = 0; i < kNumberOfAudioBuffersInQueue; i++) {
AudioQueueAllocateBuffer(_audioQueue, bufferByteSize, &_audioQueueBuffers[i]);
AudioQueueEnqueueBuffer(_audioQueue, _audioQueueBuffers[i], 0, NULL);
}
SCTraceSignal(@"Allocate audio buffer");
UInt32 size = sizeof(recordFormat);
audioQueueCreationStatus =
AudioQueueGetProperty(_audioQueue, kAudioQueueProperty_StreamDescription, &recordFormat, &size);
if (0 != audioQueueCreationStatus) {
NSError *error = [self _generateErrorForType:@"audio_queue_get_property_error"
errorCode:audioQueueCreationStatus
format:recordFormat];
[self disposeAudioRecording];
return error;
}
SCTraceSignal(@"Audio queue sample rate %lf", recordFormat.mSampleRate);
AudioChannelLayout acl;
bzero(&acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
audioQueueCreationStatus = CMAudioFormatDescriptionCreate(NULL, &recordFormat, sizeof(acl), &acl, 0, NULL, NULL,
&_audioFormatDescription);
if (0 != audioQueueCreationStatus) {
NSError *error = [self _generateErrorForType:@"audio_queue_audio_format_error"
errorCode:audioQueueCreationStatus
format:recordFormat];
[self disposeAudioRecording];
return error;
}
SCTraceSignal(@"Start audio queue");
audioQueueCreationStatus = AudioQueueStart(_audioQueue, NULL);
if (0 != audioQueueCreationStatus) {
NSError *error = [self _generateErrorForType:@"audio_queue_start_error"
errorCode:audioQueueCreationStatus
format:recordFormat];
[self disposeAudioRecording];
return error;
}
}
return nil;
}
- (void)disposeAudioRecording
{
SCTraceStart();
SCLogGeneralInfo(@"dispose audio recording");
if (_audioQueue) {
AudioQueueStop(_audioQueue, true);
AudioQueueDispose(_audioQueue, true);
for (int i = 0; i < kNumberOfAudioBuffersInQueue; i++) {
_audioQueueBuffers[i] = NULL;
}
_audioQueue = NULL;
}
if (_audioFormatDescription) {
CFRelease(_audioFormatDescription);
_audioFormatDescription = NULL;
}
}
#pragma mark - Public methods
- (void)beginAudioRecordingAsynchronouslyWithSampleRate:(double)sampleRate
completionHandler:(audio_capture_session_block)completionHandler
{
SCTraceStart();
// Request audio session change for recording mode.
[_performer perform:^{
SCTraceStart();
NSError *error = [self beginAudioRecordingWithSampleRate:sampleRate];
if (completionHandler) {
completionHandler(error);
}
}];
}
- (void)disposeAudioRecordingSynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler
{
SCTraceStart();
[_performer performAndWait:^{
SCTraceStart();
[self disposeAudioRecording];
if (completionHandler) {
completionHandler();
}
}];
}
@end

23
ManagedCapturer/SCCameraSettingUtils.h

@ -0,0 +1,23 @@
//
// SCCameraSettingUtils.h
// Snapchat
//
// Created by Pinlin Chen on 12/09/2017.
//
#import <SCBase/SCMacros.h>
#import <SCCapturerDefines.h>
#import <CoreMedia/CoreMedia.h>
#import <Foundation/Foundation.h>
SC_EXTERN_C_BEGIN
// Return the value if metadata attribute is found; otherwise, return nil
extern NSNumber *retrieveExposureTimeFromEXIFAttachments(CFDictionaryRef exifAttachments);
extern NSNumber *retrieveBrightnessFromEXIFAttachments(CFDictionaryRef exifAttachments);
extern NSNumber *retrieveISOSpeedRatingFromEXIFAttachments(CFDictionaryRef exifAttachments);
extern void retrieveSampleBufferMetadata(CMSampleBufferRef sampleBuffer, SampleBufferMetadata *metadata);
SC_EXTERN_C_END

79
ManagedCapturer/SCCameraSettingUtils.m

@ -0,0 +1,79 @@
//
// SCCameraSettingUtils.m
// Snapchat
//
// Created by Pinlin Chen on 12/09/2017.
//
#import "SCCameraSettingUtils.h"
#import <SCFoundation/SCLog.h>
#import <ImageIO/CGImageProperties.h>
NSNumber *retrieveExposureTimeFromEXIFAttachments(CFDictionaryRef exifAttachments)
{
if (!exifAttachments) {
return nil;
}
id value = CFDictionaryGetValue(exifAttachments, kCGImagePropertyExifExposureTime);
// Fetching exposure time from the sample buffer
if ([value isKindOfClass:[NSNumber class]]) {
return (NSNumber *)value;
}
return nil;
}
NSNumber *retrieveBrightnessFromEXIFAttachments(CFDictionaryRef exifAttachments)
{
if (!exifAttachments) {
return nil;
}
id value = CFDictionaryGetValue(exifAttachments, kCGImagePropertyExifBrightnessValue);
if ([value isKindOfClass:[NSNumber class]]) {
return (NSNumber *)value;
}
return nil;
}
NSNumber *retrieveISOSpeedRatingFromEXIFAttachments(CFDictionaryRef exifAttachments)
{
if (!exifAttachments) {
return nil;
}
NSArray *ISOSpeedRatings = CFDictionaryGetValue(exifAttachments, kCGImagePropertyExifISOSpeedRatings);
if ([ISOSpeedRatings respondsToSelector:@selector(count)] &&
[ISOSpeedRatings respondsToSelector:@selector(firstObject)] && ISOSpeedRatings.count > 0) {
id value = [ISOSpeedRatings firstObject];
if ([value isKindOfClass:[NSNumber class]]) {
return (NSNumber *)value;
}
}
return nil;
}
void retrieveSampleBufferMetadata(CMSampleBufferRef sampleBuffer, SampleBufferMetadata *metadata)
{
CFDictionaryRef exifAttachments = CMGetAttachment(sampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments == nil) {
SCLogCoreCameraWarning(@"SampleBuffer exifAttachment is nil");
}
// Fetching exposure time from the sample buffer
NSNumber *currentExposureTimeNum = retrieveExposureTimeFromEXIFAttachments(exifAttachments);
if (currentExposureTimeNum) {
metadata->exposureTime = [currentExposureTimeNum floatValue];
}
NSNumber *currentISOSpeedRatingNum = retrieveISOSpeedRatingFromEXIFAttachments(exifAttachments);
if (currentISOSpeedRatingNum) {
metadata->isoSpeedRating = (int)[currentISOSpeedRatingNum integerValue];
}
NSNumber *currentBrightnessNum = retrieveBrightnessFromEXIFAttachments(exifAttachments);
if (currentBrightnessNum) {
float currentBrightness = [currentBrightnessNum floatValue];
if (isfinite(currentBrightness)) {
metadata->brightness = currentBrightness;
} else {
metadata->brightness = 0;
}
}
}

74
ManagedCapturer/SCCaptureCommon.h

@ -0,0 +1,74 @@
//
// SCCaptureCommon.h
// Snapchat
//
// Created by Lin Jia on 9/29/17.
//
//
#import "SCManagedCaptureDevice.h"
#import "SCManagedDeviceCapacityAnalyzerListener.h"
#import "SCVideoCaptureSessionInfo.h"
#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h>
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
@class SCManagedCapturerState;
@class SCManagedLensesProcessor;
@class SCManagedVideoDataSource;
@class SCManagedVideoCapturerOutputSettings;
@class SCLens;
@class SCLensCategory;
@class SCLookseryFilterFactory;
@class SCSnapScannedData;
@class SCCraftResourceManager;
@class SCScanConfiguration;
@class SCCapturerToken;
@class SCProcessingPipeline;
@class SCTimedTask;
@protocol SCManagedSampleBufferDisplayController;
typedef void (^sc_managed_capturer_capture_still_image_completion_handler_t)(UIImage *fullScreenImage,
NSDictionary *metadata, NSError *error,
SCManagedCapturerState *state);
typedef void (^sc_managed_capturer_capture_video_frame_completion_handler_t)(UIImage *image);
typedef void (^sc_managed_capturer_start_recording_completion_handler_t)(SCVideoCaptureSessionInfo session,
NSError *error);
typedef void (^sc_managed_capturer_convert_view_coordniates_completion_handler_t)(CGPoint pointOfInterest);
typedef void (^sc_managed_capturer_unsafe_changes_t)(AVCaptureSession *session, AVCaptureDevice *front,
AVCaptureDeviceInput *frontInput, AVCaptureDevice *back,
AVCaptureDeviceInput *backInput, SCManagedCapturerState *state);
typedef void (^sc_managed_capturer_stop_running_completion_handler_t)(BOOL succeed);
typedef void (^sc_managed_capturer_scan_results_handler_t)(NSObject *resultObject);
typedef void (^sc_managed_lenses_processor_category_point_completion_handler_t)(SCLensCategory *category,
NSInteger categoriesCount);
extern CGFloat const kSCManagedCapturerAspectRatioUnspecified;
extern CGFloat const kSCManagedCapturerDefaultVideoActiveFormatWidth;
extern CGFloat const kSCManagedCapturerDefaultVideoActiveFormatHeight;
extern CGFloat const kSCManagedCapturerVideoActiveFormatWidth1080p;
extern CGFloat const kSCManagedCapturerVideoActiveFormatHeight1080p;
extern CGFloat const kSCManagedCapturerNightVideoHighResActiveFormatWidth;
extern CGFloat const kSCManagedCapturerNightVideoHighResActiveFormatHeight;
extern CGFloat const kSCManagedCapturerNightVideoDefaultResActiveFormatWidth;
extern CGFloat const kSCManagedCapturerNightVideoDefaultResActiveFormatHeight;
extern CGFloat const kSCManagedCapturerLiveStreamingVideoActiveFormatWidth;
extern CGFloat const kSCManagedCapturerLiveStreamingVideoActiveFormatHeight;

31
ManagedCapturer/SCCaptureCommon.m

@ -0,0 +1,31 @@
//
// SCCaptureCommon.m
// Snapchat
//
// Created by Lin Jia on 9/29/17.
//
//
#import "SCCaptureCommon.h"
CGFloat const kSCManagedCapturerAspectRatioUnspecified = -1.0;
CGFloat const kSCManagedCapturerDefaultVideoActiveFormatWidth = 1280;
CGFloat const kSCManagedCapturerDefaultVideoActiveFormatHeight = 720;
CGFloat const kSCManagedCapturerVideoActiveFormatWidth1080p = 1920;
CGFloat const kSCManagedCapturerVideoActiveFormatHeight1080p = 1080;
CGFloat const kSCManagedCapturerNightVideoHighResActiveFormatWidth = 2592;
CGFloat const kSCManagedCapturerNightVideoHighResActiveFormatHeight = 1936;
CGFloat const kSCManagedCapturerNightVideoDefaultResActiveFormatWidth = 640;
CGFloat const kSCManagedCapturerNightVideoDefaultResActiveFormatHeight = 480;
CGFloat const kSCManagedCapturerLiveStreamingVideoActiveFormatWidth = 1280;
CGFloat const kSCManagedCapturerLiveStreamingVideoActiveFormatHeight = 720;

22
ManagedCapturer/SCCaptureCoreImageFaceDetector.h

@ -0,0 +1,22 @@
//
// SCCaptureCoreImageFaceDetector.h
// Snapchat
//
// Created by Jiyang Zhu on 3/27/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
// This class is intended to detect faces in Camera. It receives CMSampleBuffer, process the face detection using
// CIDetector, and announce the bounds and faceIDs.
#import "SCCaptureFaceDetector.h"
#import <SCBase/SCMacros.h>
#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h>
#import <Foundation/Foundation.h>
@interface SCCaptureCoreImageFaceDetector : NSObject <SCCaptureFaceDetector, SCManagedVideoDataSourceListener>
SC_INIT_AND_NEW_UNAVAILABLE;
@end

205
ManagedCapturer/SCCaptureCoreImageFaceDetector.m

@ -0,0 +1,205 @@
//
// SCCaptureCoreImageFaceDetector.m
// Snapchat
//
// Created by Jiyang Zhu on 3/27/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
#import "SCCaptureCoreImageFaceDetector.h"
#import "SCCameraTweaks.h"
#import "SCCaptureFaceDetectionParser.h"
#import "SCCaptureFaceDetectorTrigger.h"
#import "SCCaptureResource.h"
#import "SCManagedCapturer.h"
#import <SCFoundation/NSArray+Helpers.h>
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCLog.h>
#import <SCFoundation/SCQueuePerformer.h>
#import <SCFoundation/SCTrace.h>
#import <SCFoundation/SCTraceODPCompatible.h>
#import <SCFoundation/SCZeroDependencyExperiments.h>
#import <SCFoundation/UIImage+CVPixelBufferRef.h>
@import ImageIO;
static const NSTimeInterval kSCCaptureCoreImageFaceDetectorMaxAllowedLatency =
1; // Drop the face detection result if it is 1 second late.
static const NSInteger kDefaultNumberOfSequentialOutputSampleBuffer = -1; // -1 means no sequential sample buffers.
static char *const kSCCaptureCoreImageFaceDetectorProcessQueue =
"com.snapchat.capture-core-image-face-detector-process";
@implementation SCCaptureCoreImageFaceDetector {
CIDetector *_detector;
SCCaptureResource *_captureResource;
BOOL _isDetecting;
BOOL _hasDetectedFaces;
NSInteger _numberOfSequentialOutputSampleBuffer;
NSUInteger _detectionFrequency;
NSDictionary *_detectorOptions;
SCManagedCaptureDevicePosition _devicePosition;
CIContext *_context;
SCQueuePerformer *_callbackPerformer;
SCQueuePerformer *_processPerformer;
SCCaptureFaceDetectionParser *_parser;
SCCaptureFaceDetectorTrigger *_trigger;
}
@synthesize trigger = _trigger;
@synthesize parser = _parser;
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource
{
SCTraceODPCompatibleStart(2);
self = [super init];
if (self) {
SCAssert(captureResource, @"SCCaptureResource should not be nil");
SCAssert(captureResource.queuePerformer, @"SCQueuePerformer should not be nil");
_callbackPerformer = captureResource.queuePerformer;
_captureResource = captureResource;
_parser = [[SCCaptureFaceDetectionParser alloc]
initWithFaceBoundsAreaThreshold:pow(SCCameraFaceFocusMinFaceSize(), 2)];
_processPerformer = [[SCQueuePerformer alloc] initWithLabel:kSCCaptureCoreImageFaceDetectorProcessQueue
qualityOfService:QOS_CLASS_USER_INITIATED
queueType:DISPATCH_QUEUE_SERIAL
context:SCQueuePerformerContextCamera];
_detectionFrequency = SCExperimentWithFaceDetectionFrequency();
_devicePosition = captureResource.device.position;
_trigger = [[SCCaptureFaceDetectorTrigger alloc] initWithDetector:self];
}
return self;
}
- (void)_setupDetectionIfNeeded
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN(!_detector);
if (!_context) {
_context = [CIContext context];
}
// For CIDetectorMinFeatureSize, the valid range is [0.0100, 0.5000], otherwise, it will cause a crash.
if (!_detectorOptions) {
_detectorOptions = @{
CIDetectorAccuracy : CIDetectorAccuracyLow,
CIDetectorTracking : @(YES),
CIDetectorMaxFeatureCount : @(2),
CIDetectorMinFeatureSize : @(SCCameraFaceFocusMinFaceSize()),
CIDetectorNumberOfAngles : @(3)
};
}
@try {
_detector = [CIDetector detectorOfType:CIDetectorTypeFace context:_context options:_detectorOptions];
} @catch (NSException *exception) {
SCLogCoreCameraError(@"Failed to create CIDetector with exception:%@", exception);
}
}
- (void)_resetDetection
{
SCTraceODPCompatibleStart(2);
_detector = nil;
[self _setupDetectionIfNeeded];
}
- (SCQueuePerformer *)detectionPerformer
{
return _processPerformer;
}
- (void)startDetection
{
SCTraceODPCompatibleStart(2);
SCAssert([[self detectionPerformer] isCurrentPerformer], @"Calling -startDetection in an invalid queue.");
[self _setupDetectionIfNeeded];
_isDetecting = YES;
_hasDetectedFaces = NO;
_numberOfSequentialOutputSampleBuffer = kDefaultNumberOfSequentialOutputSampleBuffer;
}
- (void)stopDetection
{
SCTraceODPCompatibleStart(2);
SCAssert([[self detectionPerformer] isCurrentPerformer], @"Calling -stopDetection in an invalid queue.");
_isDetecting = NO;
}
- (NSDictionary<NSNumber *, NSValue *> *)_detectFaceFeaturesInImage:(CIImage *)image
withOrientation:(CGImagePropertyOrientation)orientation
{
SCTraceODPCompatibleStart(2);
NSDictionary *opts =
@{ CIDetectorImageOrientation : @(orientation),
CIDetectorEyeBlink : @(NO),
CIDetectorSmile : @(NO) };
NSArray<CIFeature *> *features = [_detector featuresInImage:image options:opts];
return [_parser parseFaceBoundsByFaceIDFromCIFeatures:features
withImageSize:image.extent.size
imageOrientation:orientation];
}
#pragma mark - SCManagedVideoDataSourceListener
- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
devicePosition:(SCManagedCaptureDevicePosition)devicePosition
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN(_isDetecting);
// Reset detection if the device position changes. Resetting detection should execute in _processPerformer, so we
// just set a flag here, and then do it later in the perform block.
BOOL shouldForceResetDetection = NO;
if (devicePosition != _devicePosition) {
_devicePosition = devicePosition;
shouldForceResetDetection = YES;
_numberOfSequentialOutputSampleBuffer = kDefaultNumberOfSequentialOutputSampleBuffer;
}
_numberOfSequentialOutputSampleBuffer++;
SC_GUARD_ELSE_RETURN(_numberOfSequentialOutputSampleBuffer % _detectionFrequency == 0);
@weakify(self);
CFRetain(sampleBuffer);
[_processPerformer perform:^{
SCTraceStart();
@strongify(self);
SC_GUARD_ELSE_RETURN(self);
if (shouldForceResetDetection) {
// Resetting detection usually costs no more than 1ms.
[self _resetDetection];
}
CGImagePropertyOrientation orientation =
(devicePosition == SCManagedCaptureDevicePositionBack ? kCGImagePropertyOrientationRight
: kCGImagePropertyOrientationLeftMirrored);
CIImage *image = [CIImage imageWithCVPixelBuffer:CMSampleBufferGetImageBuffer(sampleBuffer)];
NSDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID =
[self _detectFaceFeaturesInImage:image withOrientation:orientation];
// Calculate the latency for face detection, if it is too long, discard the face detection results.
NSTimeInterval latency =
CACurrentMediaTime() - CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer));
CFRelease(sampleBuffer);
if (latency >= kSCCaptureCoreImageFaceDetectorMaxAllowedLatency) {
faceBoundsByFaceID = nil;
}
// Only announce face detection result if faceBoundsByFaceID is not empty, or faceBoundsByFaceID was not empty
// last time.
if (faceBoundsByFaceID.count > 0 || self->_hasDetectedFaces) {
self->_hasDetectedFaces = faceBoundsByFaceID.count > 0;
[self->_callbackPerformer perform:^{
[self->_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
didDetectFaceBounds:faceBoundsByFaceID];
}];
}
}];
}
@end

24
ManagedCapturer/SCCaptureDeviceAuthorization.h

@ -0,0 +1,24 @@
//
// SCCaptureDeviceAuthorization.h
// Snapchat
//
// Created by Xiaomu Wu on 8/19/14.
// Copyright (c) 2014 Snapchat, Inc. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface SCCaptureDeviceAuthorization : NSObject
// Methods for checking / requesting authorization to use media capture devices of a given type.
+ (BOOL)notDeterminedForMediaType:(NSString *)mediaType;
+ (BOOL)deniedForMediaType:(NSString *)mediaType;
+ (BOOL)restrictedForMediaType:(NSString *)mediaType;
+ (void)requestAccessForMediaType:(NSString *)mediaType completionHandler:(void (^)(BOOL granted))handler;
// Convenience methods for media type == AVMediaTypeVideo
+ (BOOL)notDeterminedForVideoCapture;
+ (BOOL)deniedForVideoCapture;
+ (void)requestAccessForVideoCaptureWithCompletionHandler:(void (^)(BOOL granted))handler;
@end

71
ManagedCapturer/SCCaptureDeviceAuthorization.m

@ -0,0 +1,71 @@
//
// SCCaptureDeviceAuthorization.m
// Snapchat
//
// Created by Xiaomu Wu on 8/19/14.
// Copyright (c) 2014 Snapchat, Inc. All rights reserved.
//
#import "SCCaptureDeviceAuthorization.h"
#import <BlizzardSchema/SCAEvents.h>
#import <SCFoundation/SCTrace.h>
#import <SCLogger/SCLogger.h>
@import AVFoundation;
@implementation SCCaptureDeviceAuthorization
#pragma mark - Public
+ (BOOL)notDeterminedForMediaType:(NSString *)mediaType
{
return [AVCaptureDevice authorizationStatusForMediaType:mediaType] == AVAuthorizationStatusNotDetermined;
}
+ (BOOL)deniedForMediaType:(NSString *)mediaType
{
return [AVCaptureDevice authorizationStatusForMediaType:mediaType] == AVAuthorizationStatusDenied;
}
+ (BOOL)restrictedForMediaType:(NSString *)mediaType
{
return [AVCaptureDevice authorizationStatusForMediaType:mediaType] == AVAuthorizationStatusRestricted;
}
+ (void)requestAccessForMediaType:(NSString *)mediaType completionHandler:(void (^)(BOOL granted))handler
{
[AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:handler];
}
#pragma mark - Convenience methods for AVMediaTypeVideo
+ (BOOL)notDeterminedForVideoCapture
{
return [self notDeterminedForMediaType:AVMediaTypeVideo];
}
+ (BOOL)deniedForVideoCapture
{
return [self deniedForMediaType:AVMediaTypeVideo];
}
+ (void)requestAccessForVideoCaptureWithCompletionHandler:(void (^)(BOOL granted))handler
{
BOOL firstTimeAsking =
[AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo] == AVAuthorizationStatusNotDetermined;
[self requestAccessForMediaType:AVMediaTypeVideo
completionHandler:^(BOOL granted) {
if (firstTimeAsking) {
SCAPermissionPromptResponse *responseEvent = [[SCAPermissionPromptResponse alloc] init];
[responseEvent setPermissionPromptType:SCAPermissionPromptType_OS_CAMERA];
[responseEvent setAccepted:granted];
[[SCLogger sharedInstance] logUserTrackedEvent:responseEvent];
}
if (handler) {
handler(granted);
}
}];
}
@end

31
ManagedCapturer/SCCaptureDeviceAuthorizationChecker.h

@ -0,0 +1,31 @@
//
// SCCaptureDeviceAuthorizationChecker.h
// Snapchat
//
// Created by Sun Lei on 15/03/2018.
//
@class SCQueuePerformer;
#import <SCBase/SCMacros.h>
#import <Foundation/Foundation.h>
/*
In general, the function of SCCaptureDeviceAuthorizationChecker is to speed up the checking of AVMediaTypeVideo
authorization. It would cache the authorization value. 'preloadVideoCaptureAuthorization' would be called very early
after the app is launched to populate the cached value. 'authorizedForVideoCapture' could be called to get the value
synchronously.
*/
@interface SCCaptureDeviceAuthorizationChecker : NSObject
SC_INIT_AND_NEW_UNAVAILABLE
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer NS_DESIGNATED_INITIALIZER;
- (BOOL)authorizedForVideoCapture;
- (void)preloadVideoCaptureAuthorization;
@end

71
ManagedCapturer/SCCaptureDeviceAuthorizationChecker.m

@ -0,0 +1,71 @@
//
// SCCaptureDeviceAuthorizationChecker.m
// Snapchat
//
// Created by Sun Lei on 15/03/2018.
//
#import "SCCaptureDeviceAuthorizationChecker.h"
#import <SCFoundation/SCQueuePerformer.h>
#import <SCFoundation/SCTraceODPCompatible.h>
@import AVFoundation;
@interface SCCaptureDeviceAuthorizationChecker () {
SCQueuePerformer *_performer;
BOOL _videoCaptureAuthorizationCachedValue;
}
@end
@implementation SCCaptureDeviceAuthorizationChecker
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
{
SCTraceODPCompatibleStart(2);
self = [super init];
if (self) {
_performer = performer;
_videoCaptureAuthorizationCachedValue = NO;
}
return self;
}
- (void)preloadVideoCaptureAuthorization
{
SCTraceODPCompatibleStart(2);
[_performer perform:^{
SCTraceODPCompatibleStart(2);
_videoCaptureAuthorizationCachedValue = [self authorizedForMediaType:AVMediaTypeVideo];
}];
}
- (BOOL)authorizedForVideoCapture
{
SCTraceODPCompatibleStart(2);
// Cache authorizedForVideoCapture for low devices if it's YES
// [AVCaptureDevice authorizationStatusForMediaType:] is expensive on low devices like iPhone4
if (_videoCaptureAuthorizationCachedValue) {
// If the user authorizes and then unauthorizes, iOS would SIGKILL the app.
// When the user opens the app, a pop-up tells the user to allow camera access in settings.
// So 'return YES' makes sense here.
return YES;
} else {
@weakify(self);
[_performer performAndWait:^{
@strongify(self);
SC_GUARD_ELSE_RETURN(self);
if (!_videoCaptureAuthorizationCachedValue) {
_videoCaptureAuthorizationCachedValue = [self authorizedForMediaType:AVMediaTypeVideo];
}
}];
return _videoCaptureAuthorizationCachedValue;
}
}
- (BOOL)authorizedForMediaType:(NSString *)mediaType
{
return [AVCaptureDevice authorizationStatusForMediaType:mediaType] == AVAuthorizationStatusAuthorized;
}
@end

31
ManagedCapturer/SCCaptureDeviceResolver.h

@ -0,0 +1,31 @@
//
// SCCaptureDeviceResolver.h
// Snapchat
//
// Created by Lin Jia on 11/8/17.
//
//
#import <AVFoundation/AVFoundation.h>
/*
See https://jira.sc-corp.net/browse/CCAM-5843
Retrieving AVCaptureDevice is a flaky operation. Thus create capture device resolver to make our code more robust.
Resolver is used to retrieve AVCaptureDevice. We are going to do our best to find the camera for you.
Resolver is only going to be used by SCManagedCaptureDevice.
All APIs are thread safe.
*/
@interface SCCaptureDeviceResolver : NSObject
+ (instancetype)sharedInstance;
- (AVCaptureDevice *)findAVCaptureDevice:(AVCaptureDevicePosition)position;
- (AVCaptureDevice *)findDualCamera;
@end

147
ManagedCapturer/SCCaptureDeviceResolver.m

@ -0,0 +1,147 @@
//
// SCCaptureDeviceResolver.m
// Snapchat
//
// Created by Lin Jia on 11/8/17.
//
//
#import "SCCaptureDeviceResolver.h"
#import "SCCameraTweaks.h"
#import <SCBase/SCAvailability.h>
#import <SCFoundation/SCAssertWrapper.h>
@interface SCCaptureDeviceResolver () {
AVCaptureDeviceDiscoverySession *_discoverySession;
}
@end
@implementation SCCaptureDeviceResolver
+ (instancetype)sharedInstance
{
static SCCaptureDeviceResolver *resolver;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
resolver = [[SCCaptureDeviceResolver alloc] init];
});
return resolver;
}
- (instancetype)init
{
self = [super init];
if (self) {
NSMutableArray *deviceTypes = [[NSMutableArray alloc] init];
[deviceTypes addObject:AVCaptureDeviceTypeBuiltInWideAngleCamera];
if (SC_AT_LEAST_IOS_10_2) {
[deviceTypes addObject:AVCaptureDeviceTypeBuiltInDualCamera];
}
// TODO: we should KVO _discoverySession.devices.
_discoverySession =
[AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:deviceTypes
mediaType:AVMediaTypeVideo
position:AVCaptureDevicePositionUnspecified];
}
return self;
}
- (AVCaptureDevice *)findAVCaptureDevice:(AVCaptureDevicePosition)position
{
SCAssert(position == AVCaptureDevicePositionFront || position == AVCaptureDevicePositionBack, @"");
AVCaptureDevice *captureDevice;
if (position == AVCaptureDevicePositionFront) {
captureDevice = [self _pickBestFrontCamera:[_discoverySession.devices copy]];
} else if (position == AVCaptureDevicePositionBack) {
captureDevice = [self _pickBestBackCamera:[_discoverySession.devices copy]];
}
if (captureDevice) {
return captureDevice;
}
if (SC_AT_LEAST_IOS_10_2 && SCCameraTweaksEnableDualCamera()) {
captureDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInDualCamera
mediaType:AVMediaTypeVideo
position:position];
if (captureDevice) {
return captureDevice;
}
}
// if code still execute, discoverSession failed, then we keep searching.
captureDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera
mediaType:AVMediaTypeVideo
position:position];
if (captureDevice) {
return captureDevice;
}
#if !TARGET_IPHONE_SIMULATOR
// We do not return nil at the beginning of the function for simulator, because simulators of different IOS
// versions can check whether or not our camera device API access is correct.
SCAssertFail(@"No camera is found.");
#endif
return nil;
}
- (AVCaptureDevice *)_pickBestFrontCamera:(NSArray<AVCaptureDevice *> *)devices
{
for (AVCaptureDevice *device in devices) {
if (device.position == AVCaptureDevicePositionFront) {
return device;
}
}
return nil;
}
- (AVCaptureDevice *)_pickBestBackCamera:(NSArray<AVCaptureDevice *> *)devices
{
// Look for dual camera first if needed. If dual camera not found, continue to look for wide angle camera.
if (SC_AT_LEAST_IOS_10_2 && SCCameraTweaksEnableDualCamera()) {
for (AVCaptureDevice *device in devices) {
if (device.position == AVCaptureDevicePositionBack &&
device.deviceType == AVCaptureDeviceTypeBuiltInDualCamera) {
return device;
}
}
}
for (AVCaptureDevice *device in devices) {
if (device.position == AVCaptureDevicePositionBack &&
device.deviceType == AVCaptureDeviceTypeBuiltInWideAngleCamera) {
return device;
}
}
return nil;
}
- (AVCaptureDevice *)findDualCamera
{
if (SC_AT_LEAST_IOS_10_2) {
for (AVCaptureDevice *device in [_discoverySession.devices copy]) {
if (device.position == AVCaptureDevicePositionBack &&
device.deviceType == AVCaptureDeviceTypeBuiltInDualCamera) {
return device;
}
}
}
AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInDualCamera
mediaType:AVMediaTypeVideo
position:AVCaptureDevicePositionBack];
if (captureDevice) {
return captureDevice;
}
#if !TARGET_IPHONE_SIMULATOR
// We do not return nil at the beginning of the function for simulator, because simulators of different IOS
// versions can check whether or not our camera device API access is correct.
SCAssertFail(@"No camera is found.");
#endif
return nil;
}
@end

43
ManagedCapturer/SCCaptureFaceDetectionParser.h

@ -0,0 +1,43 @@
//
// SCCaptureFaceDetectionParser.h
// Snapchat
//
// Created by Jiyang Zhu on 3/13/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
// This class offers methods to parse face bounds from raw data, e.g., AVMetadataObject, CIFeature.
#import <SCBase/SCMacros.h>
#import <AVFoundation/AVFoundation.h>
#import <CoreImage/CoreImage.h>
@interface SCCaptureFaceDetectionParser : NSObject
SC_INIT_AND_NEW_UNAVAILABLE;
- (instancetype)initWithFaceBoundsAreaThreshold:(CGFloat)minimumArea;
/**
Parse face bounds from AVMetadataObject.
@param metadataObjects An array of AVMetadataObject.
@return A dictionary, value is faceBounds: CGRect, key is faceID: NSString.
*/
- (NSDictionary<NSNumber *, NSValue *> *)parseFaceBoundsByFaceIDFromMetadataObjects:
(NSArray<__kindof AVMetadataObject *> *)metadataObjects;
/**
Parse face bounds from CIFeature.
@param features An array of CIFeature.
@param imageSize Size of the image, where the feature are detected from.
@param imageOrientation Orientation of the image.
@return A dictionary, value is faceBounds: CGRect, key is faceID: NSString.
*/
- (NSDictionary<NSNumber *, NSValue *> *)parseFaceBoundsByFaceIDFromCIFeatures:(NSArray<__kindof CIFeature *> *)features
withImageSize:(CGSize)imageSize
imageOrientation:
(CGImagePropertyOrientation)imageOrientation;
@end

94
ManagedCapturer/SCCaptureFaceDetectionParser.m

@ -0,0 +1,94 @@
//
// SCCaptureFaceDetectionParser.m
// Snapchat
//
// Created by Jiyang Zhu on 3/13/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
#import "SCCaptureFaceDetectionParser.h"
#import <SCFoundation/NSArray+Helpers.h>
#import <SCFoundation/SCLog.h>
#import <SCFoundation/SCTraceODPCompatible.h>
@implementation SCCaptureFaceDetectionParser {
CGFloat _minimumArea;
}
- (instancetype)initWithFaceBoundsAreaThreshold:(CGFloat)minimumArea
{
self = [super init];
if (self) {
_minimumArea = minimumArea;
}
return self;
}
- (NSDictionary<NSNumber *, NSValue *> *)parseFaceBoundsByFaceIDFromMetadataObjects:
(NSArray<__kindof AVMetadataObject *> *)metadataObjects
{
SCTraceODPCompatibleStart(2);
NSMutableArray *faceObjects = [NSMutableArray array];
[metadataObjects
enumerateObjectsUsingBlock:^(__kindof AVMetadataObject *_Nonnull obj, NSUInteger idx, BOOL *_Nonnull stop) {
if ([obj isKindOfClass:[AVMetadataFaceObject class]]) {
[faceObjects addObject:obj];
}
}];
SC_GUARD_ELSE_RETURN_VALUE(faceObjects.count > 0, nil);
NSMutableDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID =
[NSMutableDictionary dictionaryWithCapacity:faceObjects.count];
for (AVMetadataFaceObject *faceObject in faceObjects) {
CGRect bounds = faceObject.bounds;
if (CGRectGetWidth(bounds) * CGRectGetHeight(bounds) >= _minimumArea) {
[faceBoundsByFaceID setObject:[NSValue valueWithCGRect:bounds] forKey:@(faceObject.faceID)];
}
}
return faceBoundsByFaceID;
}
- (NSDictionary<NSNumber *, NSValue *> *)parseFaceBoundsByFaceIDFromCIFeatures:(NSArray<__kindof CIFeature *> *)features
withImageSize:(CGSize)imageSize
imageOrientation:
(CGImagePropertyOrientation)imageOrientation
{
SCTraceODPCompatibleStart(2);
NSArray<CIFaceFeature *> *faceFeatures = [features filteredArrayUsingBlock:^BOOL(id _Nonnull evaluatedObject) {
return [evaluatedObject isKindOfClass:[CIFaceFeature class]];
}];
SC_GUARD_ELSE_RETURN_VALUE(faceFeatures.count > 0, nil);
NSMutableDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID =
[NSMutableDictionary dictionaryWithCapacity:faceFeatures.count];
CGFloat width = imageSize.width;
CGFloat height = imageSize.height;
SCLogGeneralInfo(@"Face feature count:%d", faceFeatures.count);
for (CIFaceFeature *faceFeature in faceFeatures) {
SCLogGeneralInfo(@"Face feature: hasTrackingID:%d, bounds:%@", faceFeature.hasTrackingID,
NSStringFromCGRect(faceFeature.bounds));
if (faceFeature.hasTrackingID) {
CGRect transferredBounds;
// Somehow the detected bounds for back camera is mirrored.
if (imageOrientation == kCGImagePropertyOrientationRight) {
transferredBounds = CGRectMake(
CGRectGetMinX(faceFeature.bounds) / width, 1 - CGRectGetMaxY(faceFeature.bounds) / height,
CGRectGetWidth(faceFeature.bounds) / width, CGRectGetHeight(faceFeature.bounds) / height);
} else {
transferredBounds = CGRectMake(
CGRectGetMinX(faceFeature.bounds) / width, CGRectGetMinY(faceFeature.bounds) / height,
CGRectGetWidth(faceFeature.bounds) / width, CGRectGetHeight(faceFeature.bounds) / height);
}
if (CGRectGetWidth(transferredBounds) * CGRectGetHeight(transferredBounds) >= _minimumArea) {
[faceBoundsByFaceID setObject:[NSValue valueWithCGRect:transferredBounds]
forKey:@(faceFeature.trackingID)];
}
}
}
return faceBoundsByFaceID;
}
@end

31
ManagedCapturer/SCCaptureFaceDetector.h

@ -0,0 +1,31 @@
//
// SCCaptureFaceDetector.h
// Snapchat
//
// Created by Jiyang Zhu on 3/27/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
// This protocol declares properties and methods that are used for face detectors.
#import <Foundation/Foundation.h>
@class SCCaptureResource;
@class SCQueuePerformer;
@class SCCaptureFaceDetectorTrigger;
@class SCCaptureFaceDetectionParser;
@protocol SCCaptureFaceDetector <NSObject>
@property (nonatomic, strong, readonly) SCCaptureFaceDetectorTrigger *trigger;
@property (nonatomic, strong, readonly) SCCaptureFaceDetectionParser *parser;
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource;
- (SCQueuePerformer *)detectionPerformer;
- (void)startDetection;
- (void)stopDetection;
@end

22
ManagedCapturer/SCCaptureFaceDetectorTrigger.h

@ -0,0 +1,22 @@
//
// SCCaptureFaceDetectorTrigger.h
// Snapchat
//
// Created by Jiyang Zhu on 3/22/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
// This class is used to control when should SCCaptureFaceDetector starts and stops.
#import <SCBase/SCMacros.h>
#import <Foundation/Foundation.h>
@protocol SCCaptureFaceDetector;
@interface SCCaptureFaceDetectorTrigger : NSObject
SC_INIT_AND_NEW_UNAVAILABLE;
- (instancetype)initWithDetector:(id<SCCaptureFaceDetector>)detector;
@end

97
ManagedCapturer/SCCaptureFaceDetectorTrigger.m

@ -0,0 +1,97 @@
//
// SCCaptureFaceDetectorTrigger.m
// Snapchat
//
// Created by Jiyang Zhu on 3/22/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
#import "SCCaptureFaceDetectorTrigger.h"
#import "SCCaptureFaceDetector.h"
#import <SCFoundation/SCAppLifecycle.h>
#import <SCFoundation/SCIdleMonitor.h>
#import <SCFoundation/SCQueuePerformer.h>
#import <SCFoundation/SCTaskManager.h>
#import <SCFoundation/SCTraceODPCompatible.h>
@interface SCCaptureFaceDetectorTrigger () {
id<SCCaptureFaceDetector> __weak _detector;
}
@end
@implementation SCCaptureFaceDetectorTrigger
- (instancetype)initWithDetector:(id<SCCaptureFaceDetector>)detector
{
self = [super init];
if (self) {
_detector = detector;
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(_applicationDidBecomeActive)
name:kSCPostponedUIApplicationDidBecomeActiveNotification
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(_applicationWillResignActive)
name:UIApplicationWillResignActiveNotification
object:nil];
}
return self;
}
#pragma mark - Internal Methods
- (void)_applicationWillResignActive
{
SCTraceODPCompatibleStart(2);
[self _stopDetection];
}
- (void)_applicationDidBecomeActive
{
SCTraceODPCompatibleStart(2);
[self _waitUntilAppStartCompleteToStartDetection];
}
- (void)_waitUntilAppStartCompleteToStartDetection
{
SCTraceODPCompatibleStart(2);
@weakify(self);
if (SCExperimentWithWaitUntilIdleReplacement()) {
[[SCTaskManager sharedManager] addTaskToRunWhenAppIdle:"SCCaptureFaceDetectorTrigger.startDetection"
performer:[_detector detectionPerformer]
block:^{
@strongify(self);
SC_GUARD_ELSE_RETURN(self);
[self _startDetection];
}];
} else {
[[SCIdleMonitor sharedInstance] waitUntilIdleForTag:"SCCaptureFaceDetectorTrigger.startDetection"
callbackQueue:[_detector detectionPerformer].queue
block:^{
@strongify(self);
SC_GUARD_ELSE_RETURN(self);
[self _startDetection];
}];
}
}
- (void)_startDetection
{
SCTraceODPCompatibleStart(2);
[[_detector detectionPerformer] performImmediatelyIfCurrentPerformer:^{
[_detector startDetection];
}];
}
- (void)_stopDetection
{
SCTraceODPCompatibleStart(2);
[[_detector detectionPerformer] performImmediatelyIfCurrentPerformer:^{
[_detector stopDetection];
}];
}
@end

23
ManagedCapturer/SCCaptureMetadataObjectParser.h

@ -0,0 +1,23 @@
//
// SCCaptureMetadataObjectParser.h
// Snapchat
//
// Created by Jiyang Zhu on 3/13/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
// This class offers class methods to parse AVMetadataObject.
#import <AVFoundation/AVFoundation.h>
@interface SCCaptureMetadataObjectParser : NSObject
/**
Parse face bounds from AVMetadataObject.
@param metadataObjects An array of AVMetadataObject.
@return A dictionary, value is faceBounds: CGRect, key is faceID: NSString.
*/
- (NSDictionary<NSNumber *, NSValue *> *)parseFaceBoundsByFaceIDFromMetadataObjects:
(NSArray<__kindof AVMetadataObject *> *)metadataObjects;
@end

38
ManagedCapturer/SCCaptureMetadataObjectParser.m

@ -0,0 +1,38 @@
//
// SCCaptureMetadataObjectParser.m
// Snapchat
//
// Created by Jiyang Zhu on 3/13/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
#import "SCCaptureMetadataObjectParser.h"
#import <SCBase/SCMacros.h>
@import UIKit;
@implementation SCCaptureMetadataObjectParser
- (NSDictionary<NSNumber *, NSValue *> *)parseFaceBoundsByFaceIDFromMetadataObjects:
(NSArray<__kindof AVMetadataObject *> *)metadataObjects
{
NSMutableArray *faceObjects = [NSMutableArray array];
[metadataObjects
enumerateObjectsUsingBlock:^(__kindof AVMetadataObject *_Nonnull obj, NSUInteger idx, BOOL *_Nonnull stop) {
if ([obj isKindOfClass:[AVMetadataFaceObject class]]) {
[faceObjects addObject:obj];
}
}];
SC_GUARD_ELSE_RETURN_VALUE(faceObjects.count > 0, nil);
NSMutableDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID =
[NSMutableDictionary dictionaryWithCapacity:faceObjects.count];
for (AVMetadataFaceObject *faceObject in faceObjects) {
[faceBoundsByFaceID setObject:[NSValue valueWithCGRect:faceObject.bounds] forKey:@(faceObject.faceID)];
}
return faceBoundsByFaceID;
}
@end

19
ManagedCapturer/SCCaptureMetadataOutputDetector.h

@ -0,0 +1,19 @@
//
// SCCaptureMetadataOutputDetector.h
// Snapchat
//
// Created by Jiyang Zhu on 12/21/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
// This class is intended to detect faces in Camera. It receives AVMetadataFaceObjects, and announce the bounds and
// faceIDs.
#import "SCCaptureFaceDetector.h"
#import <SCBase/SCMacros.h>
@interface SCCaptureMetadataOutputDetector : NSObject <SCCaptureFaceDetector>
SC_INIT_AND_NEW_UNAVAILABLE;
@end

175
ManagedCapturer/SCCaptureMetadataOutputDetector.m

@ -0,0 +1,175 @@
//
// SCCaptureMetadataOutputDetector.m
// Snapchat
//
// Created by Jiyang Zhu on 12/21/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCCaptureMetadataOutputDetector.h"
#import "SCCameraTweaks.h"
#import "SCCaptureFaceDetectionParser.h"
#import "SCCaptureFaceDetectorTrigger.h"
#import "SCCaptureResource.h"
#import "SCManagedCaptureSession.h"
#import "SCManagedCapturer.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCLog.h>
#import <SCFoundation/SCQueuePerformer.h>
#import <SCFoundation/SCTrace.h>
#import <SCFoundation/SCTraceODPCompatible.h>
#import <SCFoundation/SCZeroDependencyExperiments.h>
#import <SCFoundation/UIImage+CVPixelBufferRef.h>
#define SCLogCaptureMetaDetectorInfo(fmt, ...) \
SCLogCoreCameraInfo(@"[SCCaptureMetadataOutputDetector] " fmt, ##__VA_ARGS__)
#define SCLogCaptureMetaDetectorWarning(fmt, ...) \
SCLogCoreCameraWarning(@"[SCCaptureMetadataOutputDetector] " fmt, ##__VA_ARGS__)
#define SCLogCaptureMetaDetectorError(fmt, ...) \
SCLogCoreCameraError(@"[SCCaptureMetadataOutputDetector] " fmt, ##__VA_ARGS__)
static char *const kSCCaptureMetadataOutputDetectorProcessQueue =
"com.snapchat.capture-metadata-output-detector-process";
static const NSInteger kDefaultNumberOfSequentialFramesWithFaces = -1; // -1 means no sequential frames with faces.
@interface SCCaptureMetadataOutputDetector () <AVCaptureMetadataOutputObjectsDelegate>
@end
@implementation SCCaptureMetadataOutputDetector {
BOOL _isDetecting;
AVCaptureMetadataOutput *_metadataOutput;
SCCaptureResource *_captureResource;
SCCaptureFaceDetectionParser *_parser;
NSInteger _numberOfSequentialFramesWithFaces;
NSUInteger _detectionFrequency;
SCQueuePerformer *_callbackPerformer;
SCQueuePerformer *_metadataProcessPerformer;
SCCaptureFaceDetectorTrigger *_trigger;
}
@synthesize trigger = _trigger;
@synthesize parser = _parser;
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource
{
SCTraceODPCompatibleStart(2);
self = [super init];
if (self) {
SCAssert(captureResource, @"SCCaptureResource should not be nil");
SCAssert(captureResource.managedSession.avSession, @"AVCaptureSession should not be nil");
SCAssert(captureResource.queuePerformer, @"SCQueuePerformer should not be nil");
_metadataOutput = [AVCaptureMetadataOutput new];
_callbackPerformer = captureResource.queuePerformer;
_captureResource = captureResource;
_detectionFrequency = SCExperimentWithFaceDetectionFrequency();
_parser = [[SCCaptureFaceDetectionParser alloc]
initWithFaceBoundsAreaThreshold:pow(SCCameraFaceFocusMinFaceSize(), 2)];
_metadataProcessPerformer = [[SCQueuePerformer alloc] initWithLabel:kSCCaptureMetadataOutputDetectorProcessQueue
qualityOfService:QOS_CLASS_DEFAULT
queueType:DISPATCH_QUEUE_SERIAL
context:SCQueuePerformerContextCamera];
if ([self _initDetection]) {
_trigger = [[SCCaptureFaceDetectorTrigger alloc] initWithDetector:self];
}
}
return self;
}
- (AVCaptureSession *)_captureSession
{
// _captureResource.avSession may change, so we don't retain any specific AVCaptureSession.
return _captureResource.managedSession.avSession;
}
- (BOOL)_initDetection
{
BOOL success = NO;
if ([[self _captureSession] canAddOutput:_metadataOutput]) {
[[self _captureSession] addOutput:_metadataOutput];
if ([_metadataOutput.availableMetadataObjectTypes containsObject:AVMetadataObjectTypeFace]) {
_numberOfSequentialFramesWithFaces = kDefaultNumberOfSequentialFramesWithFaces;
_metadataOutput.metadataObjectTypes = @[ AVMetadataObjectTypeFace ];
success = YES;
SCLogCaptureMetaDetectorInfo(@"AVMetadataObjectTypeFace detection successfully enabled.");
} else {
[[self _captureSession] removeOutput:_metadataOutput];
success = NO;
SCLogCaptureMetaDetectorError(@"AVMetadataObjectTypeFace is not available for "
@"AVMetadataOutput[%@]",
_metadataOutput);
}
} else {
success = NO;
SCLogCaptureMetaDetectorError(@"AVCaptureSession[%@] cannot add AVMetadataOutput[%@] as an output",
[self _captureSession], _metadataOutput);
}
return success;
}
- (void)startDetection
{
SCAssert([[self detectionPerformer] isCurrentPerformer], @"Calling -startDetection in an invalid queue.");
SC_GUARD_ELSE_RETURN(!_isDetecting);
[_captureResource.queuePerformer performImmediatelyIfCurrentPerformer:^{
[_metadataOutput setMetadataObjectsDelegate:self queue:_metadataProcessPerformer.queue];
_isDetecting = YES;
SCLogCaptureMetaDetectorInfo(@"AVMetadataObjectTypeFace detection successfully enabled.");
}];
}
- (void)stopDetection
{
SCAssert([[self detectionPerformer] isCurrentPerformer], @"Calling -stopDetection in an invalid queue.");
SC_GUARD_ELSE_RETURN(_isDetecting);
[_captureResource.queuePerformer performImmediatelyIfCurrentPerformer:^{
[_metadataOutput setMetadataObjectsDelegate:nil queue:NULL];
_isDetecting = NO;
SCLogCaptureMetaDetectorInfo(@"AVMetadataObjectTypeFace detection successfully disabled.");
}];
}
- (SCQueuePerformer *)detectionPerformer
{
return _captureResource.queuePerformer;
}
#pragma mark - AVCaptureMetadataOutputObjectsDelegate
- (void)captureOutput:(AVCaptureOutput *)output
didOutputMetadataObjects:(NSArray<__kindof AVMetadataObject *> *)metadataObjects
fromConnection:(AVCaptureConnection *)connection
{
SCTraceODPCompatibleStart(2);
BOOL shouldNotify = NO;
if (metadataObjects.count == 0 &&
_numberOfSequentialFramesWithFaces !=
kDefaultNumberOfSequentialFramesWithFaces) { // There were faces detected before, but there is no face right
// now, so send out the notification.
_numberOfSequentialFramesWithFaces = kDefaultNumberOfSequentialFramesWithFaces;
shouldNotify = YES;
} else if (metadataObjects.count > 0) {
_numberOfSequentialFramesWithFaces++;
shouldNotify = (_numberOfSequentialFramesWithFaces % _detectionFrequency == 0);
}
SC_GUARD_ELSE_RETURN(shouldNotify);
NSDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID =
[_parser parseFaceBoundsByFaceIDFromMetadataObjects:metadataObjects];
[_callbackPerformer perform:^{
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
didDetectFaceBounds:faceBoundsByFaceID];
}];
}
@end

225
ManagedCapturer/SCCapturer.h

@ -0,0 +1,225 @@
//
// SCManagedCapturer.h
// Snapchat
//
// Created by Liu Liu on 4/20/15.
// Copyright (c) 2015 Liu Liu. All rights reserved.
//
#import "SCCaptureCommon.h"
#import "SCSnapCreationTriggers.h"
#import <SCAudio/SCAudioConfiguration.h>
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
#define SCCapturerContext [NSString sc_stringWithFormat:@"%s/%d", __FUNCTION__, __LINE__]
@class SCBlackCameraDetector;
@protocol SCManagedCapturerListener
, SCManagedCapturerLensAPI, SCDeviceMotionProvider, SCFileInputDecider, SCManagedCapturerARImageCaptureProvider,
SCManagedCapturerGLViewManagerAPI, SCManagedCapturerLensAPIProvider, SCManagedCapturerLSAComponentTrackerAPI,
SCManagedCapturePreviewLayerControllerDelegate;
@protocol SCCapturer <NSObject>
@property (nonatomic, readonly) SCBlackCameraDetector *blackCameraDetector;
/**
* Returns id<SCLensProcessingCore> for the current capturer.
*/
- (id<SCManagedCapturerLensAPI>)lensProcessingCore;
- (CMTime)firstWrittenAudioBufferDelay;
- (BOOL)audioQueueStarted;
- (BOOL)isLensApplied;
- (BOOL)isVideoMirrored;
- (SCVideoCaptureSessionInfo)activeSession;
#pragma mark - Outside resources
- (void)setBlackCameraDetector:(SCBlackCameraDetector *)blackCameraDetector
deviceMotionProvider:(id<SCDeviceMotionProvider>)deviceMotionProvider
fileInputDecider:(id<SCFileInputDecider>)fileInputDecider
arImageCaptureProvider:(id<SCManagedCapturerARImageCaptureProvider>)arImageCaptureProvider
glviewManager:(id<SCManagedCapturerGLViewManagerAPI>)glViewManager
lensAPIProvider:(id<SCManagedCapturerLensAPIProvider>)lensAPIProvider
lsaComponentTracker:(id<SCManagedCapturerLSAComponentTrackerAPI>)lsaComponentTracker
managedCapturerPreviewLayerControllerDelegate:
(id<SCManagedCapturePreviewLayerControllerDelegate>)previewLayerControllerDelegate;
#pragma mark - Setup, Start & Stop
// setupWithDevicePositionAsynchronously will be called on the main thread, executed off the main thread, exactly once
- (void)setupWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
/**
* Important: Remember to call stopRunningAsynchronously to stop the capture session. Dismissing the view is not enough
* @param identifier is for knowing the callsite. Pass in the classname of the callsite is generally suggested.
* Currently it is used for debugging purposes. In other words the capture session will work without it.
*/
- (SCCapturerToken *)startRunningAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (void)stopRunningAsynchronously:(SCCapturerToken *)token
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
context:(NSString *)context;
- (void)stopRunningAsynchronously:(SCCapturerToken *)token
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
after:(NSTimeInterval)delay
context:(NSString *)context;
- (void)startStreamingAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (void)addSampleBufferDisplayController:(id<SCManagedSampleBufferDisplayController>)sampleBufferDisplayController
context:(NSString *)context;
#pragma mark - Recording / Capture
- (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio
captureSessionID:(NSString *)captureSessionID
completionHandler:
(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler
context:(NSString *)context;
/**
* Unlike captureStillImageAsynchronouslyWithAspectRatio, this captures a single frame from the ongoing video
* stream. This should be faster but lower quality (and smaller size), and does not play the shutter sound.
*/
- (void)captureSingleVideoFrameAsynchronouslyWithCompletionHandler:
(sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler
context:(NSString *)context;
- (void)prepareForRecordingAsynchronouslyWithContext:(NSString *)context
audioConfiguration:(SCAudioConfiguration *)configuration;
- (void)startRecordingAsynchronouslyWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
audioConfiguration:(SCAudioConfiguration *)configuration
maxDuration:(NSTimeInterval)maxDuration
fileURL:(NSURL *)fileURL
captureSessionID:(NSString *)captureSessionID
completionHandler:
(sc_managed_capturer_start_recording_completion_handler_t)completionHandler
context:(NSString *)context;
- (void)stopRecordingAsynchronouslyWithContext:(NSString *)context;
- (void)cancelRecordingAsynchronouslyWithContext:(NSString *)context;
- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context;
- (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context;
- (void)sampleFrameWithCompletionHandler:(void (^)(UIImage *frame, CMTime presentationTime))completionHandler
context:(NSString *)context;
// AddTimedTask will schedule a task to run, it is thread safe API. Your task will run on main thread, so it is not
// recommended to add large amount of tasks which all have the same task target time.
- (void)addTimedTask:(SCTimedTask *)task context:(NSString *)context;
// clearTimedTasks will cancel the tasks, it is thread safe API.
- (void)clearTimedTasksWithContext:(NSString *)context;
#pragma mark - Utilities
- (void)convertViewCoordinates:(CGPoint)viewCoordinates
completionHandler:(sc_managed_capturer_convert_view_coordniates_completion_handler_t)completionHandler
context:(NSString *)context;
- (void)detectLensCategoryOnNextFrame:(CGPoint)point
lenses:(NSArray<SCLens *> *)lenses
completion:(sc_managed_lenses_processor_category_point_completion_handler_t)completion
context:(NSString *)context;
#pragma mark - Configurations
- (void)setDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (void)setFlashActive:(BOOL)flashActive
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (void)setLensesActive:(BOOL)lensesActive
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (void)setLensesActive:(BOOL)lensesActive
filterFactory:(SCLookseryFilterFactory *)filterFactory
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (void)setLensesInTalkActive:(BOOL)lensesActive
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (void)setTorchActiveAsynchronously:(BOOL)torchActive
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (void)setNightModeActiveAsynchronously:(BOOL)active
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (void)lockZoomWithContext:(NSString *)context;
- (void)unlockZoomWithContext:(NSString *)context;
- (void)setZoomFactorAsynchronously:(CGFloat)zoomFactor context:(NSString *)context;
- (void)resetZoomFactorAsynchronously:(CGFloat)zoomFactor
devicePosition:(SCManagedCaptureDevicePosition)devicePosition
context:(NSString *)context;
- (void)setExposurePointOfInterestAsynchronously:(CGPoint)pointOfInterest
fromUser:(BOOL)fromUser
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (void)setAutofocusPointOfInterestAsynchronously:(CGPoint)pointOfInterest
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (void)setPortraitModePointOfInterestAsynchronously:(CGPoint)pointOfInterest
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (void)continuousAutofocusAndExposureAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
// I need to call these three methods from SCAppDelegate explicitly so that I get the latest information.
- (void)applicationDidEnterBackground;
- (void)applicationWillEnterForeground;
- (void)applicationDidBecomeActive;
- (void)applicationWillResignActive;
- (void)mediaServicesWereReset;
- (void)mediaServicesWereLost;
#pragma mark - Add / Remove Listener
- (void)addListener:(id<SCManagedCapturerListener>)listener;
- (void)removeListener:(id<SCManagedCapturerListener>)listener;
- (void)addVideoDataSourceListener:(id<SCManagedVideoDataSourceListener>)listener;
- (void)removeVideoDataSourceListener:(id<SCManagedVideoDataSourceListener>)listener;
- (void)addDeviceCapacityAnalyzerListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener;
- (void)removeDeviceCapacityAnalyzerListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener;
- (NSString *)debugInfo;
- (id<SCManagedVideoDataSource>)currentVideoDataSource;
- (void)checkRestrictedCamera:(void (^)(BOOL, BOOL, AVAuthorizationStatus))callback;
// Need to be visible so that classes like SCCaptureSessionFixer can manage capture session
- (void)recreateAVCaptureSession;
#pragma mark - Snap Creation triggers
- (SCSnapCreationTriggers *)snapCreationTriggers;
@optional
- (BOOL)authorizedForVideoCapture;
- (void)preloadVideoCaptureAuthorization;
@end

44
ManagedCapturer/SCCapturerBufferedVideoWriter.h

@ -0,0 +1,44 @@
//
// SCCapturerBufferedVideoWriter.h
// Snapchat
//
// Created by Chao Pang on 12/5/17.
//
#import <SCFoundation/SCQueuePerformer.h>
#import <SCManagedVideoCapturerOutputSettings.h>
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
@protocol SCCapturerBufferedVideoWriterDelegate <NSObject>
- (void)videoWriterDidFailWritingWithError:(NSError *)error;
@end
@interface SCCapturerBufferedVideoWriter : NSObject
- (instancetype)init NS_UNAVAILABLE;
- (instancetype)initWithPerformer:(id<SCPerforming>)performer
outputURL:(NSURL *)outputURL
delegate:(id<SCCapturerBufferedVideoWriterDelegate>)delegate
error:(NSError **)error;
- (BOOL)prepareWritingWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings;
- (void)startWritingAtSourceTime:(CMTime)sourceTime;
- (void)finishWritingAtSourceTime:(CMTime)sourceTime withCompletionHanlder:(dispatch_block_t)completionBlock;
- (void)cancelWriting;
- (void)appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;
- (void)appendAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;
- (void)cleanUp;
@end

430
ManagedCapturer/SCCapturerBufferedVideoWriter.m

@ -0,0 +1,430 @@
//
// SCCapturerBufferedVideoWriter.m
// Snapchat
//
// Created by Chao Pang on 12/5/17.
//
#import "SCCapturerBufferedVideoWriter.h"
#import "SCAudioCaptureSession.h"
#import "SCCaptureCommon.h"
#import "SCManagedCapturerUtils.h"
#import <SCBase/SCMacros.h>
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCDeviceName.h>
#import <SCFoundation/SCLog.h>
#import <SCFoundation/SCTrace.h>
#import <FBKVOController/FBKVOController.h>
@implementation SCCapturerBufferedVideoWriter {
SCQueuePerformer *_performer;
__weak id<SCCapturerBufferedVideoWriterDelegate> _delegate;
FBKVOController *_observeController;
AVAssetWriter *_assetWriter;
AVAssetWriterInput *_audioWriterInput;
AVAssetWriterInput *_videoWriterInput;
AVAssetWriterInputPixelBufferAdaptor *_pixelBufferAdaptor;
CVPixelBufferPoolRef _defaultPixelBufferPool;
CVPixelBufferPoolRef _nightPixelBufferPool;
CVPixelBufferPoolRef _lensesPixelBufferPool;
CMBufferQueueRef _videoBufferQueue;
CMBufferQueueRef _audioBufferQueue;
}
- (instancetype)initWithPerformer:(id<SCPerforming>)performer
outputURL:(NSURL *)outputURL
delegate:(id<SCCapturerBufferedVideoWriterDelegate>)delegate
error:(NSError **)error
{
self = [super init];
if (self) {
_performer = performer;
_delegate = delegate;
_observeController = [[FBKVOController alloc] initWithObserver:self];
CMBufferQueueCreate(kCFAllocatorDefault, 0, CMBufferQueueGetCallbacksForUnsortedSampleBuffers(),
&_videoBufferQueue);
CMBufferQueueCreate(kCFAllocatorDefault, 0, CMBufferQueueGetCallbacksForUnsortedSampleBuffers(),
&_audioBufferQueue);
_assetWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeMPEG4 error:error];
if (*error) {
self = nil;
return self;
}
}
return self;
}
- (BOOL)prepareWritingWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
{
SCTraceStart();
SCAssert([_performer isCurrentPerformer], @"");
SCAssert(outputSettings, @"empty output setting");
// Audio
SCTraceSignal(@"Derive audio output setting");
NSDictionary *audioOutputSettings = @{
AVFormatIDKey : @(kAudioFormatMPEG4AAC),
AVNumberOfChannelsKey : @(1),
AVSampleRateKey : @(kSCAudioCaptureSessionDefaultSampleRate),
AVEncoderBitRateKey : @(outputSettings.audioBitRate)
};
_audioWriterInput =
[[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
_audioWriterInput.expectsMediaDataInRealTime = YES;
// Video
SCTraceSignal(@"Derive video output setting");
size_t outputWidth = outputSettings.width;
size_t outputHeight = outputSettings.height;
SCAssert(outputWidth > 0 && outputHeight > 0 && (outputWidth % 2 == 0) && (outputHeight % 2 == 0),
@"invalid output size");
NSDictionary *videoCompressionSettings = @{
AVVideoAverageBitRateKey : @(outputSettings.videoBitRate),
AVVideoMaxKeyFrameIntervalKey : @(outputSettings.keyFrameInterval)
};
NSDictionary *videoOutputSettings = @{
AVVideoCodecKey : AVVideoCodecH264,
AVVideoWidthKey : @(outputWidth),
AVVideoHeightKey : @(outputHeight),
AVVideoScalingModeKey : AVVideoScalingModeResizeAspectFill,
AVVideoCompressionPropertiesKey : videoCompressionSettings
};
_videoWriterInput =
[[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:videoOutputSettings];
_videoWriterInput.expectsMediaDataInRealTime = YES;
CGAffineTransform transform = CGAffineTransformMakeTranslation(outputHeight, 0);
_videoWriterInput.transform = CGAffineTransformRotate(transform, M_PI_2);
_pixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc]
initWithAssetWriterInput:_videoWriterInput
sourcePixelBufferAttributes:@{
(NSString *)
kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), (NSString *)
kCVPixelBufferWidthKey : @(outputWidth), (NSString *)
kCVPixelBufferHeightKey : @(outputHeight)
}];
SCTraceSignal(@"Setup video writer input");
if ([_assetWriter canAddInput:_videoWriterInput]) {
[_assetWriter addInput:_videoWriterInput];
} else {
return NO;
}
SCTraceSignal(@"Setup audio writer input");
if ([_assetWriter canAddInput:_audioWriterInput]) {
[_assetWriter addInput:_audioWriterInput];
} else {
return NO;
}
return YES;
}
- (void)appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
SCAssert([_performer isCurrentPerformer], @"");
SC_GUARD_ELSE_RETURN(sampleBuffer);
if (!CMBufferQueueIsEmpty(_videoBufferQueue)) {
// We need to drain the buffer queue in this case
while (_videoWriterInput.readyForMoreMediaData) { // TODO: also need to break out in case of errors
CMSampleBufferRef dequeuedSampleBuffer =
(CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_videoBufferQueue);
if (dequeuedSampleBuffer == NULL) {
break;
}
[self _appendVideoSampleBuffer:dequeuedSampleBuffer];
CFRelease(dequeuedSampleBuffer);
}
}
// Fast path, just append this sample buffer if ready
if (_videoWriterInput.readyForMoreMediaData) {
[self _appendVideoSampleBuffer:sampleBuffer];
} else {
// It is not ready, queuing the sample buffer
CMBufferQueueEnqueue(_videoBufferQueue, sampleBuffer);
}
}
- (void)appendAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
SCAssert([_performer isCurrentPerformer], @"");
SC_GUARD_ELSE_RETURN(sampleBuffer);
if (!CMBufferQueueIsEmpty(_audioBufferQueue)) {
// We need to drain the buffer queue in this case
while (_audioWriterInput.readyForMoreMediaData) {
CMSampleBufferRef dequeuedSampleBuffer =
(CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_audioBufferQueue);
if (dequeuedSampleBuffer == NULL) {
break;
}
[_audioWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(dequeuedSampleBuffer);
}
}
// fast path, just append this sample buffer if ready
if ((_audioWriterInput.readyForMoreMediaData)) {
[_audioWriterInput appendSampleBuffer:sampleBuffer];
} else {
// it is not ready, queuing the sample buffer
CMBufferQueueEnqueue(_audioBufferQueue, sampleBuffer);
}
}
- (void)startWritingAtSourceTime:(CMTime)sourceTime
{
SCTraceStart();
SCAssert([_performer isCurrentPerformer], @"");
// To observe the status change on assetWriter because when assetWriter errors out, it only changes the
// status, no further delegate callbacks etc.
[_observeController observe:_assetWriter
keyPath:@keypath(_assetWriter, status)
options:NSKeyValueObservingOptionNew
action:@selector(assetWriterStatusChanged:)];
[_assetWriter startWriting];
[_assetWriter startSessionAtSourceTime:sourceTime];
}
- (void)cancelWriting
{
SCTraceStart();
SCAssert([_performer isCurrentPerformer], @"");
CMBufferQueueReset(_videoBufferQueue);
CMBufferQueueReset(_audioBufferQueue);
[_assetWriter cancelWriting];
}
- (void)finishWritingAtSourceTime:(CMTime)sourceTime withCompletionHanlder:(dispatch_block_t)completionBlock
{
SCTraceStart();
SCAssert([_performer isCurrentPerformer], @"");
while (_audioWriterInput.readyForMoreMediaData && !CMBufferQueueIsEmpty(_audioBufferQueue)) {
CMSampleBufferRef audioSampleBuffer = (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_audioBufferQueue);
if (audioSampleBuffer == NULL) {
break;
}
[_audioWriterInput appendSampleBuffer:audioSampleBuffer];
CFRelease(audioSampleBuffer);
}
while (_videoWriterInput.readyForMoreMediaData && !CMBufferQueueIsEmpty(_videoBufferQueue)) {
CMSampleBufferRef videoSampleBuffer = (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_videoBufferQueue);
if (videoSampleBuffer == NULL) {
break;
}
[_videoWriterInput appendSampleBuffer:videoSampleBuffer];
CFRelease(videoSampleBuffer);
}
dispatch_block_t finishWritingBlock = ^() {
[_assetWriter endSessionAtSourceTime:sourceTime];
[_audioWriterInput markAsFinished];
[_videoWriterInput markAsFinished];
[_assetWriter finishWritingWithCompletionHandler:^{
if (completionBlock) {
completionBlock();
}
}];
};
if (CMBufferQueueIsEmpty(_audioBufferQueue) && CMBufferQueueIsEmpty(_videoBufferQueue)) {
finishWritingBlock();
} else {
// We need to drain the samples from the queues before finish writing
__block BOOL isAudioDone = NO;
__block BOOL isVideoDone = NO;
// Audio
[_audioWriterInput
requestMediaDataWhenReadyOnQueue:_performer.queue
usingBlock:^{
if (!CMBufferQueueIsEmpty(_audioBufferQueue) &&
_assetWriter.status == AVAssetWriterStatusWriting) {
CMSampleBufferRef audioSampleBuffer =
(CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_audioBufferQueue);
if (audioSampleBuffer) {
[_audioWriterInput appendSampleBuffer:audioSampleBuffer];
CFRelease(audioSampleBuffer);
}
} else if (!isAudioDone) {
isAudioDone = YES;
}
if (isAudioDone && isVideoDone) {
finishWritingBlock();
}
}];
// Video
[_videoWriterInput
requestMediaDataWhenReadyOnQueue:_performer.queue
usingBlock:^{
if (!CMBufferQueueIsEmpty(_videoBufferQueue) &&
_assetWriter.status == AVAssetWriterStatusWriting) {
CMSampleBufferRef videoSampleBuffer =
(CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_videoBufferQueue);
if (videoSampleBuffer) {
[_videoWriterInput appendSampleBuffer:videoSampleBuffer];
CFRelease(videoSampleBuffer);
}
} else if (!isVideoDone) {
isVideoDone = YES;
}
if (isAudioDone && isVideoDone) {
finishWritingBlock();
}
}];
}
}
- (void)cleanUp
{
_assetWriter = nil;
_videoWriterInput = nil;
_audioWriterInput = nil;
_pixelBufferAdaptor = nil;
}
- (void)dealloc
{
CFRelease(_videoBufferQueue);
CFRelease(_audioBufferQueue);
CVPixelBufferPoolRelease(_defaultPixelBufferPool);
CVPixelBufferPoolRelease(_nightPixelBufferPool);
CVPixelBufferPoolRelease(_lensesPixelBufferPool);
[_observeController unobserveAll];
}
- (void)assetWriterStatusChanged:(NSDictionary *)change
{
SCTraceStart();
if (_assetWriter.status == AVAssetWriterStatusFailed) {
SCTraceSignal(@"Asset writer status failed %@, error %@", change, _assetWriter.error);
[_delegate videoWriterDidFailWritingWithError:[_assetWriter.error copy]];
}
}
#pragma - Private methods
- (CVImageBufferRef)_croppedPixelBufferWithInputPixelBuffer:(CVImageBufferRef)inputPixelBuffer
{
SCAssertTrue([SCDeviceName isIphoneX]);
const size_t inputBufferWidth = CVPixelBufferGetWidth(inputPixelBuffer);
const size_t inputBufferHeight = CVPixelBufferGetHeight(inputPixelBuffer);
const size_t croppedBufferWidth = (size_t)(inputBufferWidth * kSCIPhoneXCapturedImageVideoCropRatio) / 2 * 2;
const size_t croppedBufferHeight =
(size_t)(croppedBufferWidth * SCManagedCapturedImageAndVideoAspectRatio()) / 2 * 2;
const size_t offsetPointX = inputBufferWidth - croppedBufferWidth;
const size_t offsetPointY = (inputBufferHeight - croppedBufferHeight) / 4 * 2;
SC_GUARD_ELSE_RUN_AND_RETURN_VALUE((inputBufferWidth >= croppedBufferWidth) &&
(inputBufferHeight >= croppedBufferHeight) && (offsetPointX % 2 == 0) &&
(offsetPointY % 2 == 0) &&
(inputBufferWidth >= croppedBufferWidth + offsetPointX) &&
(inputBufferHeight >= croppedBufferHeight + offsetPointY),
SCLogGeneralError(@"Invalid cropping configuration"), NULL);
CVPixelBufferRef croppedPixelBuffer = NULL;
CVPixelBufferPoolRef pixelBufferPool =
[self _pixelBufferPoolWithInputSize:CGSizeMake(inputBufferWidth, inputBufferHeight)
croppedSize:CGSizeMake(croppedBufferWidth, croppedBufferHeight)];
if (pixelBufferPool) {
CVReturn result = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferPool, &croppedPixelBuffer);
if ((result != kCVReturnSuccess) || (croppedPixelBuffer == NULL)) {
SCLogGeneralError(@"[SCCapturerVideoWriterInput] Error creating croppedPixelBuffer");
return NULL;
}
} else {
SCAssertFail(@"[SCCapturerVideoWriterInput] PixelBufferPool is NULL with inputBufferWidth:%@, "
@"inputBufferHeight:%@, croppedBufferWidth:%@, croppedBufferHeight:%@",
@(inputBufferWidth), @(inputBufferHeight), @(croppedBufferWidth), @(croppedBufferHeight));
return NULL;
}
CVPixelBufferLockBaseAddress(inputPixelBuffer, kCVPixelBufferLock_ReadOnly);
CVPixelBufferLockBaseAddress(croppedPixelBuffer, 0);
const size_t planesCount = CVPixelBufferGetPlaneCount(inputPixelBuffer);
for (int planeIndex = 0; planeIndex < planesCount; planeIndex++) {
size_t inPlaneHeight = CVPixelBufferGetHeightOfPlane(inputPixelBuffer, planeIndex);
size_t inPlaneBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(inputPixelBuffer, planeIndex);
uint8_t *inPlaneAdress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(inputPixelBuffer, planeIndex);
size_t croppedPlaneHeight = CVPixelBufferGetHeightOfPlane(croppedPixelBuffer, planeIndex);
size_t croppedPlaneBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(croppedPixelBuffer, planeIndex);
uint8_t *croppedPlaneAdress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(croppedPixelBuffer, planeIndex);
// Note that inPlaneBytesPerRow is not strictly 2x of inPlaneWidth for some devices (e.g. iPhone X).
// However, since UV are packed together in memory, we can use offsetPointX for all planes
size_t offsetPlaneBytesX = offsetPointX;
size_t offsetPlaneBytesY = offsetPointY * inPlaneHeight / inputBufferHeight;
inPlaneAdress = inPlaneAdress + offsetPlaneBytesY * inPlaneBytesPerRow + offsetPlaneBytesX;
size_t bytesToCopyPerRow = MIN(inPlaneBytesPerRow - offsetPlaneBytesX, croppedPlaneBytesPerRow);
for (int i = 0; i < croppedPlaneHeight; i++) {
memcpy(croppedPlaneAdress, inPlaneAdress, bytesToCopyPerRow);
inPlaneAdress += inPlaneBytesPerRow;
croppedPlaneAdress += croppedPlaneBytesPerRow;
}
}
CVPixelBufferUnlockBaseAddress(inputPixelBuffer, kCVPixelBufferLock_ReadOnly);
CVPixelBufferUnlockBaseAddress(croppedPixelBuffer, 0);
return croppedPixelBuffer;
}
- (CVPixelBufferPoolRef)_pixelBufferPoolWithInputSize:(CGSize)inputSize croppedSize:(CGSize)croppedSize
{
if (CGSizeEqualToSize(inputSize, [SCManagedCaptureDevice defaultActiveFormatResolution])) {
if (_defaultPixelBufferPool == NULL) {
_defaultPixelBufferPool = [self _newPixelBufferPoolWithWidth:croppedSize.width height:croppedSize.height];
}
return _defaultPixelBufferPool;
} else if (CGSizeEqualToSize(inputSize, [SCManagedCaptureDevice nightModeActiveFormatResolution])) {
if (_nightPixelBufferPool == NULL) {
_nightPixelBufferPool = [self _newPixelBufferPoolWithWidth:croppedSize.width height:croppedSize.height];
}
return _nightPixelBufferPool;
} else {
if (_lensesPixelBufferPool == NULL) {
_lensesPixelBufferPool = [self _newPixelBufferPoolWithWidth:croppedSize.width height:croppedSize.height];
}
return _lensesPixelBufferPool;
}
}
- (CVPixelBufferPoolRef)_newPixelBufferPoolWithWidth:(size_t)width height:(size_t)height
{
NSDictionary *attributes = @{
(NSString *) kCVPixelBufferIOSurfacePropertiesKey : @{}, (NSString *)
kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), (NSString *)
kCVPixelBufferWidthKey : @(width), (NSString *)
kCVPixelBufferHeightKey : @(height)
};
CVPixelBufferPoolRef pixelBufferPool = NULL;
CVReturn result = CVPixelBufferPoolCreate(kCFAllocatorDefault, NULL,
(__bridge CFDictionaryRef _Nullable)(attributes), &pixelBufferPool);
if (result != kCVReturnSuccess) {
SCLogGeneralError(@"[SCCapturerBufferredVideoWriter] Error creating pixel buffer pool %i", result);
return NULL;
}
return pixelBufferPool;
}
- (void)_appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
SCAssert([_performer isCurrentPerformer], @"");
CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
CVImageBufferRef inputPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if ([SCDeviceName isIphoneX]) {
CVImageBufferRef croppedPixelBuffer = [self _croppedPixelBufferWithInputPixelBuffer:inputPixelBuffer];
if (croppedPixelBuffer) {
[_pixelBufferAdaptor appendPixelBuffer:croppedPixelBuffer withPresentationTime:presentationTime];
CVPixelBufferRelease(croppedPixelBuffer);
}
} else {
[_pixelBufferAdaptor appendPixelBuffer:inputPixelBuffer withPresentationTime:presentationTime];
}
}
@end

20
ManagedCapturer/SCCapturerDefines.h

@ -0,0 +1,20 @@
//
// SCCapturerDefines.h
// Snapchat
//
// Created by Chao Pang on 12/20/17.
//
#import <Foundation/Foundation.h>
typedef NS_ENUM(NSInteger, SCCapturerLightingConditionType) {
SCCapturerLightingConditionTypeNormal = 0,
SCCapturerLightingConditionTypeDark,
SCCapturerLightingConditionTypeExtremeDark,
};
typedef struct SampleBufferMetadata {
int isoSpeedRating;
float exposureTime;
float brightness;
} SampleBufferMetadata;

18
ManagedCapturer/SCCapturerToken.h

@ -0,0 +1,18 @@
//
// SCCapturerToken.h
// Snapchat
//
// Created by Xishuo Liu on 3/24/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface SCCapturerToken : NSObject
- (instancetype)initWithIdentifier:(NSString *)identifier NS_DESIGNATED_INITIALIZER;
- (instancetype)init __attribute__((unavailable("Use initWithIdentifier: instead.")));
- (instancetype) new __attribute__((unavailable("Use initWithIdentifier: instead.")));
@end

30
ManagedCapturer/SCCapturerToken.m

@ -0,0 +1,30 @@
//
// SCCapturerToken.m
// Snapchat
//
// Created by Xishuo Liu on 3/24/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCCapturerToken.h"
#import <SCFoundation/NSString+SCFormat.h>
@implementation SCCapturerToken {
NSString *_identifier;
}
- (instancetype)initWithIdentifier:(NSString *)identifier
{
if (self = [super init]) {
_identifier = identifier.copy;
}
return self;
}
- (NSString *)debugDescription
{
return [NSString sc_stringWithFormat:@"%@_%@", _identifier, self];
}
@end

20
ManagedCapturer/SCCapturerTokenProvider.h

@ -0,0 +1,20 @@
//
// Created by Aaron Levine on 10/16/17.
//
#import <SCBase/SCMacros.h>
#import <Foundation/Foundation.h>
@class SCCapturerToken;
NS_ASSUME_NONNULL_BEGIN
@interface SCCapturerTokenProvider : NSObject
SC_INIT_AND_NEW_UNAVAILABLE
+ (instancetype)providerWithToken:(SCCapturerToken *)token;
- (nullable SCCapturerToken *)getTokenAndInvalidate;
@end
NS_ASSUME_NONNULL_END

42
ManagedCapturer/SCCapturerTokenProvider.m

@ -0,0 +1,42 @@
//
// Created by Aaron Levine on 10/16/17.
//
#import "SCCapturerTokenProvider.h"
#import "SCCapturerToken.h"
#import <SCBase/SCAssignment.h>
#import <SCFoundation/SCAssertWrapper.h>
@implementation SCCapturerTokenProvider {
SCCapturerToken *_Nullable _token;
}
+ (instancetype)providerWithToken:(SCCapturerToken *)token
{
return [[self alloc] initWithToken:token];
}
- (instancetype)initWithToken:(SCCapturerToken *)token
{
self = [super init];
if (self) {
_token = token;
}
return self;
}
- (nullable SCCapturerToken *)getTokenAndInvalidate
{
// ensure serial access by requiring calls be on the main thread
SCAssertMainThread();
let token = _token;
_token = nil;
return token;
}
@end

18
ManagedCapturer/SCExposureState.h

@ -0,0 +1,18 @@
//
// SCExposureState.h
// Snapchat
//
// Created by Derek Peirce on 4/10/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
@interface SCExposureState : NSObject
- (instancetype)initWithDevice:(AVCaptureDevice *)device;
- (void)applyISOAndExposureDurationToDevice:(AVCaptureDevice *)device;
@end

47
ManagedCapturer/SCExposureState.m

@ -0,0 +1,47 @@
//
// SCExposureState.m
// Snapchat
//
// Created by Derek Peirce on 4/10/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCExposureState.h"
#import "AVCaptureDevice+ConfigurationLock.h"
#import <SCBase/SCMacros.h>
@import AVFoundation;
@implementation SCExposureState {
float _ISO;
CMTime _exposureDuration;
}
- (instancetype)initWithDevice:(AVCaptureDevice *)device
{
if (self = [super init]) {
_ISO = device.ISO;
_exposureDuration = device.exposureDuration;
}
return self;
}
- (void)applyISOAndExposureDurationToDevice:(AVCaptureDevice *)device
{
if ([device isExposureModeSupported:AVCaptureExposureModeCustom]) {
[device runTask:@"set prior exposure"
withLockedConfiguration:^() {
CMTime exposureDuration =
CMTimeClampToRange(_exposureDuration, CMTimeRangeMake(device.activeFormat.minExposureDuration,
device.activeFormat.maxExposureDuration));
[device setExposureModeCustomWithDuration:exposureDuration
ISO:SC_CLAMP(_ISO, device.activeFormat.minISO,
device.activeFormat.maxISO)
completionHandler:nil];
}];
}
}
@end

19
ManagedCapturer/SCFileAudioCaptureSession.h

@ -0,0 +1,19 @@
//
// SCFileAudioCaptureSession.h
// Snapchat
//
// Created by Xiaomu Wu on 2/2/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCAudioCaptureSession.h"
#import <Foundation/Foundation.h>
@interface SCFileAudioCaptureSession : NSObject <SCAudioCaptureSession>
// Linear PCM is required.
// To best mimic `SCAudioCaptureSession`, use an audio file recorded from it.
- (void)setFileURL:(NSURL *)fileURL;
@end

243
ManagedCapturer/SCFileAudioCaptureSession.m

@ -0,0 +1,243 @@
//
// SCFileAudioCaptureSession.m
// Snapchat
//
// Created by Xiaomu Wu on 2/2/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCFileAudioCaptureSession.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCLog.h>
#import <SCFoundation/SCQueuePerformer.h>
#import <SCFoundation/SCSentinel.h>
@import AudioToolbox;
static float const kAudioBufferDurationInSeconds = 0.2; // same as SCAudioCaptureSession
static char *const kSCFileAudioCaptureSessionQueueLabel = "com.snapchat.file-audio-capture-session";
@implementation SCFileAudioCaptureSession {
SCQueuePerformer *_performer;
SCSentinel *_sentinel;
NSURL *_fileURL;
AudioFileID _audioFile; // audio file
AudioStreamBasicDescription _asbd; // audio format (core audio)
CMAudioFormatDescriptionRef _formatDescription; // audio format (core media)
SInt64 _readCurPacket; // current packet index to read
UInt32 _readNumPackets; // number of packets to read every time
UInt32 _readNumBytes; // number of bytes to read every time
void *_readBuffer; // data buffer to hold read packets
}
@synthesize delegate = _delegate;
#pragma mark - Public
- (instancetype)init
{
self = [super init];
if (self) {
_performer = [[SCQueuePerformer alloc] initWithLabel:kSCFileAudioCaptureSessionQueueLabel
qualityOfService:QOS_CLASS_UNSPECIFIED
queueType:DISPATCH_QUEUE_SERIAL
context:SCQueuePerformerContextCamera];
_sentinel = [[SCSentinel alloc] init];
}
return self;
}
- (void)dealloc
{
if (_audioFile) {
AudioFileClose(_audioFile);
}
if (_formatDescription) {
CFRelease(_formatDescription);
}
if (_readBuffer) {
free(_readBuffer);
}
}
- (void)setFileURL:(NSURL *)fileURL
{
[_performer perform:^{
_fileURL = fileURL;
}];
}
#pragma mark - SCAudioCaptureSession
- (void)beginAudioRecordingAsynchronouslyWithSampleRate:(double)sampleRate // `sampleRate` ignored
completionHandler:(audio_capture_session_block)completionHandler
{
[_performer perform:^{
BOOL succeeded = [self _setup];
int32_t sentinelValue = [_sentinel value];
if (completionHandler) {
completionHandler(nil);
}
if (succeeded) {
[_performer perform:^{
SC_GUARD_ELSE_RETURN([_sentinel value] == sentinelValue);
[self _read];
}
after:kAudioBufferDurationInSeconds];
}
}];
}
- (void)disposeAudioRecordingSynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler
{
[_performer performAndWait:^{
[self _teardown];
if (completionHandler) {
completionHandler();
}
}];
}
#pragma mark - Private
- (BOOL)_setup
{
SCAssert([_performer isCurrentPerformer], @"");
[_sentinel increment];
OSStatus status = noErr;
status = AudioFileOpenURL((__bridge CFURLRef)_fileURL, kAudioFileReadPermission, 0, &_audioFile);
if (noErr != status) {
SCLogGeneralError(@"Cannot open file at URL %@, error code %d", _fileURL, (int)status);
return NO;
}
_asbd = (AudioStreamBasicDescription){0};
UInt32 asbdSize = sizeof(_asbd);
status = AudioFileGetProperty(_audioFile, kAudioFilePropertyDataFormat, &asbdSize, &_asbd);
if (noErr != status) {
SCLogGeneralError(@"Cannot get audio data format, error code %d", (int)status);
AudioFileClose(_audioFile);
_audioFile = NULL;
return NO;
}
if (kAudioFormatLinearPCM != _asbd.mFormatID) {
SCLogGeneralError(@"Linear PCM is required");
AudioFileClose(_audioFile);
_audioFile = NULL;
_asbd = (AudioStreamBasicDescription){0};
return NO;
}
UInt32 aclSize = 0;
AudioChannelLayout *acl = NULL;
status = AudioFileGetPropertyInfo(_audioFile, kAudioFilePropertyChannelLayout, &aclSize, NULL);
if (noErr == status) {
acl = malloc(aclSize);
status = AudioFileGetProperty(_audioFile, kAudioFilePropertyChannelLayout, &aclSize, acl);
if (noErr != status) {
aclSize = 0;
free(acl);
acl = NULL;
}
}
status = CMAudioFormatDescriptionCreate(NULL, &_asbd, aclSize, acl, 0, NULL, NULL, &_formatDescription);
if (acl) {
free(acl);
acl = NULL;
}
if (noErr != status) {
SCLogGeneralError(@"Cannot create format description, error code %d", (int)status);
AudioFileClose(_audioFile);
_audioFile = NULL;
_asbd = (AudioStreamBasicDescription){0};
return NO;
}
_readCurPacket = 0;
_readNumPackets = ceil(_asbd.mSampleRate * kAudioBufferDurationInSeconds);
_readNumBytes = _asbd.mBytesPerPacket * _readNumPackets;
_readBuffer = malloc(_readNumBytes);
return YES;
}
- (void)_read
{
SCAssert([_performer isCurrentPerformer], @"");
OSStatus status = noErr;
UInt32 numBytes = _readNumBytes;
UInt32 numPackets = _readNumPackets;
status = AudioFileReadPacketData(_audioFile, NO, &numBytes, NULL, _readCurPacket, &numPackets, _readBuffer);
if (noErr != status) {
SCLogGeneralError(@"Cannot read audio data, error code %d", (int)status);
return;
}
if (0 == numPackets) {
return;
}
CMTime PTS = CMTimeMakeWithSeconds(_readCurPacket / _asbd.mSampleRate, 600);
_readCurPacket += numPackets;
CMBlockBufferRef dataBuffer = NULL;
status = CMBlockBufferCreateWithMemoryBlock(NULL, NULL, numBytes, NULL, NULL, 0, numBytes, 0, &dataBuffer);
if (kCMBlockBufferNoErr == status) {
if (dataBuffer) {
CMBlockBufferReplaceDataBytes(_readBuffer, dataBuffer, 0, numBytes);
CMSampleBufferRef sampleBuffer = NULL;
CMAudioSampleBufferCreateWithPacketDescriptions(NULL, dataBuffer, true, NULL, NULL, _formatDescription,
numPackets, PTS, NULL, &sampleBuffer);
if (sampleBuffer) {
[_delegate audioCaptureSession:self didOutputSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
}
CFRelease(dataBuffer);
}
} else {
SCLogGeneralError(@"Cannot create data buffer, error code %d", (int)status);
}
int32_t sentinelValue = [_sentinel value];
[_performer perform:^{
SC_GUARD_ELSE_RETURN([_sentinel value] == sentinelValue);
[self _read];
}
after:kAudioBufferDurationInSeconds];
}
- (void)_teardown
{
SCAssert([_performer isCurrentPerformer], @"");
[_sentinel increment];
if (_audioFile) {
AudioFileClose(_audioFile);
_audioFile = NULL;
}
_asbd = (AudioStreamBasicDescription){0};
if (_formatDescription) {
CFRelease(_formatDescription);
_formatDescription = NULL;
}
_readCurPacket = 0;
_readNumPackets = 0;
_readNumBytes = 0;
if (_readBuffer) {
free(_readBuffer);
_readBuffer = NULL;
}
}
@end

20
ManagedCapturer/SCManagedAudioStreamer.h

@ -0,0 +1,20 @@
//
// SCManagedAudioStreamer.h
// Snapchat
//
// Created by Ricardo Sánchez-Sáez on 7/28/16.
// Copyright © 2016 Snapchat, Inc. All rights reserved.
//
#import <SCCameraFoundation/SCManagedAudioDataSource.h>
#import <Foundation/Foundation.h>
@interface SCManagedAudioStreamer : NSObject <SCManagedAudioDataSource>
+ (instancetype)sharedInstance;
+ (instancetype) new NS_UNAVAILABLE;
- (instancetype)init NS_UNAVAILABLE;
@end

115
ManagedCapturer/SCManagedAudioStreamer.m

@ -0,0 +1,115 @@
//
// SCManagedAudioStreamer.m
// Snapchat
//
// Created by Ricardo Sánchez-Sáez on 7/28/16.
// Copyright © 2016 Snapchat, Inc. All rights reserved.
//
#import "SCManagedAudioStreamer.h"
#import "SCAudioCaptureSession.h"
#import <SCAudio/SCAudioSession.h>
#import <SCCameraFoundation/SCManagedAudioDataSourceListenerAnnouncer.h>
#import <SCFoundation/SCQueuePerformer.h>
#import <SCFoundation/SCTrace.h>
#import <SCAudioScope/SCAudioScope.h>
#import <SCAudioScope/SCAudioSessionExperimentAdapter.h>
static char *const kSCManagedAudioStreamerQueueLabel = "com.snapchat.audioStreamerQueue";
@interface SCManagedAudioStreamer () <SCAudioCaptureSessionDelegate>
@end
@implementation SCManagedAudioStreamer {
SCAudioCaptureSession *_captureSession;
SCAudioConfigurationToken *_audioConfiguration;
SCManagedAudioDataSourceListenerAnnouncer *_announcer;
SCScopedAccess<SCMutableAudioSession *> *_scopedMutableAudioSession;
}
@synthesize performer = _performer;
+ (instancetype)sharedInstance
{
static dispatch_once_t onceToken;
static SCManagedAudioStreamer *managedAudioStreamer;
dispatch_once(&onceToken, ^{
managedAudioStreamer = [[SCManagedAudioStreamer alloc] initSharedInstance];
});
return managedAudioStreamer;
}
- (instancetype)initSharedInstance
{
SCTraceStart();
self = [super init];
if (self) {
_performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedAudioStreamerQueueLabel
qualityOfService:QOS_CLASS_USER_INTERACTIVE
queueType:DISPATCH_QUEUE_SERIAL
context:SCQueuePerformerContextCamera];
_announcer = [[SCManagedAudioDataSourceListenerAnnouncer alloc] init];
_captureSession = [[SCAudioCaptureSession alloc] init];
_captureSession.delegate = self;
}
return self;
}
- (BOOL)isStreaming
{
return _audioConfiguration != nil;
}
- (void)startStreamingWithAudioConfiguration:(SCAudioConfiguration *)configuration
{
SCTraceStart();
[_performer perform:^{
if (!self.isStreaming) {
// Begin audio recording asynchronously. First we need to have the proper audio session category.
_audioConfiguration = [SCAudioSessionExperimentAdapter
configureWith:configuration
performer:_performer
completion:^(NSError *error) {
[_captureSession
beginAudioRecordingAsynchronouslyWithSampleRate:kSCAudioCaptureSessionDefaultSampleRate
completionHandler:NULL];
}];
}
}];
}
- (void)stopStreaming
{
[_performer perform:^{
if (self.isStreaming) {
[_captureSession disposeAudioRecordingSynchronouslyWithCompletionHandler:NULL];
[SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration performer:nil completion:nil];
_audioConfiguration = nil;
}
}];
}
- (void)addListener:(id<SCManagedAudioDataSourceListener>)listener
{
SCTraceStart();
[_announcer addListener:listener];
}
- (void)removeListener:(id<SCManagedAudioDataSourceListener>)listener
{
SCTraceStart();
[_announcer removeListener:listener];
}
- (void)audioCaptureSession:(SCAudioCaptureSession *)audioCaptureSession
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
[_announcer managedAudioDataSource:self didOutputSampleBuffer:sampleBuffer];
}
@end

71
ManagedCapturer/SCManagedCaptureDevice+SCManagedCapturer.h

@ -0,0 +1,71 @@
//
// SCManagedCaptureDevice+SCManagedCapturer.h
// Snapchat
//
// Created by Liu Liu on 5/9/15.
// Copyright (c) 2015 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDevice.h"
#import <AVFoundation/AVFoundation.h>
@interface SCManagedCaptureDevice (SCManagedCapturer)
@property (nonatomic, strong, readonly) AVCaptureDevice *device;
@property (nonatomic, strong, readonly) AVCaptureDeviceInput *deviceInput;
@property (nonatomic, copy, readonly) NSError *error;
@property (nonatomic, assign, readonly) BOOL isConnected;
@property (nonatomic, strong, readonly) AVCaptureDeviceFormat *activeFormat;
// Setup and hook up with device
- (BOOL)setDeviceAsInput:(AVCaptureSession *)session;
- (void)removeDeviceAsInput:(AVCaptureSession *)session;
- (void)resetDeviceAsInput;
// Configurations
@property (nonatomic, assign) BOOL flashActive;
@property (nonatomic, assign) BOOL torchActive;
@property (nonatomic, assign) float zoomFactor;
@property (nonatomic, assign, readonly) BOOL liveVideoStreamingActive;
@property (nonatomic, assign, readonly) BOOL isNightModeActive;
@property (nonatomic, assign, readonly) BOOL isFlashSupported;
@property (nonatomic, assign, readonly) BOOL isTorchSupported;
- (void)setNightModeActive:(BOOL)nightModeActive session:(AVCaptureSession *)session;
- (void)setLiveVideoStreaming:(BOOL)liveVideoStreaming session:(AVCaptureSession *)session;
- (void)setCaptureDepthData:(BOOL)captureDepthData session:(AVCaptureSession *)session;
- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser;
- (void)setAutofocusPointOfInterest:(CGPoint)pointOfInterest;
- (void)continuousAutofocus;
- (void)setRecording:(BOOL)recording;
- (void)updateActiveFormatWithSession:(AVCaptureSession *)session;
// Utilities
- (CGPoint)convertViewCoordinates:(CGPoint)viewCoordinates
viewSize:(CGSize)viewSize
videoGravity:(NSString *)videoGravity;
@end

17
ManagedCapturer/SCManagedCaptureDevice+SCManagedDeviceCapacityAnalyzer.h

@ -0,0 +1,17 @@
//
// SCManagedCaptureDevice+SCManagedDeviceCapacityAnalyzer.h
// Snapchat
//
// Created by Kam Sheffield on 10/29/15.
// Copyright © 2015 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDevice.h"
#import <AVFoundation/AVFoundation.h>
@interface SCManagedCaptureDevice (SCManagedDeviceCapacityAnalyzer)
@property (nonatomic, strong, readonly) AVCaptureDevice *device;
@end

60
ManagedCapturer/SCManagedCaptureDevice.h

@ -0,0 +1,60 @@
//
// SCManagedCaptureDevice.h
// Snapchat
//
// Created by Liu Liu on 4/22/15.
// Copyright (c) 2015 Liu Liu. All rights reserved.
//
#import <SCCameraFoundation/SCManagedCaptureDevicePosition.h>
#import <SCCameraFoundation/SCManagedCaptureDeviceProtocol.h>
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
extern CGFloat const kSCMaxVideoZoomFactor;
extern CGFloat const kSCMinVideoZoomFactor;
@class SCManagedCaptureDevice;
@protocol SCManagedCaptureDeviceDelegate <NSObject>
@optional
- (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeAdjustingExposure:(BOOL)adjustingExposure;
- (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeExposurePoint:(CGPoint)exposurePoint;
- (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeFocusPoint:(CGPoint)focusPoint;
@end
@interface SCManagedCaptureDevice : NSObject <SCManagedCaptureDeviceProtocol>
@property (nonatomic, weak) id<SCManagedCaptureDeviceDelegate> delegate;
// These two class methods are thread safe
+ (instancetype)front;
+ (instancetype)back;
+ (instancetype)dualCamera;
+ (instancetype)deviceWithPosition:(SCManagedCaptureDevicePosition)position;
+ (BOOL)is1080pSupported;
+ (BOOL)isMixCaptureSupported;
+ (BOOL)isNightModeSupported;
+ (BOOL)isEnhancedNightModeSupported;
+ (CGSize)defaultActiveFormatResolution;
+ (CGSize)nightModeActiveFormatResolution;
- (BOOL)softwareZoom;
- (SCManagedCaptureDevicePosition)position;
- (BOOL)isAvailable;
@end

821
ManagedCapturer/SCManagedCaptureDevice.m

@ -0,0 +1,821 @@
//
// SCManagedCaptureDevice.m
// Snapchat
//
// Created by Liu Liu on 4/22/15.
// Copyright (c) 2015 Liu Liu. All rights reserved.
//
#import "SCManagedCaptureDevice.h"
#import "AVCaptureDevice+ConfigurationLock.h"
#import "SCCameraTweaks.h"
#import "SCCaptureCommon.h"
#import "SCCaptureDeviceResolver.h"
#import "SCManagedCaptureDevice+SCManagedCapturer.h"
#import "SCManagedCaptureDeviceAutoExposureHandler.h"
#import "SCManagedCaptureDeviceAutoFocusHandler.h"
#import "SCManagedCaptureDeviceExposureHandler.h"
#import "SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.h"
#import "SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.h"
#import "SCManagedCaptureDeviceFocusHandler.h"
#import "SCManagedCapturer.h"
#import "SCManagedDeviceCapacityAnalyzer.h"
#import <SCFoundation/SCDeviceName.h>
#import <SCFoundation/SCLog.h>
#import <SCFoundation/SCTrace.h>
#import <FBKVOController/FBKVOController.h>
static int32_t const kSCManagedCaptureDeviceMaximumHighFrameRate = 30;
static int32_t const kSCManagedCaptureDeviceMaximumLowFrameRate = 24;
static float const kSCManagedCaptureDevicecSoftwareMaxZoomFactor = 8;
CGFloat const kSCMaxVideoZoomFactor = 100; // the max videoZoomFactor acceptable
CGFloat const kSCMinVideoZoomFactor = 1;
static NSDictionary *SCBestHRSIFormatsForHeights(NSArray *desiredHeights, NSArray *formats, BOOL shouldSupportDepth)
{
NSMutableDictionary *bestHRSIHeights = [NSMutableDictionary dictionary];
for (NSNumber *height in desiredHeights) {
bestHRSIHeights[height] = @0;
}
NSMutableDictionary *bestHRSIFormats = [NSMutableDictionary dictionary];
for (AVCaptureDeviceFormat *format in formats) {
if (@available(ios 11.0, *)) {
if (shouldSupportDepth && format.supportedDepthDataFormats.count == 0) {
continue;
}
}
if (CMFormatDescriptionGetMediaSubType(format.formatDescription) !=
kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
continue;
}
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
NSNumber *height = @(dimensions.height);
NSNumber *bestHRSI = bestHRSIHeights[height];
if (bestHRSI) {
CMVideoDimensions hrsi = format.highResolutionStillImageDimensions;
// If we enabled HSRI, we only intersted in the ones that is good.
if (hrsi.height > [bestHRSI intValue]) {
bestHRSIHeights[height] = @(hrsi.height);
bestHRSIFormats[height] = format;
}
}
}
return [bestHRSIFormats copy];
}
static inline float SCDegreesToRadians(float theta)
{
return theta * (float)M_PI / 180.f;
}
static inline float SCRadiansToDegrees(float theta)
{
return theta * 180.f / (float)M_PI;
}
@implementation SCManagedCaptureDevice {
AVCaptureDevice *_device;
AVCaptureDeviceInput *_deviceInput;
AVCaptureDeviceFormat *_defaultFormat;
AVCaptureDeviceFormat *_nightFormat;
AVCaptureDeviceFormat *_liveVideoStreamingFormat;
SCManagedCaptureDevicePosition _devicePosition;
// Configurations on the device, shortcut to avoid re-configurations
id<SCManagedCaptureDeviceExposureHandler> _exposureHandler;
id<SCManagedCaptureDeviceFocusHandler> _focusHandler;
FBKVOController *_observeController;
// For the private category methods
NSError *_error;
BOOL _softwareZoom;
BOOL _isConnected;
BOOL _flashActive;
BOOL _torchActive;
BOOL _liveVideoStreamingActive;
float _zoomFactor;
BOOL _isNightModeActive;
BOOL _captureDepthData;
}
@synthesize fieldOfView = _fieldOfView;
+ (instancetype)front
{
SCTraceStart();
static dispatch_once_t onceToken;
static SCManagedCaptureDevice *front;
static dispatch_semaphore_t semaphore;
dispatch_once(&onceToken, ^{
semaphore = dispatch_semaphore_create(1);
});
/* You can use the tweak below to intentionally kill camera in debug.
if (SCIsDebugBuild() && SCCameraTweaksKillFrontCamera()) {
return nil;
}
*/
dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);
if (!front) {
AVCaptureDevice *device =
[[SCCaptureDeviceResolver sharedInstance] findAVCaptureDevice:AVCaptureDevicePositionFront];
if (device) {
front = [[SCManagedCaptureDevice alloc] initWithDevice:device
devicePosition:SCManagedCaptureDevicePositionFront];
}
}
dispatch_semaphore_signal(semaphore);
return front;
}
+ (instancetype)back
{
SCTraceStart();
static dispatch_once_t onceToken;
static SCManagedCaptureDevice *back;
static dispatch_semaphore_t semaphore;
dispatch_once(&onceToken, ^{
semaphore = dispatch_semaphore_create(1);
});
/* You can use the tweak below to intentionally kill camera in debug.
if (SCIsDebugBuild() && SCCameraTweaksKillBackCamera()) {
return nil;
}
*/
dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);
if (!back) {
AVCaptureDevice *device =
[[SCCaptureDeviceResolver sharedInstance] findAVCaptureDevice:AVCaptureDevicePositionBack];
if (device) {
back = [[SCManagedCaptureDevice alloc] initWithDevice:device
devicePosition:SCManagedCaptureDevicePositionBack];
}
}
dispatch_semaphore_signal(semaphore);
return back;
}
+ (SCManagedCaptureDevice *)dualCamera
{
SCTraceStart();
static dispatch_once_t onceToken;
static SCManagedCaptureDevice *dualCamera;
static dispatch_semaphore_t semaphore;
dispatch_once(&onceToken, ^{
semaphore = dispatch_semaphore_create(1);
});
dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);
if (!dualCamera) {
AVCaptureDevice *device = [[SCCaptureDeviceResolver sharedInstance] findDualCamera];
if (device) {
dualCamera = [[SCManagedCaptureDevice alloc] initWithDevice:device
devicePosition:SCManagedCaptureDevicePositionBackDualCamera];
}
}
dispatch_semaphore_signal(semaphore);
return dualCamera;
}
+ (instancetype)deviceWithPosition:(SCManagedCaptureDevicePosition)position
{
switch (position) {
case SCManagedCaptureDevicePositionFront:
return [self front];
case SCManagedCaptureDevicePositionBack:
return [self back];
case SCManagedCaptureDevicePositionBackDualCamera:
return [self dualCamera];
}
}
+ (BOOL)is1080pSupported
{
return [SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone6SorNewer];
}
+ (BOOL)isMixCaptureSupported
{
return !![self front] && !![self back];
}
+ (BOOL)isNightModeSupported
{
return [SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone6orNewer];
}
+ (BOOL)isEnhancedNightModeSupported
{
if (SC_AT_LEAST_IOS_11) {
return [SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone6SorNewer];
}
return NO;
}
+ (CGSize)defaultActiveFormatResolution
{
if ([SCDeviceName isIphoneX]) {
return CGSizeMake(kSCManagedCapturerVideoActiveFormatWidth1080p,
kSCManagedCapturerVideoActiveFormatHeight1080p);
}
return CGSizeMake(kSCManagedCapturerDefaultVideoActiveFormatWidth,
kSCManagedCapturerDefaultVideoActiveFormatHeight);
}
+ (CGSize)nightModeActiveFormatResolution
{
if ([SCManagedCaptureDevice isEnhancedNightModeSupported]) {
return CGSizeMake(kSCManagedCapturerNightVideoHighResActiveFormatWidth,
kSCManagedCapturerNightVideoHighResActiveFormatHeight);
}
return CGSizeMake(kSCManagedCapturerNightVideoDefaultResActiveFormatWidth,
kSCManagedCapturerNightVideoDefaultResActiveFormatHeight);
}
- (instancetype)initWithDevice:(AVCaptureDevice *)device devicePosition:(SCManagedCaptureDevicePosition)devicePosition
{
SCTraceStart();
self = [super init];
if (self) {
_device = device;
_devicePosition = devicePosition;
if (SCCameraTweaksEnableFaceDetectionFocus(devicePosition)) {
_exposureHandler = [[SCManagedCaptureDeviceFaceDetectionAutoExposureHandler alloc]
initWithDevice:device
pointOfInterest:CGPointMake(0.5, 0.5)
managedCapturer:[SCManagedCapturer sharedInstance]];
_focusHandler = [[SCManagedCaptureDeviceFaceDetectionAutoFocusHandler alloc]
initWithDevice:device
pointOfInterest:CGPointMake(0.5, 0.5)
managedCapturer:[SCManagedCapturer sharedInstance]];
} else {
_exposureHandler = [[SCManagedCaptureDeviceAutoExposureHandler alloc] initWithDevice:device
pointOfInterest:CGPointMake(0.5, 0.5)];
_focusHandler = [[SCManagedCaptureDeviceAutoFocusHandler alloc] initWithDevice:device
pointOfInterest:CGPointMake(0.5, 0.5)];
}
_observeController = [[FBKVOController alloc] initWithObserver:self];
[self _setAsExposureListenerForDevice:device];
if (SCCameraTweaksEnableExposurePointObservation()) {
[self _observeExposurePointForDevice:device];
}
if (SCCameraTweaksEnableFocusPointObservation()) {
[self _observeFocusPointForDevice:device];
}
_zoomFactor = 1.0;
[self _findSupportedFormats];
}
return self;
}
- (SCManagedCaptureDevicePosition)position
{
return _devicePosition;
}
#pragma mark - Setup and hook up with device
- (BOOL)setDeviceAsInput:(AVCaptureSession *)session
{
SCTraceStart();
AVCaptureDeviceInput *deviceInput = [self deviceInput];
if ([session canAddInput:deviceInput]) {
[session addInput:deviceInput];
} else {
NSString *previousSessionPreset = session.sessionPreset;
session.sessionPreset = AVCaptureSessionPresetInputPriority;
// Now we surely can add input
if ([session canAddInput:deviceInput]) {
[session addInput:deviceInput];
} else {
session.sessionPreset = previousSessionPreset;
return NO;
}
}
[self _enableSubjectAreaChangeMonitoring];
[self _updateActiveFormatWithSession:session fallbackPreset:AVCaptureSessionPreset640x480];
if (_device.activeFormat.videoMaxZoomFactor < 1 + 1e-5) {
_softwareZoom = YES;
} else {
_softwareZoom = NO;
if (_device.videoZoomFactor != _zoomFactor) {
// Reset the zoom factor
[self setZoomFactor:_zoomFactor];
}
}
[_exposureHandler setVisible:YES];
[_focusHandler setVisible:YES];
_isConnected = YES;
return YES;
}
- (void)removeDeviceAsInput:(AVCaptureSession *)session
{
SCTraceStart();
if (_isConnected) {
[session removeInput:_deviceInput];
[_exposureHandler setVisible:NO];
[_focusHandler setVisible:NO];
_isConnected = NO;
}
}
- (void)resetDeviceAsInput
{
_deviceInput = nil;
AVCaptureDevice *deviceFound;
switch (_devicePosition) {
case SCManagedCaptureDevicePositionFront:
deviceFound = [[SCCaptureDeviceResolver sharedInstance] findAVCaptureDevice:AVCaptureDevicePositionFront];
break;
case SCManagedCaptureDevicePositionBack:
deviceFound = [[SCCaptureDeviceResolver sharedInstance] findAVCaptureDevice:AVCaptureDevicePositionBack];
break;
case SCManagedCaptureDevicePositionBackDualCamera:
deviceFound = [[SCCaptureDeviceResolver sharedInstance] findDualCamera];
break;
}
if (deviceFound) {
_device = deviceFound;
}
}
#pragma mark - Configurations
- (void)_findSupportedFormats
{
NSInteger defaultHeight = [SCManagedCaptureDevice defaultActiveFormatResolution].height;
NSInteger nightHeight = [SCManagedCaptureDevice nightModeActiveFormatResolution].height;
NSInteger liveVideoStreamingHeight = kSCManagedCapturerLiveStreamingVideoActiveFormatHeight;
NSArray *heights = @[ @(nightHeight), @(defaultHeight), @(liveVideoStreamingHeight) ];
BOOL formatsShouldSupportDepth = _devicePosition == SCManagedCaptureDevicePositionBackDualCamera;
NSDictionary *formats = SCBestHRSIFormatsForHeights(heights, _device.formats, formatsShouldSupportDepth);
_nightFormat = formats[@(nightHeight)];
_defaultFormat = formats[@(defaultHeight)];
_liveVideoStreamingFormat = formats[@(liveVideoStreamingHeight)];
}
- (AVCaptureDeviceFormat *)_bestSupportedFormat
{
if (_isNightModeActive) {
return _nightFormat;
}
if (_liveVideoStreamingActive) {
return _liveVideoStreamingFormat;
}
return _defaultFormat;
}
- (void)setNightModeActive:(BOOL)nightModeActive session:(AVCaptureSession *)session
{
SCTraceStart();
if (![SCManagedCaptureDevice isNightModeSupported]) {
return;
}
if (_isNightModeActive == nightModeActive) {
return;
}
_isNightModeActive = nightModeActive;
[self updateActiveFormatWithSession:session];
}
- (void)setLiveVideoStreaming:(BOOL)liveVideoStreaming session:(AVCaptureSession *)session
{
SCTraceStart();
if (_liveVideoStreamingActive == liveVideoStreaming) {
return;
}
_liveVideoStreamingActive = liveVideoStreaming;
[self updateActiveFormatWithSession:session];
}
- (void)setCaptureDepthData:(BOOL)captureDepthData session:(AVCaptureSession *)session
{
SCTraceStart();
_captureDepthData = captureDepthData;
[self _findSupportedFormats];
[self updateActiveFormatWithSession:session];
}
- (void)updateActiveFormatWithSession:(AVCaptureSession *)session
{
[self _updateActiveFormatWithSession:session fallbackPreset:AVCaptureSessionPreset640x480];
if (_device.videoZoomFactor != _zoomFactor) {
[self setZoomFactor:_zoomFactor];
}
}
- (void)_updateActiveFormatWithSession:(AVCaptureSession *)session fallbackPreset:(NSString *)fallbackPreset
{
AVCaptureDeviceFormat *nextFormat = [self _bestSupportedFormat];
if (nextFormat && [session canSetSessionPreset:AVCaptureSessionPresetInputPriority]) {
session.sessionPreset = AVCaptureSessionPresetInputPriority;
if (nextFormat == _device.activeFormat) {
// Need to reconfigure frame rate though active format unchanged
[_device runTask:@"update frame rate"
withLockedConfiguration:^() {
[self _updateDeviceFrameRate];
}];
} else {
[_device runTask:@"update active format"
withLockedConfiguration:^() {
_device.activeFormat = nextFormat;
[self _updateDeviceFrameRate];
}];
}
} else {
session.sessionPreset = fallbackPreset;
}
[self _updateFieldOfView];
}
- (void)_updateDeviceFrameRate
{
int32_t deviceFrameRate;
if (_liveVideoStreamingActive) {
deviceFrameRate = kSCManagedCaptureDeviceMaximumLowFrameRate;
} else {
deviceFrameRate = kSCManagedCaptureDeviceMaximumHighFrameRate;
}
CMTime frameDuration = CMTimeMake(1, deviceFrameRate);
if (@available(ios 11.0, *)) {
if (_captureDepthData) {
// Sync the video frame rate to the max depth frame rate (24 fps)
if (_device.activeDepthDataFormat.videoSupportedFrameRateRanges.firstObject) {
frameDuration =
_device.activeDepthDataFormat.videoSupportedFrameRateRanges.firstObject.minFrameDuration;
}
}
}
_device.activeVideoMaxFrameDuration = frameDuration;
_device.activeVideoMinFrameDuration = frameDuration;
if (_device.lowLightBoostSupported) {
_device.automaticallyEnablesLowLightBoostWhenAvailable = YES;
}
}
- (void)setZoomFactor:(float)zoomFactor
{
SCTraceStart();
if (_softwareZoom) {
// Just remember the software zoom scale
if (zoomFactor <= kSCManagedCaptureDevicecSoftwareMaxZoomFactor && zoomFactor >= 1) {
_zoomFactor = zoomFactor;
}
} else {
[_device runTask:@"set zoom factor"
withLockedConfiguration:^() {
if (zoomFactor <= _device.activeFormat.videoMaxZoomFactor && zoomFactor >= 1) {
_zoomFactor = zoomFactor;
if (_device.videoZoomFactor != _zoomFactor) {
_device.videoZoomFactor = _zoomFactor;
}
}
}];
}
[self _updateFieldOfView];
}
- (void)_updateFieldOfView
{
float fieldOfView = _device.activeFormat.videoFieldOfView;
if (_zoomFactor > 1.f) {
// Adjust the field of view to take the zoom factor into account.
// Note: this assumes the zoom factor linearly affects the focal length.
fieldOfView = 2.f * SCRadiansToDegrees(atanf(tanf(SCDegreesToRadians(0.5f * fieldOfView)) / _zoomFactor));
}
self.fieldOfView = fieldOfView;
}
- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser
{
[_exposureHandler setExposurePointOfInterest:pointOfInterest fromUser:fromUser];
}
// called when user taps on a point on screen, to re-adjust camera focus onto that tapped spot.
// this re-adjustment is always necessary, regardless of scenarios (recording video, taking photo, etc),
// therefore we don't have to check _focusLock in this method.
- (void)setAutofocusPointOfInterest:(CGPoint)pointOfInterest
{
SCTraceStart();
[_focusHandler setAutofocusPointOfInterest:pointOfInterest];
}
- (void)continuousAutofocus
{
SCTraceStart();
[_focusHandler continuousAutofocus];
}
- (void)setRecording:(BOOL)recording
{
if (SCCameraTweaksSmoothAutoFocusWhileRecording() && [_device isSmoothAutoFocusSupported]) {
[self _setSmoothFocus:recording];
} else {
[self _setFocusLock:recording];
}
[_exposureHandler setStableExposure:recording];
}
- (void)_setFocusLock:(BOOL)focusLock
{
SCTraceStart();
[_focusHandler setFocusLock:focusLock];
}
- (void)_setSmoothFocus:(BOOL)smoothFocus
{
SCTraceStart();
[_focusHandler setSmoothFocus:smoothFocus];
}
- (void)setFlashActive:(BOOL)flashActive
{
SCTraceStart();
if (_flashActive != flashActive) {
if ([_device hasFlash]) {
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
if (flashActive && [_device isFlashModeSupported:AVCaptureFlashModeOn]) {
[_device runTask:@"set flash active"
withLockedConfiguration:^() {
_device.flashMode = AVCaptureFlashModeOn;
}];
} else if (!flashActive && [_device isFlashModeSupported:AVCaptureFlashModeOff]) {
[_device runTask:@"set flash off"
withLockedConfiguration:^() {
_device.flashMode = AVCaptureFlashModeOff;
}];
}
#pragma clang diagnostic pop
_flashActive = flashActive;
} else {
_flashActive = NO;
}
}
}
- (void)setTorchActive:(BOOL)torchActive
{
SCTraceStart();
if (_torchActive != torchActive) {
if ([_device hasTorch]) {
if (torchActive && [_device isTorchModeSupported:AVCaptureTorchModeOn]) {
[_device runTask:@"set torch active"
withLockedConfiguration:^() {
[_device setTorchMode:AVCaptureTorchModeOn];
}];
} else if (!torchActive && [_device isTorchModeSupported:AVCaptureTorchModeOff]) {
[_device runTask:@"set torch off"
withLockedConfiguration:^() {
_device.torchMode = AVCaptureTorchModeOff;
}];
}
_torchActive = torchActive;
} else {
_torchActive = NO;
}
}
}
#pragma mark - Utilities
- (BOOL)isFlashSupported
{
return _device.hasFlash;
}
- (BOOL)isTorchSupported
{
return _device.hasTorch;
}
- (CGPoint)convertViewCoordinates:(CGPoint)viewCoordinates
viewSize:(CGSize)viewSize
videoGravity:(NSString *)videoGravity
{
SCTraceStart();
CGPoint pointOfInterest = CGPointMake(.5f, .5f);
CGRect cleanAperture;
AVCaptureDeviceInput *deviceInput = [self deviceInput];
NSArray *ports = [deviceInput.ports copy];
if ([videoGravity isEqualToString:AVLayerVideoGravityResize]) {
// Scale, switch x and y, and reverse x
return CGPointMake(viewCoordinates.y / viewSize.height, 1.f - (viewCoordinates.x / viewSize.width));
}
for (AVCaptureInputPort *port in ports) {
if ([port mediaType] == AVMediaTypeVideo && port.formatDescription) {
cleanAperture = CMVideoFormatDescriptionGetCleanAperture(port.formatDescription, YES);
CGSize apertureSize = cleanAperture.size;
CGPoint point = viewCoordinates;
CGFloat apertureRatio = apertureSize.height / apertureSize.width;
CGFloat viewRatio = viewSize.width / viewSize.height;
CGFloat xc = .5f;
CGFloat yc = .5f;
if ([videoGravity isEqualToString:AVLayerVideoGravityResizeAspect]) {
if (viewRatio > apertureRatio) {
CGFloat y2 = viewSize.height;
CGFloat x2 = viewSize.height * apertureRatio;
CGFloat x1 = viewSize.width;
CGFloat blackBar = (x1 - x2) / 2;
// If point is inside letterboxed area, do coordinate conversion; otherwise, don't change the
// default value returned (.5,.5)
if (point.x >= blackBar && point.x <= blackBar + x2) {
// Scale (accounting for the letterboxing on the left and right of the video preview),
// switch x and y, and reverse x
xc = point.y / y2;
yc = 1.f - ((point.x - blackBar) / x2);
}
} else {
CGFloat y2 = viewSize.width / apertureRatio;
CGFloat y1 = viewSize.height;
CGFloat x2 = viewSize.width;
CGFloat blackBar = (y1 - y2) / 2;
// If point is inside letterboxed area, do coordinate conversion. Otherwise, don't change the
// default value returned (.5,.5)
if (point.y >= blackBar && point.y <= blackBar + y2) {
// Scale (accounting for the letterboxing on the top and bottom of the video preview),
// switch x and y, and reverse x
xc = ((point.y - blackBar) / y2);
yc = 1.f - (point.x / x2);
}
}
} else if ([videoGravity isEqualToString:AVLayerVideoGravityResizeAspectFill]) {
// Scale, switch x and y, and reverse x
if (viewRatio > apertureRatio) {
CGFloat y2 = apertureSize.width * (viewSize.width / apertureSize.height);
xc = (point.y + ((y2 - viewSize.height) / 2.f)) / y2; // Account for cropped height
yc = (viewSize.width - point.x) / viewSize.width;
} else {
CGFloat x2 = apertureSize.height * (viewSize.height / apertureSize.width);
yc = 1.f - ((point.x + ((x2 - viewSize.width) / 2)) / x2); // Account for cropped width
xc = point.y / viewSize.height;
}
}
pointOfInterest = CGPointMake(xc, yc);
break;
}
}
return pointOfInterest;
}
#pragma mark - SCManagedCapturer friendly methods
- (AVCaptureDevice *)device
{
return _device;
}
- (AVCaptureDeviceInput *)deviceInput
{
SCTraceStart();
if (!_deviceInput) {
NSError *error = nil;
_deviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:_device error:&error];
if (!_deviceInput) {
_error = [error copy];
}
}
return _deviceInput;
}
- (NSError *)error
{
return _error;
}
- (BOOL)softwareZoom
{
return _softwareZoom;
}
- (BOOL)isConnected
{
return _isConnected;
}
- (BOOL)flashActive
{
return _flashActive;
}
- (BOOL)torchActive
{
return _torchActive;
}
- (float)zoomFactor
{
return _zoomFactor;
}
- (BOOL)isNightModeActive
{
return _isNightModeActive;
}
- (BOOL)liveVideoStreamingActive
{
return _liveVideoStreamingActive;
}
- (BOOL)isAvailable
{
return [_device isConnected];
}
#pragma mark - Private methods
- (void)_enableSubjectAreaChangeMonitoring
{
SCTraceStart();
[_device runTask:@"enable SubjectAreaChangeMonitoring"
withLockedConfiguration:^() {
_device.subjectAreaChangeMonitoringEnabled = YES;
}];
}
- (AVCaptureDeviceFormat *)activeFormat
{
return _device.activeFormat;
}
#pragma mark - Observe -adjustingExposure
- (void)_setAsExposureListenerForDevice:(AVCaptureDevice *)device
{
SCTraceStart();
SCLogCoreCameraInfo(@"Set exposure adjustment KVO for device: %ld", (long)device.position);
[_observeController observe:device
keyPath:@keypath(device, adjustingExposure)
options:NSKeyValueObservingOptionNew
action:@selector(_adjustingExposureChanged:)];
}
- (void)_adjustingExposureChanged:(NSDictionary *)change
{
SCTraceStart();
BOOL adjustingExposure = [change[NSKeyValueChangeNewKey] boolValue];
SCLogCoreCameraInfo(@"KVO exposure changed to %d", adjustingExposure);
if ([self.delegate respondsToSelector:@selector(managedCaptureDevice:didChangeAdjustingExposure:)]) {
[self.delegate managedCaptureDevice:self didChangeAdjustingExposure:adjustingExposure];
}
}
#pragma mark - Observe -exposurePointOfInterest
- (void)_observeExposurePointForDevice:(AVCaptureDevice *)device
{
SCTraceStart();
SCLogCoreCameraInfo(@"Set exposure point KVO for device: %ld", (long)device.position);
[_observeController observe:device
keyPath:@keypath(device, exposurePointOfInterest)
options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
action:@selector(_exposurePointOfInterestChanged:)];
}
- (void)_exposurePointOfInterestChanged:(NSDictionary *)change
{
SCTraceStart();
CGPoint exposurePoint = [change[NSKeyValueChangeNewKey] CGPointValue];
SCLogCoreCameraInfo(@"KVO exposure point changed to %@", NSStringFromCGPoint(exposurePoint));
if ([self.delegate respondsToSelector:@selector(managedCaptureDevice:didChangeExposurePoint:)]) {
[self.delegate managedCaptureDevice:self didChangeExposurePoint:exposurePoint];
}
}
#pragma mark - Observe -focusPointOfInterest
- (void)_observeFocusPointForDevice:(AVCaptureDevice *)device
{
SCTraceStart();
SCLogCoreCameraInfo(@"Set focus point KVO for device: %ld", (long)device.position);
[_observeController observe:device
keyPath:@keypath(device, focusPointOfInterest)
options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
action:@selector(_focusPointOfInterestChanged:)];
}
- (void)_focusPointOfInterestChanged:(NSDictionary *)change
{
SCTraceStart();
CGPoint focusPoint = [change[NSKeyValueChangeNewKey] CGPointValue];
SCLogCoreCameraInfo(@"KVO focus point changed to %@", NSStringFromCGPoint(focusPoint));
if ([self.delegate respondsToSelector:@selector(managedCaptureDevice:didChangeFocusPoint:)]) {
[self.delegate managedCaptureDevice:self didChangeFocusPoint:focusPoint];
}
}
- (void)dealloc
{
[_observeController unobserveAll];
}
@end

17
ManagedCapturer/SCManagedCaptureDeviceAutoExposureHandler.h

@ -0,0 +1,17 @@
//
// SCManagedCaptureDeviceAutoExposureHandler.h
// Snapchat
//
// Created by Derek Peirce on 3/21/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDeviceExposureHandler.h"
#import <AVFoundation/AVFoundation.h>
@interface SCManagedCaptureDeviceAutoExposureHandler : NSObject <SCManagedCaptureDeviceExposureHandler>
- (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest;
@end

63
ManagedCapturer/SCManagedCaptureDeviceAutoExposureHandler.m

@ -0,0 +1,63 @@
//
// SCManagedCaptureDeviceAutoExposureHandler.m
// Snapchat
//
// Created by Derek Peirce on 3/21/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDeviceAutoExposureHandler.h"
#import "AVCaptureDevice+ConfigurationLock.h"
#import "SCManagedCaptureDeviceExposureHandler.h"
#import <SCFoundation/SCTrace.h>
@import AVFoundation;
@implementation SCManagedCaptureDeviceAutoExposureHandler {
CGPoint _exposurePointOfInterest;
AVCaptureDevice *_device;
}
- (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest
{
if (self = [super init]) {
_device = device;
_exposurePointOfInterest = pointOfInterest;
}
return self;
}
- (CGPoint)getExposurePointOfInterest
{
return _exposurePointOfInterest;
}
- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser
{
SCTraceStart();
if (!CGPointEqualToPoint(pointOfInterest, _exposurePointOfInterest)) {
if ([_device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure] &&
[_device isExposurePointOfInterestSupported]) {
[_device runTask:@"set exposure"
withLockedConfiguration:^() {
// Set exposure point before changing focus mode
// Be noticed that order does matter
_device.exposurePointOfInterest = pointOfInterest;
_device.exposureMode = AVCaptureExposureModeContinuousAutoExposure;
}];
}
_exposurePointOfInterest = pointOfInterest;
}
}
- (void)setStableExposure:(BOOL)stableExposure
{
}
- (void)setVisible:(BOOL)visible
{
}
@end

18
ManagedCapturer/SCManagedCaptureDeviceAutoFocusHandler.h

@ -0,0 +1,18 @@
//
// SCManagedCaptureDeviceAutoFocusHandler.h
// Snapchat
//
// Created by Jiyang Zhu on 3/7/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
// This class is used to adjust focus related parameters of camera, including focus mode and focus point.
#import "SCManagedCaptureDeviceFocusHandler.h"
#import <AVFoundation/AVFoundation.h>
@interface SCManagedCaptureDeviceAutoFocusHandler : NSObject <SCManagedCaptureDeviceFocusHandler>
- (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest;
@end

131
ManagedCapturer/SCManagedCaptureDeviceAutoFocusHandler.m

@ -0,0 +1,131 @@
//
// SCManagedCaptureDeviceAutoFocusHandler.m
// Snapchat
//
// Created by Jiyang Zhu on 3/7/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDeviceAutoFocusHandler.h"
#import "AVCaptureDevice+ConfigurationLock.h"
#import <SCFoundation/SCTrace.h>
#import <SCFoundation/SCTraceODPCompatible.h>
@import CoreGraphics;
@interface SCManagedCaptureDeviceAutoFocusHandler ()
@property (nonatomic, assign) CGPoint focusPointOfInterest;
@property (nonatomic, strong) AVCaptureDevice *device;
@property (nonatomic, assign) BOOL isContinuousAutofocus;
@property (nonatomic, assign) BOOL isFocusLock;
@end
@implementation SCManagedCaptureDeviceAutoFocusHandler
- (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest
{
if (self = [super init]) {
_device = device;
_focusPointOfInterest = pointOfInterest;
_isContinuousAutofocus = YES;
_isFocusLock = NO;
}
return self;
}
- (CGPoint)getFocusPointOfInterest
{
return self.focusPointOfInterest;
}
// called when user taps on a point on screen, to re-adjust camera focus onto that tapped spot.
// this re-adjustment is always necessary, regardless of scenarios (recording video, taking photo, etc),
// therefore we don't have to check self.isFocusLock in this method.
- (void)setAutofocusPointOfInterest:(CGPoint)pointOfInterest
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN(!CGPointEqualToPoint(pointOfInterest, self.focusPointOfInterest) || self.isContinuousAutofocus)
// Do the setup immediately if the focus lock is off.
if ([self.device isFocusModeSupported:AVCaptureFocusModeAutoFocus] &&
[self.device isFocusPointOfInterestSupported]) {
[self.device runTask:@"set autofocus"
withLockedConfiguration:^() {
// Set focus point before changing focus mode
// Be noticed that order does matter
self.device.focusPointOfInterest = pointOfInterest;
self.device.focusMode = AVCaptureFocusModeAutoFocus;
}];
}
self.focusPointOfInterest = pointOfInterest;
self.isContinuousAutofocus = NO;
}
- (void)continuousAutofocus
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN(!self.isContinuousAutofocus);
if (!self.isFocusLock) {
// Do the setup immediately if the focus lock is off.
if ([self.device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus] &&
[self.device isFocusPointOfInterestSupported]) {
[self.device runTask:@"set continuous autofocus"
withLockedConfiguration:^() {
// Set focus point before changing focus mode
// Be noticed that order does matter
self.device.focusPointOfInterest = CGPointMake(0.5, 0.5);
self.device.focusMode = AVCaptureFocusModeContinuousAutoFocus;
}];
}
}
self.focusPointOfInterest = CGPointMake(0.5, 0.5);
self.isContinuousAutofocus = YES;
}
- (void)setFocusLock:(BOOL)focusLock
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN(self.isFocusLock != focusLock);
// This is the old lock, we only do focus lock on back camera
if (focusLock) {
if ([self.device isFocusModeSupported:AVCaptureFocusModeLocked]) {
[self.device runTask:@"set focus lock on"
withLockedConfiguration:^() {
self.device.focusMode = AVCaptureFocusModeLocked;
}];
}
} else {
// Restore to previous autofocus configurations
if ([self.device isFocusModeSupported:(self.isContinuousAutofocus ? AVCaptureFocusModeContinuousAutoFocus
: AVCaptureFocusModeAutoFocus)] &&
[self.device isFocusPointOfInterestSupported]) {
[self.device runTask:@"set focus lock on"
withLockedConfiguration:^() {
self.device.focusPointOfInterest = self.focusPointOfInterest;
self.device.focusMode = self.isContinuousAutofocus ? AVCaptureFocusModeContinuousAutoFocus
: AVCaptureFocusModeAutoFocus;
}];
}
}
self.isFocusLock = focusLock;
}
- (void)setSmoothFocus:(BOOL)smoothFocus
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN(smoothFocus != self.device.smoothAutoFocusEnabled);
[self.device runTask:@"set smooth autofocus"
withLockedConfiguration:^() {
[self.device setSmoothAutoFocusEnabled:smoothFocus];
}];
}
- (void)setVisible:(BOOL)visible
{
}
@end

25
ManagedCapturer/SCManagedCaptureDeviceDefaultZoomHandler.h

@ -0,0 +1,25 @@
//
// SCManagedCaptureDeviceDefaultZoomHandler.h
// Snapchat
//
// Created by Yu-Kuan Lai on 4/12/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import <SCBase/SCMacros.h>
#import <CoreGraphics/CoreGraphics.h>
#import <Foundation/Foundation.h>
@class SCManagedCaptureDevice;
@class SCCaptureResource;
@interface SCManagedCaptureDeviceDefaultZoomHandler : NSObject
SC_INIT_AND_NEW_UNAVAILABLE
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource;
- (void)setZoomFactor:(CGFloat)zoomFactor forDevice:(SCManagedCaptureDevice *)device immediately:(BOOL)immediately;
- (void)softwareZoomWithDevice:(SCManagedCaptureDevice *)device;
@end

93
ManagedCapturer/SCManagedCaptureDeviceDefaultZoomHandler.m

@ -0,0 +1,93 @@
//
// SCManagedCaptureDeviceDefaultZoomHandler.m
// Snapchat
//
// Created by Yu-Kuan Lai on 4/12/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDeviceDefaultZoomHandler_Private.h"
#import "SCCaptureResource.h"
#import "SCManagedCaptureDevice+SCManagedCapturer.h"
#import "SCManagedCapturer.h"
#import "SCManagedCapturerLogging.h"
#import "SCManagedCapturerStateBuilder.h"
#import "SCMetalUtils.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCQueuePerformer.h>
#import <SCFoundation/SCThreadHelpers.h>
#import <SCFoundation/SCTraceODPCompatible.h>
@implementation SCManagedCaptureDeviceDefaultZoomHandler
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource
{
self = [super init];
if (self) {
_captureResource = captureResource;
}
return self;
}
- (void)setZoomFactor:(CGFloat)zoomFactor forDevice:(SCManagedCaptureDevice *)device immediately:(BOOL)immediately
{
[self _setZoomFactor:zoomFactor forManagedCaptureDevice:device];
}
- (void)softwareZoomWithDevice:(SCManagedCaptureDevice *)device
{
SCTraceODPCompatibleStart(2);
SCAssert([_captureResource.queuePerformer isCurrentPerformer] ||
[[SCQueuePerformer mainQueuePerformer] isCurrentPerformer],
@"");
SCAssert(device.softwareZoom, @"Only do software zoom for software zoom device");
SC_GUARD_ELSE_RETURN(!SCDeviceSupportsMetal());
float zoomFactor = device.zoomFactor;
SCLogCapturerInfo(@"Adjusting software zoom factor to: %f", zoomFactor);
AVCaptureVideoPreviewLayer *videoPreviewLayer = _captureResource.videoPreviewLayer;
[[SCQueuePerformer mainQueuePerformer] perform:^{
[CATransaction begin];
[CATransaction setDisableActions:YES];
// I end up need to change its superlayer transform to get the zoom effect
videoPreviewLayer.superlayer.affineTransform = CGAffineTransformMakeScale(zoomFactor, zoomFactor);
[CATransaction commit];
}];
}
- (void)_setZoomFactor:(CGFloat)zoomFactor forManagedCaptureDevice:(SCManagedCaptureDevice *)device
{
SCTraceODPCompatibleStart(2);
[_captureResource.queuePerformer perform:^{
SCTraceStart();
if (device) {
SCLogCapturerInfo(@"Set zoom factor: %f -> %f", _captureResource.state.zoomFactor, zoomFactor);
[device setZoomFactor:zoomFactor];
BOOL zoomFactorChanged = NO;
// If the device is our current device, send the notification, update the
// state.
if (device.isConnected && device == _captureResource.device) {
if (device.softwareZoom) {
[self softwareZoomWithDevice:device];
}
_captureResource.state = [[[SCManagedCapturerStateBuilder
withManagedCapturerState:_captureResource.state] setZoomFactor:zoomFactor] build];
zoomFactorChanged = YES;
}
SCManagedCapturerState *state = [_captureResource.state copy];
runOnMainThreadAsynchronously(^{
if (zoomFactorChanged) {
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
didChangeState:state];
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
didChangeZoomFactor:state];
}
});
}
}];
}
@end

17
ManagedCapturer/SCManagedCaptureDeviceDefaultZoomHandler_Private.h

@ -0,0 +1,17 @@
//
// SCManagedCaptureDeviceDefaultZoomHandler_Private.h
// Snapchat
//
// Created by Joe Qiao on 04/01/2018.
//
#import "SCManagedCaptureDeviceDefaultZoomHandler.h"
@interface SCManagedCaptureDeviceDefaultZoomHandler ()
@property (nonatomic, weak) SCCaptureResource *captureResource;
@property (nonatomic, weak) SCManagedCaptureDevice *currentDevice;
- (void)_setZoomFactor:(CGFloat)zoomFactor forManagedCaptureDevice:(SCManagedCaptureDevice *)device;
@end

22
ManagedCapturer/SCManagedCaptureDeviceExposureHandler.h

@ -0,0 +1,22 @@
//
// SCManagedCaptureDeviceExposureHandler.h
// Snapchat
//
// Created by Derek Peirce on 3/21/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import <CoreGraphics/CoreGraphics.h>
#import <Foundation/Foundation.h>
@protocol SCManagedCaptureDeviceExposureHandler <NSObject>
- (CGPoint)getExposurePointOfInterest;
- (void)setStableExposure:(BOOL)stableExposure;
- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser;
- (void)setVisible:(BOOL)visible;
@end

28
ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.h

@ -0,0 +1,28 @@
//
// SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.h
// Snapchat
//
// Created by Jiyang Zhu on 3/6/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
// This class is used to
// 1. adjust exposure related parameters of camera, including exposure mode and exposure point.
// 2. receive detected face bounds, and set exposure point to a preferred face if needed.
#import "SCManagedCaptureDeviceExposureHandler.h"
#import <SCBase/SCMacros.h>
#import <AVFoundation/AVFoundation.h>
@protocol SCCapturer;
@interface SCManagedCaptureDeviceFaceDetectionAutoExposureHandler : NSObject <SCManagedCaptureDeviceExposureHandler>
SC_INIT_AND_NEW_UNAVAILABLE
- (instancetype)initWithDevice:(AVCaptureDevice *)device
pointOfInterest:(CGPoint)pointOfInterest
managedCapturer:(id<SCCapturer>)managedCapturer;
@end

121
ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.m

@ -0,0 +1,121 @@
//
// SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.m
// Snapchat
//
// Created by Jiyang Zhu on 3/6/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.h"
#import "AVCaptureDevice+ConfigurationLock.h"
#import "SCCameraTweaks.h"
#import "SCManagedCaptureDeviceExposureHandler.h"
#import "SCManagedCaptureFaceDetectionAdjustingPOIResource.h"
#import "SCManagedCapturer.h"
#import "SCManagedCapturerListener.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCTrace.h>
#import <SCFoundation/SCTraceODPCompatible.h>
@import AVFoundation;
@interface SCManagedCaptureDeviceFaceDetectionAutoExposureHandler () <SCManagedCapturerListener>
@property (nonatomic, strong) AVCaptureDevice *device;
@property (nonatomic, weak) id<SCCapturer> managedCapturer;
@property (nonatomic, assign) CGPoint exposurePointOfInterest;
@property (nonatomic, assign) BOOL isVisible;
@property (nonatomic, copy) NSDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID;
@property (nonatomic, strong) SCManagedCaptureFaceDetectionAdjustingPOIResource *resource;
@end
@implementation SCManagedCaptureDeviceFaceDetectionAutoExposureHandler
- (instancetype)initWithDevice:(AVCaptureDevice *)device
pointOfInterest:(CGPoint)pointOfInterest
managedCapturer:(id<SCCapturer>)managedCapturer
{
if (self = [super init]) {
SCAssert(device, @"AVCaptureDevice should not be nil.");
SCAssert(managedCapturer, @"id<SCCapturer> should not be nil.");
_device = device;
_exposurePointOfInterest = pointOfInterest;
SCManagedCaptureDevicePosition position =
(device.position == AVCaptureDevicePositionFront ? SCManagedCaptureDevicePositionFront
: SCManagedCaptureDevicePositionBack);
_resource = [[SCManagedCaptureFaceDetectionAdjustingPOIResource alloc]
initWithDefaultPointOfInterest:pointOfInterest
shouldTargetOnFaceAutomatically:SCCameraTweaksTurnOnFaceDetectionFocusByDefault(position)];
_managedCapturer = managedCapturer;
}
return self;
}
- (void)dealloc
{
[_managedCapturer removeListener:self];
}
- (CGPoint)getExposurePointOfInterest
{
return self.exposurePointOfInterest;
}
- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser
{
SCTraceODPCompatibleStart(2);
pointOfInterest = [self.resource updateWithNewProposedPointOfInterest:pointOfInterest fromUser:fromUser];
[self _actuallySetExposurePointOfInterestIfNeeded:pointOfInterest];
}
- (void)_actuallySetExposurePointOfInterestIfNeeded:(CGPoint)pointOfInterest
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN(!CGPointEqualToPoint(pointOfInterest, self.exposurePointOfInterest));
if ([self.device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure] &&
[self.device isExposurePointOfInterestSupported]) {
[self.device runTask:@"set exposure"
withLockedConfiguration:^() {
// Set exposure point before changing exposure mode
// Be noticed that order does matter
self.device.exposurePointOfInterest = pointOfInterest;
self.device.exposureMode = AVCaptureExposureModeContinuousAutoExposure;
}];
}
self.exposurePointOfInterest = pointOfInterest;
}
- (void)setStableExposure:(BOOL)stableExposure
{
}
- (void)setVisible:(BOOL)visible
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN(_isVisible != visible);
_isVisible = visible;
if (visible) {
[self.managedCapturer addListener:self];
} else {
[self.managedCapturer removeListener:self];
[self.resource reset];
}
}
#pragma mark - SCManagedCapturerListener
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didDetectFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN(self.isVisible);
CGPoint pointOfInterest = [self.resource updateWithNewDetectedFaceBounds:faceBoundsByFaceID];
[self _actuallySetExposurePointOfInterestIfNeeded:pointOfInterest];
}
@end

28
ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.h

@ -0,0 +1,28 @@
//
// SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.h
// Snapchat
//
// Created by Jiyang Zhu on 3/7/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
// This class is used to
// 1. adjust focus related parameters of camera, including focus mode and focus point.
// 2. receive detected face bounds, and focus to a preferred face if needed.
#import "SCManagedCaptureDeviceFocusHandler.h"
#import <SCBase/SCMacros.h>
#import <AVFoundation/AVFoundation.h>
@protocol SCCapturer;
@interface SCManagedCaptureDeviceFaceDetectionAutoFocusHandler : NSObject <SCManagedCaptureDeviceFocusHandler>
SC_INIT_AND_NEW_UNAVAILABLE
- (instancetype)initWithDevice:(AVCaptureDevice *)device
pointOfInterest:(CGPoint)pointOfInterest
managedCapturer:(id<SCCapturer>)managedCapturer;
@end

153
ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.m

@ -0,0 +1,153 @@
//
// SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.m
// Snapchat
//
// Created by Jiyang Zhu on 3/7/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.h"
#import "AVCaptureDevice+ConfigurationLock.h"
#import "SCCameraTweaks.h"
#import "SCManagedCaptureFaceDetectionAdjustingPOIResource.h"
#import "SCManagedCapturer.h"
#import "SCManagedCapturerListener.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCTrace.h>
#import <SCFoundation/SCTraceODPCompatible.h>
@interface SCManagedCaptureDeviceFaceDetectionAutoFocusHandler () <SCManagedCapturerListener>
@property (nonatomic, strong) AVCaptureDevice *device;
@property (nonatomic, weak) id<SCCapturer> managedCapturer;
@property (nonatomic, assign) CGPoint focusPointOfInterest;
@property (nonatomic, assign) BOOL isVisible;
@property (nonatomic, assign) BOOL isContinuousAutofocus;
@property (nonatomic, assign) BOOL focusLock;
@property (nonatomic, copy) NSDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID;
@property (nonatomic, strong) SCManagedCaptureFaceDetectionAdjustingPOIResource *resource;
@end
@implementation SCManagedCaptureDeviceFaceDetectionAutoFocusHandler
- (instancetype)initWithDevice:(AVCaptureDevice *)device
pointOfInterest:(CGPoint)pointOfInterest
managedCapturer:(id<SCCapturer>)managedCapturer
{
if (self = [super init]) {
SCAssert(device, @"AVCaptureDevice should not be nil.");
SCAssert(managedCapturer, @"id<SCCapturer> should not be nil.");
_device = device;
_focusPointOfInterest = pointOfInterest;
SCManagedCaptureDevicePosition position =
(device.position == AVCaptureDevicePositionFront ? SCManagedCaptureDevicePositionFront
: SCManagedCaptureDevicePositionBack);
_resource = [[SCManagedCaptureFaceDetectionAdjustingPOIResource alloc]
initWithDefaultPointOfInterest:pointOfInterest
shouldTargetOnFaceAutomatically:SCCameraTweaksTurnOnFaceDetectionFocusByDefault(position)];
_managedCapturer = managedCapturer;
}
return self;
}
- (CGPoint)getFocusPointOfInterest
{
return self.focusPointOfInterest;
}
// called when user taps on a point on screen, to re-adjust camera focus onto that tapped spot.
// this re-adjustment is always necessary, regardless of scenarios (recording video, taking photo, etc),
// therefore we don't have to check self.focusLock in this method.
- (void)setAutofocusPointOfInterest:(CGPoint)pointOfInterest
{
SCTraceODPCompatibleStart(2);
pointOfInterest = [self.resource updateWithNewProposedPointOfInterest:pointOfInterest fromUser:YES];
SC_GUARD_ELSE_RETURN(!CGPointEqualToPoint(pointOfInterest, self.focusPointOfInterest) ||
self.isContinuousAutofocus);
[self _actuallySetFocusPointOfInterestIfNeeded:pointOfInterest
withFocusMode:AVCaptureFocusModeAutoFocus
taskName:@"set autofocus"];
}
- (void)continuousAutofocus
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN(!self.isContinuousAutofocus);
CGPoint pointOfInterest = [self.resource updateWithNewProposedPointOfInterest:CGPointMake(0.5, 0.5) fromUser:NO];
[self _actuallySetFocusPointOfInterestIfNeeded:pointOfInterest
withFocusMode:AVCaptureFocusModeContinuousAutoFocus
taskName:@"set continuous autofocus"];
}
- (void)setFocusLock:(BOOL)focusLock
{
// Disabled focus lock for face detection and focus handler.
}
- (void)setSmoothFocus:(BOOL)smoothFocus
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN(smoothFocus != self.device.smoothAutoFocusEnabled);
[self.device runTask:@"set smooth autofocus"
withLockedConfiguration:^() {
[self.device setSmoothAutoFocusEnabled:smoothFocus];
}];
}
- (void)setVisible:(BOOL)visible
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN(_isVisible != visible);
self.isVisible = visible;
if (visible) {
[[SCManagedCapturer sharedInstance] addListener:self];
} else {
[[SCManagedCapturer sharedInstance] removeListener:self];
[self.resource reset];
}
}
- (void)_actuallySetFocusPointOfInterestIfNeeded:(CGPoint)pointOfInterest
withFocusMode:(AVCaptureFocusMode)focusMode
taskName:(NSString *)taskName
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN(!CGPointEqualToPoint(pointOfInterest, self.focusPointOfInterest) &&
[self.device isFocusModeSupported:focusMode] && [self.device isFocusPointOfInterestSupported]);
[self.device runTask:taskName
withLockedConfiguration:^() {
// Set focus point before changing focus mode
// Be noticed that order does matter
self.device.focusPointOfInterest = pointOfInterest;
self.device.focusMode = focusMode;
}];
self.focusPointOfInterest = pointOfInterest;
self.isContinuousAutofocus = (focusMode == AVCaptureFocusModeContinuousAutoFocus);
}
#pragma mark - SCManagedCapturerListener
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didDetectFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN(self.isVisible);
CGPoint pointOfInterest = [self.resource updateWithNewDetectedFaceBounds:faceBoundsByFaceID];
// If pointOfInterest is equal to CGPointMake(0.5, 0.5), it means no valid face is found, so that we should reset to
// AVCaptureFocusModeContinuousAutoFocus. Otherwise, focus on the point and set the mode as
// AVCaptureFocusModeAutoFocus.
// TODO(Jiyang): Refactor SCManagedCaptureFaceDetectionAdjustingPOIResource to include focusMode and exposureMode.
AVCaptureFocusMode focusMode = CGPointEqualToPoint(pointOfInterest, CGPointMake(0.5, 0.5))
? AVCaptureFocusModeContinuousAutoFocus
: AVCaptureFocusModeAutoFocus;
[self _actuallySetFocusPointOfInterestIfNeeded:pointOfInterest
withFocusMode:focusMode
taskName:@"set autofocus from face detection"];
}
@end

28
ManagedCapturer/SCManagedCaptureDeviceFocusHandler.h

@ -0,0 +1,28 @@
//
// SCManagedCaptureDeviceFocusHandler.h
// Snapchat
//
// Created by Jiyang Zhu on 3/7/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
#import <CoreGraphics/CoreGraphics.h>
#import <Foundation/Foundation.h>
@protocol SCManagedCaptureDeviceFocusHandler <NSObject>
- (CGPoint)getFocusPointOfInterest;
/// Called when subject area changes.
- (void)continuousAutofocus;
/// Called when user taps.
- (void)setAutofocusPointOfInterest:(CGPoint)pointOfInterest;
- (void)setSmoothFocus:(BOOL)smoothFocus;
- (void)setFocusLock:(BOOL)focusLock;
- (void)setVisible:(BOOL)visible;
@end

23
ManagedCapturer/SCManagedCaptureDeviceHandler.h

@ -0,0 +1,23 @@
//
// SCManagedCaptureDeviceHandler.h
// Snapchat
//
// Created by Jiyang Zhu on 3/8/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDevice.h"
#import <SCBase/SCMacros.h>
#import <Foundation/Foundation.h>
@class SCCaptureResource;
@interface SCManagedCaptureDeviceHandler : NSObject <SCManagedCaptureDeviceDelegate>
SC_INIT_AND_NEW_UNAVAILABLE
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource;
@end

77
ManagedCapturer/SCManagedCaptureDeviceHandler.m

@ -0,0 +1,77 @@
//
// SCManagedCaptureDeviceHandler.m
// Snapchat
//
// Created by Jiyang Zhu on 3/8/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDeviceHandler.h"
#import "SCCaptureResource.h"
#import "SCManagedCapturer.h"
#import "SCManagedCapturerLogging.h"
#import "SCManagedCapturerState.h"
#import "SCManagedCapturerStateBuilder.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCQueuePerformer.h>
#import <SCFoundation/SCThreadHelpers.h>
#import <SCFoundation/SCTraceODPCompatible.h>
@interface SCManagedCaptureDeviceHandler ()
@property (nonatomic, weak) SCCaptureResource *captureResource;
@end
@implementation SCManagedCaptureDeviceHandler
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource
{
self = [super init];
if (self) {
SCAssert(captureResource, @"SCCaptureResource should not be nil.");
_captureResource = captureResource;
}
return self;
}
- (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeAdjustingExposure:(BOOL)adjustingExposure
{
SC_GUARD_ELSE_RETURN(device == _captureResource.device);
SCTraceODPCompatibleStart(2);
SCLogCapturerInfo(@"KVO Changes adjustingExposure %d", adjustingExposure);
[_captureResource.queuePerformer perform:^{
_captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state]
setAdjustingExposure:adjustingExposure] build];
SCManagedCapturerState *state = [_captureResource.state copy];
runOnMainThreadAsynchronously(^{
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:state];
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
didChangeAdjustingExposure:state];
});
}];
}
- (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeExposurePoint:(CGPoint)exposurePoint
{
SC_GUARD_ELSE_RETURN(device == self.captureResource.device);
SCTraceODPCompatibleStart(2);
runOnMainThreadAsynchronously(^{
[self.captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
didChangeExposurePoint:exposurePoint];
});
}
- (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeFocusPoint:(CGPoint)focusPoint
{
SC_GUARD_ELSE_RETURN(device == self.captureResource.device);
SCTraceODPCompatibleStart(2);
runOnMainThreadAsynchronously(^{
[self.captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
didChangeFocusPoint:focusPoint];
});
}
@end

12
ManagedCapturer/SCManagedCaptureDeviceLinearInterpolationZoomHandler.h

@ -0,0 +1,12 @@
//
// SCManagedCaptureDeviceLinearInterpolationZoomHandler.h
// Snapchat
//
// Created by Joe Qiao on 03/01/2018.
//
#import "SCManagedCaptureDeviceDefaultZoomHandler.h"
@interface SCManagedCaptureDeviceLinearInterpolationZoomHandler : SCManagedCaptureDeviceDefaultZoomHandler
@end

190
ManagedCapturer/SCManagedCaptureDeviceLinearInterpolationZoomHandler.m

@ -0,0 +1,190 @@
//
// SCManagedCaptureDeviceLinearInterpolationZoomHandler.m
// Snapchat
//
// Created by Joe Qiao on 03/01/2018.
//
#import "SCManagedCaptureDeviceLinearInterpolationZoomHandler.h"
#import "SCCameraTweaks.h"
#import "SCManagedCaptureDeviceDefaultZoomHandler_Private.h"
#import "SCManagedCapturerLogging.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCMathUtils.h>
@interface SCManagedCaptureDeviceLinearInterpolationZoomHandler ()
@property (nonatomic, strong) CADisplayLink *displayLink;
@property (nonatomic, assign) double timestamp;
@property (nonatomic, assign) float targetFactor;
@property (nonatomic, assign) float intermediateFactor;
@property (nonatomic, assign) int trend;
@property (nonatomic, assign) float stepLength;
@end
@implementation SCManagedCaptureDeviceLinearInterpolationZoomHandler
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource
{
self = [super initWithCaptureResource:captureResource];
if (self) {
_timestamp = -1.0;
_targetFactor = 1.0;
_intermediateFactor = _targetFactor;
_trend = 1;
_stepLength = 0.0;
}
return self;
}
- (void)dealloc
{
[self _invalidate];
}
- (void)setZoomFactor:(CGFloat)zoomFactor forDevice:(SCManagedCaptureDevice *)device immediately:(BOOL)immediately
{
if (self.currentDevice != device) {
if (_displayLink) {
// if device changed, interupt smoothing process
// and reset to target zoom factor immediately
[self _resetToZoomFactor:_targetFactor];
}
self.currentDevice = device;
immediately = YES;
}
if (immediately) {
[self _resetToZoomFactor:zoomFactor];
} else {
[self _addTargetZoomFactor:zoomFactor];
}
}
#pragma mark - Configurable
// smoothen if the update time interval is greater than the threshold
- (double)_thresholdTimeIntervalToSmoothen
{
return SCCameraTweaksSmoothZoomThresholdTime();
}
- (double)_thresholdFactorDiffToSmoothen
{
return SCCameraTweaksSmoothZoomThresholdFactor();
}
- (int)_intermediateFactorFramesPerSecond
{
return SCCameraTweaksSmoothZoomIntermediateFramesPerSecond();
}
- (double)_delayTolerantTime
{
return SCCameraTweaksSmoothZoomDelayTolerantTime();
}
// minimum step length between two intermediate factors,
// the greater the better as long as could provide a 'smooth experience' during smoothing process
- (float)_minimumStepLength
{
return SCCameraTweaksSmoothZoomMinStepLength();
}
#pragma mark - Private methods
- (void)_addTargetZoomFactor:(float)factor
{
SCAssertMainThread();
SCLogCapturerInfo(@"Smooth Zoom - [1] t=%f zf=%f", CACurrentMediaTime(), factor);
if (SCFloatEqual(factor, _targetFactor)) {
return;
}
_targetFactor = factor;
float diff = _targetFactor - _intermediateFactor;
if ([self _isDuringSmoothingProcess]) {
// during smoothing, only update data
[self _updateDataWithDiff:diff];
} else {
double curTimestamp = CACurrentMediaTime();
if (!SCFloatEqual(_timestamp, -1.0) && (curTimestamp - _timestamp) > [self _thresholdTimeIntervalToSmoothen] &&
ABS(diff) > [self _thresholdFactorDiffToSmoothen]) {
// need smoothing
[self _updateDataWithDiff:diff];
if ([self _nextStep]) {
// use timer to interpolate intermediate factors to avoid sharp jump
_displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(_nextStep)];
_displayLink.preferredFramesPerSecond = [self _intermediateFactorFramesPerSecond];
[_displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
}
} else {
_timestamp = curTimestamp;
_intermediateFactor = factor;
SCLogCapturerInfo(@"Smooth Zoom - [2] t=%f zf=%f", CACurrentMediaTime(), _intermediateFactor);
[self _setZoomFactor:_intermediateFactor forManagedCaptureDevice:self.currentDevice];
}
}
}
- (void)_resetToZoomFactor:(float)factor
{
[self _invalidate];
_timestamp = -1.0;
_targetFactor = factor;
_intermediateFactor = _targetFactor;
[self _setZoomFactor:_intermediateFactor forManagedCaptureDevice:self.currentDevice];
}
- (BOOL)_nextStep
{
_timestamp = CACurrentMediaTime();
_intermediateFactor += (_trend * _stepLength);
BOOL hasNext = YES;
if (_trend < 0.0) {
_intermediateFactor = MAX(_intermediateFactor, _targetFactor);
} else {
_intermediateFactor = MIN(_intermediateFactor, _targetFactor);
}
SCLogCapturerInfo(@"Smooth Zoom - [3] t=%f zf=%f", CACurrentMediaTime(), _intermediateFactor);
[self _setZoomFactor:_intermediateFactor forManagedCaptureDevice:self.currentDevice];
if (SCFloatEqual(_intermediateFactor, _targetFactor)) {
// finish smoothening
[self _invalidate];
hasNext = NO;
}
return hasNext;
}
- (void)_invalidate
{
[_displayLink invalidate];
_displayLink = nil;
_trend = 1;
_stepLength = 0.0;
}
- (void)_updateDataWithDiff:(CGFloat)diff
{
_trend = diff < 0.0 ? -1 : 1;
_stepLength =
MAX(_stepLength, MAX([self _minimumStepLength],
ABS(diff) / ([self _delayTolerantTime] * [self _intermediateFactorFramesPerSecond])));
}
- (BOOL)_isDuringSmoothingProcess
{
return (_displayLink ? YES : NO);
}
@end

20
ManagedCapturer/SCManagedCaptureDeviceLockOnRecordExposureHandler.h

@ -0,0 +1,20 @@
//
// SCManagedCaptureDeviceLockOnRecordExposureHandler.h
// Snapchat
//
// Created by Derek Peirce on 3/24/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDeviceExposureHandler.h"
#import <AVFoundation/AVFoundation.h>
// An exposure handler that prevents any changes in exposure as soon as recording begins
@interface SCManagedCaptureDeviceLockOnRecordExposureHandler : NSObject <SCManagedCaptureDeviceExposureHandler>
- (instancetype)initWithDevice:(AVCaptureDevice *)device
pointOfInterest:(CGPoint)pointOfInterest
allowTap:(BOOL)allowTap;
@end

90
ManagedCapturer/SCManagedCaptureDeviceLockOnRecordExposureHandler.m

@ -0,0 +1,90 @@
//
// SCManagedCaptureDeviceLockOnRecordExposureHandler.m
// Snapchat
//
// Created by Derek Peirce on 3/24/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDeviceLockOnRecordExposureHandler.h"
#import "AVCaptureDevice+ConfigurationLock.h"
#import "SCExposureState.h"
#import "SCManagedCaptureDeviceExposureHandler.h"
#import <SCFoundation/SCTrace.h>
@import AVFoundation;
@implementation SCManagedCaptureDeviceLockOnRecordExposureHandler {
CGPoint _exposurePointOfInterest;
AVCaptureDevice *_device;
// allows the exposure to change when the user taps to refocus
BOOL _allowTap;
SCExposureState *_exposureState;
}
- (instancetype)initWithDevice:(AVCaptureDevice *)device
pointOfInterest:(CGPoint)pointOfInterest
allowTap:(BOOL)allowTap
{
if (self = [super init]) {
_device = device;
_exposurePointOfInterest = pointOfInterest;
_allowTap = allowTap;
}
return self;
}
- (CGPoint)getExposurePointOfInterest
{
return _exposurePointOfInterest;
}
- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser
{
SCTraceStart();
BOOL locked = _device.exposureMode == AVCaptureExposureModeLocked ||
_device.exposureMode == AVCaptureExposureModeCustom ||
_device.exposureMode == AVCaptureExposureModeAutoExpose;
if (!locked || (fromUser && _allowTap)) {
AVCaptureExposureMode exposureMode =
(locked ? AVCaptureExposureModeAutoExpose : AVCaptureExposureModeContinuousAutoExposure);
if ([_device isExposureModeSupported:exposureMode] && [_device isExposurePointOfInterestSupported]) {
[_device runTask:@"set exposure point"
withLockedConfiguration:^() {
// Set exposure point before changing focus mode
// Be noticed that order does matter
_device.exposurePointOfInterest = pointOfInterest;
_device.exposureMode = exposureMode;
}];
}
_exposurePointOfInterest = pointOfInterest;
}
}
- (void)setStableExposure:(BOOL)stableExposure
{
AVCaptureExposureMode exposureMode =
stableExposure ? AVCaptureExposureModeLocked : AVCaptureExposureModeContinuousAutoExposure;
if ([_device isExposureModeSupported:exposureMode]) {
[_device runTask:@"set stable exposure"
withLockedConfiguration:^() {
_device.exposureMode = exposureMode;
}];
}
}
- (void)setVisible:(BOOL)visible
{
if (visible) {
if (_device.exposureMode == AVCaptureExposureModeLocked ||
_device.exposureMode == AVCaptureExposureModeCustom) {
[_exposureState applyISOAndExposureDurationToDevice:_device];
}
} else {
_exposureState = [[SCExposureState alloc] initWithDevice:_device];
}
}
@end

13
ManagedCapturer/SCManagedCaptureDeviceSavitzkyGolayZoomHandler.h

@ -0,0 +1,13 @@
//
// SCManagedCaptureDeviceSavitzkyGolayZoomHandler.h
// Snapchat
//
// Created by Yu-Kuan Lai on 4/12/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDeviceDefaultZoomHandler.h"
@interface SCManagedCaptureDeviceSavitzkyGolayZoomHandler : SCManagedCaptureDeviceDefaultZoomHandler
@end

95
ManagedCapturer/SCManagedCaptureDeviceSavitzkyGolayZoomHandler.m

@ -0,0 +1,95 @@
//
// SCManagedCaptureDeviceSavitzkyGolayZoomHandler.m
// Snapchat
//
// Created by Yu-Kuan Lai on 4/12/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
// https://en.wikipedia.org/wiki/Savitzky%E2%80%93Golay_filter
//
#import "SCManagedCaptureDeviceSavitzkyGolayZoomHandler.h"
#import "SCManagedCaptureDevice.h"
#import "SCManagedCaptureDeviceDefaultZoomHandler_Private.h"
#import <SCFoundation/SCQueuePerformer.h>
#import <SCFoundation/SCTraceODPCompatible.h>
static NSUInteger const kSCSavitzkyGolayWindowSize = 9;
static CGFloat const kSCUpperSharpZoomThreshold = 1.15;
@interface SCManagedCaptureDeviceSavitzkyGolayZoomHandler ()
@property (nonatomic, strong) NSMutableArray *zoomFactorHistoryArray;
@end
@implementation SCManagedCaptureDeviceSavitzkyGolayZoomHandler
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource
{
self = [super initWithCaptureResource:captureResource];
if (self) {
_zoomFactorHistoryArray = [[NSMutableArray alloc] init];
}
return self;
}
- (void)setZoomFactor:(CGFloat)zoomFactor forDevice:(SCManagedCaptureDevice *)device immediately:(BOOL)immediately
{
if (self.currentDevice != device) {
// reset if device changed
self.currentDevice = device;
[self _resetZoomFactor:zoomFactor forDevice:self.currentDevice];
return;
}
if (immediately || zoomFactor == 1 || _zoomFactorHistoryArray.count == 0) {
// reset if zoomFactor is 1 or this is the first data point
[self _resetZoomFactor:zoomFactor forDevice:device];
return;
}
CGFloat lastVal = [[_zoomFactorHistoryArray lastObject] floatValue];
CGFloat upperThreshold = lastVal * kSCUpperSharpZoomThreshold;
if (zoomFactor > upperThreshold) {
// sharp change in zoomFactor, reset
[self _resetZoomFactor:zoomFactor forDevice:device];
return;
}
[_zoomFactorHistoryArray addObject:@(zoomFactor)];
if ([_zoomFactorHistoryArray count] > kSCSavitzkyGolayWindowSize) {
[_zoomFactorHistoryArray removeObjectAtIndex:0];
}
float filteredZoomFactor =
SC_CLAMP([self _savitzkyGolayFilteredZoomFactor], kSCMinVideoZoomFactor, kSCMaxVideoZoomFactor);
[self _setZoomFactor:filteredZoomFactor forManagedCaptureDevice:device];
}
- (CGFloat)_savitzkyGolayFilteredZoomFactor
{
if ([_zoomFactorHistoryArray count] == kSCSavitzkyGolayWindowSize) {
CGFloat filteredZoomFactor =
59 * [_zoomFactorHistoryArray[4] floatValue] +
54 * ([_zoomFactorHistoryArray[3] floatValue] + [_zoomFactorHistoryArray[5] floatValue]) +
39 * ([_zoomFactorHistoryArray[2] floatValue] + [_zoomFactorHistoryArray[6] floatValue]) +
14 * ([_zoomFactorHistoryArray[1] floatValue] + [_zoomFactorHistoryArray[7] floatValue]) -
21 * ([_zoomFactorHistoryArray[0] floatValue] + [_zoomFactorHistoryArray[8] floatValue]);
filteredZoomFactor /= 231;
return filteredZoomFactor;
} else {
return [[_zoomFactorHistoryArray lastObject] floatValue]; // use zoomFactor directly if we have less than 9
}
}
- (void)_resetZoomFactor:(CGFloat)zoomFactor forDevice:(SCManagedCaptureDevice *)device
{
[_zoomFactorHistoryArray removeAllObjects];
[_zoomFactorHistoryArray addObject:@(zoomFactor)];
[self _setZoomFactor:zoomFactor forManagedCaptureDevice:device];
}
@end

23
ManagedCapturer/SCManagedCaptureDeviceSubjectAreaHandler.h

@ -0,0 +1,23 @@
//
// SCManagedCaptureDeviceSubjectAreaHandler.h
// Snapchat
//
// Created by Xiaokang Liu on 19/03/2018.
//
// This class is used to handle the AVCaptureDeviceSubjectAreaDidChangeNotification notification for SCManagedCapturer.
// To reset device's settings when the subject area changed
#import <SCBase/SCMacros.h>
#import <Foundation/Foundation.h>
@class SCCaptureResource;
@protocol SCCapturer;
@interface SCManagedCaptureDeviceSubjectAreaHandler : NSObject
SC_INIT_AND_NEW_UNAVAILABLE
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource NS_DESIGNATED_INITIALIZER;
- (void)stopObserving;
- (void)startObserving;
@end

67
ManagedCapturer/SCManagedCaptureDeviceSubjectAreaHandler.m

@ -0,0 +1,67 @@
//
// SCManagedCaptureDeviceSubjectAreaHandler.m
// Snapchat
//
// Created by Xiaokang Liu on 19/03/2018.
//
#import "SCManagedCaptureDeviceSubjectAreaHandler.h"
#import "SCCameraTweaks.h"
#import "SCCaptureResource.h"
#import "SCCaptureWorker.h"
#import "SCManagedCaptureDevice+SCManagedCapturer.h"
#import "SCManagedCapturer.h"
#import "SCManagedCapturerState.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCQueuePerformer.h>
@interface SCManagedCaptureDeviceSubjectAreaHandler () {
__weak SCCaptureResource *_captureResource;
}
@end
@implementation SCManagedCaptureDeviceSubjectAreaHandler
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource
{
self = [super init];
if (self) {
SCAssert(captureResource, @"");
_captureResource = captureResource;
}
return self;
}
- (void)stopObserving
{
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVCaptureDeviceSubjectAreaDidChangeNotification
object:nil];
}
- (void)startObserving
{
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(_subjectAreaDidChange:)
name:AVCaptureDeviceSubjectAreaDidChangeNotification
object:nil];
}
#pragma mark - Private methods
- (void)_subjectAreaDidChange:(NSDictionary *)notification
{
[_captureResource.queuePerformer perform:^{
if (_captureResource.device.isConnected && !_captureResource.state.arSessionActive) {
// Reset to continuous autofocus when the subject area changed
[_captureResource.device continuousAutofocus];
[_captureResource.device setExposurePointOfInterest:CGPointMake(0.5, 0.5) fromUser:NO];
if (SCCameraTweaksEnablePortraitModeAutofocus()) {
[SCCaptureWorker setPortraitModePointOfInterestAsynchronously:CGPointMake(0.5, 0.5)
completionHandler:nil
resource:_captureResource];
}
}
}];
}
@end

19
ManagedCapturer/SCManagedCaptureDeviceThresholdExposureHandler.h

@ -0,0 +1,19 @@
//
// SCManagedCaptureDeviceThresholdExposureHandler.h
// Snapchat
//
// Created by Derek Peirce on 4/11/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDeviceExposureHandler.h"
#import <AVFoundation/AVFoundation.h>
@interface SCManagedCaptureDeviceThresholdExposureHandler : NSObject <SCManagedCaptureDeviceExposureHandler>
- (instancetype)initWithDevice:(AVCaptureDevice *)device
pointOfInterest:(CGPoint)pointOfInterest
threshold:(CGFloat)threshold;
@end

133
ManagedCapturer/SCManagedCaptureDeviceThresholdExposureHandler.m

@ -0,0 +1,133 @@
//
// SCManagedCaptureDeviceThresholdExposureHandler.m
// Snapchat
//
// Created by Derek Peirce on 4/11/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDeviceThresholdExposureHandler.h"
#import "AVCaptureDevice+ConfigurationLock.h"
#import "SCCameraTweaks.h"
#import "SCExposureState.h"
#import "SCManagedCaptureDeviceExposureHandler.h"
#import <SCFoundation/SCTrace.h>
#import <FBKVOController/FBKVOController.h>
@import AVFoundation;
@implementation SCManagedCaptureDeviceThresholdExposureHandler {
AVCaptureDevice *_device;
CGPoint _exposurePointOfInterest;
CGFloat _threshold;
// allows the exposure to change when the user taps to refocus
SCExposureState *_exposureState;
FBKVOController *_kvoController;
}
- (instancetype)initWithDevice:(AVCaptureDevice *)device
pointOfInterest:(CGPoint)pointOfInterest
threshold:(CGFloat)threshold
{
if (self = [super init]) {
_device = device;
_exposurePointOfInterest = pointOfInterest;
_threshold = threshold;
_kvoController = [FBKVOController controllerWithObserver:self];
@weakify(self);
[_kvoController observe:device
keyPath:NSStringFromSelector(@selector(exposureMode))
options:NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew
block:^(id observer, id object, NSDictionary *change) {
@strongify(self);
AVCaptureExposureMode old =
(AVCaptureExposureMode)[(NSNumber *)change[NSKeyValueChangeOldKey] intValue];
AVCaptureExposureMode new =
(AVCaptureExposureMode)[(NSNumber *)change[NSKeyValueChangeNewKey] intValue];
if (old == AVCaptureExposureModeAutoExpose && new == AVCaptureExposureModeLocked) {
// auto expose is done, go back to custom
self->_exposureState = [[SCExposureState alloc] initWithDevice:self->_device];
[self->_exposureState applyISOAndExposureDurationToDevice:self->_device];
}
}];
[_kvoController observe:device
keyPath:NSStringFromSelector(@selector(exposureTargetOffset))
options:NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew
block:^(id observer, id object, NSDictionary *change) {
@strongify(self);
if (self->_device.exposureMode == AVCaptureExposureModeCustom) {
CGFloat offset = [(NSNumber *)change[NSKeyValueChangeOldKey] floatValue];
if (fabs(offset) > self->_threshold) {
[self->_device runTask:@"set exposure point"
withLockedConfiguration:^() {
// Set exposure point before changing focus mode
// Be noticed that order does matter
self->_device.exposurePointOfInterest = CGPointMake(0.5, 0.5);
self->_device.exposureMode = AVCaptureExposureModeAutoExpose;
}];
}
}
}];
}
return self;
}
- (CGPoint)getExposurePointOfInterest
{
return _exposurePointOfInterest;
}
- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser
{
SCTraceStart();
BOOL locked = _device.exposureMode == AVCaptureExposureModeLocked ||
_device.exposureMode == AVCaptureExposureModeCustom ||
_device.exposureMode == AVCaptureExposureModeAutoExpose;
if (!locked || fromUser) {
AVCaptureExposureMode exposureMode =
(locked ? AVCaptureExposureModeAutoExpose : AVCaptureExposureModeContinuousAutoExposure);
if ([_device isExposureModeSupported:exposureMode] && [_device isExposurePointOfInterestSupported]) {
[_device runTask:@"set exposure point"
withLockedConfiguration:^() {
// Set exposure point before changing focus mode
// Be noticed that order does matter
_device.exposurePointOfInterest = pointOfInterest;
_device.exposureMode = exposureMode;
}];
}
_exposurePointOfInterest = pointOfInterest;
}
}
- (void)setStableExposure:(BOOL)stableExposure
{
if (stableExposure) {
_exposureState = [[SCExposureState alloc] initWithDevice:_device];
[_exposureState applyISOAndExposureDurationToDevice:_device];
} else {
AVCaptureExposureMode exposureMode = AVCaptureExposureModeContinuousAutoExposure;
if ([_device isExposureModeSupported:exposureMode]) {
[_device runTask:@"set exposure point"
withLockedConfiguration:^() {
_device.exposureMode = AVCaptureExposureModeContinuousAutoExposure;
}];
}
}
}
- (void)setVisible:(BOOL)visible
{
if (visible) {
if (_device.exposureMode == AVCaptureExposureModeLocked ||
_device.exposureMode == AVCaptureExposureModeCustom) {
[_exposureState applyISOAndExposureDurationToDevice:_device];
}
} else {
_exposureState = [[SCExposureState alloc] initWithDevice:_device];
}
}
@end

61
ManagedCapturer/SCManagedCaptureFaceDetectionAdjustingPOIResource.h

@ -0,0 +1,61 @@
//
// SCManagedCaptureFaceDetectionAdjustingPOIResource.h
// Snapchat
//
// Created by Jiyang Zhu on 3/7/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
// This class is used to keep several properties for face detection and focus/exposure. It provides methods to help
// FaceDetectionAutoFocusHandler and FaceDetectionAutoExposureHandler to deal with the point of interest setting events
// from user taps, subject area changes, and face detection, by updating itself and return the actual point of
// interest.
#import <CoreGraphics/CoreGraphics.h>
#import <Foundation/Foundation.h>
typedef NS_ENUM(NSInteger, SCManagedCaptureFaceDetectionAdjustingPOIMode) {
SCManagedCaptureFaceDetectionAdjustingPOIModeNone = 0,
SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithFace,
SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithoutFace,
};
@interface SCManagedCaptureFaceDetectionAdjustingPOIResource : NSObject
@property (nonatomic, assign) CGPoint pointOfInterest;
@property (nonatomic, strong) NSDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID;
@property (nonatomic, assign) SCManagedCaptureFaceDetectionAdjustingPOIMode adjustingPOIMode;
@property (nonatomic, assign) BOOL shouldTargetOnFaceAutomatically;
@property (nonatomic, strong) NSNumber *targetingFaceID;
@property (nonatomic, assign) CGRect targetingFaceBounds;
- (instancetype)initWithDefaultPointOfInterest:(CGPoint)pointOfInterest
shouldTargetOnFaceAutomatically:(BOOL)shouldTargetOnFaceAutomatically;
- (void)reset;
/**
Update SCManagedCaptureFaceDetectionAdjustingPOIResource when a new POI adjustment comes. It will find the face that
the proposedPoint belongs to, return the center of the face, if the adjustingPOIMode and fromUser meets the
requirements.
@param proposedPoint
The point of interest that upper level wants to set.
@param fromUser
Whether the setting is from user's tap or not.
@return
The actual point of interest that should be applied.
*/
- (CGPoint)updateWithNewProposedPointOfInterest:(CGPoint)proposedPoint fromUser:(BOOL)fromUser;
/**
Update SCManagedCaptureFaceDetectionAdjustingPOIResource when new detected face bounds comes.
@param faceBoundsByFaceID
A dictionary. Key: FaceID as NSNumber. Value: FaceBounds as CGRect.
@return
The actual point of interest that should be applied.
*/
- (CGPoint)updateWithNewDetectedFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID;
@end

232
ManagedCapturer/SCManagedCaptureFaceDetectionAdjustingPOIResource.m

@ -0,0 +1,232 @@
//
// SCManagedCaptureFaceDetectionAdjustingPOIResource.m
// Snapchat
//
// Created by Jiyang Zhu on 3/7/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureFaceDetectionAdjustingPOIResource.h"
#import <SCFoundation/SCLog.h>
#import <SCFoundation/SCTrace.h>
#import <SCFoundation/SCTraceODPCompatible.h>
@implementation SCManagedCaptureFaceDetectionAdjustingPOIResource {
CGPoint _defaultPointOfInterest;
}
#pragma mark - Public Methods
- (instancetype)initWithDefaultPointOfInterest:(CGPoint)pointOfInterest
shouldTargetOnFaceAutomatically:(BOOL)shouldTargetOnFaceAutomatically
{
if (self = [super init]) {
_pointOfInterest = pointOfInterest;
_defaultPointOfInterest = pointOfInterest;
_shouldTargetOnFaceAutomatically = shouldTargetOnFaceAutomatically;
}
return self;
}
- (void)reset
{
SCTraceODPCompatibleStart(2);
self.adjustingPOIMode = SCManagedCaptureFaceDetectionAdjustingPOIModeNone;
self.targetingFaceID = nil;
self.targetingFaceBounds = CGRectZero;
self.faceBoundsByFaceID = nil;
self.pointOfInterest = _defaultPointOfInterest;
}
- (CGPoint)updateWithNewProposedPointOfInterest:(CGPoint)proposedPoint fromUser:(BOOL)fromUser
{
SCTraceODPCompatibleStart(2);
if (fromUser) {
NSNumber *faceID =
[self _getFaceIDOfFaceBoundsContainingPoint:proposedPoint fromFaceBounds:self.faceBoundsByFaceID];
if (faceID && [faceID integerValue] >= 0) {
CGPoint point = [self _getPointOfInterestWithFaceID:faceID fromFaceBounds:self.faceBoundsByFaceID];
if ([self _isPointOfInterestValid:point]) {
[self _setPointOfInterest:point
targetingFaceID:faceID
adjustingPOIMode:SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithFace];
} else {
[self _setPointOfInterest:proposedPoint
targetingFaceID:nil
adjustingPOIMode:SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithoutFace];
}
} else {
[self _setPointOfInterest:proposedPoint
targetingFaceID:nil
adjustingPOIMode:SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithoutFace];
}
} else {
[self _setPointOfInterest:proposedPoint
targetingFaceID:nil
adjustingPOIMode:SCManagedCaptureFaceDetectionAdjustingPOIModeNone];
}
return self.pointOfInterest;
}
- (CGPoint)updateWithNewDetectedFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID
{
SCTraceODPCompatibleStart(2);
self.faceBoundsByFaceID = faceBoundsByFaceID;
switch (self.adjustingPOIMode) {
case SCManagedCaptureFaceDetectionAdjustingPOIModeNone: {
if (self.shouldTargetOnFaceAutomatically) {
[self _focusOnPreferredFaceInFaceBounds:self.faceBoundsByFaceID];
}
} break;
case SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithFace: {
BOOL isFocusingOnCurrentTargetingFaceSuccess =
[self _focusOnFaceWithTargetFaceID:self.targetingFaceID inFaceBounds:self.faceBoundsByFaceID];
if (!isFocusingOnCurrentTargetingFaceSuccess && self.shouldTargetOnFaceAutomatically) {
// If the targeted face has disappeared, and shouldTargetOnFaceAutomatically is YES, automatically target on
// the next preferred face.
[self _focusOnPreferredFaceInFaceBounds:self.faceBoundsByFaceID];
}
} break;
case SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithoutFace:
// The point of interest should be fixed at a non-face point where user tapped before.
break;
}
return self.pointOfInterest;
}
#pragma mark - Internal Methods
- (BOOL)_focusOnPreferredFaceInFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID
{
SCTraceODPCompatibleStart(2);
NSNumber *preferredFaceID = [self _getPreferredFaceIDFromFaceBounds:faceBoundsByFaceID];
return [self _focusOnFaceWithTargetFaceID:preferredFaceID inFaceBounds:faceBoundsByFaceID];
}
- (BOOL)_focusOnFaceWithTargetFaceID:(NSNumber *)preferredFaceID
inFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN_VALUE(preferredFaceID, NO);
NSValue *faceBoundsValue = [faceBoundsByFaceID objectForKey:preferredFaceID];
if (faceBoundsValue) {
CGRect faceBounds = [faceBoundsValue CGRectValue];
CGPoint proposedPoint = CGPointMake(CGRectGetMidX(faceBounds), CGRectGetMidY(faceBounds));
if ([self _isPointOfInterestValid:proposedPoint]) {
if ([self _shouldChangeToNewPoint:proposedPoint withNewFaceID:preferredFaceID newFaceBounds:faceBounds]) {
[self _setPointOfInterest:proposedPoint
targetingFaceID:preferredFaceID
adjustingPOIMode:SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithFace];
}
return YES;
}
}
[self reset];
return NO;
}
- (void)_setPointOfInterest:(CGPoint)pointOfInterest
targetingFaceID:(NSNumber *)targetingFaceID
adjustingPOIMode:(SCManagedCaptureFaceDetectionAdjustingPOIMode)adjustingPOIMode
{
SCTraceODPCompatibleStart(2);
self.pointOfInterest = pointOfInterest;
self.targetingFaceID = targetingFaceID;
if (targetingFaceID) { // If targetingFaceID exists, record the current face bounds.
self.targetingFaceBounds = [[self.faceBoundsByFaceID objectForKey:targetingFaceID] CGRectValue];
} else { // Otherwise, reset targetingFaceBounds to zero.
self.targetingFaceBounds = CGRectZero;
}
self.adjustingPOIMode = adjustingPOIMode;
}
- (BOOL)_isPointOfInterestValid:(CGPoint)pointOfInterest
{
return (pointOfInterest.x >= 0 && pointOfInterest.x <= 1 && pointOfInterest.y >= 0 && pointOfInterest.y <= 1);
}
- (NSNumber *)_getPreferredFaceIDFromFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN_VALUE(faceBoundsByFaceID.count > 0, nil);
// Find out the bounds with the max area.
__block NSNumber *preferredFaceID = nil;
__block CGFloat maxArea = 0;
[faceBoundsByFaceID
enumerateKeysAndObjectsUsingBlock:^(NSNumber *_Nonnull key, NSValue *_Nonnull obj, BOOL *_Nonnull stop) {
CGRect faceBounds = [obj CGRectValue];
CGFloat area = CGRectGetWidth(faceBounds) * CGRectGetHeight(faceBounds);
if (area > maxArea) {
preferredFaceID = key;
maxArea = area;
}
}];
return preferredFaceID;
}
- (CGPoint)_getPointOfInterestWithFaceID:(NSNumber *)faceID
fromFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID
{
SCTraceODPCompatibleStart(2);
NSValue *faceBoundsValue = [faceBoundsByFaceID objectForKey:faceID];
if (faceBoundsValue) {
CGRect faceBounds = [faceBoundsValue CGRectValue];
CGPoint point = CGPointMake(CGRectGetMidX(faceBounds), CGRectGetMidY(faceBounds));
return point;
} else {
return CGPointMake(-1, -1); // An invalid point.
}
}
/**
Setting a new focus/exposure point needs high CPU usage, so we only set a new POI when we have to. This method is to
return whether setting this new point if necessary.
If not, there is no need to change the POI.
*/
- (BOOL)_shouldChangeToNewPoint:(CGPoint)newPoint
withNewFaceID:(NSNumber *)newFaceID
newFaceBounds:(CGRect)newFaceBounds
{
SCTraceODPCompatibleStart(2);
BOOL shouldChange = NO;
if (!newFaceID || !self.targetingFaceID ||
![newFaceID isEqualToNumber:self.targetingFaceID]) { // Return YES if it is a new face.
shouldChange = YES;
} else if (CGRectEqualToRect(self.targetingFaceBounds, CGRectZero) ||
!CGRectContainsPoint(self.targetingFaceBounds,
newPoint)) { // Return YES if the new point if out of the current face bounds.
shouldChange = YES;
} else {
CGFloat currentBoundsArea =
CGRectGetWidth(self.targetingFaceBounds) * CGRectGetHeight(self.targetingFaceBounds);
CGFloat newBoundsArea = CGRectGetWidth(newFaceBounds) * CGRectGetHeight(newFaceBounds);
if (newBoundsArea >= currentBoundsArea * 1.2 ||
newBoundsArea <=
currentBoundsArea *
0.8) { // Return YES if the area of new bounds if over 20% more or 20% less than the current one.
shouldChange = YES;
}
}
return shouldChange;
}
- (NSNumber *)_getFaceIDOfFaceBoundsContainingPoint:(CGPoint)point
fromFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID
{
SC_GUARD_ELSE_RETURN_VALUE(faceBoundsByFaceID.count > 0, nil);
__block NSNumber *faceID = nil;
[faceBoundsByFaceID
enumerateKeysAndObjectsUsingBlock:^(NSNumber *_Nonnull key, NSValue *_Nonnull obj, BOOL *_Nonnull stop) {
CGRect faceBounds = [obj CGRectValue];
if (CGRectContainsPoint(faceBounds, point)) {
faceID = key;
*stop = YES;
}
}];
return faceID;
}
@end

80
ManagedCapturer/SCManagedCapturePreviewLayerController.h

@ -0,0 +1,80 @@
//
// SCManagedCapturePreviewLayerController.h
// Snapchat
//
// Created by Liu Liu on 5/5/15.
// Copyright (c) 2015 Snapchat, Inc. All rights reserved.
//
#import <SCCameraFoundation/SCManagedVideoDataSource.h>
#import <SCFoundation/SCAssertWrapper.h>
#import <SCGhostToSnappable/SCGhostToSnappableSignal.h>
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
#import <Metal/Metal.h>
#import <UIKit/UIKit.h>
@protocol SCCapturer;
@class LSAGLView, SCBlackCameraDetector, SCManagedCapturePreviewLayerController;
@protocol SCManagedCapturePreviewLayerControllerDelegate
- (SCBlackCameraDetector *)blackCameraDetectorForManagedCapturePreviewLayerController:
(SCManagedCapturePreviewLayerController *)controller;
- (sc_create_g2s_ticket_f)g2sTicketForManagedCapturePreviewLayerController:
(SCManagedCapturePreviewLayerController *)controller;
@end
/**
* SCManagedCapturePreviewLayerController controls display of frame in a view. The controller has 3
* different methods for this.
* AVCaptureVideoPreviewLayer: This is a feed coming straight from the camera and does not allow any
* image processing or modification of the frames displayed.
* LSAGLView: OpenGL based video for displaying video that is being processed (Lenses etc.)
* CAMetalLayer: Metal layer drawing textures on a vertex quad for display on screen.
*/
@interface SCManagedCapturePreviewLayerController : NSObject <SCManagedSampleBufferDisplayController>
@property (nonatomic, strong, readonly) UIView *view;
@property (nonatomic, strong, readonly) AVCaptureVideoPreviewLayer *videoPreviewLayer;
@property (nonatomic, strong, readonly) LSAGLView *videoPreviewGLView;
@property (nonatomic, weak) id<SCManagedCapturePreviewLayerControllerDelegate> delegate;
+ (instancetype)sharedInstance;
- (void)pause;
- (void)resume;
- (UIView *)newStandInViewWithRect:(CGRect)rect;
- (void)setManagedCapturer:(id<SCCapturer>)managedCapturer;
// This method returns a token that you can hold on to. As long as the token is hold,
// an outdated view will be hold unless the app backgrounded.
- (NSString *)keepDisplayingOutdatedPreview;
// End displaying the outdated frame with an issued keep token. If there is no one holds
// any token any more, this outdated view will be flushed.
- (void)endDisplayingOutdatedPreview:(NSString *)keepToken;
// Create views for Metal, this method need to be called on the main thread.
- (void)setupPreviewLayer;
// Create render pipeline state, setup shaders for Metal, this need to be called off the main thread.
- (void)setupRenderPipeline;
- (void)applicationDidEnterBackground;
- (void)applicationWillEnterForeground;
- (void)applicationWillResignActive;
- (void)applicationDidBecomeActive;
@end

563
ManagedCapturer/SCManagedCapturePreviewLayerController.m

@ -0,0 +1,563 @@
//
// SCManagedCapturePreviewLayerController.m
// Snapchat
//
// Created by Liu Liu on 5/5/15.
// Copyright (c) 2015 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCapturePreviewLayerController.h"
#import "SCBlackCameraDetector.h"
#import "SCCameraTweaks.h"
#import "SCManagedCapturePreviewView.h"
#import "SCManagedCapturer.h"
#import "SCManagedCapturerListener.h"
#import "SCManagedCapturerUtils.h"
#import "SCMetalUtils.h"
#import <SCFoundation/NSData+Random.h>
#import <SCFoundation/SCCoreGraphicsUtils.h>
#import <SCFoundation/SCDeviceName.h>
#import <SCFoundation/SCLog.h>
#import <SCFoundation/SCQueuePerformer.h>
#import <SCFoundation/SCTrace.h>
#import <SCFoundation/SCTraceODPCompatible.h>
#import <SCFoundation/UIScreen+SCSafeAreaInsets.h>
#import <SCGhostToSnappable/SCGhostToSnappableSignal.h>
#import <FBKVOController/FBKVOController.h>
#define SCLogPreviewLayerInfo(fmt, ...) SCLogCoreCameraInfo(@"[PreviewLayerController] " fmt, ##__VA_ARGS__)
#define SCLogPreviewLayerWarning(fmt, ...) SCLogCoreCameraWarning(@"[PreviewLayerController] " fmt, ##__VA_ARGS__)
#define SCLogPreviewLayerError(fmt, ...) SCLogCoreCameraError(@"[PreviewLayerController] " fmt, ##__VA_ARGS__)
const static CGSize kSCManagedCapturePreviewDefaultRenderSize = {
.width = 720, .height = 1280,
};
const static CGSize kSCManagedCapturePreviewRenderSize1080p = {
.width = 1080, .height = 1920,
};
#if !TARGET_IPHONE_SIMULATOR
static NSInteger const kSCMetalCannotAcquireDrawableLimit = 2;
@interface CAMetalLayer (SCSecretFature)
// Call discardContents.
- (void)sc_secretFeature;
@end
@implementation CAMetalLayer (SCSecretFature)
- (void)sc_secretFeature
{
// "discardContents"
char buffer[] = {0x9b, 0x96, 0x8c, 0x9c, 0x9e, 0x8d, 0x9b, 0xbc, 0x90, 0x91, 0x8b, 0x9a, 0x91, 0x8b, 0x8c, 0};
unsigned long len = strlen(buffer);
for (unsigned idx = 0; idx < len; ++idx) {
buffer[idx] = ~buffer[idx];
}
SEL selector = NSSelectorFromString([NSString stringWithUTF8String:buffer]);
if ([self respondsToSelector:selector]) {
NSMethodSignature *signature = [self methodSignatureForSelector:selector];
NSInvocation *invocation = [NSInvocation invocationWithMethodSignature:signature];
[invocation setTarget:self];
[invocation setSelector:selector];
[invocation invoke];
}
// For anyone curious, here is the actual implementation for discardContents in 10.3 (With Hopper v4, arm64)
// From glance, this seems pretty safe to call.
// void -[CAMetalLayer(CAMetalLayerPrivate) discardContents](int arg0)
// {
// *(r31 + 0xffffffffffffffe0) = r20;
// *(0xfffffffffffffff0 + r31) = r19;
// r31 = r31 + 0xffffffffffffffe0;
// *(r31 + 0x10) = r29;
// *(0x20 + r31) = r30;
// r29 = r31 + 0x10;
// r19 = *(arg0 + sign_extend_64(*(int32_t *)0x1a6300510));
// if (r19 != 0x0) {
// r0 = loc_1807079dc(*0x1a7811fc8, r19);
// r0 = _CAImageQueueConsumeUnconsumed(*(r19 + 0x10));
// r0 = _CAImageQueueFlush(*(r19 + 0x10));
// r29 = *(r31 + 0x10);
// r30 = *(0x20 + r31);
// r20 = *r31;
// r19 = *(r31 + 0x10);
// r31 = r31 + 0x20;
// r0 = loc_1807079dc(*0x1a7811fc8, zero_extend_64(0x0));
// } else {
// r29 = *(r31 + 0x10);
// r30 = *(0x20 + r31);
// r20 = *r31;
// r19 = *(r31 + 0x10);
// r31 = r31 + 0x20;
// }
// return;
// }
}
@end
#endif
@interface SCManagedCapturePreviewLayerController () <SCManagedCapturerListener>
@property (nonatomic) BOOL renderSuspended;
@end
@implementation SCManagedCapturePreviewLayerController {
SCManagedCapturePreviewView *_view;
CGSize _drawableSize;
SCQueuePerformer *_performer;
FBKVOController *_renderingKVO;
#if !TARGET_IPHONE_SIMULATOR
CAMetalLayer *_metalLayer;
id<MTLCommandQueue> _commandQueue;
id<MTLRenderPipelineState> _renderPipelineState;
CVMetalTextureCacheRef _textureCache;
dispatch_semaphore_t _commandBufferSemaphore;
// If the current view contains an outdated display (or any display)
BOOL _containOutdatedPreview;
// If we called empty outdated display already, but for some reason, hasn't emptied it yet.
BOOL _requireToFlushOutdatedPreview;
NSMutableSet *_tokenSet;
NSUInteger _cannotAcquireDrawable;
#endif
}
+ (instancetype)sharedInstance
{
static dispatch_once_t onceToken;
static SCManagedCapturePreviewLayerController *managedCapturePreviewLayerController;
dispatch_once(&onceToken, ^{
managedCapturePreviewLayerController = [[SCManagedCapturePreviewLayerController alloc] init];
});
return managedCapturePreviewLayerController;
}
- (instancetype)init
{
self = [super init];
if (self) {
#if !TARGET_IPHONE_SIMULATOR
// We only allow one renders at a time (Sorry, no double / triple buffering).
// It has to be created early here, otherwise integrity of other parts of the code is not
// guaranteed.
// TODO: I need to reason more about the initialization sequence.
_commandBufferSemaphore = dispatch_semaphore_create(1);
// Set _renderSuspended to be YES so that we won't render until it is fully setup.
_renderSuspended = YES;
_tokenSet = [NSMutableSet set];
#endif
// If the screen is less than default size, we should fallback.
CGFloat nativeScale = [UIScreen mainScreen].nativeScale;
CGSize screenSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size;
CGSize renderSize = [SCDeviceName isIphoneX] ? kSCManagedCapturePreviewRenderSize1080p
: kSCManagedCapturePreviewDefaultRenderSize;
if (screenSize.width * nativeScale < renderSize.width) {
_drawableSize = CGSizeMake(screenSize.width * nativeScale, screenSize.height * nativeScale);
} else {
_drawableSize = SCSizeIntegral(
SCSizeCropToAspectRatio(renderSize, SCSizeGetAspectRatio(SCManagedCapturerAllScreenSize())));
}
_performer = [[SCQueuePerformer alloc] initWithLabel:"SCManagedCapturePreviewLayerController"
qualityOfService:QOS_CLASS_USER_INITIATED
queueType:DISPATCH_QUEUE_SERIAL
context:SCQueuePerformerContextCoreCamera];
_renderingKVO = [[FBKVOController alloc] initWithObserver:self];
[_renderingKVO observe:self
keyPath:@keypath(self, renderSuspended)
options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld
block:^(id observer, id object, NSDictionary *change) {
BOOL oldValue = [change[NSKeyValueChangeOldKey] boolValue];
BOOL newValue = [change[NSKeyValueChangeNewKey] boolValue];
if (oldValue != newValue) {
[[_delegate blackCameraDetectorForManagedCapturePreviewLayerController:self]
capturePreviewDidBecomeVisible:!newValue];
}
}];
}
return self;
}
- (void)pause
{
#if !TARGET_IPHONE_SIMULATOR
SCTraceStart();
SCLogPreviewLayerInfo(@"pause Metal rendering performer waiting");
[_performer performAndWait:^() {
self.renderSuspended = YES;
}];
SCLogPreviewLayerInfo(@"pause Metal rendering performer finished");
#endif
}
- (void)resume
{
#if !TARGET_IPHONE_SIMULATOR
SCTraceStart();
SCLogPreviewLayerInfo(@"resume Metal rendering performer waiting");
[_performer performAndWait:^() {
self.renderSuspended = NO;
}];
SCLogPreviewLayerInfo(@"resume Metal rendering performer finished");
#endif
}
- (void)setupPreviewLayer
{
#if !TARGET_IPHONE_SIMULATOR
SCTraceStart();
SCAssertMainThread();
SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal());
if (!_metalLayer) {
_metalLayer = [CAMetalLayer new];
SCLogPreviewLayerInfo(@"setup metalLayer:%@", _metalLayer);
if (!_view) {
// Create capture preview view and setup the metal layer
[self view];
} else {
[_view setupMetalLayer:_metalLayer];
}
}
#endif
}
- (UIView *)newStandInViewWithRect:(CGRect)rect
{
return [self.view resizableSnapshotViewFromRect:rect afterScreenUpdates:YES withCapInsets:UIEdgeInsetsZero];
}
- (void)setupRenderPipeline
{
#if !TARGET_IPHONE_SIMULATOR
SCTraceStart();
SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal());
SCAssertNotMainThread();
id<MTLDevice> device = SCGetManagedCaptureMetalDevice();
id<MTLLibrary> shaderLibrary = [device newDefaultLibrary];
_commandQueue = [device newCommandQueue];
MTLRenderPipelineDescriptor *renderPipelineDescriptor = [MTLRenderPipelineDescriptor new];
renderPipelineDescriptor.colorAttachments[0].pixelFormat = MTLPixelFormatBGRA8Unorm;
renderPipelineDescriptor.vertexFunction = [shaderLibrary newFunctionWithName:@"yuv_vertex_reshape"];
renderPipelineDescriptor.fragmentFunction = [shaderLibrary newFunctionWithName:@"yuv_fragment_texture"];
MTLVertexDescriptor *vertexDescriptor = [MTLVertexDescriptor vertexDescriptor];
vertexDescriptor.attributes[0].format = MTLVertexFormatFloat2; // position
vertexDescriptor.attributes[0].offset = 0;
vertexDescriptor.attributes[0].bufferIndex = 0;
vertexDescriptor.attributes[1].format = MTLVertexFormatFloat2; // texCoords
vertexDescriptor.attributes[1].offset = 2 * sizeof(float);
vertexDescriptor.attributes[1].bufferIndex = 0;
vertexDescriptor.layouts[0].stepRate = 1;
vertexDescriptor.layouts[0].stepFunction = MTLVertexStepFunctionPerVertex;
vertexDescriptor.layouts[0].stride = 4 * sizeof(float);
renderPipelineDescriptor.vertexDescriptor = vertexDescriptor;
_renderPipelineState = [device newRenderPipelineStateWithDescriptor:renderPipelineDescriptor error:nil];
CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, device, nil, &_textureCache);
_metalLayer.device = device;
_metalLayer.drawableSize = _drawableSize;
_metalLayer.pixelFormat = MTLPixelFormatBGRA8Unorm;
_metalLayer.framebufferOnly = YES; // It is default to Yes.
[_performer performAndWait:^() {
self.renderSuspended = NO;
}];
SCLogPreviewLayerInfo(@"did setup render pipeline");
#endif
}
- (UIView *)view
{
SCTraceStart();
SCAssertMainThread();
if (!_view) {
#if TARGET_IPHONE_SIMULATOR
_view = [[SCManagedCapturePreviewView alloc] initWithFrame:[UIScreen mainScreen].fixedCoordinateSpace.bounds
aspectRatio:SCSizeGetAspectRatio(_drawableSize)
metalLayer:nil];
#else
_view = [[SCManagedCapturePreviewView alloc] initWithFrame:[UIScreen mainScreen].fixedCoordinateSpace.bounds
aspectRatio:SCSizeGetAspectRatio(_drawableSize)
metalLayer:_metalLayer];
SCLogPreviewLayerInfo(@"created SCManagedCapturePreviewView:%@", _view);
#endif
}
return _view;
}
- (void)setManagedCapturer:(id<SCCapturer>)managedCapturer
{
SCTraceStart();
SCLogPreviewLayerInfo(@"setManagedCapturer:%@", managedCapturer);
if (SCDeviceSupportsMetal()) {
[managedCapturer addSampleBufferDisplayController:self context:SCCapturerContext];
}
[managedCapturer addListener:self];
}
- (void)applicationDidEnterBackground
{
#if !TARGET_IPHONE_SIMULATOR
SCTraceStart();
SCAssertMainThread();
SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal());
SCLogPreviewLayerInfo(@"applicationDidEnterBackground waiting for performer");
[_performer performAndWait:^() {
CVMetalTextureCacheFlush(_textureCache, 0);
[_tokenSet removeAllObjects];
self.renderSuspended = YES;
}];
SCLogPreviewLayerInfo(@"applicationDidEnterBackground signal performer finishes");
#endif
}
- (void)applicationWillResignActive
{
SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal());
SCTraceStart();
SCAssertMainThread();
#if !TARGET_IPHONE_SIMULATOR
SCLogPreviewLayerInfo(@"pause Metal rendering");
[_performer performAndWait:^() {
self.renderSuspended = YES;
}];
#endif
}
- (void)applicationDidBecomeActive
{
SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal());
SCTraceStart();
SCAssertMainThread();
#if !TARGET_IPHONE_SIMULATOR
SCLogPreviewLayerInfo(@"resume Metal rendering waiting for performer");
[_performer performAndWait:^() {
self.renderSuspended = NO;
}];
SCLogPreviewLayerInfo(@"resume Metal rendering performer finished");
#endif
}
- (void)applicationWillEnterForeground
{
#if !TARGET_IPHONE_SIMULATOR
SCTraceStart();
SCAssertMainThread();
SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal());
SCLogPreviewLayerInfo(@"applicationWillEnterForeground waiting for performer");
[_performer performAndWait:^() {
self.renderSuspended = NO;
if (_containOutdatedPreview && _tokenSet.count == 0) {
[self _flushOutdatedPreview];
}
}];
SCLogPreviewLayerInfo(@"applicationWillEnterForeground performer finished");
#endif
}
- (NSString *)keepDisplayingOutdatedPreview
{
SCTraceStart();
NSString *token = [NSData randomBase64EncodedStringOfLength:8];
#if !TARGET_IPHONE_SIMULATOR
SCLogPreviewLayerInfo(@"keepDisplayingOutdatedPreview waiting for performer");
[_performer performAndWait:^() {
[_tokenSet addObject:token];
}];
SCLogPreviewLayerInfo(@"keepDisplayingOutdatedPreview performer finished");
#endif
return token;
}
- (void)endDisplayingOutdatedPreview:(NSString *)keepToken
{
#if !TARGET_IPHONE_SIMULATOR
SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal());
// I simply use a lock for this. If it becomes a bottleneck, I can figure something else out.
SCTraceStart();
SCLogPreviewLayerInfo(@"endDisplayingOutdatedPreview waiting for performer");
[_performer performAndWait:^() {
[_tokenSet removeObject:keepToken];
if (_tokenSet.count == 0 && _requireToFlushOutdatedPreview && _containOutdatedPreview && !_renderSuspended) {
[self _flushOutdatedPreview];
}
}];
SCLogPreviewLayerInfo(@"endDisplayingOutdatedPreview performer finished");
#endif
}
#pragma mark - SCManagedSampleBufferDisplayController
- (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
#if !TARGET_IPHONE_SIMULATOR
// Just drop the frame if it is rendering.
SC_GUARD_ELSE_RUN_AND_RETURN_VALUE(dispatch_semaphore_wait(_commandBufferSemaphore, DISPATCH_TIME_NOW) == 0,
SCLogPreviewLayerInfo(@"waiting for commandBufferSemaphore signaled"), );
// Just drop the frame, simple.
[_performer performAndWait:^() {
if (_renderSuspended) {
SCLogGeneralInfo(@"Preview rendering suspends and current sample buffer is dropped");
dispatch_semaphore_signal(_commandBufferSemaphore);
return;
}
@autoreleasepool {
const BOOL isFirstPreviewFrame = !_containOutdatedPreview;
if (isFirstPreviewFrame) {
// Signal that we receieved the first frame (otherwise this will be YES already).
SCGhostToSnappableSignalDidReceiveFirstPreviewFrame();
sc_create_g2s_ticket_f func = [_delegate g2sTicketForManagedCapturePreviewLayerController:self];
SCG2SActivateManiphestTicketQueueWithTicketCreationFunction(func);
}
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly);
size_t pixelWidth = CVPixelBufferGetWidth(imageBuffer);
size_t pixelHeight = CVPixelBufferGetHeight(imageBuffer);
id<MTLTexture> yTexture =
SCMetalTextureFromPixelBuffer(imageBuffer, 0, MTLPixelFormatR8Unorm, _textureCache);
id<MTLTexture> cbCrTexture =
SCMetalTextureFromPixelBuffer(imageBuffer, 1, MTLPixelFormatRG8Unorm, _textureCache);
CVPixelBufferUnlockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly);
SC_GUARD_ELSE_RUN_AND_RETURN(yTexture && cbCrTexture, dispatch_semaphore_signal(_commandBufferSemaphore));
id<MTLCommandBuffer> commandBuffer = _commandQueue.commandBuffer;
id<CAMetalDrawable> drawable = _metalLayer.nextDrawable;
if (!drawable) {
// Count how many times I cannot acquire drawable.
++_cannotAcquireDrawable;
if (_cannotAcquireDrawable >= kSCMetalCannotAcquireDrawableLimit) {
// Calling [_metalLayer discardContents] to flush the CAImageQueue
SCLogGeneralInfo(@"Cannot acquire drawable, reboot Metal ..");
[_metalLayer sc_secretFeature];
}
dispatch_semaphore_signal(_commandBufferSemaphore);
return;
}
_cannotAcquireDrawable = 0; // Reset to 0 in case we can acquire drawable.
MTLRenderPassDescriptor *renderPassDescriptor = [MTLRenderPassDescriptor new];
renderPassDescriptor.colorAttachments[0].texture = drawable.texture;
id<MTLRenderCommandEncoder> renderEncoder =
[commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor];
[renderEncoder setRenderPipelineState:_renderPipelineState];
[renderEncoder setFragmentTexture:yTexture atIndex:0];
[renderEncoder setFragmentTexture:cbCrTexture atIndex:1];
// TODO: Prob this out of the image buffer.
// 90 clock-wise rotated texture coordinate.
// Also do aspect fill.
float normalizedHeight, normalizedWidth;
if (pixelWidth * _drawableSize.width > _drawableSize.height * pixelHeight) {
normalizedHeight = 1.0;
normalizedWidth = pixelWidth * (_drawableSize.width / pixelHeight) / _drawableSize.height;
} else {
normalizedHeight = pixelHeight * (_drawableSize.height / pixelWidth) / _drawableSize.width;
normalizedWidth = 1.0;
}
const float vertices[] = {
-normalizedHeight, -normalizedWidth, 1, 1, // lower left -> upper right
normalizedHeight, -normalizedWidth, 1, 0, // lower right -> lower right
-normalizedHeight, normalizedWidth, 0, 1, // upper left -> upper left
normalizedHeight, normalizedWidth, 0, 0, // upper right -> lower left
};
[renderEncoder setVertexBytes:vertices length:sizeof(vertices) atIndex:0];
[renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip vertexStart:0 vertexCount:4];
[renderEncoder endEncoding];
// I need to set a minimum duration for the drawable.
// There is a bug on iOS 10.3, if I present as soon as I can, I am keeping the GPU
// at 30fps even you swipe between views, that causes undesirable visual jarring.
// By set a minimum duration, even it is incrediably small (I tried 10ms, and here 60fps works),
// the OS seems can adjust the frame rate much better when swiping.
// This is an iOS 10.3 new method.
if ([commandBuffer respondsToSelector:@selector(presentDrawable:afterMinimumDuration:)]) {
[(id)commandBuffer presentDrawable:drawable afterMinimumDuration:(1.0 / 60)];
} else {
[commandBuffer presentDrawable:drawable];
}
[commandBuffer addCompletedHandler:^(id<MTLCommandBuffer> commandBuffer) {
dispatch_semaphore_signal(_commandBufferSemaphore);
}];
if (isFirstPreviewFrame) {
if ([drawable respondsToSelector:@selector(addPresentedHandler:)] &&
[drawable respondsToSelector:@selector(presentedTime)]) {
[(id)drawable addPresentedHandler:^(id<MTLDrawable> presentedDrawable) {
SCGhostToSnappableSignalDidRenderFirstPreviewFrame([(id)presentedDrawable presentedTime]);
}];
} else {
[commandBuffer addCompletedHandler:^(id<MTLCommandBuffer> commandBuffer) {
// Using CACurrentMediaTime to approximate.
SCGhostToSnappableSignalDidRenderFirstPreviewFrame(CACurrentMediaTime());
}];
}
}
// We enqueued an sample buffer to display, therefore, it contains an outdated display (to be clean up).
_containOutdatedPreview = YES;
[commandBuffer commit];
}
}];
#endif
}
- (void)flushOutdatedPreview
{
SCTraceStart();
#if !TARGET_IPHONE_SIMULATOR
// This method cannot drop frames (otherwise we will have residual on the screen).
SCLogPreviewLayerInfo(@"flushOutdatedPreview waiting for performer");
[_performer performAndWait:^() {
_requireToFlushOutdatedPreview = YES;
SC_GUARD_ELSE_RETURN(!_renderSuspended);
// Have to make sure we have no token left before return.
SC_GUARD_ELSE_RETURN(_tokenSet.count == 0);
[self _flushOutdatedPreview];
}];
SCLogPreviewLayerInfo(@"flushOutdatedPreview performer finished");
#endif
}
- (void)_flushOutdatedPreview
{
SCTraceStart();
SCAssertPerformer(_performer);
#if !TARGET_IPHONE_SIMULATOR
SCLogPreviewLayerInfo(@"flushOutdatedPreview containOutdatedPreview:%d", _containOutdatedPreview);
// I don't care if this has renderSuspended or not, assuming I did the right thing.
// Emptied, no need to do this any more on foregrounding.
SC_GUARD_ELSE_RETURN(_containOutdatedPreview);
_containOutdatedPreview = NO;
_requireToFlushOutdatedPreview = NO;
[_metalLayer sc_secretFeature];
#endif
}
#pragma mark - SCManagedCapturerListener
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didChangeVideoPreviewLayer:(AVCaptureVideoPreviewLayer *)videoPreviewLayer
{
SCTraceStart();
SCAssertMainThread();
// Force to load the view
[self view];
_view.videoPreviewLayer = videoPreviewLayer;
SCLogPreviewLayerInfo(@"didChangeVideoPreviewLayer:%@", videoPreviewLayer);
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeVideoPreviewGLView:(LSAGLView *)videoPreviewGLView
{
SCTraceStart();
SCAssertMainThread();
// Force to load the view
[self view];
_view.videoPreviewGLView = videoPreviewGLView;
SCLogPreviewLayerInfo(@"didChangeVideoPreviewGLView:%@", videoPreviewGLView);
}
@end

25
ManagedCapturer/SCManagedCapturePreviewView.h

@ -0,0 +1,25 @@
//
// SCManagedCapturePreviewView.h
// Snapchat
//
// Created by Liu Liu on 5/5/15.
// Copyright (c) 2015 Snapchat, Inc. All rights reserved.
//
#import <AVFoundation/AVFoundation.h>
#import <UIKit/UIKit.h>
@class LSAGLView;
@interface SCManagedCapturePreviewView : UIView
- (instancetype)initWithFrame:(CGRect)frame NS_UNAVAILABLE;
- (instancetype)initWithFrame:(CGRect)frame aspectRatio:(CGFloat)aspectRatio metalLayer:(CALayer *)metalLayer;
// This method is called only once in case the metalLayer is nil previously.
- (void)setupMetalLayer:(CALayer *)metalLayer;
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *videoPreviewLayer;
@property (nonatomic, strong) LSAGLView *videoPreviewGLView;
@end

173
ManagedCapturer/SCManagedCapturePreviewView.m

@ -0,0 +1,173 @@
//
// SCManagedCapturePreviewView.m
// Snapchat
//
// Created by Liu Liu on 5/5/15.
// Copyright (c) 2015 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCapturePreviewView.h"
#import "SCCameraTweaks.h"
#import "SCManagedCapturePreviewLayerController.h"
#import "SCManagedCapturePreviewViewDebugView.h"
#import "SCMetalUtils.h"
#import <SCFoundation/SCCoreGraphicsUtils.h>
#import <SCFoundation/SCLog.h>
#import <SCFoundation/SCTrace.h>
#import <Looksery/LSAGLView.h>
@implementation SCManagedCapturePreviewView {
CGFloat _aspectRatio;
CALayer *_containerLayer;
CALayer *_metalLayer;
SCManagedCapturePreviewViewDebugView *_debugView;
}
- (instancetype)initWithFrame:(CGRect)frame aspectRatio:(CGFloat)aspectRatio metalLayer:(CALayer *)metalLayer
{
SCTraceStart();
SCAssertMainThread();
self = [super initWithFrame:frame];
if (self) {
_aspectRatio = aspectRatio;
if (SCDeviceSupportsMetal()) {
[CATransaction begin];
[CATransaction setDisableActions:YES];
_metalLayer = metalLayer;
_metalLayer.frame = [self _layerFrame];
[self.layer insertSublayer:_metalLayer below:[self.layer sublayers][0]];
[CATransaction commit];
} else {
_containerLayer = [[CALayer alloc] init];
_containerLayer.frame = [self _layerFrame];
// Using a container layer such that the software zooming is happening on this layer
[self.layer insertSublayer:_containerLayer below:[self.layer sublayers][0]];
}
if ([self _shouldShowDebugView]) {
_debugView = [[SCManagedCapturePreviewViewDebugView alloc] init];
[self addSubview:_debugView];
}
}
return self;
}
- (void)_layoutVideoPreviewLayer
{
SCAssertMainThread();
[CATransaction begin];
[CATransaction setDisableActions:YES];
if (SCDeviceSupportsMetal()) {
_metalLayer.frame = [self _layerFrame];
} else {
if (_videoPreviewLayer) {
SCLogGeneralInfo(@"container layer frame %@, video preview layer frame %@",
NSStringFromCGRect(_containerLayer.frame), NSStringFromCGRect(_videoPreviewLayer.frame));
}
// Using bounds because we don't really care about the position at this point.
_containerLayer.frame = [self _layerFrame];
_videoPreviewLayer.frame = _containerLayer.bounds;
_videoPreviewLayer.position =
CGPointMake(CGRectGetWidth(_containerLayer.bounds) * 0.5, CGRectGetHeight(_containerLayer.bounds) * 0.5);
}
[CATransaction commit];
}
- (void)_layoutVideoPreviewGLView
{
SCCAssertMainThread();
_videoPreviewGLView.frame = [self _layerFrame];
}
- (CGRect)_layerFrame
{
CGRect frame = SCRectMakeWithCenterAndSize(
SCRectGetMid(self.bounds), SCSizeIntegral(SCSizeExpandToAspectRatio(self.bounds.size, _aspectRatio)));
CGFloat x = frame.origin.x;
x = isnan(x) ? 0.0 : (isfinite(x) ? x : INFINITY);
CGFloat y = frame.origin.y;
y = isnan(y) ? 0.0 : (isfinite(y) ? y : INFINITY);
CGFloat width = frame.size.width;
width = isnan(width) ? 0.0 : (isfinite(width) ? width : INFINITY);
CGFloat height = frame.size.height;
height = isnan(height) ? 0.0 : (isfinite(height) ? height : INFINITY);
return CGRectMake(x, y, width, height);
}
- (void)setVideoPreviewLayer:(AVCaptureVideoPreviewLayer *)videoPreviewLayer
{
SCAssertMainThread();
if (_videoPreviewLayer != videoPreviewLayer) {
[_videoPreviewLayer removeFromSuperlayer];
_videoPreviewLayer = videoPreviewLayer;
[_containerLayer addSublayer:_videoPreviewLayer];
[self _layoutVideoPreviewLayer];
}
}
- (void)setupMetalLayer:(CALayer *)metalLayer
{
SCAssert(!_metalLayer, @"_metalLayer should be nil.");
SCAssert(metalLayer, @"metalLayer must exists.");
SCAssertMainThread();
_metalLayer = metalLayer;
[self.layer insertSublayer:_metalLayer below:[self.layer sublayers][0]];
[self _layoutVideoPreviewLayer];
}
- (void)setVideoPreviewGLView:(LSAGLView *)videoPreviewGLView
{
SCAssertMainThread();
if (_videoPreviewGLView != videoPreviewGLView) {
[_videoPreviewGLView removeFromSuperview];
_videoPreviewGLView = videoPreviewGLView;
[self addSubview:_videoPreviewGLView];
[self _layoutVideoPreviewGLView];
}
}
#pragma mark - Overridden methods
- (void)layoutSubviews
{
SCAssertMainThread();
[super layoutSubviews];
[self _layoutVideoPreviewLayer];
[self _layoutVideoPreviewGLView];
[self _layoutDebugViewIfNeeded];
}
- (void)setHidden:(BOOL)hidden
{
SCAssertMainThread();
[super setHidden:hidden];
if (hidden) {
SCLogGeneralInfo(@"[SCManagedCapturePreviewView] - isHidden is being set to YES");
}
}
#pragma mark - Debug View
- (BOOL)_shouldShowDebugView
{
// Only show debug view in internal builds and tweak settings are turned on.
return SCIsInternalBuild() &&
(SCCameraTweaksEnableFocusPointObservation() || SCCameraTweaksEnableExposurePointObservation());
}
- (void)_layoutDebugViewIfNeeded
{
SCAssertMainThread();
SC_GUARD_ELSE_RETURN([self _shouldShowDebugView]);
_debugView.frame = self.bounds;
[self bringSubviewToFront:_debugView];
}
@end

14
ManagedCapturer/SCManagedCapturePreviewViewDebugView.h

@ -0,0 +1,14 @@
//
// SCManagedCapturePreviewViewDebugView.h
// Snapchat
//
// Created by Jiyang Zhu on 1/19/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
#import <AVFoundation/AVFoundation.h>
#import <UIKit/UIKit.h>
@interface SCManagedCapturePreviewViewDebugView : UIView
@end

204
ManagedCapturer/SCManagedCapturePreviewViewDebugView.m

@ -0,0 +1,204 @@
//
// SCManagedCapturePreviewViewDebugView.m
// Snapchat
//
// Created by Jiyang Zhu on 1/19/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCapturePreviewViewDebugView.h"
#import "SCManagedCapturer.h"
#import "SCManagedCapturerListener.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCThreadHelpers.h>
#import <SCFoundation/UIFont+AvenirNext.h>
@import CoreText;
static CGFloat const kSCManagedCapturePreviewViewDebugViewCrossHairLineWidth = 1.0;
static CGFloat const kSCManagedCapturePreviewViewDebugViewCrossHairWidth = 20.0;
@interface SCManagedCapturePreviewViewDebugView () <SCManagedCapturerListener>
@property (assign, nonatomic) CGPoint focusPoint;
@property (assign, nonatomic) CGPoint exposurePoint;
@property (strong, nonatomic) NSDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID;
@end
@implementation SCManagedCapturePreviewViewDebugView
- (instancetype)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (self) {
self.userInteractionEnabled = NO;
self.backgroundColor = [UIColor clearColor];
_focusPoint = [self _convertPointOfInterest:CGPointMake(0.5, 0.5)];
_exposurePoint = [self _convertPointOfInterest:CGPointMake(0.5, 0.5)];
[[SCManagedCapturer sharedInstance] addListener:self];
}
return self;
}
- (void)drawRect:(CGRect)rect
{
CGContextRef context = UIGraphicsGetCurrentContext();
if (self.focusPoint.x > 0 || self.focusPoint.y > 0) {
[self _drawCrossHairAtPoint:self.focusPoint inContext:context withColor:[UIColor greenColor] isXShaped:YES];
}
if (self.exposurePoint.x > 0 || self.exposurePoint.y > 0) {
[self _drawCrossHairAtPoint:self.exposurePoint inContext:context withColor:[UIColor yellowColor] isXShaped:NO];
}
if (self.faceBoundsByFaceID.count > 0) {
[self.faceBoundsByFaceID
enumerateKeysAndObjectsUsingBlock:^(NSNumber *_Nonnull key, NSValue *_Nonnull obj, BOOL *_Nonnull stop) {
CGRect faceRect = [obj CGRectValue];
NSInteger faceID = [key integerValue];
[self _drawRectangle:faceRect
text:[NSString sc_stringWithFormat:@"ID: %@", key]
inContext:context
withColor:[UIColor colorWithRed:((faceID % 3) == 0)
green:((faceID % 3) == 1)
blue:((faceID % 3) == 2)
alpha:1.0]];
}];
}
}
- (void)dealloc
{
[[SCManagedCapturer sharedInstance] removeListener:self];
}
/**
Draw a crosshair with center point, context, color and shape.
@param isXShaped "X" or "+"
*/
- (void)_drawCrossHairAtPoint:(CGPoint)center
inContext:(CGContextRef)context
withColor:(UIColor *)color
isXShaped:(BOOL)isXShaped
{
CGFloat width = kSCManagedCapturePreviewViewDebugViewCrossHairWidth;
CGContextSetStrokeColorWithColor(context, color.CGColor);
CGContextSetLineWidth(context, kSCManagedCapturePreviewViewDebugViewCrossHairLineWidth);
CGContextBeginPath(context);
if (isXShaped) {
CGContextMoveToPoint(context, center.x - width / 2, center.y - width / 2);
CGContextAddLineToPoint(context, center.x + width / 2, center.y + width / 2);
CGContextMoveToPoint(context, center.x + width / 2, center.y - width / 2);
CGContextAddLineToPoint(context, center.x - width / 2, center.y + width / 2);
} else {
CGContextMoveToPoint(context, center.x - width / 2, center.y);
CGContextAddLineToPoint(context, center.x + width / 2, center.y);
CGContextMoveToPoint(context, center.x, center.y - width / 2);
CGContextAddLineToPoint(context, center.x, center.y + width / 2);
}
CGContextStrokePath(context);
}
/**
Draw a rectangle, with a text on the top left.
*/
- (void)_drawRectangle:(CGRect)rect text:(NSString *)text inContext:(CGContextRef)context withColor:(UIColor *)color
{
CGContextSetStrokeColorWithColor(context, color.CGColor);
CGContextSetLineWidth(context, kSCManagedCapturePreviewViewDebugViewCrossHairLineWidth);
CGContextBeginPath(context);
CGContextMoveToPoint(context, CGRectGetMinX(rect), CGRectGetMinY(rect));
CGContextAddLineToPoint(context, CGRectGetMinX(rect), CGRectGetMaxY(rect));
CGContextAddLineToPoint(context, CGRectGetMaxX(rect), CGRectGetMaxY(rect));
CGContextAddLineToPoint(context, CGRectGetMaxX(rect), CGRectGetMinY(rect));
CGContextAddLineToPoint(context, CGRectGetMinX(rect), CGRectGetMinY(rect));
NSMutableParagraphStyle *textStyle = [[NSMutableParagraphStyle alloc] init];
textStyle.alignment = NSTextAlignmentLeft;
NSDictionary *attributes = @{
NSFontAttributeName : [UIFont boldSystemFontOfSize:16],
NSForegroundColorAttributeName : color,
NSParagraphStyleAttributeName : textStyle
};
[text drawInRect:rect withAttributes:attributes];
CGContextStrokePath(context);
}
- (CGPoint)_convertPointOfInterest:(CGPoint)point
{
SCAssertMainThread();
CGPoint convertedPoint =
CGPointMake((1 - point.y) * CGRectGetWidth(self.bounds), point.x * CGRectGetHeight(self.bounds));
if ([[SCManagedCapturer sharedInstance] isVideoMirrored]) {
convertedPoint.x = CGRectGetWidth(self.bounds) - convertedPoint.x;
}
return convertedPoint;
}
- (NSDictionary<NSNumber *, NSValue *> *)_convertFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID
{
SCAssertMainThread();
NSMutableDictionary<NSNumber *, NSValue *> *convertedFaceBoundsByFaceID =
[NSMutableDictionary dictionaryWithCapacity:faceBoundsByFaceID.count];
for (NSNumber *key in faceBoundsByFaceID.allKeys) {
CGRect faceBounds = [[faceBoundsByFaceID objectForKey:key] CGRectValue];
CGRect convertedBounds = CGRectMake(CGRectGetMinY(faceBounds) * CGRectGetWidth(self.bounds),
CGRectGetMinX(faceBounds) * CGRectGetHeight(self.bounds),
CGRectGetHeight(faceBounds) * CGRectGetWidth(self.bounds),
CGRectGetWidth(faceBounds) * CGRectGetHeight(self.bounds));
if (![[SCManagedCapturer sharedInstance] isVideoMirrored]) {
convertedBounds.origin.x = CGRectGetWidth(self.bounds) - CGRectGetMaxX(convertedBounds);
}
[convertedFaceBoundsByFaceID setObject:[NSValue valueWithCGRect:convertedBounds] forKey:key];
}
return convertedFaceBoundsByFaceID;
}
#pragma mark - SCManagedCapturerListener
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeExposurePoint:(CGPoint)exposurePoint
{
runOnMainThreadAsynchronouslyIfNecessary(^{
self.exposurePoint = [self _convertPointOfInterest:exposurePoint];
[self setNeedsDisplay];
});
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeFocusPoint:(CGPoint)focusPoint
{
runOnMainThreadAsynchronouslyIfNecessary(^{
self.focusPoint = [self _convertPointOfInterest:focusPoint];
[self setNeedsDisplay];
});
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didDetectFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID
{
runOnMainThreadAsynchronouslyIfNecessary(^{
self.faceBoundsByFaceID = [self _convertFaceBounds:faceBoundsByFaceID];
[self setNeedsDisplay];
});
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeCaptureDevicePosition:(SCManagedCapturerState *)state
{
runOnMainThreadAsynchronouslyIfNecessary(^{
self.faceBoundsByFaceID = nil;
self.focusPoint = [self _convertPointOfInterest:CGPointMake(0.5, 0.5)];
self.exposurePoint = [self _convertPointOfInterest:CGPointMake(0.5, 0.5)];
[self setNeedsDisplay];
});
}
@end

23
ManagedCapturer/SCManagedCapturer.h

@ -0,0 +1,23 @@
// SCManagedCapturer.h
// Snapchat
//
// Created by Liu Liu on 4/20/15.
#import "SCCapturer.h"
#import "SCManagedCapturerListener.h"
#import "SCManagedCapturerUtils.h"
#import <Foundation/Foundation.h>
/*
SCManagedCapturer is a shell class. Its job is to provide an singleton instance which follows protocol of
SCManagedCapturerImpl. The reason we use this pattern is because we are building SCManagedCapturerV2. This setup
offers
possbility for us to code V2 without breaking the existing app, and can test the new implementation via Tweak.
*/
@interface SCManagedCapturer : NSObject
+ (id<SCCapturer>)sharedInstance;
@end
Loading…
Cancel
Save