Browse Source

Add files via upload

pull/2/head
Jonny Banana 6 years ago
committed by GitHub
parent
commit
0cd25b9967
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
  1. 113
      ManagedCapturer/CapturerV2/Configuration/SCCaptureConfiguration.h
  2. 75
      ManagedCapturer/CapturerV2/Configuration/SCCaptureConfiguration.m
  3. 27
      ManagedCapturer/CapturerV2/Configuration/SCCaptureConfigurationAnnouncer.h
  4. 67
      ManagedCapturer/CapturerV2/Configuration/SCCaptureConfigurationAnnouncer.m
  5. 33
      ManagedCapturer/CapturerV2/Configuration/SCCaptureConfigurationAnnouncer_Private.h
  6. 23
      ManagedCapturer/CapturerV2/Configuration/SCCaptureConfigurationListener.h
  7. 46
      ManagedCapturer/CapturerV2/Configuration/SCCaptureConfiguration_Private.h
  8. 59
      ManagedCapturer/CapturerV2/Configuration/SCCaptureConfigurator.h
  9. 56
      ManagedCapturer/CapturerV2/Configuration/SCCaptureConfigurator.m
  10. 42
      ManagedCapturer/CapturerV2/Core/SCCaptureCore.h
  11. 475
      ManagedCapturer/CapturerV2/Core/SCCaptureCore.m
  12. 47
      ManagedCapturer/ImageProcessing/SCDepthBlurMetalModule.metal
  13. 21
      ManagedCapturer/ImageProcessing/SCDepthBlurMetalRenderCommand.h
  14. 90
      ManagedCapturer/ImageProcessing/SCDepthBlurMetalRenderCommand.m
  15. 29
      ManagedCapturer/ImageProcessing/SCDepthToGrayscaleMetalModule.metal
  16. 21
      ManagedCapturer/ImageProcessing/SCDepthToGrayscaleMetalRenderCommand.h
  17. 72
      ManagedCapturer/ImageProcessing/SCDepthToGrayscaleMetalRenderCommand.m
  18. 28
      ManagedCapturer/ImageProcessing/SCDigitalExposureHandler.h
  19. 30
      ManagedCapturer/ImageProcessing/SCDigitalExposureHandler.m
  20. 60
      ManagedCapturer/ImageProcessing/SCExposureAdjustMetalModule.metal
  21. 21
      ManagedCapturer/ImageProcessing/SCExposureAdjustMetalRenderCommand.h
  22. 66
      ManagedCapturer/ImageProcessing/SCExposureAdjustMetalRenderCommand.m
  23. 28
      ManagedCapturer/ImageProcessing/SCExposureAdjustProcessingModule.h
  24. 67
      ManagedCapturer/ImageProcessing/SCExposureAdjustProcessingModule.m
  25. 48
      ManagedCapturer/ImageProcessing/SCMetalModule.h
  26. 155
      ManagedCapturer/ImageProcessing/SCMetalModule.m
  27. 54
      ManagedCapturer/ImageProcessing/SCMetalTextureResource.h
  28. 215
      ManagedCapturer/ImageProcessing/SCMetalTextureResource.m
  29. 37
      ManagedCapturer/ImageProcessing/SCNightModeEnhancementMetalModule.metal
  30. 19
      ManagedCapturer/ImageProcessing/SCNightModeEnhancementMetalRenderCommand.h
  31. 64
      ManagedCapturer/ImageProcessing/SCNightModeEnhancementMetalRenderCommand.m
  32. 32
      ManagedCapturer/ImageProcessing/SCProcessingModule.h
  33. 22
      ManagedCapturer/ImageProcessing/SCProcessingModuleUtils.h
  34. 84
      ManagedCapturer/ImageProcessing/SCProcessingModuleUtils.m
  35. 23
      ManagedCapturer/ImageProcessing/SCProcessingPipeline.h
  36. 46
      ManagedCapturer/ImageProcessing/SCProcessingPipeline.m
  37. 29
      ManagedCapturer/ImageProcessing/SCProcessingPipelineBuilder.h
  38. 57
      ManagedCapturer/ImageProcessing/SCProcessingPipelineBuilder.m
  39. 23
      ManagedCapturer/ImageProcessing/SCStillImageDepthBlurFilter.h
  40. 68
      ManagedCapturer/ImageProcessing/SCStillImageDepthBlurFilter.m
  41. 103
      ManagedCapturer/StateMachine/SCCaptureBaseState.h
  42. 169
      ManagedCapturer/StateMachine/SCCaptureBaseState.m
  43. 30
      ManagedCapturer/StateMachine/SCCaptureStateDelegate.h
  44. 29
      ManagedCapturer/StateMachine/SCCaptureStateMachineBookKeeper.h
  45. 63
      ManagedCapturer/StateMachine/SCCaptureStateMachineBookKeeper.m
  46. 76
      ManagedCapturer/StateMachine/SCCaptureStateMachineContext.h
  47. 301
      ManagedCapturer/StateMachine/SCCaptureStateMachineContext.m
  48. 37
      ManagedCapturer/StateMachine/SCCaptureStateUtil.h
  49. 38
      ManagedCapturer/StateMachine/SCCaptureStateUtil.m
  50. 12
      ManagedCapturer/StateMachine/SCManagedCapturerLogging.h
  51. 22
      ManagedCapturer/StateMachine/States/SCCaptureImageState.h
  52. 65
      ManagedCapturer/StateMachine/States/SCCaptureImageState.m
  53. 29
      ManagedCapturer/StateMachine/States/SCCaptureImageStateTransitionPayload.h
  54. 27
      ManagedCapturer/StateMachine/States/SCCaptureImageStateTransitionPayload.m
  55. 22
      ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingState.h
  56. 85
      ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingState.m
  57. 29
      ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingStateTransitionPayload.h
  58. 27
      ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingStateTransitionPayload.m
  59. 22
      ManagedCapturer/StateMachine/States/SCCaptureInitializedState.h
  60. 68
      ManagedCapturer/StateMachine/States/SCCaptureInitializedState.m
  61. 22
      ManagedCapturer/StateMachine/States/SCCaptureRecordingState.h
  62. 114
      ManagedCapturer/StateMachine/States/SCCaptureRecordingState.m
  63. 41
      ManagedCapturer/StateMachine/States/SCCaptureRecordingStateTransitionPayload.h
  64. 33
      ManagedCapturer/StateMachine/States/SCCaptureRecordingStateTransitionPayload.m
  65. 22
      ManagedCapturer/StateMachine/States/SCCaptureRunningState.h
  66. 176
      ManagedCapturer/StateMachine/States/SCCaptureRunningState.m
  67. 18
      ManagedCapturer/StateMachine/States/SCCaptureScanningState.h
  68. 75
      ManagedCapturer/StateMachine/States/SCCaptureScanningState.m
  69. 26
      ManagedCapturer/StateMachine/States/SCCaptureUninitializedState.h
  70. 70
      ManagedCapturer/StateMachine/States/SCCaptureUninitializedState.m
  71. 22
      ManagedCapturer/StateMachine/States/SCStateTransitionPayload.h
  72. 27
      ManagedCapturer/StateMachine/States/SCStateTransitionPayload.m

113
ManagedCapturer/CapturerV2/Configuration/SCCaptureConfiguration.h

@ -0,0 +1,113 @@
//
// SCCaptureConfiguration.h
// Snapchat
//
// Created by Lin Jia on 10/3/17.
//
//
#import "SCCaptureConfigurationAnnouncer.h"
#import "SCManagedCaptureDevice.h"
#import "SCManagedCapturerState.h"
#import "SCVideoCaptureSessionInfo.h"
#import <SCFoundation/SCQueuePerformer.h>
#import <Looksery/LSAGLView.h>
#import <Foundation/Foundation.h>
/*
SCCaptureConfiguration is the configuration class which is going to be used for customer to configure camera. This is
how to use it:
SCCaptureConfiguration *configuration = [SCCaptureConfiguration new];
// Conduct the setting here.
e.g:
configuration.torchActive = YES;
// Commit your configuration
[captureConfigurator commitConfiguration:configuration
completionHandler:handler]
Here are several interesting facts about SCCaptureConfiguration:
1) Though SCCaptureConfiguration has so many parameters, you don't need to care the parameters which you do not intend
to set. For example, if you only want to set night mode active, here is the code:
SCCaptureConfiguration *configuration = [SCCaptureConfiguration new];
configuration.isNightModeActive = YES;
[captureConfigurator commitConfiguration:configuration
completionHandler:handler]
That is it.
2) you can set multiple configuration settings, then commit, before you commit, nothing will happen, e.g.:
SCCaptureConfiguration *configuration = [SCCaptureConfiguration new];
configuration.isNightModeActive = YES;
configuration.zoomFactor = 5;
configuration.lensesActive = YES;
[captureConfigurator commitConfiguration:configuration
completionHandler:handler]
3) commit a configuration means the configuration is gone. If you set parameters on configuration after it is commited,
it will crash on debug build, and on other builds such as production, the setting will be ignored, e.g.:
SCCaptureConfiguration *configuration = [SCCaptureConfiguration new];
configuration.isNightModeActive = YES;
[captureConfigurator commitConfiguration:configuration
completionHandler:handler]
// The line below will crash on debug, and ignored on other builds.
configuration.zoomFactor = 5;
4) commiting a configuration is an atomic action. That means all changes customers want to have on camera will happen
in a group. If 2 customers commit at the same time, we will handle them one by one.
5) We are still figuring out what parameters should be in this configuration, parameters could be added or deleted
later. In the end, the configuration is going to be the only way customers confige the camera.
*/
@interface SCCaptureConfiguration : NSObject
@property (nonatomic, assign) BOOL isRunning;
@property (nonatomic, assign) BOOL isNightModeActive;
@property (nonatomic, assign) BOOL lowLightCondition;
@property (nonatomic, assign) BOOL adjustingExposure;
@property (nonatomic, assign) SCManagedCaptureDevicePosition devicePosition;
@property (nonatomic, assign) CGFloat zoomFactor;
@property (nonatomic, assign) BOOL flashSupported;
@property (nonatomic, assign) BOOL torchSupported;
@property (nonatomic, assign) BOOL flashActive;
@property (nonatomic, assign) BOOL torchActive;
@property (nonatomic, assign) BOOL lensesActive;
@property (nonatomic, assign) BOOL arSessionActive;
@property (nonatomic, assign) BOOL liveVideoStreaming;
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *videoPreviewLayer;
@property (nonatomic, strong) LSAGLView *videoPreviewGLView;
@property (nonatomic, assign) SCVideoCaptureSessionInfo captureSessionInfo;
@end

75
ManagedCapturer/CapturerV2/Configuration/SCCaptureConfiguration.m

@ -0,0 +1,75 @@
//
// SCCaptureConfiguration.m
// Snapchat
//
// Created by Lin Jia on 10/3/17.
//
//
#import "SCCaptureConfiguration.h"
#import "SCCaptureConfiguration_Private.h"
#import <SCFoundation/SCAppEnvironment.h>
#import <SCFoundation/SCAssertWrapper.h>
@interface SCCaptureConfiguration () {
BOOL _sealed;
NSMutableSet<SCCaptureConfigurationDirtyKey *> *_dirtyKeys;
}
@end
@implementation SCCaptureConfiguration
- (instancetype)init
{
self = [super init];
if (self) {
_dirtyKeys = [[NSMutableSet<SCCaptureConfigurationDirtyKey *> alloc] init];
_sealed = NO;
}
return self;
}
- (void)setIsRunning:(BOOL)running
{
if ([self _configurationSealed]) {
return;
}
_isRunning = running;
[_dirtyKeys addObject:@(SCCaptureConfigurationKeyIsRunning)];
}
/*
All set methods will be added later. They follow the format of setIsRunning.
*/
@end
@implementation SCCaptureConfiguration (privateMethods)
- (NSArray *)dirtyKeys
{
if (!_sealed && SCIsDebugBuild()) {
SCAssert(NO, @"Configuration not sealed yet, setting is still happening!");
}
return [_dirtyKeys allObjects];
}
- (void)seal
{
_sealed = YES;
}
- (BOOL)_configurationSealed
{
if (_sealed) {
if (SCIsDebugBuild()) {
SCAssert(NO, @"Try to set property after commit configuration to configurator");
}
return YES;
} else {
return NO;
}
}
@end

27
ManagedCapturer/CapturerV2/Configuration/SCCaptureConfigurationAnnouncer.h

@ -0,0 +1,27 @@
//
// SCCaptureConfigurationAnnouncer.h
// Snapchat
//
// Created by Lin Jia on 10/2/17.
//
//
#import "SCCaptureConfigurationListener.h"
#import <Foundation/Foundation.h>
/*
All APIs are thread safe. Announcer will not retain your object. So even if customer forgets to call remove listener,
it will not create zombie objects.
*/
@interface SCCaptureConfigurationAnnouncer : NSObject
/*
When customer adds an object to be a listener, that object will receive an update of current truth. That is the chance
for the object to do adjustment according to the current configuration of the camera.
*/
- (void)addListener:(id<SCCaptureConfigurationListener>)listener;
- (void)removeListener:(id<SCCaptureConfigurationListener>)listener;
@end

67
ManagedCapturer/CapturerV2/Configuration/SCCaptureConfigurationAnnouncer.m

@ -0,0 +1,67 @@
//
// SCCaptureConfigurationAnnouncer.m
// Snapchat
//
// Created by Lin Jia on 10/2/17.
//
//
#import "SCCaptureConfigurationAnnouncer.h"
#import "SCCaptureConfigurationAnnouncer_Private.h"
#import "SCCaptureConfigurator.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCPerforming.h>
@interface SCCaptureConfigurationAnnouncer () {
NSHashTable<id<SCCaptureConfigurationListener>> *_listeners;
SCQueuePerformer *_performer;
__weak SCCaptureConfigurator *_configurator;
}
@end
@implementation SCCaptureConfigurationAnnouncer
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer configurator:(SCCaptureConfigurator *)configurator
{
self = [super init];
if (self) {
_listeners = [NSHashTable<id<SCCaptureConfigurationListener>> hashTableWithOptions:NSHashTableWeakMemory];
SCAssert(performer, @"performer should not be nil");
_performer = performer;
_configurator = configurator;
}
return self;
}
- (void)addListener:(id<SCCaptureConfigurationListener>)listener
{
[_performer perform:^{
SCAssert(listener, @"listener should not be nil");
[_listeners addObject:listener];
[listener captureConfigurationDidChangeTo:_configurator.currentConfiguration];
}];
}
- (void)removeListener:(id<SCCaptureConfigurationListener>)listener
{
[_performer perform:^{
SCAssert(listener, @"listener should not be nil");
[_listeners removeObject:listener];
}];
}
- (void)deliverConfigurationChange:(id<SCManagedCapturerState>)configuration
{
SCAssertPerformer(_performer);
for (id<SCCaptureConfigurationListener> listener in _listeners) {
[listener captureConfigurationDidChangeTo:configuration];
}
}
- (void)dealloc
{
[_listeners removeAllObjects];
}
@end

33
ManagedCapturer/CapturerV2/Configuration/SCCaptureConfigurationAnnouncer_Private.h

@ -0,0 +1,33 @@
//
// SCCaptureConfigurationAnnouncer_Private.h
// Snapchat
//
// Created by Lin Jia on 10/2/17.
//
//
#import "SCCaptureConfigurationAnnouncer.h"
#import "SCManagedCapturerState.h"
#import <SCFoundation/SCQueuePerformer.h>
@class SCCaptureConfigurator;
/*
This private header is only going to be used by SCCaptureConfigurator. Other customers should only use the public
header.
*/
@interface SCCaptureConfigurationAnnouncer ()
/*
The announcer is going to be instantiated by SCCaptureConfigurator. It will take in a queue performer. The design is
that announcer and configurator is going to share the same serial queue to avoid racing. This is something we could
change later.
*/
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer configurator:(SCCaptureConfigurator *)configurator;
/*
The API below is called by configurator to notify listener that configuration has changed.
*/
- (void)deliverConfigurationChange:(id<SCManagedCapturerState>)configuration;
@end

23
ManagedCapturer/CapturerV2/Configuration/SCCaptureConfigurationListener.h

@ -0,0 +1,23 @@
//
// SCCaptureConfigurationListener.h
// Snapchat
//
// Created by Lin Jia on 10/2/17.
//
#import "SCManagedCapturerState.h"
#import <Foundation/Foundation.h>
@class SCCaptureConfiguration;
/*
As a listener to configuration of camera core, you will get an update whenever the configuration changes, and you will
receive an immutable state object for the current truth.
*/
@protocol SCCaptureConfigurationListener <NSObject>
- (void)captureConfigurationDidChangeTo:(id<SCManagedCapturerState>)state;
@end

46
ManagedCapturer/CapturerV2/Configuration/SCCaptureConfiguration_Private.h

@ -0,0 +1,46 @@
//
// SCCaptureConfiguration_Private.h
// Snapchat
//
// Created by Lin Jia on 10/3/17.
//
//
#import "SCCaptureConfiguration_Private.h"
typedef NSNumber SCCaptureConfigurationDirtyKey;
/*
The key values to identify dirty keys in SCCaptureConfiguration.
Dirty key is defined as the key customer changes.
e.g. if customer toggle device position. Dirty keys will have SCCaptureConfigurationKeyDevicePosition.
It is not complete, and it is only a draft now. It
will be gradually tuned while we work on the APIs.
*/
typedef NS_ENUM(NSUInteger, SCCaptureConfigurationKey) {
SCCaptureConfigurationKeyIsRunning,
SCCaptureConfigurationKeyIsNightModeActive,
SCCaptureConfigurationKeyLowLightCondition,
SCCaptureConfigurationKeyDevicePosition,
SCCaptureConfigurationKeyZoomFactor,
SCCaptureConfigurationKeyFlashActive,
SCCaptureConfigurationKeyTorchActive,
SCCaptureConfigurationKeyARSessionActive,
SCCaptureConfigurationKeyLensesActive,
SCCaptureConfigurationKeyVideoRecording,
};
@interface SCCaptureConfiguration (internalMethods)
// Return dirtyKeys, which identify the parameters customer want to set.
- (NSArray *)dirtyKeys;
// Called by SCCaptureConfigurator to seal a configuration, so future changes are ignored.
- (void)seal;
- (BOOL)_configurationSealed;
@end

59
ManagedCapturer/CapturerV2/Configuration/SCCaptureConfigurator.h

@ -0,0 +1,59 @@
//
// SCCaptureConfigurator.h
// Snapchat
//
// Created by Lin Jia on 10/2/17.
//
//
#import "SCCaptureConfiguration.h"
#import "SCCaptureConfigurationAnnouncer.h"
#import "SCManagedCaptureDevice.h"
#import "SCVideoCaptureSessionInfo.h"
#import <SCFoundation/SCQueuePerformer.h>
#import <Looksery/LSAGLView.h>
#import <Foundation/Foundation.h>
/*
SCCaptureConfigurator is the class you use to config the setting of the camera hardware. Such as setting the camera to
be front or back, setting camera hardware to be certain resolution, or to activate night mode.
You can use this class for many things:
a) do 1 time poking to checkout the current camera configuration via the currentConfiguration.
Note that we represent configuration via id<SCManagedCapturerState>. It is going to be an immutable object.
b) register to be the listener of the configuration change via the announcer.
Every time a camera configuration change, you will receive an update.
c) set the configuration via commitConfiguration API. You convey your setting intention via SCCaptureConfiguration.
You can register a completionHandler to be called after your configuration gets done.
Inside the completionHandler, we will pass you an error if it happens, and there will be a boolean cameraChanged. If
your configuration already equals the current configuration of the camera, we will not change the camera, the boolean
will be true.
d) All APIs are thread safe.
*/
typedef void (^SCCaptureConfigurationCompletionHandler)(NSError *error, BOOL cameraChanged);
@interface SCCaptureConfigurator : NSObject
@property (nonatomic, strong, readonly) SCCaptureConfigurationAnnouncer *announcer;
@property (nonatomic, strong, readonly) id<SCManagedCapturerState> currentConfiguration;
- (instancetype)init NS_UNAVAILABLE;
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer;
- (void)commitConfiguration:(SCCaptureConfiguration *)configuration
completionHandler:(SCCaptureConfigurationCompletionHandler)completionHandler;
@end

56
ManagedCapturer/CapturerV2/Configuration/SCCaptureConfigurator.m

@ -0,0 +1,56 @@
//
// SCCaptureConfiguration.m
// Snapchat
//
// Created by Lin Jia on 10/2/17.
//
//
#import "SCCaptureConfigurator.h"
#import "SCCaptureConfigurationAnnouncer_Private.h"
#import "SCCaptureConfiguration_Private.h"
#import <SCFoundation/SCAssertWrapper.h>
@interface SCCaptureConfigurator () {
SCQueuePerformer *_performer;
}
@end
@implementation SCCaptureConfigurator
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
{
self = [super init];
if (self) {
_announcer = [[SCCaptureConfigurationAnnouncer alloc] initWithPerformer:performer configurator:self];
_performer = performer;
// TODO: initialize _currentConfiguration
}
return self;
}
- (void)commitConfiguration:(SCCaptureConfiguration *)configuration
completionHandler:(SCCaptureConfigurationCompletionHandler)completionHandler
{
[configuration seal];
[_performer perform:^() {
SCAssert(configuration, @"Configuration must be a valid input parameter");
NSArray<SCCaptureConfigurationDirtyKey *> *dirtyKeys = [configuration dirtyKeys];
for (SCCaptureConfigurationDirtyKey *key in dirtyKeys) {
[self _processKey:[key integerValue] configuration:configuration];
}
if (completionHandler) {
// TODO: passing in right parameters.
completionHandler(NULL, YES);
}
}];
}
- (void)_processKey:(SCCaptureConfigurationKey)key configuration:(SCCaptureConfiguration *)configuration
{
// Tune the hardware depending on what key is dirty, and what is the value is inside configuration.
}
@end

42
ManagedCapturer/CapturerV2/Core/SCCaptureCore.h

@ -0,0 +1,42 @@
//
// SCCaptureCore.h
// Snapchat
//
// Created by Lin Jia on 10/2/17.
//
//
#import "SCCaptureStateMachineContext.h"
#import "SCCapturer.h"
#import <SCFoundation/SCPerforming.h>
#import <Foundation/Foundation.h>
@class SCCaptureConfigurator;
/*
SCCaptureCore abstracts away the hardware aspect of a camera. SCCaptureCore is the V2 version of the
SCManagedCapturerV1.
SCCaptureCore itself does very little things actually. Its main job is to expose APIs of camera hardware to outside
customers. The actual heavy lifting is done via delegating the jobs to multiple worker classes.
We generally categorize the operation of camera hardware into 2 categories:
1) make camera hardware do state transition. Such as what is shown in this graph:
https://docs.google.com/presentation/d/1KWk-XSgO0wFAjBZXsl_OnHBGpi_pd9-ds6Wje8vX-0s/edit#slide=id.g2017e46295_1_10
2) config camera hardware setting, such as setting the camera to be front or back, such as setting camera hardware to
be certain resolution, or to activate night mode.
Indeed, we create 2 working classes to do the heavy lifting. Both of them are under construction. Feel free to checkout
SCCaptureConfigurator, which is responsible for 2).
*/
@interface SCCaptureCore : NSObject <SCCapturer>
@property (nonatomic, strong, readonly) SCCaptureStateMachineContext *stateMachine;
@end

475
ManagedCapturer/CapturerV2/Core/SCCaptureCore.m

@ -0,0 +1,475 @@
//
// SCCaptureCore.m
// Snapchat
//
// Created by Lin Jia on 10/2/17.
//
//
#import "SCCaptureCore.h"
#import "SCCaptureDeviceAuthorizationChecker.h"
#import "SCCaptureResource.h"
#import "SCCaptureWorker.h"
#import "SCManagedCapturePreviewLayerController.h"
#import "SCManagedCapturerGLViewManagerAPI.h"
#import "SCManagedCapturerLSAComponentTrackerAPI.h"
#import "SCManagedCapturerV1_Private.h"
#import <SCAudio/SCAudioConfiguration.h>
#import <SCFoundation/SCAssertWrapper.h>
static const char *kSCCaptureDeviceAuthorizationManagerQueueLabel =
"com.snapchat.capture_device_authorization_checker_queue";
@implementation SCCaptureCore {
SCManagedCapturerV1 *_managedCapturerV1;
SCQueuePerformer *_queuePerformer;
SCCaptureDeviceAuthorizationChecker *_authorizationChecker;
}
@synthesize blackCameraDetector = _blackCameraDetector;
- (instancetype)init
{
SCTraceStart();
SCAssertMainThread();
self = [super init];
if (self) {
_managedCapturerV1 = [SCManagedCapturerV1 sharedInstance];
SCCaptureResource *resource = _managedCapturerV1.captureResource;
_queuePerformer = resource.queuePerformer;
_stateMachine = [[SCCaptureStateMachineContext alloc] initWithResource:resource];
SCQueuePerformer *authorizationCheckPerformer =
[[SCQueuePerformer alloc] initWithLabel:kSCCaptureDeviceAuthorizationManagerQueueLabel
qualityOfService:QOS_CLASS_USER_INTERACTIVE
queueType:DISPATCH_QUEUE_SERIAL
context:SCQueuePerformerContextCamera];
_authorizationChecker =
[[SCCaptureDeviceAuthorizationChecker alloc] initWithPerformer:authorizationCheckPerformer];
}
return self;
}
- (id<SCManagedCapturerLensAPI>)lensProcessingCore
{
return _managedCapturerV1.lensProcessingCore;
}
// For APIs inside protocol SCCapture, if they are related to capture state machine, we delegate to state machine.
- (void)setupWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[_stateMachine initializeCaptureWithDevicePositionAsynchronously:devicePosition
completionHandler:completionHandler
context:context];
}
- (SCCapturerToken *)startRunningAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
return [_stateMachine startRunningWithContext:context completionHandler:completionHandler];
}
#pragma mark - Recording / Capture
- (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio
captureSessionID:(NSString *)captureSessionID
completionHandler:
(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler
context:(NSString *)context
{
[_stateMachine captureStillImageAsynchronouslyWithAspectRatio:aspectRatio
captureSessionID:captureSessionID
completionHandler:completionHandler
context:context];
}
- (void)stopRunningAsynchronously:(SCCapturerToken *)token
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
context:(NSString *)context
{
[_stateMachine stopRunningWithCapturerToken:token completionHandler:completionHandler context:context];
}
- (void)stopRunningAsynchronously:(SCCapturerToken *)token
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
after:(NSTimeInterval)delay
context:(NSString *)context
{
[_stateMachine stopRunningWithCapturerToken:token after:delay completionHandler:completionHandler context:context];
}
#pragma mark - Scanning
- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context
{
[_stateMachine startScanAsynchronouslyWithScanConfiguration:configuration context:context];
}
- (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context
{
[_stateMachine stopScanAsynchronouslyWithCompletionHandler:completionHandler context:context];
}
- (void)prepareForRecordingAsynchronouslyWithContext:(NSString *)context
audioConfiguration:(SCAudioConfiguration *)configuration
{
[_stateMachine prepareForRecordingAsynchronouslyWithAudioConfiguration:configuration context:context];
}
- (void)startRecordingAsynchronouslyWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
audioConfiguration:(SCAudioConfiguration *)configuration
maxDuration:(NSTimeInterval)maxDuration
fileURL:(NSURL *)fileURL
captureSessionID:(NSString *)captureSessionID
completionHandler:
(sc_managed_capturer_start_recording_completion_handler_t)completionHandler
context:(NSString *)context
{
[_stateMachine startRecordingWithOutputSettings:outputSettings
audioConfiguration:configuration
maxDuration:maxDuration
fileURL:fileURL
captureSessionID:captureSessionID
completionHandler:completionHandler
context:context];
}
- (void)stopRecordingAsynchronouslyWithContext:(NSString *)context
{
[_stateMachine stopRecordingWithContext:context];
}
- (void)cancelRecordingAsynchronouslyWithContext:(NSString *)context
{
[_stateMachine cancelRecordingWithContext:context];
[[self snapCreationTriggers] markSnapCreationEndWithContext:context];
}
#pragma mark -
- (void)startStreamingAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 startStreamingAsynchronouslyWithCompletionHandler:completionHandler context:context];
}
- (void)addSampleBufferDisplayController:(id<SCManagedSampleBufferDisplayController>)sampleBufferDisplayController
context:(NSString *)context
{
[_managedCapturerV1 addSampleBufferDisplayController:sampleBufferDisplayController context:context];
}
#pragma mark - Utilities
- (void)convertViewCoordinates:(CGPoint)viewCoordinates
completionHandler:(sc_managed_capturer_convert_view_coordniates_completion_handler_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 convertViewCoordinates:viewCoordinates completionHandler:completionHandler context:context];
}
- (void)detectLensCategoryOnNextFrame:(CGPoint)point
lenses:(NSArray<SCLens *> *)lenses
completion:(sc_managed_lenses_processor_category_point_completion_handler_t)completion
context:(NSString *)context
{
[_managedCapturerV1 detectLensCategoryOnNextFrame:point lenses:lenses completion:completion context:context];
}
#pragma mark - Configurations
- (void)setDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 setDevicePositionAsynchronously:devicePosition
completionHandler:completionHandler
context:context];
}
- (void)setFlashActive:(BOOL)flashActive
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 setFlashActive:flashActive completionHandler:completionHandler context:context];
}
- (void)setLensesActive:(BOOL)lensesActive
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 setLensesActive:lensesActive completionHandler:completionHandler context:context];
}
- (void)setLensesActive:(BOOL)lensesActive
filterFactory:(SCLookseryFilterFactory *)filterFactory
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 setLensesActive:lensesActive
filterFactory:filterFactory
completionHandler:completionHandler
context:context];
}
- (void)setLensesInTalkActive:(BOOL)lensesActive
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 setLensesInTalkActive:lensesActive completionHandler:completionHandler context:context];
}
- (void)setTorchActiveAsynchronously:(BOOL)torchActive
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 setTorchActiveAsynchronously:torchActive completionHandler:completionHandler context:context];
}
- (void)setNightModeActiveAsynchronously:(BOOL)active
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 setNightModeActiveAsynchronously:active completionHandler:completionHandler context:context];
}
- (void)lockZoomWithContext:(NSString *)context
{
[_managedCapturerV1 lockZoomWithContext:context];
}
- (void)unlockZoomWithContext:(NSString *)context
{
[_managedCapturerV1 unlockZoomWithContext:context];
}
- (void)setZoomFactorAsynchronously:(CGFloat)zoomFactor context:(NSString *)context
{
[_managedCapturerV1 setZoomFactorAsynchronously:zoomFactor context:context];
}
- (void)resetZoomFactorAsynchronously:(CGFloat)zoomFactor
devicePosition:(SCManagedCaptureDevicePosition)devicePosition
context:(NSString *)context
{
[_managedCapturerV1 resetZoomFactorAsynchronously:zoomFactor devicePosition:devicePosition context:context];
}
- (void)setExposurePointOfInterestAsynchronously:(CGPoint)pointOfInterest
fromUser:(BOOL)fromUser
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 setExposurePointOfInterestAsynchronously:pointOfInterest
fromUser:fromUser
completionHandler:completionHandler
context:context];
}
- (void)setAutofocusPointOfInterestAsynchronously:(CGPoint)pointOfInterest
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 setAutofocusPointOfInterestAsynchronously:pointOfInterest
completionHandler:completionHandler
context:context];
}
- (void)setPortraitModePointOfInterestAsynchronously:(CGPoint)pointOfInterest
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 setPortraitModePointOfInterestAsynchronously:pointOfInterest
completionHandler:completionHandler
context:context];
}
- (void)continuousAutofocusAndExposureAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 continuousAutofocusAndExposureAsynchronouslyWithCompletionHandler:completionHandler
context:context];
}
// I need to call these three methods from SCAppDelegate explicitly so that I get the latest information.
- (void)applicationDidEnterBackground
{
[_managedCapturerV1 applicationDidEnterBackground];
}
- (void)applicationWillEnterForeground
{
[_managedCapturerV1 applicationWillEnterForeground];
}
- (void)applicationDidBecomeActive
{
[_managedCapturerV1 applicationDidBecomeActive];
}
- (void)applicationWillResignActive
{
[_managedCapturerV1 applicationWillResignActive];
}
- (void)mediaServicesWereReset
{
[_managedCapturerV1 mediaServicesWereReset];
}
- (void)mediaServicesWereLost
{
[_managedCapturerV1 mediaServicesWereLost];
}
#pragma mark - Add / Remove Listener
- (void)addListener:(id<SCManagedCapturerListener>)listener
{
[_managedCapturerV1 addListener:listener];
}
- (void)removeListener:(id<SCManagedCapturerListener>)listener
{
[_managedCapturerV1 removeListener:listener];
}
- (void)addVideoDataSourceListener:(id<SCManagedVideoDataSourceListener>)listener
{
[_managedCapturerV1 addVideoDataSourceListener:listener];
}
- (void)removeVideoDataSourceListener:(id<SCManagedVideoDataSourceListener>)listener
{
[_managedCapturerV1 removeVideoDataSourceListener:listener];
}
- (void)addDeviceCapacityAnalyzerListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener
{
[_managedCapturerV1 addDeviceCapacityAnalyzerListener:listener];
}
- (void)removeDeviceCapacityAnalyzerListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener
{
[_managedCapturerV1 removeDeviceCapacityAnalyzerListener:listener];
}
- (NSString *)debugInfo
{
return [_managedCapturerV1 debugInfo];
}
- (id<SCManagedVideoDataSource>)currentVideoDataSource
{
return [_managedCapturerV1 currentVideoDataSource];
}
// For APIs inside protocol SCCapture, if they are not related to capture state machine, we directly delegate to V1.
- (void)checkRestrictedCamera:(void (^)(BOOL, BOOL, AVAuthorizationStatus))callback
{
[_managedCapturerV1 checkRestrictedCamera:callback];
}
- (void)recreateAVCaptureSession
{
[_managedCapturerV1 recreateAVCaptureSession];
}
#pragma mark -
- (CMTime)firstWrittenAudioBufferDelay
{
return [SCCaptureWorker firstWrittenAudioBufferDelay:_managedCapturerV1.captureResource];
}
- (BOOL)audioQueueStarted
{
return [SCCaptureWorker audioQueueStarted:_managedCapturerV1.captureResource];
}
- (BOOL)isLensApplied
{
return [SCCaptureWorker isLensApplied:_managedCapturerV1.captureResource];
}
- (BOOL)isVideoMirrored
{
return [SCCaptureWorker isVideoMirrored:_managedCapturerV1.captureResource];
}
- (SCVideoCaptureSessionInfo)activeSession
{
return _managedCapturerV1.activeSession;
}
- (void)setBlackCameraDetector:(SCBlackCameraDetector *)blackCameraDetector
deviceMotionProvider:(id<SCDeviceMotionProvider>)deviceMotionProvider
fileInputDecider:(id<SCFileInputDecider>)fileInputDecider
arImageCaptureProvider:(id<SCManagedCapturerARImageCaptureProvider>)arImageCaptureProvider
glviewManager:(id<SCManagedCapturerGLViewManagerAPI>)glViewManager
lensAPIProvider:(id<SCManagedCapturerLensAPIProvider>)lensAPIProvider
lsaComponentTracker:(id<SCManagedCapturerLSAComponentTrackerAPI>)lsaComponentTracker
managedCapturerPreviewLayerControllerDelegate:
(id<SCManagedCapturePreviewLayerControllerDelegate>)previewLayerControllerDelegate
{
_managedCapturerV1.captureResource.blackCameraDetector = blackCameraDetector;
_managedCapturerV1.captureResource.deviceMotionProvider = deviceMotionProvider;
_managedCapturerV1.captureResource.fileInputDecider = fileInputDecider;
_managedCapturerV1.captureResource.arImageCaptureProvider = arImageCaptureProvider;
_managedCapturerV1.captureResource.videoPreviewGLViewManager = glViewManager;
[_managedCapturerV1.captureResource.videoPreviewGLViewManager
configureWithCaptureResource:_managedCapturerV1.captureResource];
_managedCapturerV1.captureResource.lensAPIProvider = lensAPIProvider;
_managedCapturerV1.captureResource.lsaTrackingComponentHandler = lsaComponentTracker;
[_managedCapturerV1.captureResource.lsaTrackingComponentHandler
configureWithCaptureResource:_managedCapturerV1.captureResource];
_managedCapturerV1.captureResource.previewLayerControllerDelegate = previewLayerControllerDelegate;
[SCManagedCapturePreviewLayerController sharedInstance].delegate =
_managedCapturerV1.captureResource.previewLayerControllerDelegate;
}
- (SCBlackCameraDetector *)blackCameraDetector
{
return _managedCapturerV1.captureResource.blackCameraDetector;
}
- (void)captureSingleVideoFrameAsynchronouslyWithCompletionHandler:
(sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 captureSingleVideoFrameAsynchronouslyWithCompletionHandler:completionHandler context:context];
}
- (void)sampleFrameWithCompletionHandler:(void (^)(UIImage *frame, CMTime presentationTime))completionHandler
context:(NSString *)context
{
[_managedCapturerV1 sampleFrameWithCompletionHandler:completionHandler context:context];
}
- (void)addTimedTask:(SCTimedTask *)task context:(NSString *)context
{
[_managedCapturerV1 addTimedTask:task context:context];
}
- (void)clearTimedTasksWithContext:(NSString *)context
{
[_managedCapturerV1 clearTimedTasksWithContext:context];
}
- (BOOL)authorizedForVideoCapture
{
return [_authorizationChecker authorizedForVideoCapture];
}
- (void)preloadVideoCaptureAuthorization
{
[_authorizationChecker preloadVideoCaptureAuthorization];
}
#pragma mark - Snap Creation triggers
- (SCSnapCreationTriggers *)snapCreationTriggers
{
return [_managedCapturerV1 snapCreationTriggers];
}
@end

47
ManagedCapturer/ImageProcessing/SCDepthBlurMetalModule.metal

@ -0,0 +1,47 @@
//
// SCDepthBlurMetalModule.metal
// Snapchat
//
// Created by Brian Ng on 10/31/17.
//
#include <metal_stdlib>
using namespace metal;
struct DepthBlurRenderData {
float depthRange;
float depthOffset;
float depthBlurForegroundThreshold;
float depthBlurBackgroundThreshold;
};
kernel void kernel_depth_blur(texture2d<float, access::read> sourceYTexture [[texture(0)]],
texture2d<float, access::read> sourceUVTexture [[texture(1)]],
texture2d<float, access::read> sourceDepthTexture[[texture(2)]],
texture2d<float, access::read> sourceBlurredYTexture [[texture(3)]],
texture2d<float, access::write> destinationYTexture [[texture(4)]],
texture2d<float, access::write> destinationUVTexture [[texture(5)]],
constant DepthBlurRenderData &renderData [[buffer(0)]],
uint2 gid [[thread_position_in_grid]],
uint2 size [[threads_per_grid]]) {
float2 valueUV = sourceUVTexture.read(gid).rg;
float depthValue = sourceDepthTexture.read(uint2(gid.x/4, gid.y/4)).r;
float normalizedDepthValue = (depthValue - renderData.depthOffset) / renderData.depthRange;
float valueYUnblurred = sourceYTexture.read(gid).r;
float valueYBlurred = sourceBlurredYTexture.read(gid).r;
float valueY = 0;
if (normalizedDepthValue > renderData.depthBlurForegroundThreshold) {
valueY = valueYUnblurred;
} else if (normalizedDepthValue < renderData.depthBlurBackgroundThreshold) {
valueY = valueYBlurred;
} else {
float blendRange = renderData.depthBlurForegroundThreshold - renderData.depthBlurBackgroundThreshold;
float normalizedBlendDepthValue = (normalizedDepthValue - renderData.depthBlurBackgroundThreshold) / blendRange;
valueY = valueYUnblurred * normalizedBlendDepthValue + valueYBlurred * (1 - normalizedBlendDepthValue);
}
destinationYTexture.write(valueY, gid);
destinationUVTexture.write(float4(valueUV.r, valueUV.g, 0, 0), gid);
}

21
ManagedCapturer/ImageProcessing/SCDepthBlurMetalRenderCommand.h

@ -0,0 +1,21 @@
//
// SCDepthBlurMetalRenderCommand.h
// Snapchat
//
// Created by Brian Ng on 11/8/17.
//
//
#import "SCMetalModule.h"
#import <Foundation/Foundation.h>
/*
@class SCDepthBlurMetalRenderCommand
Prepares the command buffer for the SCDepthBlurMetalModule.metal shader.
*/
@interface SCDepthBlurMetalRenderCommand : NSObject <SCMetalRenderCommand>
@property (nonatomic, readonly) NSString *functionName;
@end

90
ManagedCapturer/ImageProcessing/SCDepthBlurMetalRenderCommand.m

@ -0,0 +1,90 @@
//
// SCDepthBlurMetalRenderCommand.m
// Snapchat
//
// Created by Brian Ng on 11/8/17.
//
//
#import "SCDepthBlurMetalRenderCommand.h"
#import "SCCameraTweaks.h"
#import "SCMetalUtils.h"
#import <SCFoundation/NSString+SCFormat.h>
@import MetalPerformanceShaders;
@implementation SCDepthBlurMetalRenderCommand
typedef struct DepthBlurRenderData {
float depthRange;
float depthOffset;
float depthBlurForegroundThreshold;
float depthBlurBackgroundThreshold;
} DepthBlurRenderData;
#pragma mark - SCMetalRenderCommand
- (id<MTLComputeCommandEncoder>)encodeMetalCommand:(id<MTLCommandBuffer>)commandBuffer
pipelineState:(id<MTLComputePipelineState>)pipelineState
textureResource:(SCMetalTextureResource *)textureResource
{
#if !TARGET_IPHONE_SIMULATOR
CGFloat depthBlurForegroundThreshold = textureResource.depthBlurForegroundThreshold;
CGFloat depthBlurBackgroundThreshold =
textureResource.depthBlurForegroundThreshold > SCCameraTweaksDepthBlurBackgroundThreshold()
? SCCameraTweaksDepthBlurBackgroundThreshold()
: 0;
DepthBlurRenderData depthBlurRenderData = {
.depthRange = textureResource.depthRange,
.depthOffset = textureResource.depthOffset,
.depthBlurBackgroundThreshold = depthBlurBackgroundThreshold,
.depthBlurForegroundThreshold = depthBlurForegroundThreshold,
};
id<MTLBuffer> depthBlurRenderDataBuffer =
[textureResource.device newBufferWithLength:sizeof(DepthBlurRenderData)
options:MTLResourceOptionCPUCacheModeDefault];
memcpy(depthBlurRenderDataBuffer.contents, &depthBlurRenderData, sizeof(DepthBlurRenderData));
MPSImageGaussianBlur *kernel =
[[MPSImageGaussianBlur alloc] initWithDevice:textureResource.device sigma:SCCameraTweaksBlurSigma()];
[kernel encodeToCommandBuffer:commandBuffer
sourceTexture:textureResource.sourceYTexture
destinationTexture:textureResource.sourceBlurredYTexture];
id<MTLComputeCommandEncoder> commandEncoder = [commandBuffer computeCommandEncoder];
[commandEncoder setComputePipelineState:pipelineState];
[commandEncoder setTexture:textureResource.sourceYTexture atIndex:0];
[commandEncoder setTexture:textureResource.sourceUVTexture atIndex:1];
[commandEncoder setTexture:textureResource.sourceDepthTexture atIndex:2];
[commandEncoder setTexture:textureResource.sourceBlurredYTexture atIndex:3];
[commandEncoder setTexture:textureResource.destinationYTexture atIndex:4];
[commandEncoder setTexture:textureResource.destinationUVTexture atIndex:5];
[commandEncoder setBuffer:depthBlurRenderDataBuffer offset:0 atIndex:0];
return commandEncoder;
#else
return nil;
#endif
}
- (BOOL)requiresDepthData
{
return YES;
}
#pragma mark - SCMetalModuleFunctionProvider
- (NSString *)functionName
{
return @"kernel_depth_blur";
}
- (NSString *)description
{
return [NSString sc_stringWithFormat:@"SCDepthBlurMetalRenderCommand (shader function = %@)", self.functionName];
}
@end

29
ManagedCapturer/ImageProcessing/SCDepthToGrayscaleMetalModule.metal

@ -0,0 +1,29 @@
//
// SCDepthToGrayscaleMetalModule.metal
// Snapchat
//
// Created by Brian Ng on 12/7/17.
//
#include <metal_stdlib>
using namespace metal;
typedef struct DepthToGrayscaleRenderData {
float depthRange;
float depthOffset;
} DepthToGrayscaleRenderData;
kernel void kernel_depth_to_grayscale(texture2d<float, access::read> sourceDepthTexture[[texture(0)]],
texture2d<float, access::write> destinationYTexture [[texture(1)]],
texture2d<float, access::write> destinationUVTexture [[texture(2)]],
constant DepthToGrayscaleRenderData &renderData [[buffer(0)]],
uint2 gid [[thread_position_in_grid]],
uint2 size [[threads_per_grid]]) {
float depthValue = sourceDepthTexture.read(uint2(gid.x/4, gid.y/4)).r;
float normalizedDepthValue = (depthValue - renderData.depthOffset) / renderData.depthRange;
destinationYTexture.write(normalizedDepthValue, gid);
destinationUVTexture.write(float4(0.5, 0.5, 0, 0), gid);
}

21
ManagedCapturer/ImageProcessing/SCDepthToGrayscaleMetalRenderCommand.h

@ -0,0 +1,21 @@
//
// SCDepthToGrayscaleMetalRenderCommand.h
// Snapchat
//
// Created by Brian Ng on 12/7/17.
//
//
#import "SCMetalModule.h"
#import <Foundation/Foundation.h>
/*
@class SCDepthToGrayscaleMetalRenderCommand
Prepares the command buffer for the SCDepthToGrayscaleMetalModule.metal shader.
*/
@interface SCDepthToGrayscaleMetalRenderCommand : NSObject <SCMetalRenderCommand>
@property (nonatomic, readonly) NSString *functionName;
@end

72
ManagedCapturer/ImageProcessing/SCDepthToGrayscaleMetalRenderCommand.m

@ -0,0 +1,72 @@
//
// SCDepthToGrayscaleMetalRenderCommand.m
// Snapchat
//
// Created by Brian Ng on 12/7/17.
//
//
#import "SCDepthToGrayscaleMetalRenderCommand.h"
#import "SCCameraTweaks.h"
#import "SCMetalUtils.h"
#import <SCFoundation/NSString+SCFormat.h>
@import MetalPerformanceShaders;
@implementation SCDepthToGrayscaleMetalRenderCommand
typedef struct DepthToGrayscaleRenderData {
float depthRange;
float depthOffset;
} DepthToGrayscaleRenderData;
#pragma mark - SCMetalRenderCommand
- (id<MTLComputeCommandEncoder>)encodeMetalCommand:(id<MTLCommandBuffer>)commandBuffer
pipelineState:(id<MTLComputePipelineState>)pipelineState
textureResource:(SCMetalTextureResource *)textureResource
{
#if !TARGET_IPHONE_SIMULATOR
DepthToGrayscaleRenderData depthToGrayscaleRenderData = {
.depthRange = textureResource.depthRange, .depthOffset = textureResource.depthOffset,
};
id<MTLBuffer> depthToGrayscaleDataBuffer =
[textureResource.device newBufferWithLength:sizeof(DepthToGrayscaleRenderData)
options:MTLResourceOptionCPUCacheModeDefault];
memcpy(depthToGrayscaleDataBuffer.contents, &depthToGrayscaleRenderData, sizeof(DepthToGrayscaleRenderData));
id<MTLComputeCommandEncoder> commandEncoder = [commandBuffer computeCommandEncoder];
[commandEncoder setComputePipelineState:pipelineState];
[commandEncoder setTexture:textureResource.sourceDepthTexture atIndex:0];
[commandEncoder setTexture:textureResource.destinationYTexture atIndex:1];
[commandEncoder setTexture:textureResource.destinationUVTexture atIndex:2];
[commandEncoder setBuffer:depthToGrayscaleDataBuffer offset:0 atIndex:0];
return commandEncoder;
#else
return nil;
#endif
}
- (BOOL)requiresDepthData
{
return YES;
}
#pragma mark - SCMetalModuleFunctionProvider
- (NSString *)functionName
{
return @"kernel_depth_to_grayscale";
}
- (NSString *)description
{
return [NSString
sc_stringWithFormat:@"SCDepthToGrayscaleMetalRenderCommand (shader function = %@)", self.functionName];
}
@end

28
ManagedCapturer/ImageProcessing/SCDigitalExposureHandler.h

@ -0,0 +1,28 @@
//
// SCDigitalExposureHandler.h
// Snapchat
//
// Created by Yu-Kuan (Anthony) Lai on 6/15/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import <CoreGraphics/CoreGraphics.h>
#import <Foundation/Foundation.h>
@class SCExposureAdjustProcessingModule;
/*
@class SCDigitalExposureHandler
The SCDigitalExposureHandler will be built by the SCProcessingBuilder when the user indicates that he/she
wants to add SCExposureAdjustProcessingModule to the processing pipeline. The builder will take care
of initializing the handler by linking the processing module. Caller of the builder can then link up
the handler to the UI element (in this case, SCExposureSlider) so that user's control is hooked up to
the processing module.
*/
@interface SCDigitalExposureHandler : NSObject
- (instancetype)initWithProcessingModule:(SCExposureAdjustProcessingModule *)processingModule;
- (void)setExposureParameter:(CGFloat)value;
@end

30
ManagedCapturer/ImageProcessing/SCDigitalExposureHandler.m

@ -0,0 +1,30 @@
//
// SCDigitalExposureHandler.m
// Snapchat
//
// Created by Yu-Kuan (Anthony) Lai on 6/15/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCDigitalExposureHandler.h"
#import "SCExposureAdjustProcessingModule.h"
@implementation SCDigitalExposureHandler {
__weak SCExposureAdjustProcessingModule *_processingModule;
}
- (instancetype)initWithProcessingModule:(SCExposureAdjustProcessingModule *)processingModule
{
if (self = [super init]) {
_processingModule = processingModule;
}
return self;
}
- (void)setExposureParameter:(CGFloat)value
{
[_processingModule setEVValue:value];
}
@end

60
ManagedCapturer/ImageProcessing/SCExposureAdjustMetalModule.metal

@ -0,0 +1,60 @@
//
// SCExposureAdjustMetalModule.metal
// Snapchat
//
// Created by Michel Loenngren on 7/11/17.
//
//
#include <metal_stdlib>
using namespace metal;
kernel void kernel_exposure_adjust(texture2d<float, access::read> sourceYTexture [[texture(0)]],
texture2d<float, access::read> sourceUVTexture [[texture(1)]],
texture2d<float, access::write> destinationYTexture [[texture(2)]],
texture2d<float, access::write> destinationUVTexture [[texture(3)]],
uint2 gid [[thread_position_in_grid]],
uint2 size [[threads_per_grid]]) {
float valueY = sourceYTexture.read(gid).r;
float2 valueUV = sourceUVTexture.read(gid).rg;
float factor = 1.0 / pow(1.0 + valueY, 5) + 1.0;
valueY *= factor;
destinationYTexture.write(valueY, gid);
destinationUVTexture.write(float4(valueUV.r, valueUV.g, 0, 0), gid);
}
kernel void kernel_exposure_adjust_nightvision(texture2d<float, access::read> sourceYTexture [[texture(0)]],
texture2d<float, access::read> sourceUVTexture [[texture(1)]],
texture2d<float, access::write> destinationYTexture [[texture(2)]],
texture2d<float, access::write> destinationUVTexture [[texture(3)]],
uint2 gid [[thread_position_in_grid]],
uint2 size [[threads_per_grid]]) {
float valueY = sourceYTexture.read(gid).r;
float u = 0.5 - 0.368;
float v = 0.5 - 0.291;
destinationYTexture.write(valueY, gid);
destinationUVTexture.write(float4(u, v, 0, 0), gid);
}
kernel void kernel_exposure_adjust_inverted_nightvision(texture2d<float, access::read> sourceYTexture [[texture(0)]],
texture2d<float, access::read> sourceUVTexture [[texture(1)]],
texture2d<float, access::write> destinationYTexture [[texture(2)]],
texture2d<float, access::write> destinationUVTexture [[texture(3)]],
uint2 gid [[thread_position_in_grid]],
uint2 size [[threads_per_grid]]) {
float valueY = sourceYTexture.read(gid).r;
valueY = 1.0 - valueY;
float u = 0.5 - 0.368;
float v = 0.5 - 0.291;
destinationYTexture.write(valueY, gid);
destinationUVTexture.write(float4(u, v, 0, 0), gid);
}

21
ManagedCapturer/ImageProcessing/SCExposureAdjustMetalRenderCommand.h

@ -0,0 +1,21 @@
//
// SCExposureAdjustMetalRenderCommand.h
// Snapchat
//
// Created by Michel Loenngren on 7/11/17.
//
//
#import "SCMetalModule.h"
#import <Foundation/Foundation.h>
/*
@class SCExposureAdjustProcessingModule
Prepares the command buffer for the SCExposureAdjustProcessingModule.metal shader.
*/
@interface SCExposureAdjustMetalRenderCommand : SCMetalModule <SCMetalRenderCommand>
@property (nonatomic, readonly) NSString *functionName;
@end

66
ManagedCapturer/ImageProcessing/SCExposureAdjustMetalRenderCommand.m

@ -0,0 +1,66 @@
//
// SCExposureAdjustMetalRenderCommand.m
// Snapchat
//
// Created by Michel Loenngren on 7/11/17.
//
//
#import "SCExposureAdjustMetalRenderCommand.h"
#import "SCCameraTweaks.h"
#import "SCMetalUtils.h"
#import <SCFoundation/SCAssertWrapper.h>
@import Metal;
@implementation SCExposureAdjustMetalRenderCommand
#pragma mark - SCMetalRenderCommand
- (id<MTLComputeCommandEncoder>)encodeMetalCommand:(id<MTLCommandBuffer>)commandBuffer
pipelineState:(id<MTLComputePipelineState>)pipelineState
textureResource:(SCMetalTextureResource *)textureResource
{
id<MTLComputeCommandEncoder> commandEncoder = [commandBuffer computeCommandEncoder];
[commandEncoder setComputePipelineState:pipelineState];
#if !TARGET_IPHONE_SIMULATOR
[commandEncoder setTexture:textureResource.sourceYTexture atIndex:0];
[commandEncoder setTexture:textureResource.sourceUVTexture atIndex:1];
[commandEncoder setTexture:textureResource.destinationYTexture atIndex:2];
[commandEncoder setTexture:textureResource.destinationUVTexture atIndex:3];
#endif
return commandEncoder;
}
#pragma mark - SCMetalModuleFunctionProvider
- (NSString *)functionName
{
if (SCCameraExposureAdjustmentMode() == 1) {
return @"kernel_exposure_adjust";
} else if (SCCameraExposureAdjustmentMode() == 2) {
return @"kernel_exposure_adjust_nightvision";
} else if (SCCameraExposureAdjustmentMode() == 3) {
return @"kernel_exposure_adjust_inverted_nightvision";
} else {
SCAssertFail(@"Incorrect value from SCCameraExposureAdjustmentMode() %ld",
(long)SCCameraExposureAdjustmentMode());
return nil;
}
}
- (BOOL)requiresDepthData
{
return NO;
}
- (NSString *)description
{
return
[NSString sc_stringWithFormat:@"SCExposureAdjustMetalRenderCommand (shader function = %@)", self.functionName];
}
@end

28
ManagedCapturer/ImageProcessing/SCExposureAdjustProcessingModule.h

@ -0,0 +1,28 @@
//
// SCExposureAdjustProcessingModule.h
// Snapchat
//
// Created by Yu-Kuan (Anthony) Lai on 6/1/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCProcessingModule.h"
#import <Foundation/Foundation.h>
/**
NOTE: If we start chaining multiple CIImage modules we should
not run them back to back but instead in one CIImage pass
as CoreImage will merge the shaders for best performance
*/
/*
@class SCExposureAdjustProcessingModule
This module use the CIExposureAdjust CIFilter to process the frames. It use the value provided by
the SCDigitalExposurehandler as evValue (default is 0).
*/
@interface SCExposureAdjustProcessingModule : NSObject <SCProcessingModule>
- (void)setEVValue:(CGFloat)value;
@end

67
ManagedCapturer/ImageProcessing/SCExposureAdjustProcessingModule.m

@ -0,0 +1,67 @@
//
// SCExposureAdjustProcessingModule.m
// Snapchat
//
// Created by Yu-Kuan (Anthony) Lai on 6/1/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCExposureAdjustProcessingModule.h"
#import "SCProcessingModuleUtils.h"
@import CoreImage;
@import CoreMedia;
static const CGFloat kSCExposureAdjustProcessingModuleMaxEVValue = 2.0;
@implementation SCExposureAdjustProcessingModule {
CIContext *_context;
CIFilter *_filter;
CFMutableDictionaryRef _attributes;
CVPixelBufferPoolRef _bufferPool;
}
- (instancetype)init
{
if (self = [super init]) {
_context = [CIContext context];
_filter = [CIFilter filterWithName:@"CIExposureAdjust"];
[_filter setValue:@0.0 forKey:@"inputEV"];
}
return self;
}
- (void)setEVValue:(CGFloat)value
{
CGFloat newEVValue = value * kSCExposureAdjustProcessingModuleMaxEVValue;
[_filter setValue:@(newEVValue) forKey:@"inputEV"];
}
- (void)dealloc
{
CVPixelBufferPoolFlush(_bufferPool, kCVPixelBufferPoolFlushExcessBuffers);
CVPixelBufferPoolRelease(_bufferPool);
}
- (BOOL)requiresDepthData
{
return NO;
}
- (CMSampleBufferRef)render:(RenderData)renderData
{
CMSampleBufferRef input = renderData.sampleBuffer;
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(input);
CIImage *image = [CIImage imageWithCVPixelBuffer:pixelBuffer];
[_filter setValue:image forKey:kCIInputImageKey];
CIImage *result = [_filter outputImage];
return [SCProcessingModuleUtils sampleBufferFromImage:result
oldSampleBuffer:input
bufferPool:_bufferPool
context:_context];
}
@end

48
ManagedCapturer/ImageProcessing/SCMetalModule.h

@ -0,0 +1,48 @@
//
// SCMetalModule.h
// Snapchat
//
// Created by Michel Loenngren on 7/19/17.
//
//
#import "SCMetalTextureResource.h"
#import "SCMetalUtils.h"
#import "SCProcessingModule.h"
#import <Foundation/Foundation.h>
@protocol SCMetalModuleFunctionProvider <NSObject>
@property (nonatomic, readonly) NSString *functionName;
@end
@protocol SCMetalRenderCommand <SCMetalModuleFunctionProvider>
/**
Sets textures and parameters for the shader function. When implementing this function, the command encoder must be
computed and the pipeline state set. That is, ensure that there are calls to: [commandBuffer computeCommandEncoder]
and [commandEncoder setComputePipelineState:pipelineState].
*/
#if !TARGET_IPHONE_SIMULATOR
- (id<MTLComputeCommandEncoder>)encodeMetalCommand:(id<MTLCommandBuffer>)commandBuffer
pipelineState:(id<MTLComputePipelineState>)pipelineState
textureResource:(SCMetalTextureResource *)textureResource;
#endif
- (BOOL)requiresDepthData;
@end
/**
NOTE: If we start chaining multiple metal modules we should
not run them back to back but instead chain different render
passes.
*/
@interface SCMetalModule : NSObject <SCProcessingModule>
// Designated initializer: SCMetalModule should always have a SCMetalRenderCommand
- (instancetype)initWithMetalRenderCommand:(id<SCMetalRenderCommand>)metalRenderCommand;
@end

155
ManagedCapturer/ImageProcessing/SCMetalModule.m

@ -0,0 +1,155 @@
//
// SCMetalModule.m
// Snapchat
//
// Created by Michel Loenngren on 7/19/17.
//
//
#import "SCMetalModule.h"
#import "SCCameraTweaks.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCLog.h>
@interface SCMetalModule ()
#if !TARGET_IPHONE_SIMULATOR
@property (nonatomic, readonly) id<MTLLibrary> library;
@property (nonatomic, readonly) id<MTLDevice> device;
@property (nonatomic, readonly) id<MTLFunction> function;
@property (nonatomic, readonly) id<MTLComputePipelineState> computePipelineState;
@property (nonatomic, readonly) id<MTLCommandQueue> commandQueue;
@property (nonatomic, readonly) CVMetalTextureCacheRef textureCache;
#endif
@end
@implementation SCMetalModule {
id<SCMetalRenderCommand> _metalRenderCommand;
}
#if !TARGET_IPHONE_SIMULATOR
@synthesize library = _library;
@synthesize function = _function;
@synthesize computePipelineState = _computePipelineState;
@synthesize commandQueue = _commandQueue;
@synthesize textureCache = _textureCache;
#endif
- (instancetype)initWithMetalRenderCommand:(id<SCMetalRenderCommand>)metalRenderCommand
{
self = [super init];
if (self) {
_metalRenderCommand = metalRenderCommand;
}
return self;
}
#pragma mark - SCProcessingModule
- (CMSampleBufferRef)render:(RenderData)renderData
{
CMSampleBufferRef input = renderData.sampleBuffer;
#if !TARGET_IPHONE_SIMULATOR
id<MTLComputePipelineState> pipelineState = self.computePipelineState;
SC_GUARD_ELSE_RETURN_VALUE(pipelineState, input);
CVMetalTextureCacheRef textureCache = self.textureCache;
SC_GUARD_ELSE_RETURN_VALUE(textureCache, input);
id<MTLCommandQueue> commandQueue = self.commandQueue;
SC_GUARD_ELSE_RETURN_VALUE(commandQueue, input);
SCMetalTextureResource *textureResource =
[[SCMetalTextureResource alloc] initWithRenderData:renderData textureCache:textureCache device:self.device];
id<MTLCommandBuffer> commandBuffer = [commandQueue commandBuffer];
if (!_metalRenderCommand) {
SCAssertFail(@"Metal module must be initialized with an SCMetalRenderCommand");
}
id<MTLComputeCommandEncoder> commandEncoder = [_metalRenderCommand encodeMetalCommand:commandBuffer
pipelineState:pipelineState
textureResource:textureResource];
NSUInteger w = pipelineState.threadExecutionWidth;
NSUInteger h = pipelineState.maxTotalThreadsPerThreadgroup / w;
MTLSize threadsPerThreadgroup = MTLSizeMake(w, h, 1);
MTLSize threadgroupsPerGrid = MTLSizeMake((textureResource.sourceYTexture.width + w - 1) / w,
(textureResource.sourceYTexture.height + h - 1) / h, 1);
[commandEncoder dispatchThreadgroups:threadgroupsPerGrid threadsPerThreadgroup:threadsPerThreadgroup];
[commandEncoder endEncoding];
[commandBuffer commit];
[commandBuffer waitUntilCompleted];
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(renderData.sampleBuffer);
SCMetalCopyTexture(textureResource.destinationYTexture, imageBuffer, 0);
SCMetalCopyTexture(textureResource.destinationUVTexture, imageBuffer, 1);
#endif
return input;
}
- (BOOL)requiresDepthData
{
return [_metalRenderCommand requiresDepthData];
}
#pragma mark - Lazy properties
#if !TARGET_IPHONE_SIMULATOR
- (id<MTLLibrary>)library
{
if (!_library) {
NSString *libPath = [[NSBundle mainBundle] pathForResource:@"sccamera-default" ofType:@"metallib"];
NSError *error = nil;
_library = [self.device newLibraryWithFile:libPath error:&error];
if (error) {
SCLogGeneralError(@"Create metallib error: %@", error.description);
}
}
return _library;
}
- (id<MTLDevice>)device
{
return SCGetManagedCaptureMetalDevice();
}
- (id<MTLFunction>)function
{
return [self.library newFunctionWithName:[_metalRenderCommand functionName]];
}
- (id<MTLComputePipelineState>)computePipelineState
{
if (!_computePipelineState) {
NSError *error = nil;
_computePipelineState = [self.device newComputePipelineStateWithFunction:self.function error:&error];
if (error) {
SCLogGeneralError(@"Error while creating compute pipeline state %@", error.description);
}
}
return _computePipelineState;
}
- (id<MTLCommandQueue>)commandQueue
{
if (!_commandQueue) {
_commandQueue = [self.device newCommandQueue];
}
return _commandQueue;
}
- (CVMetalTextureCacheRef)textureCache
{
if (!_textureCache) {
CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, self.device, nil, &_textureCache);
}
return _textureCache;
}
#endif
@end

54
ManagedCapturer/ImageProcessing/SCMetalTextureResource.h

@ -0,0 +1,54 @@
//
// SCMetalTextureResource.h
// Snapchat
//
// Created by Brian Ng on 11/7/17.
//
#import "SCProcessingModule.h"
#import "SCCapturerDefines.h"
#import <Foundation/Foundation.h>
#if !TARGET_IPHONE_SIMULATOR
#import <Metal/Metal.h>
#endif
/*
@class SCMetalTextureResource
The SCMetalTextureResource is created by SCMetalModule and is passed to a SCMetalRenderCommand.
This resource provides a collection of textures for rendering, where a SCMetalRenderCommand
selects which textures it needs. Textures are lazily initialiazed to optimize performance.
Additionally, information pertaining to depth is provided if normalizing depth is desired:
depthRange is the range of possible depth values [depthOffset, depthOffset + depthRange],
where depthOffset is the min depth value in the given depth map.
NOTE: This class is NOT thread safe -- ensure any calls are made by a performer by calling
SCAssertPerformer before actually accessing any textures
*/
@interface SCMetalTextureResource : NSObject
#if !TARGET_IPHONE_SIMULATOR
@property (nonatomic, readonly) id<MTLTexture> sourceYTexture;
@property (nonatomic, readonly) id<MTLTexture> sourceUVTexture;
@property (nonatomic, readonly) id<MTLTexture> destinationYTexture;
@property (nonatomic, readonly) id<MTLTexture> destinationUVTexture;
// Textures for SCDepthBlurMetalCommand
@property (nonatomic, readonly) id<MTLTexture> sourceBlurredYTexture;
@property (nonatomic, readonly) id<MTLTexture> sourceDepthTexture;
@property (nonatomic, readonly) id<MTLDevice> device;
#endif
// Available depth-related auxiliary resources (when depth data is provided)
@property (nonatomic, readonly) float depthRange;
@property (nonatomic, readonly) float depthOffset;
@property (nonatomic, readonly) CGFloat depthBlurForegroundThreshold;
@property (nonatomic, readonly) SampleBufferMetadata sampleBufferMetadata;
#if !TARGET_IPHONE_SIMULATOR
- (instancetype)initWithRenderData:(RenderData)renderData
textureCache:(CVMetalTextureCacheRef)textureCache
device:(id<MTLDevice>)device;
#endif
@end

215
ManagedCapturer/ImageProcessing/SCMetalTextureResource.m

@ -0,0 +1,215 @@
//
// SCMetalTextureResource.m
// Snapchat
//
// Created by Brian Ng on 11/7/17.
//
#import "SCMetalTextureResource.h"
#import "SCCameraSettingUtils.h"
#import "SCCameraTweaks.h"
#import "SCMetalUtils.h"
@import CoreImage;
#if !TARGET_IPHONE_SIMULATOR
static NSInteger const kSCFocusRectSize = 4;
#endif
@interface SCMetalTextureResource ()
#if !TARGET_IPHONE_SIMULATOR
@property (nonatomic, readonly) CVMetalTextureCacheRef textureCache;
#endif
@end
@implementation SCMetalTextureResource {
RenderData _renderData;
CVImageBufferRef _imageBuffer;
CIContext *_context;
}
#if !TARGET_IPHONE_SIMULATOR
@synthesize sourceYTexture = _sourceYTexture;
@synthesize sourceUVTexture = _sourceUVTexture;
@synthesize destinationYTexture = _destinationYTexture;
@synthesize destinationUVTexture = _destinationUVTexture;
@synthesize sourceBlurredYTexture = _sourceBlurredYTexture;
@synthesize sourceDepthTexture = _sourceDepthTexture;
@synthesize depthRange = _depthRange;
@synthesize depthOffset = _depthOffset;
@synthesize depthBlurForegroundThreshold = _depthBlurForegroundThreshold;
@synthesize device = _device;
@synthesize sampleBufferMetadata = _sampleBufferMetadata;
- (instancetype)initWithRenderData:(RenderData)renderData
textureCache:(CVMetalTextureCacheRef)textureCache
device:(id<MTLDevice>)device
{
self = [super init];
if (self) {
_imageBuffer = CMSampleBufferGetImageBuffer(renderData.sampleBuffer);
_renderData = renderData;
_textureCache = textureCache;
_device = device;
_context = [CIContext contextWithOptions:@{ kCIContextWorkingFormat : @(kCIFormatRGBAh) }];
}
return self;
}
#endif
#if !TARGET_IPHONE_SIMULATOR
- (id<MTLTexture>)sourceYTexture
{
if (!_sourceYTexture) {
CVPixelBufferLockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly);
_sourceYTexture = SCMetalTextureFromPixelBuffer(_imageBuffer, 0, MTLPixelFormatR8Unorm, _textureCache);
CVPixelBufferUnlockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly);
}
return _sourceYTexture;
}
- (id<MTLTexture>)sourceUVTexture
{
if (!_sourceUVTexture) {
CVPixelBufferLockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly);
_sourceUVTexture = SCMetalTextureFromPixelBuffer(_imageBuffer, 1, MTLPixelFormatRG8Unorm, _textureCache);
CVPixelBufferUnlockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly);
}
return _sourceUVTexture;
}
- (id<MTLTexture>)destinationYTexture
{
if (!_destinationYTexture) {
MTLTextureDescriptor *textureDescriptor =
[MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm
width:CVPixelBufferGetWidthOfPlane(_imageBuffer, 0)
height:CVPixelBufferGetHeightOfPlane(_imageBuffer, 0)
mipmapped:NO];
textureDescriptor.usage |= MTLTextureUsageShaderWrite;
_destinationYTexture = [_device newTextureWithDescriptor:textureDescriptor];
}
return _destinationYTexture;
}
- (id<MTLTexture>)destinationUVTexture
{
if (!_destinationUVTexture) {
MTLTextureDescriptor *textureDescriptor =
[MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatRG8Unorm
width:CVPixelBufferGetWidthOfPlane(_imageBuffer, 1)
height:CVPixelBufferGetHeightOfPlane(_imageBuffer, 1)
mipmapped:NO];
textureDescriptor.usage |= MTLTextureUsageShaderWrite;
_destinationUVTexture = [_device newTextureWithDescriptor:textureDescriptor];
}
return _destinationUVTexture;
}
- (id<MTLTexture>)sourceBlurredYTexture
{
if (!_sourceBlurredYTexture) {
MTLTextureDescriptor *textureDescriptor =
[MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm
width:CVPixelBufferGetWidthOfPlane(_imageBuffer, 0)
height:CVPixelBufferGetHeightOfPlane(_imageBuffer, 0)
mipmapped:NO];
textureDescriptor.usage |= MTLTextureUsageShaderWrite;
_sourceBlurredYTexture = [_device newTextureWithDescriptor:textureDescriptor];
}
return _sourceBlurredYTexture;
}
- (id<MTLTexture>)sourceDepthTexture
{
if (!_sourceDepthTexture) {
CVPixelBufferLockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly);
_sourceDepthTexture =
SCMetalTextureFromPixelBuffer(_renderData.depthDataMap, 0, MTLPixelFormatR16Float, _textureCache);
CVPixelBufferUnlockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly);
}
return _sourceDepthTexture;
}
- (float)depthRange
{
if (_depthRange == 0) {
// Get min/max values of depth image to normalize
size_t bufferWidth = CVPixelBufferGetWidth(_renderData.depthDataMap);
size_t bufferHeight = CVPixelBufferGetHeight(_renderData.depthDataMap);
size_t bufferBytesPerRow = CVPixelBufferGetBytesPerRow(_renderData.depthDataMap);
CVPixelBufferLockBaseAddress(_renderData.depthDataMap, kCVPixelBufferLock_ReadOnly);
unsigned char *pixelBufferPointer = CVPixelBufferGetBaseAddress(_renderData.depthDataMap);
__fp16 *bufferPtr = (__fp16 *)pixelBufferPointer;
uint32_t ptrInc = (int)bufferBytesPerRow / sizeof(__fp16);
float depthMin = MAXFLOAT;
float depthMax = -MAXFLOAT;
for (int j = 0; j < bufferHeight; j++) {
for (int i = 0; i < bufferWidth; i++) {
float value = bufferPtr[i];
if (!isnan(value)) {
depthMax = MAX(depthMax, value);
depthMin = MIN(depthMin, value);
}
}
bufferPtr += ptrInc;
}
CVPixelBufferUnlockBaseAddress(_renderData.depthDataMap, kCVPixelBufferLock_ReadOnly);
_depthRange = depthMax - depthMin;
_depthOffset = depthMin;
}
return _depthRange;
}
- (float)depthOffset
{
if (_depthRange == 0) {
[self depthRange];
}
return _depthOffset;
}
- (CGFloat)depthBlurForegroundThreshold
{
if (_renderData.depthBlurPointOfInterest) {
CGPoint point = *_renderData.depthBlurPointOfInterest;
CIImage *disparityImage = [CIImage imageWithCVPixelBuffer:_renderData.depthDataMap];
CIVector *vector =
[CIVector vectorWithX:point.x * CVPixelBufferGetWidth(_renderData.depthDataMap) - kSCFocusRectSize / 2
Y:point.y * CVPixelBufferGetHeight(_renderData.depthDataMap) - kSCFocusRectSize / 2
Z:kSCFocusRectSize
W:kSCFocusRectSize];
CIImage *minMaxImage =
[[disparityImage imageByClampingToExtent] imageByApplyingFilter:@"CIAreaMinMaxRed"
withInputParameters:@{kCIInputExtentKey : vector}];
UInt8 pixel[4] = {0, 0, 0, 0};
[_context render:minMaxImage
toBitmap:&pixel
rowBytes:4
bounds:CGRectMake(0, 0, 1, 1)
format:kCIFormatRGBA8
colorSpace:nil];
CGFloat disparity = pixel[1] / 255.0;
CGFloat normalizedDisparity = (disparity - self.depthOffset) / self.depthRange;
return normalizedDisparity;
} else {
return SCCameraTweaksDepthBlurForegroundThreshold();
}
}
- (SampleBufferMetadata)sampleBufferMetadata
{
SampleBufferMetadata sampleMetadata = {
.isoSpeedRating = 0, .exposureTime = 0.033, .brightness = 0,
};
retrieveSampleBufferMetadata(_renderData.sampleBuffer, &sampleMetadata);
return sampleMetadata;
}
#endif
@end

37
ManagedCapturer/ImageProcessing/SCNightModeEnhancementMetalModule.metal

@ -0,0 +1,37 @@
//
// SCNightModeEnhancementMetalModule.metal
// Snapchat
//
// Created by Chao Pang on 12/21/17.
//
//
#include <metal_stdlib>
using namespace metal;
typedef struct SampleBufferMetadata {
int iosSpeedRating;
float exposureTime;
float brightness;
}SampleBufferMetadata;
kernel void kernel_night_mode_enhancement(texture2d<float, access::read> sourceYTexture [[texture(0)]],
texture2d<float, access::read> sourceUVTexture [[texture(1)]],
texture2d<float, access::write> destinationYTexture [[texture(2)]],
texture2d<float, access::write> destinationUVTexture [[texture(3)]],
constant SampleBufferMetadata &metaData [[buffer(0)]],
uint2 gid [[thread_position_in_grid]],
uint2 size [[threads_per_grid]]) {
float valueY = sourceYTexture.read(gid).r;
float2 valueUV = sourceUVTexture.read(gid).rg;
float factor = 1.0 - metaData.brightness * 0.1;
factor = max(min(factor, 1.3), 1.0);
valueY = min(valueY * factor, 1.0);
valueUV.rg = max(min((valueUV.rg - 0.5) * factor + 0.5, 1.0), 0.0);
destinationYTexture.write(valueY, gid);
destinationUVTexture.write(float4(valueUV.r, valueUV.g, 0, 0), gid);
}

19
ManagedCapturer/ImageProcessing/SCNightModeEnhancementMetalRenderCommand.h

@ -0,0 +1,19 @@
//
// SCNightModeEnhancementMetalRenderCommand.h
// Snapchat
//
// Created by Chao Pang on 12/21/17.
//
#import "SCMetalModule.h"
#import <Foundation/Foundation.h>
/*
Prepares the command buffer for the SCNightModeEnhancementMetalModule.metal.
*/
@interface SCNightModeEnhancementMetalRenderCommand : SCMetalModule <SCMetalRenderCommand>
@property (nonatomic, readonly) NSString *functionName;
@end

64
ManagedCapturer/ImageProcessing/SCNightModeEnhancementMetalRenderCommand.m

@ -0,0 +1,64 @@
//
// SCNightModeEnhancementMetalRenderCommand.m
// Snapchat
//
// Created by Chao Pang on 12/21/17.
//
#import "SCNightModeEnhancementMetalRenderCommand.h"
#import "SCCameraTweaks.h"
#import "SCMetalUtils.h"
#import <SCFoundation/NSString+SCFormat.h>
@import Metal;
@implementation SCNightModeEnhancementMetalRenderCommand
#pragma mark - SCMetalRenderCommand
- (id<MTLComputeCommandEncoder>)encodeMetalCommand:(id<MTLCommandBuffer>)commandBuffer
pipelineState:(id<MTLComputePipelineState>)pipelineState
textureResource:(SCMetalTextureResource *)textureResource
{
id<MTLComputeCommandEncoder> commandEncoder = [commandBuffer computeCommandEncoder];
[commandEncoder setComputePipelineState:pipelineState];
#if !TARGET_IPHONE_SIMULATOR
SampleBufferMetadata sampleBufferMetadata = {
.isoSpeedRating = textureResource.sampleBufferMetadata.isoSpeedRating,
.exposureTime = textureResource.sampleBufferMetadata.exposureTime,
.brightness = textureResource.sampleBufferMetadata.brightness,
};
id<MTLBuffer> metadataBuffer = [textureResource.device newBufferWithLength:sizeof(SampleBufferMetadata)
options:MTLResourceOptionCPUCacheModeDefault];
memcpy(metadataBuffer.contents, &sampleBufferMetadata, sizeof(SampleBufferMetadata));
[commandEncoder setTexture:textureResource.sourceYTexture atIndex:0];
[commandEncoder setTexture:textureResource.sourceUVTexture atIndex:1];
[commandEncoder setTexture:textureResource.destinationYTexture atIndex:2];
[commandEncoder setTexture:textureResource.destinationUVTexture atIndex:3];
[commandEncoder setBuffer:metadataBuffer offset:0 atIndex:0];
#endif
return commandEncoder;
}
#pragma mark - SCMetalModuleFunctionProvider
- (NSString *)functionName
{
return @"kernel_night_mode_enhancement";
}
- (BOOL)requiresDepthData
{
return NO;
}
- (NSString *)description
{
return [NSString
sc_stringWithFormat:@"SCNightModeEnhancementMetalRenderCommand (shader function = %@)", self.functionName];
}
@end

32
ManagedCapturer/ImageProcessing/SCProcessingModule.h

@ -0,0 +1,32 @@
//
// SCProcessingModule.h
// Snapchat
//
// Created by Yu-Kuan (Anthony) Lai on 5/30/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import <AVFoundation/AVFoundation.h>
#import <CoreMedia/CoreMedia.h>
#import <Foundation/Foundation.h>
typedef struct RenderData {
CMSampleBufferRef sampleBuffer;
CVPixelBufferRef depthDataMap; // Optional - for depth blur rendering
CGPoint *depthBlurPointOfInterest; // Optional - for depth blur rendering
} RenderData;
/*
@protocol SCProcessingModule
A single module that is responsible for the actual image processing work. Multiple modules can be chained
together by the SCProcessingPipelineBuilder and the frame can be passed through the entire
SCProcessingPipeline.
*/
@protocol SCProcessingModule <NSObject>
- (CMSampleBufferRef)render:(RenderData)renderData;
// Needed to protect against depth data potentially being nil during the render pass
- (BOOL)requiresDepthData;
@end

22
ManagedCapturer/ImageProcessing/SCProcessingModuleUtils.h

@ -0,0 +1,22 @@
//
// SCProcessingModuleUtils.h
// Snapchat
//
// Created by Brian Ng on 11/10/17.
//
#import <CoreImage/CoreImage.h>
#import <CoreMedia/CoreMedia.h>
#import <Foundation/Foundation.h>
@interface SCProcessingModuleUtils : NSObject
+ (CVPixelBufferRef)pixelBufferFromImage:(CIImage *)image
bufferPool:(CVPixelBufferPoolRef)bufferPool
context:(CIContext *)context;
+ (CMSampleBufferRef)sampleBufferFromImage:(CIImage *)image
oldSampleBuffer:(CMSampleBufferRef)oldSampleBuffer
bufferPool:(CVPixelBufferPoolRef)bufferPool
context:(CIContext *)context;
@end

84
ManagedCapturer/ImageProcessing/SCProcessingModuleUtils.m

@ -0,0 +1,84 @@
//
// SCProcessingModuleUtils.m
// Snapchat
//
// Created by Brian Ng on 11/10/17.
//
#import "SCProcessingModuleUtils.h"
#import <SCFoundation/SCLog.h>
@import CoreImage;
@implementation SCProcessingModuleUtils
+ (CVPixelBufferRef)pixelBufferFromImage:(CIImage *)image
bufferPool:(CVPixelBufferPoolRef)bufferPool
context:(CIContext *)context
{
CVReturn result;
if (bufferPool == NULL) {
NSDictionary *pixelAttributes = @{
(NSString *) kCVPixelBufferIOSurfacePropertiesKey : @{}, (NSString *)
kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), (NSString *)
kCVPixelBufferWidthKey : @(image.extent.size.width), (NSString *)
kCVPixelBufferHeightKey : @(image.extent.size.height)
};
result = CVPixelBufferPoolCreate(kCFAllocatorDefault, NULL,
(__bridge CFDictionaryRef _Nullable)(pixelAttributes), &bufferPool);
if (result != kCVReturnSuccess) {
SCLogGeneralError(@"[Processing Pipeline] Error creating pixel buffer pool %i", result);
return NULL;
}
}
CVPixelBufferRef resultBuffer = NULL;
result = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, bufferPool, &resultBuffer);
if (result == kCVReturnSuccess) {
[context render:image toCVPixelBuffer:resultBuffer];
} else {
SCLogGeneralError(@"[Processing Pipeline] Error creating pixel buffer from pool %i", result);
}
return resultBuffer;
}
+ (CMSampleBufferRef)sampleBufferFromImage:(CIImage *)image
oldSampleBuffer:(CMSampleBufferRef)oldSampleBuffer
bufferPool:(CVPixelBufferPoolRef)bufferPool
context:(CIContext *)context
{
CVPixelBufferRef pixelBuffer =
[SCProcessingModuleUtils pixelBufferFromImage:image bufferPool:bufferPool context:context];
if (!pixelBuffer) {
SCLogGeneralError(@"[Processing Pipeline] Error creating new pixel buffer from image");
return oldSampleBuffer;
}
CMSampleBufferRef newSampleBuffer = NULL;
CMSampleTimingInfo timimgInfo = kCMTimingInfoInvalid;
CMSampleBufferGetSampleTimingInfo(oldSampleBuffer, 0, &timimgInfo);
CMVideoFormatDescriptionRef videoInfo = NULL;
OSStatus status = CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixelBuffer, &videoInfo);
if (status != noErr) {
SCLogGeneralError(@"[Processing Pipeline] Error creating video format description %i", (int)status);
CVPixelBufferRelease(pixelBuffer);
return oldSampleBuffer;
}
status = CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, videoInfo,
&timimgInfo, &newSampleBuffer);
if (status != noErr) {
SCLogGeneralError(@"[Processing Pipeline] Error creating CMSampleBuffer %i", (int)status);
CVPixelBufferRelease(pixelBuffer);
return oldSampleBuffer;
}
CVPixelBufferRelease(pixelBuffer);
return newSampleBuffer;
}
@end

23
ManagedCapturer/ImageProcessing/SCProcessingPipeline.h

@ -0,0 +1,23 @@
//
// SCProcessingPipeline.h
// Snapchat
//
// Created by Yu-Kuan (Anthony) Lai on 5/30/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCProcessingModule.h"
#import <Foundation/Foundation.h>
/*
@class SCProcessingPipeline
The SCProcessingPipeline chains together a series of SCProcessingModules and passes the frame through
each of them in a pre-determined order. This is done through a chain of command, where the resulting
frame from the the first module is passed to the second, then to the third, etc.
*/
@interface SCProcessingPipeline : NSObject <SCProcessingModule>
@property (nonatomic, strong) NSMutableArray<id<SCProcessingModule>> *processingModules;
@end

46
ManagedCapturer/ImageProcessing/SCProcessingPipeline.m

@ -0,0 +1,46 @@
//
// SCProcessingPipeline.m
// Snapchat
//
// Created by Yu-Kuan (Anthony) Lai on 5/30/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCProcessingPipeline.h"
#import <SCFoundation/NSString+Helpers.h>
@import CoreMedia;
@implementation SCProcessingPipeline
- (CMSampleBufferRef)render:(RenderData)renderData
{
for (id<SCProcessingModule> module in self.processingModules) {
if (![module requiresDepthData] || ([module requiresDepthData] && renderData.depthDataMap)) {
renderData.sampleBuffer = [module render:renderData];
}
}
return renderData.sampleBuffer;
}
- (NSString *)description
{
NSMutableString *desc = [NSMutableString new];
[desc appendString:@"ProcessingPipeline, modules: "];
for (id<SCProcessingModule> module in self.processingModules) {
[desc appendFormat:@"%@, ", [module description]];
}
if (self.processingModules.count > 0) {
return [desc substringToIndex:desc.lengthOfCharacterSequences - 2];
}
return desc;
}
- (BOOL)requiresDepthData
{
return NO;
}
@end

29
ManagedCapturer/ImageProcessing/SCProcessingPipelineBuilder.h

@ -0,0 +1,29 @@
//
// SCProcessingPipelineBuilder.h
// Snapchat
//
// Created by Yu-Kuan (Anthony) Lai on 6/1/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import <Foundation/Foundation.h>
@class SCDigitalExposureHandler;
@class SCProcessingPipeline;
/*
@class SCProcessingPipelineBuilder
The builder object is responsible for creating the SCProcessingPipeline, the underneath
SCProcessingModules, and eventually chaining the SCProcessingModules together in a pre-determined
order. The builder is also responsible for providing consumers with handler objects.
*/
@interface SCProcessingPipelineBuilder : NSObject
@property (nonatomic) BOOL useExposureAdjust;
@property (nonatomic) BOOL portraitModeEnabled;
@property (nonatomic) BOOL enhancedNightMode;
- (SCProcessingPipeline *)build;
@end

57
ManagedCapturer/ImageProcessing/SCProcessingPipelineBuilder.m

@ -0,0 +1,57 @@
//
// SCProcessingPipelineBuilder.m
// Snapchat
//
// Created by Yu-Kuan (Anthony) Lai on 6/1/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCProcessingPipelineBuilder.h"
#import "SCCameraTweaks.h"
#import "SCDepthBlurMetalRenderCommand.h"
#import "SCDepthToGrayscaleMetalRenderCommand.h"
#import "SCDigitalExposureHandler.h"
#import "SCExposureAdjustMetalRenderCommand.h"
#import "SCMetalUtils.h"
#import "SCNightModeEnhancementMetalRenderCommand.h"
#import "SCProcessingPipeline.h"
@implementation SCProcessingPipelineBuilder
- (SCProcessingPipeline *)build
{
if (!_useExposureAdjust && !_portraitModeEnabled && !_enhancedNightMode) { // in the future: && !useA && !useB ...
return nil;
}
SCProcessingPipeline *processingPipeline = [[SCProcessingPipeline alloc] init];
NSMutableArray<id<SCProcessingModule>> *processingModules = [NSMutableArray array];
// order of adding module matters!
if (_useExposureAdjust && SCDeviceSupportsMetal()) {
// this check looks redundant right now, but when we have more modules it will be necessary
SCMetalModule *exposureAdjustMetalModule =
[[SCMetalModule alloc] initWithMetalRenderCommand:[SCExposureAdjustMetalRenderCommand new]];
[processingModules addObject:exposureAdjustMetalModule];
}
if (_portraitModeEnabled) {
id<SCMetalRenderCommand> renderCommand = SCCameraTweaksDepthToGrayscaleOverride()
? [SCDepthToGrayscaleMetalRenderCommand new]
: [SCDepthBlurMetalRenderCommand new];
SCMetalModule *depthBlurMetalModule = [[SCMetalModule alloc] initWithMetalRenderCommand:renderCommand];
[processingModules addObject:depthBlurMetalModule];
}
if (_enhancedNightMode && SCDeviceSupportsMetal()) {
SCMetalModule *nightModeEnhancementModule =
[[SCMetalModule alloc] initWithMetalRenderCommand:[SCNightModeEnhancementMetalRenderCommand new]];
[processingModules addObject:nightModeEnhancementModule];
}
processingPipeline.processingModules = processingModules;
return processingPipeline;
}
@end

23
ManagedCapturer/ImageProcessing/SCStillImageDepthBlurFilter.h

@ -0,0 +1,23 @@
//
// SCStillImageDepthBlurFilter.h
// Snapchat
//
// Created by Brian Ng on 10/11/17.
//
#import "SCProcessingModule.h"
#import <Foundation/Foundation.h>
/*
@class SCStillImageDepthBlurFilter
This module uses the CIDepthBlurEffect CIFilter that uses rgb and depth information to produce an image with
the portrait mode effect (background blurred, foreground sharp).
*/
@interface SCStillImageDepthBlurFilter : NSObject
// Applies the CIDepthBlurEffect filter to a still image capture photo. If an error occured, the original
// photoData will be returned
- (NSData *)renderWithPhotoData:(NSData *)photoData renderData:(RenderData)renderData NS_AVAILABLE_IOS(11_0);
@end

68
ManagedCapturer/ImageProcessing/SCStillImageDepthBlurFilter.m

@ -0,0 +1,68 @@
//
// SCStillImageDepthBlurFilter.m
// Snapchat
//
// Created by Brian Ng on 10/11/17.
//
#import "SCStillImageDepthBlurFilter.h"
#import "SCCameraTweaks.h"
#import "SCProcessingModuleUtils.h"
@import CoreMedia;
@implementation SCStillImageDepthBlurFilter {
CIContext *_context;
CIFilter *_filter;
CVPixelBufferPoolRef _bufferPool;
}
- (instancetype)init
{
if (self = [super init]) {
_context = [CIContext contextWithOptions:@{ kCIContextWorkingFormat : @(kCIFormatRGBAh) }];
_filter = [CIFilter filterWithName:@"CIDepthBlurEffect"];
}
return self;
}
- (void)dealloc
{
CVPixelBufferPoolFlush(_bufferPool, kCVPixelBufferPoolFlushExcessBuffers);
CVPixelBufferPoolRelease(_bufferPool);
}
- (NSData *)renderWithPhotoData:(NSData *)photoData renderData:(RenderData)renderData NS_AVAILABLE_IOS(11_0)
{
CIImage *mainImage = [CIImage imageWithData:photoData];
CVPixelBufferRef disparityImagePixelBuffer = renderData.depthDataMap;
CIImage *disparityImage = [CIImage imageWithCVPixelBuffer:disparityImagePixelBuffer];
if (!disparityImage) {
return photoData;
}
[_filter setValue:mainImage forKey:kCIInputImageKey];
[_filter setValue:disparityImage forKey:kCIInputDisparityImageKey];
if (renderData.depthBlurPointOfInterest && SCCameraTweaksEnableFilterInputFocusRect()) {
CGPoint pointOfInterest = *renderData.depthBlurPointOfInterest;
[_filter setValue:[CIVector vectorWithX:pointOfInterest.x Y:pointOfInterest.y Z:1 W:1]
forKey:@"inputFocusRect"];
}
CIImage *result = [_filter outputImage];
if (!result) {
return photoData;
}
CGColorSpaceRef deviceRGBColorSpace = CGColorSpaceCreateDeviceRGB();
NSData *processedPhotoData = [_context JPEGRepresentationOfImage:result colorSpace:deviceRGBColorSpace options:@{}];
CGColorSpaceRelease(deviceRGBColorSpace);
if (!processedPhotoData) {
return photoData;
}
renderData.sampleBuffer = [SCProcessingModuleUtils sampleBufferFromImage:result
oldSampleBuffer:renderData.sampleBuffer
bufferPool:_bufferPool
context:_context];
return processedPhotoData;
}
@end

103
ManagedCapturer/StateMachine/SCCaptureBaseState.h

@ -0,0 +1,103 @@
//
// SCCaptureBaseState.h
// Snapchat
//
// Created by Lin Jia on 10/19/17.
//
//
#import "SCCaptureCommon.h"
#import "SCCaptureStateDelegate.h"
#import "SCCaptureStateMachineBookKeeper.h"
#import "SCCaptureStateUtil.h"
#import "SCCaptureWorker.h"
#import "SCManagedCaptureDevice.h"
#import "SCManagedCapturerState.h"
#import "SCStateTransitionPayload.h"
#import <Foundation/Foundation.h>
@class SCCaptureResource;
@class SCCapturerToken;
@class SCAudioConfiguration;
@class SCQueuePerformer;
/*
Every state machine state needs to inherent SCCaptureBaseState to have the APIs. State machine state in general will
only implement APIs which are legal for itself. If illegal APIs are invoked, SCCaptureBaseState will handle it.
The intended behavior:
1) crash using SCAssert in Debug build,
2) ignore api call, and log the call, for alpha/master/production.
3) in the future, we will introduce dangerous API call concept, and restart camera in such case, to avoid bad state.
Every state machine state is going to be built to follow functional programming as more as possible. The shared
resources between them will be passed into the API via SCCaptureResource.
*/
@interface SCCaptureBaseState : NSObject
- (instancetype)init NS_UNAVAILABLE;
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
delegate:(id<SCCaptureStateDelegate>)delegate;
/* The following API will be invoked at the moment state context promote the state to be current state. State use this
* chance to do something, such as start recording for recording state.
*/
- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload
resource:(SCCaptureResource *)resource
context:(NSString *)context;
- (SCCaptureStateMachineStateId)stateId;
- (void)initializeCaptureWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition
resource:(SCCaptureResource *)resource
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (void)startRunningWithCapturerToken:(SCCapturerToken *)token
resource:(SCCaptureResource *)resource
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token
resource:(SCCaptureResource *)resource
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
context:(NSString *)context;
- (void)prepareForRecordingWithResource:(SCCaptureResource *)resource
audioConfiguration:(SCAudioConfiguration *)configuration
context:(NSString *)context;
- (void)startRecordingWithResource:(SCCaptureResource *)resource
audioConfiguration:(SCAudioConfiguration *)configuration
outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
maxDuration:(NSTimeInterval)maxDuration
fileURL:(NSURL *)fileURL
captureSessionID:(NSString *)captureSessionID
completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler
context:(NSString *)context;
- (void)stopRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context;
- (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context;
- (void)captureStillImageWithResource:(SCCaptureResource *)resource
aspectRatio:(CGFloat)aspectRatio
captureSessionID:(NSString *)captureSessionID
completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler
context:(NSString *)context;
- (void)startScanWithScanConfiguration:(SCScanConfiguration *)configuration
resource:(SCCaptureResource *)resource
context:(NSString *)context;
- (void)stopScanWithCompletionHandler:(dispatch_block_t)completionHandler
resource:(SCCaptureResource *)resource
context:(NSString *)context;
@property (nonatomic, strong, readonly) SCCaptureStateMachineBookKeeper *bookKeeper;
@end

169
ManagedCapturer/StateMachine/SCCaptureBaseState.m

@ -0,0 +1,169 @@
//
// SCCaptureBaseState.m
// Snapchat
//
// Created by Lin Jia on 10/19/17.
//
//
#import "SCCaptureBaseState.h"
#import "SCCaptureStateMachineBookKeeper.h"
#import "SCCapturerToken.h"
#import "SCManagedCapturerV1_Private.h"
#import <SCFoundation/SCAppEnvironment.h>
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCQueuePerformer.h>
@implementation SCCaptureBaseState {
SCCaptureStateMachineBookKeeper *_bookKeeper;
SCQueuePerformer *_performer;
__weak id<SCCaptureStateDelegate> _delegate;
}
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
delegate:(id<SCCaptureStateDelegate>)delegate
{
self = [super init];
if (self) {
SCAssert(performer, @"");
SCAssert(bookKeeper, @"");
_bookKeeper = bookKeeper;
_performer = performer;
_delegate = delegate;
}
return self;
}
- (SCCaptureStateMachineStateId)stateId
{
return SCCaptureBaseStateId;
}
- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload
resource:(SCCaptureResource *)resource
context:(NSString *)context
{
[self _handleBaseStateBehavior:@"didBecomeCurrentState" context:context];
}
- (void)initializeCaptureWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition
resource:(SCCaptureResource *)resource
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[self _handleBaseStateBehavior:@"initializeCaptureWithDevicePosition" context:context];
}
- (void)startRunningWithCapturerToken:(SCCapturerToken *)token
resource:(SCCaptureResource *)resource
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[self _handleBaseStateBehavior:@"startRunningWithCapturerToken" context:context];
}
- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token
resource:(SCCaptureResource *)resource
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
context:(NSString *)context
{
SCAssertPerformer(_performer);
BOOL actuallyStopped = [[SCManagedCapturerV1 sharedInstance] stopRunningWithCaptureToken:token
completionHandler:completionHandler
context:context];
// TODO: Fix CCAM-14450
// This is a temporary solution for https://jira.sc-corp.net/browse/CCAM-14450
// It is caused by switching from scanning state to stop running state when the view is disappearing in the scanning
// state, which can be reproduced by triggering scanning and then switch to maps page.
// We remove SCAssert to ingore the crashes in master branch and will find a solution for the illegal call for the
// state machine later
if (self.stateId != SCCaptureScanningStateId) {
SCAssert(!actuallyStopped, @"actuallyStopped in state: %@ with context: %@", SCCaptureStateName([self stateId]),
context);
} else {
SCLogCaptureStateMachineInfo(@"actuallyStopped:%d in state: %@ with context: %@", actuallyStopped,
SCCaptureStateName([self stateId]), context);
}
if (actuallyStopped) {
[_delegate currentState:self
requestToTransferToNewState:SCCaptureInitializedStateId
payload:nil
context:context];
}
}
- (void)prepareForRecordingWithResource:(SCCaptureResource *)resource
audioConfiguration:(SCAudioConfiguration *)configuration
context:(NSString *)context
{
[self _handleBaseStateBehavior:@"prepareForRecordingWithResource" context:context];
}
- (void)startRecordingWithResource:(SCCaptureResource *)resource
audioConfiguration:(SCAudioConfiguration *)configuration
outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
maxDuration:(NSTimeInterval)maxDuration
fileURL:(NSURL *)fileURL
captureSessionID:(NSString *)captureSessionID
completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler
context:(NSString *)context
{
[self _handleBaseStateBehavior:@"startRecordingWithResource" context:context];
}
- (void)stopRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context
{
[self _handleBaseStateBehavior:@"stopRecordingWithResource" context:context];
}
- (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context
{
[self _handleBaseStateBehavior:@"cancelRecordingWithResource" context:context];
}
- (void)captureStillImageWithResource:(SCCaptureResource *)resource
aspectRatio:(CGFloat)aspectRatio
captureSessionID:(NSString *)captureSessionID
completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler
context:(NSString *)context
{
[self _handleBaseStateBehavior:@"captureStillImageWithResource" context:context];
}
- (void)startScanWithScanConfiguration:(SCScanConfiguration *)configuration
resource:(SCCaptureResource *)resource
context:(NSString *)context
{
[self _handleBaseStateBehavior:@"startScanWithScanConfiguration" context:context];
}
- (void)stopScanWithCompletionHandler:(dispatch_block_t)completionHandler
resource:(SCCaptureResource *)resource
context:(NSString *)context
{
// Temporary solution until IDT-12520 is resolved.
[SCCaptureWorker stopScanWithCompletionHandler:completionHandler resource:resource];
//[self _handleBaseStateBehavior:@"stopScanWithCompletionHandler"];
}
- (void)_handleBaseStateBehavior:(NSString *)illegalAPIName context:(NSString *)context
{
[_bookKeeper state:[self stateId]
illegalAPIcalled:illegalAPIName
callStack:[NSThread callStackSymbols]
context:context];
if (SCIsDebugBuild()) {
SCAssertFail(@"illegal API invoked on capture state machine");
}
}
- (SCCaptureStateMachineBookKeeper *)bookKeeper
{
return _bookKeeper;
}
@end

30
ManagedCapturer/StateMachine/SCCaptureStateDelegate.h

@ -0,0 +1,30 @@
//
// SCCaptureStateDelegate.h
// Snapchat
//
// Created by Lin Jia on 10/27/17.
//
//
#import "SCCaptureStateUtil.h"
#import <Foundation/Foundation.h>
@class SCCaptureBaseState;
@class SCStateTransitionPayload;
/*
The state machine state delegate is used by state machine states to hint to the system that "I am done, now transfer
to other state".
Currently, SCCaptureStateMachineContext is the central piece that glues all states together, and it is the delegate for
those states.
*/
@protocol SCCaptureStateDelegate <NSObject>
- (void)currentState:(SCCaptureBaseState *)state
requestToTransferToNewState:(SCCaptureStateMachineStateId)newState
payload:(SCStateTransitionPayload *)payload
context:(NSString *)context;
@end

29
ManagedCapturer/StateMachine/SCCaptureStateMachineBookKeeper.h

@ -0,0 +1,29 @@
//
// SCCaptureStateTransitionBookKeeper.h
// Snapchat
//
// Created by Lin Jia on 10/27/17.
//
//
#import "SCCaptureStateUtil.h"
#import <Foundation/Foundation.h>
/*
Book keeper is used to record every state transition, and every illegal API call.
*/
@interface SCCaptureStateMachineBookKeeper : NSObject
- (void)stateTransitionFrom:(SCCaptureStateMachineStateId)fromId
to:(SCCaptureStateMachineStateId)toId
context:(NSString *)context;
- (void)state:(SCCaptureStateMachineStateId)captureState
illegalAPIcalled:(NSString *)illegalAPIName
callStack:(NSArray<NSString *> *)callStack
context:(NSString *)context;
- (void)logAPICalled:(NSString *)apiName context:(NSString *)context;
@end

63
ManagedCapturer/StateMachine/SCCaptureStateMachineBookKeeper.m

@ -0,0 +1,63 @@
//
// SCCaptureStateTransitionBookKeeper.m
// Snapchat
//
// Created by Lin Jia on 10/27/17.
//
//
#import "SCCaptureStateMachineBookKeeper.h"
#import "SCCaptureStateUtil.h"
#import "SCLogger+Camera.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCLogger/SCCameraMetrics.h>
@interface SCCaptureStateMachineBookKeeper () {
NSDate *_lastStateStartTime;
}
@end
@implementation SCCaptureStateMachineBookKeeper
- (void)stateTransitionFrom:(SCCaptureStateMachineStateId)fromId
to:(SCCaptureStateMachineStateId)toId
context:(NSString *)context
{
NSDate *date = [NSDate date];
SCLogCaptureStateMachineInfo(@"State %@ life span: %f seconds, transition to: %@, in context:%@, at: %@ \n",
SCCaptureStateName(fromId), [date timeIntervalSinceDate:_lastStateStartTime],
SCCaptureStateName(toId), context, date);
_lastStateStartTime = date;
}
- (void)state:(SCCaptureStateMachineStateId)captureState
illegalAPIcalled:(NSString *)illegalAPIName
callStack:(NSArray<NSString *> *)callStack
context:(NSString *)context
{
SCAssert(callStack, @"call stack empty");
SCAssert(illegalAPIName, @"");
SCAssert(context, @"Context is empty");
SCLogCaptureStateMachineError(@"State: %@, illegal API invoke: %@, at: %@, callstack: %@ \n",
SCCaptureStateName(captureState), illegalAPIName, [NSDate date], callStack);
NSArray<NSString *> *reportedArray =
[callStack count] > 15 ? [callStack subarrayWithRange:NSMakeRange(0, 15)] : callStack;
[[SCLogger sharedInstance] logEvent:kSCCameraStateMachineIllegalAPICall
parameters:@{
@"state" : SCCaptureStateName(captureState),
@"API" : illegalAPIName,
@"call_stack" : reportedArray,
@"context" : context
}];
}
- (void)logAPICalled:(NSString *)apiName context:(NSString *)context
{
SCAssert(apiName, @"API name is empty");
SCAssert(context, @"Context is empty");
SCLogCaptureStateMachineInfo(@"api: %@ context: %@", apiName, context);
}
@end

76
ManagedCapturer/StateMachine/SCCaptureStateMachineContext.h

@ -0,0 +1,76 @@
//
// SCCaptureStateMachineContext.h
// Snapchat
//
// Created by Lin Jia on 10/18/17.
//
//
#import "SCCaptureCommon.h"
#import "SCManagedCaptureDevice.h"
#import <SCAudio/SCAudioConfiguration.h>
#import <Foundation/Foundation.h>
/*
SCCaptureStateMachineContext is the central piece that glues all states together.
It will pass API calls to the current state.
The classic state machine design pattern:
https://en.wikipedia.org/wiki/State_pattern
It is also the delegate for the states it manages, so that those states can tell stateMachineContext to transit to next
state.
*/
@class SCCaptureResource;
@class SCCapturerToken;
@interface SCCaptureStateMachineContext : NSObject
- (instancetype)initWithResource:(SCCaptureResource *)resource;
- (void)initializeCaptureWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (SCCapturerToken *)startRunningWithContext:(NSString *)context completionHandler:(dispatch_block_t)completionHandler;
- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
context:(NSString *)context;
- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token
after:(NSTimeInterval)delay
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
context:(NSString *)context;
- (void)prepareForRecordingAsynchronouslyWithAudioConfiguration:(SCAudioConfiguration *)configuration
context:(NSString *)context;
- (void)startRecordingWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
audioConfiguration:(SCAudioConfiguration *)configuration
maxDuration:(NSTimeInterval)maxDuration
fileURL:(NSURL *)fileURL
captureSessionID:(NSString *)captureSessionID
completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler
context:(NSString *)context;
- (void)stopRecordingWithContext:(NSString *)context;
- (void)cancelRecordingWithContext:(NSString *)context;
- (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio
captureSessionID:(NSString *)captureSessionID
completionHandler:
(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler
context:(NSString *)context;
#pragma mark - Scanning
- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context;
- (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context;
@end

301
ManagedCapturer/StateMachine/SCCaptureStateMachineContext.m

@ -0,0 +1,301 @@
//
// SCCaptureStateMachineContext.m
// Snapchat
//
// Created by Lin Jia on 10/18/17.
//
//
#import "SCCaptureStateMachineContext.h"
#import "SCCaptureBaseState.h"
#import "SCCaptureImageState.h"
#import "SCCaptureImageWhileRecordingState.h"
#import "SCCaptureInitializedState.h"
#import "SCCaptureRecordingState.h"
#import "SCCaptureResource.h"
#import "SCCaptureRunningState.h"
#import "SCCaptureScanningState.h"
#import "SCCaptureStateMachineBookKeeper.h"
#import "SCCaptureStateUtil.h"
#import "SCCaptureUninitializedState.h"
#import "SCCaptureWorker.h"
#import "SCCapturerToken.h"
#import "SCStateTransitionPayload.h"
#import <SCAudio/SCAudioConfiguration.h>
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCQueuePerformer.h>
#import <SCFoundation/SCTrace.h>
#import <SCLogger/SCCameraMetrics.h>
#import <SCLogger/SCLogger+Performance.h>
@interface SCCaptureStateMachineContext () <SCCaptureStateDelegate> {
SCQueuePerformer *_queuePerformer;
// Cache all the states.
NSMutableDictionary<SCCaptureStateKey *, SCCaptureBaseState *> *_states;
SCCaptureBaseState *_currentState;
SCCaptureStateMachineBookKeeper *_bookKeeper;
SCCaptureResource *_captureResource;
}
@end
@implementation SCCaptureStateMachineContext
- (instancetype)initWithResource:(SCCaptureResource *)resource
{
self = [super init];
if (self) {
SCAssert(resource, @"");
SCAssert(resource.queuePerformer, @"");
_captureResource = resource;
_queuePerformer = resource.queuePerformer;
_states = [[NSMutableDictionary<SCCaptureStateKey *, SCCaptureBaseState *> alloc] init];
_bookKeeper = [[SCCaptureStateMachineBookKeeper alloc] init];
[self _setCurrentState:SCCaptureUninitializedStateId payload:nil context:SCCapturerContext];
}
return self;
}
- (void)_setCurrentState:(SCCaptureStateMachineStateId)stateId
payload:(SCStateTransitionPayload *)payload
context:(NSString *)context
{
switch (stateId) {
case SCCaptureUninitializedStateId:
if (![_states objectForKey:@(stateId)]) {
SCCaptureUninitializedState *uninitializedState =
[[SCCaptureUninitializedState alloc] initWithPerformer:_queuePerformer
bookKeeper:_bookKeeper
delegate:self];
[_states setObject:uninitializedState forKey:@(stateId)];
}
_currentState = [_states objectForKey:@(stateId)];
break;
case SCCaptureInitializedStateId:
if (![_states objectForKey:@(stateId)]) {
SCCaptureInitializedState *initializedState =
[[SCCaptureInitializedState alloc] initWithPerformer:_queuePerformer
bookKeeper:_bookKeeper
delegate:self];
[_states setObject:initializedState forKey:@(stateId)];
}
_currentState = [_states objectForKey:@(stateId)];
break;
case SCCaptureRunningStateId:
if (![_states objectForKey:@(stateId)]) {
SCCaptureRunningState *runningState =
[[SCCaptureRunningState alloc] initWithPerformer:_queuePerformer bookKeeper:_bookKeeper delegate:self];
[_states setObject:runningState forKey:@(stateId)];
}
_currentState = [_states objectForKey:@(stateId)];
break;
case SCCaptureImageStateId:
if (![_states objectForKey:@(stateId)]) {
SCCaptureImageState *captureImageState =
[[SCCaptureImageState alloc] initWithPerformer:_queuePerformer bookKeeper:_bookKeeper delegate:self];
[_states setObject:captureImageState forKey:@(stateId)];
}
_currentState = [_states objectForKey:@(stateId)];
break;
case SCCaptureImageWhileRecordingStateId:
if (![_states objectForKey:@(stateId)]) {
SCCaptureImageWhileRecordingState *captureImageWhileRecordingState =
[[SCCaptureImageWhileRecordingState alloc] initWithPerformer:_queuePerformer
bookKeeper:_bookKeeper
delegate:self];
[_states setObject:captureImageWhileRecordingState forKey:@(stateId)];
}
_currentState = [_states objectForKey:@(stateId)];
break;
case SCCaptureScanningStateId:
if (![_states objectForKey:@(stateId)]) {
SCCaptureScanningState *scanningState =
[[SCCaptureScanningState alloc] initWithPerformer:_queuePerformer bookKeeper:_bookKeeper delegate:self];
[_states setObject:scanningState forKey:@(stateId)];
}
_currentState = [_states objectForKey:@(stateId)];
break;
case SCCaptureRecordingStateId:
if (![_states objectForKey:@(stateId)]) {
SCCaptureRecordingState *recordingState = [[SCCaptureRecordingState alloc] initWithPerformer:_queuePerformer
bookKeeper:_bookKeeper
delegate:self];
[_states setObject:recordingState forKey:@(stateId)];
}
_currentState = [_states objectForKey:@(stateId)];
break;
default:
SCAssert(NO, @"illigal state Id");
break;
}
[_currentState didBecomeCurrentState:payload resource:_captureResource context:context];
}
- (void)initializeCaptureWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[SCCaptureWorker setupCapturePreviewLayerController];
SCTraceResumeToken resumeToken = SCTraceCapture();
[_queuePerformer perform:^{
SCTraceResume(resumeToken);
[_currentState initializeCaptureWithDevicePosition:devicePosition
resource:_captureResource
completionHandler:completionHandler
context:context];
}];
}
- (SCCapturerToken *)startRunningWithContext:(NSString *)context completionHandler:(dispatch_block_t)completionHandler
{
[[SCLogger sharedInstance] updateLogTimedEventStart:kSCCameraMetricsOpen uniqueId:@""];
SCCapturerToken *token = [[SCCapturerToken alloc] initWithIdentifier:context];
SCTraceResumeToken resumeToken = SCTraceCapture();
[_queuePerformer perform:^{
SCTraceResume(resumeToken);
[_currentState startRunningWithCapturerToken:token
resource:_captureResource
completionHandler:completionHandler
context:context];
}];
return token;
}
- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
context:(NSString *)context
{
SCTraceResumeToken resumeToken = SCTraceCapture();
[_queuePerformer perform:^{
SCTraceResume(resumeToken);
[_currentState stopRunningWithCapturerToken:token
resource:_captureResource
completionHandler:completionHandler
context:context];
}];
}
- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token
after:(NSTimeInterval)delay
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
context:(NSString *)context
{
SCTraceResumeToken resumeToken = SCTraceCapture();
[_queuePerformer perform:^{
SCTraceResume(resumeToken);
[_currentState stopRunningWithCapturerToken:token
resource:_captureResource
completionHandler:completionHandler
context:context];
}
after:delay];
}
- (void)prepareForRecordingAsynchronouslyWithAudioConfiguration:(SCAudioConfiguration *)configuration
context:(NSString *)context
{
SCTraceResumeToken resumeToken = SCTraceCapture();
[_queuePerformer perform:^{
SCTraceResume(resumeToken);
[_currentState prepareForRecordingWithResource:_captureResource
audioConfiguration:configuration
context:context];
}];
}
- (void)startRecordingWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
audioConfiguration:(SCAudioConfiguration *)configuration
maxDuration:(NSTimeInterval)maxDuration
fileURL:(NSURL *)fileURL
captureSessionID:(NSString *)captureSessionID
completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler
context:(NSString *)context
{
SCTraceResumeToken resumeToken = SCTraceCapture();
[_queuePerformer perform:^{
SCTraceResume(resumeToken);
[_currentState startRecordingWithResource:_captureResource
audioConfiguration:configuration
outputSettings:outputSettings
maxDuration:maxDuration
fileURL:fileURL
captureSessionID:captureSessionID
completionHandler:completionHandler
context:context];
}];
}
- (void)stopRecordingWithContext:(NSString *)context
{
SCTraceResumeToken resumeToken = SCTraceCapture();
[_queuePerformer perform:^{
SCTraceResume(resumeToken);
[_currentState stopRecordingWithResource:_captureResource context:context];
}];
}
- (void)cancelRecordingWithContext:(NSString *)context
{
SCTraceResumeToken resumeToken = SCTraceCapture();
[_queuePerformer perform:^{
SCTraceResume(resumeToken);
[_currentState cancelRecordingWithResource:_captureResource context:context];
}];
}
- (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio
captureSessionID:(NSString *)captureSessionID
completionHandler:
(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler
context:(NSString *)context
{
[_queuePerformer perform:^() {
[_currentState captureStillImageWithResource:_captureResource
aspectRatio:aspectRatio
captureSessionID:captureSessionID
completionHandler:completionHandler
context:context];
}];
}
- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context
{
[_queuePerformer perform:^() {
[_currentState startScanWithScanConfiguration:configuration resource:_captureResource context:context];
}];
}
- (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context
{
[_queuePerformer perform:^() {
[_currentState stopScanWithCompletionHandler:completionHandler resource:_captureResource context:context];
}];
}
- (void)currentState:(SCCaptureBaseState *)state
requestToTransferToNewState:(SCCaptureStateMachineStateId)newState
payload:(SCStateTransitionPayload *)payload
context:(NSString *)context
{
SCAssertPerformer(_queuePerformer);
SCAssert(_currentState == state, @"state: %@ newState: %@ context:%@", SCCaptureStateName([state stateId]),
SCCaptureStateName(newState), context);
if (payload) {
SCAssert(payload.fromState == [state stateId], @"From state id check");
SCAssert(payload.toState == newState, @"To state id check");
}
if (_currentState != state) {
return;
}
[_bookKeeper stateTransitionFrom:[state stateId] to:newState context:context];
[self _setCurrentState:newState payload:payload context:context];
}
@end

37
ManagedCapturer/StateMachine/SCCaptureStateUtil.h

@ -0,0 +1,37 @@
//
// SCCaptureStateUtil.h
// Snapchat
//
// Created by Lin Jia on 10/27/17.
//
//
#import "SCLogger+Camera.h"
#import <SCBase/SCMacros.h>
#import <SCFoundation/SCLog.h>
#import <Foundation/Foundation.h>
#define SCLogCaptureStateMachineInfo(fmt, ...) SCLogCoreCameraInfo(@"[SCCaptureStateMachine] " fmt, ##__VA_ARGS__)
#define SCLogCaptureStateMachineError(fmt, ...) SCLogCoreCameraError(@"[SCCaptureStateMachine] " fmt, ##__VA_ARGS__)
typedef NSNumber SCCaptureStateKey;
typedef NS_ENUM(NSUInteger, SCCaptureStateMachineStateId) {
SCCaptureBaseStateId = 0,
SCCaptureUninitializedStateId,
SCCaptureInitializedStateId,
SCCaptureImageStateId,
SCCaptureImageWhileRecordingStateId,
SCCaptureRunningStateId,
SCCaptureRecordingStateId,
SCCaptureScanningStateId,
SCCaptureStateMachineStateIdCount
};
SC_EXTERN_C_BEGIN
NSString *SCCaptureStateName(SCCaptureStateMachineStateId stateId);
SC_EXTERN_C_END

38
ManagedCapturer/StateMachine/SCCaptureStateUtil.m

@ -0,0 +1,38 @@
//
// SCCaptureStateUtil.m
// Snapchat
//
// Created by Lin Jia on 10/27/17.
//
//
#import "SCCaptureStateUtil.h"
#import <SCFoundation/SCAppEnvironment.h>
#import <SCFoundation/SCAssertWrapper.h>
NSString *SCCaptureStateName(SCCaptureStateMachineStateId stateId)
{
switch (stateId) {
case SCCaptureBaseStateId:
return @"SCCaptureBaseStateId";
case SCCaptureUninitializedStateId:
return @"SCCaptureUninitializedStateId";
case SCCaptureInitializedStateId:
return @"SCCaptureInitializedStateId";
case SCCaptureImageStateId:
return @"SCCaptureImageStateId";
case SCCaptureImageWhileRecordingStateId:
return @"SCCaptureImageWhileRecordingStateId";
case SCCaptureRunningStateId:
return @"SCCaptureRunningStateId";
case SCCaptureRecordingStateId:
return @"SCCaptureRecordingStateId";
case SCCaptureScanningStateId:
return @"SCCaptureScanningStateId";
default:
SCCAssert(NO, @"illegate state id");
break;
}
return @"SCIllegalStateId";
}

12
ManagedCapturer/StateMachine/SCManagedCapturerLogging.h

@ -0,0 +1,12 @@
//
// SCManagedCapturerLogging.h
// Snapchat
//
// Created by Lin Jia on 11/13/17.
//
#import <SCFoundation/SCLog.h>
#define SCLogCapturerInfo(fmt, ...) SCLogCoreCameraInfo(@"[SCManagedCapturer] " fmt, ##__VA_ARGS__)
#define SCLogCapturerWarning(fmt, ...) SCLogCoreCameraWarning(@"[SCManagedCapturer] " fmt, ##__VA_ARGS__)
#define SCLogCapturerError(fmt, ...) SCLogCoreCameraError(@"[SCManagedCapturer] " fmt, ##__VA_ARGS__)

22
ManagedCapturer/StateMachine/States/SCCaptureImageState.h

@ -0,0 +1,22 @@
//
// SCCaptureImageState.h
// Snapchat
//
// Created by Lin Jia on 1/8/18.
//
#import "SCCaptureBaseState.h"
#import <Foundation/Foundation.h>
@class SCQueuePerformer;
@interface SCCaptureImageState : SCCaptureBaseState
SC_INIT_AND_NEW_UNAVAILABLE
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
delegate:(id<SCCaptureStateDelegate>)delegate;
@end

65
ManagedCapturer/StateMachine/States/SCCaptureImageState.m

@ -0,0 +1,65 @@
//
// SCCaptureImageState.m
// Snapchat
//
// Created by Lin Jia on 1/8/18.
//
#import "SCCaptureImageState.h"
#import "SCCaptureImageStateTransitionPayload.h"
#import "SCManagedCapturerV1_Private.h"
#import "SCStateTransitionPayload.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCQueuePerformer.h>
@interface SCCaptureImageState () {
__weak id<SCCaptureStateDelegate> _delegate;
SCQueuePerformer *_performer;
}
@end
@implementation SCCaptureImageState
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
delegate:(id<SCCaptureStateDelegate>)delegate
{
self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate];
if (self) {
_delegate = delegate;
_performer = performer;
}
return self;
}
- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload
resource:(SCCaptureResource *)resource
context:(NSString *)context
{
SCAssertPerformer(_performer);
SCAssert(payload.toState == [self stateId], @"");
if (![payload isKindOfClass:[SCCaptureImageStateTransitionPayload class]]) {
SCAssertFail(@"wrong payload pass in");
[_delegate currentState:self requestToTransferToNewState:payload.fromState payload:nil context:context];
return;
}
SCCaptureImageStateTransitionPayload *captureImagePayload = (SCCaptureImageStateTransitionPayload *)payload;
[SCCaptureWorker
captureStillImageWithCaptureResource:resource
aspectRatio:captureImagePayload.aspectRatio
captureSessionID:captureImagePayload.captureSessionID
shouldCaptureFromVideo:[SCCaptureWorker shouldCaptureImageFromVideoWithResource:resource]
completionHandler:captureImagePayload.block
context:context];
[_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context];
}
- (SCCaptureStateMachineStateId)stateId
{
return SCCaptureImageStateId;
}
@end

29
ManagedCapturer/StateMachine/States/SCCaptureImageStateTransitionPayload.h

@ -0,0 +1,29 @@
//
// SCCaptureImageStateTransitionPayload.h
// Snapchat
//
// Created by Lin Jia on 1/9/18.
//
#import "SCCaptureCommon.h"
#import "SCStateTransitionPayload.h"
#import <Foundation/Foundation.h>
@interface SCCaptureImageStateTransitionPayload : SCStateTransitionPayload
@property (nonatomic, readonly, strong) NSString *captureSessionID;
@property (nonatomic, readonly, copy) sc_managed_capturer_capture_still_image_completion_handler_t block;
@property (nonatomic, readonly, assign) CGFloat aspectRatio;
SC_INIT_AND_NEW_UNAVAILABLE
- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState
toState:(SCCaptureStateMachineStateId)toState
captureSessionId:(NSString *)captureSessionID
aspectRatio:(CGFloat)aspectRatio
completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)block;
@end

27
ManagedCapturer/StateMachine/States/SCCaptureImageStateTransitionPayload.m

@ -0,0 +1,27 @@
//
// SCCaptureImageStateTransitionPayload.m
// Snapchat
//
// Created by Lin Jia on 1/9/18.
//
#import "SCCaptureImageStateTransitionPayload.h"
@implementation SCCaptureImageStateTransitionPayload
- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState
toState:(SCCaptureStateMachineStateId)toState
captureSessionId:(NSString *)captureSessionID
aspectRatio:(CGFloat)aspectRatio
completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)block
{
self = [super initWithFromState:fromState toState:toState];
if (self) {
_captureSessionID = captureSessionID;
_aspectRatio = aspectRatio;
_block = block;
}
return self;
}
@end

22
ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingState.h

@ -0,0 +1,22 @@
//
// SCCaptureImageWhileRecordingState.h
// Snapchat
//
// Created by Sun Lei on 22/02/2018.
//
#import "SCCaptureBaseState.h"
#import <Foundation/Foundation.h>
@class SCQueuePerformer;
@interface SCCaptureImageWhileRecordingState : SCCaptureBaseState
SC_INIT_AND_NEW_UNAVAILABLE
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
delegate:(id<SCCaptureStateDelegate>)delegate;
@end

85
ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingState.m

@ -0,0 +1,85 @@
//
// SCCaptureImageWhileRecordingState.m
// Snapchat
//
// Created by Sun Lei on 22/02/2018.
//
#import "SCCaptureImageWhileRecordingState.h"
#import "SCCaptureImageWhileRecordingStateTransitionPayload.h"
#import "SCManagedCapturerV1_Private.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCQueuePerformer.h>
@interface SCCaptureImageWhileRecordingState () {
__weak id<SCCaptureStateDelegate> _delegate;
SCQueuePerformer *_performer;
}
@end
@implementation SCCaptureImageWhileRecordingState
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
delegate:(id<SCCaptureStateDelegate>)delegate
{
self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate];
if (self) {
_delegate = delegate;
_performer = performer;
}
return self;
}
- (SCCaptureStateMachineStateId)stateId
{
return SCCaptureImageWhileRecordingStateId;
}
- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload
resource:(SCCaptureResource *)resource
context:(NSString *)context
{
SCAssertPerformer(_performer);
SCAssert(payload.fromState == SCCaptureRecordingStateId, @"");
SCAssert(payload.toState == [self stateId], @"");
SCAssert([payload isKindOfClass:[SCCaptureImageWhileRecordingStateTransitionPayload class]], @"");
;
SCCaptureImageWhileRecordingStateTransitionPayload *captureImagePayload =
(SCCaptureImageWhileRecordingStateTransitionPayload *)payload;
@weakify(self);
sc_managed_capturer_capture_still_image_completion_handler_t block =
^(UIImage *fullScreenImage, NSDictionary *metadata, NSError *error, SCManagedCapturerState *state) {
captureImagePayload.block(fullScreenImage, metadata, error, state);
[_performer perform:^{
@strongify(self);
[self _cancelRecordingWithContext:context resource:resource];
}];
};
[SCCaptureWorker
captureStillImageWithCaptureResource:resource
aspectRatio:captureImagePayload.aspectRatio
captureSessionID:captureImagePayload.captureSessionID
shouldCaptureFromVideo:[SCCaptureWorker shouldCaptureImageFromVideoWithResource:resource]
completionHandler:block
context:context];
[_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context];
}
- (void)_cancelRecordingWithContext:(NSString *)context resource:(SCCaptureResource *)resource
{
SCTraceODPCompatibleStart(2);
SCAssertPerformer(_performer);
[SCCaptureWorker cancelRecordingWithCaptureResource:resource];
NSString *apiName =
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
[self.bookKeeper logAPICalled:apiName context:context];
}
@end

29
ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingStateTransitionPayload.h

@ -0,0 +1,29 @@
//
// SCCaptureImageWhileRecordingStateTransitionPayload.h
// Snapchat
//
// Created by Sun Lei on 22/02/2018.
//
#import "SCCaptureCommon.h"
#import "SCStateTransitionPayload.h"
#import <Foundation/Foundation.h>
@interface SCCaptureImageWhileRecordingStateTransitionPayload : SCStateTransitionPayload
@property (nonatomic, readonly, strong) NSString *captureSessionID;
@property (nonatomic, readonly, copy) sc_managed_capturer_capture_still_image_completion_handler_t block;
@property (nonatomic, readonly, assign) CGFloat aspectRatio;
SC_INIT_AND_NEW_UNAVAILABLE
- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState
toState:(SCCaptureStateMachineStateId)toState
captureSessionId:(NSString *)captureSessionID
aspectRatio:(CGFloat)aspectRatio
completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)block;
@end

27
ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingStateTransitionPayload.m

@ -0,0 +1,27 @@
//
// SCCaptureImageWhileRecordingStateTransitionPayload.m
// Snapchat
//
// Created by Sun Lei on 22/02/2018.
//
#import "SCCaptureImageWhileRecordingStateTransitionPayload.h"
@implementation SCCaptureImageWhileRecordingStateTransitionPayload
- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState
toState:(SCCaptureStateMachineStateId)toState
captureSessionId:(NSString *)captureSessionID
aspectRatio:(CGFloat)aspectRatio
completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)block
{
self = [super initWithFromState:fromState toState:toState];
if (self) {
_captureSessionID = captureSessionID;
_aspectRatio = aspectRatio;
_block = block;
}
return self;
}
@end

22
ManagedCapturer/StateMachine/States/SCCaptureInitializedState.h

@ -0,0 +1,22 @@
//
// SCCaptureInitializedState.h
// Snapchat
//
// Created by Jingtian Yang on 20/12/2017.
//
#import "SCCaptureBaseState.h"
#import <Foundation/Foundation.h>
@class SCQueuePerformer;
@interface SCCaptureInitializedState : SCCaptureBaseState
- (instancetype)init NS_UNAVAILABLE;
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
delegate:(id<SCCaptureStateDelegate>)delegate;
@end

68
ManagedCapturer/StateMachine/States/SCCaptureInitializedState.m

@ -0,0 +1,68 @@
//
// SCCaptureInitializedState.m
// Snapchat
//
// Created by Jingtian Yang on 20/12/2017.
//
#import "SCCaptureInitializedState.h"
#import "SCCapturerToken.h"
#import "SCManagedCapturerLogging.h"
#import "SCManagedCapturerV1_Private.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCQueuePerformer.h>
@interface SCCaptureInitializedState () {
__weak id<SCCaptureStateDelegate> _delegate;
SCQueuePerformer *_performer;
}
@end
@implementation SCCaptureInitializedState
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
delegate:(id<SCCaptureStateDelegate>)delegate
{
self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate];
if (self) {
_delegate = delegate;
_performer = performer;
}
return self;
}
- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload
resource:(SCCaptureResource *)resource
context:(NSString *)context
{
// No op.
}
- (SCCaptureStateMachineStateId)stateId
{
return SCCaptureInitializedStateId;
}
- (void)startRunningWithCapturerToken:(SCCapturerToken *)token
resource:(SCCaptureResource *)resource
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
SCAssertPerformer(_performer);
SCTraceODPCompatibleStart(2);
SCLogCapturerInfo(@"startRunningAsynchronouslyWithCompletionHandler called. token: %@", token);
[SCCaptureWorker startRunningWithCaptureResource:resource token:token completionHandler:completionHandler];
[_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context];
NSString *apiName =
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
[self.bookKeeper logAPICalled:apiName context:context];
}
@end

22
ManagedCapturer/StateMachine/States/SCCaptureRecordingState.h

@ -0,0 +1,22 @@
//
// SCCaptureRecordingState.h
// Snapchat
//
// Created by Jingtian Yang on 12/01/2018.
//
#import "SCCaptureBaseState.h"
#import <Foundation/Foundation.h>
@class SCQueuePerformer;
@interface SCCaptureRecordingState : SCCaptureBaseState
SC_INIT_AND_NEW_UNAVAILABLE
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
delegate:(id<SCCaptureStateDelegate>)delegate;
@end

114
ManagedCapturer/StateMachine/States/SCCaptureRecordingState.m

@ -0,0 +1,114 @@
//
// SCCaptureRecordingState.m
// Snapchat
//
// Created by Jingtian Yang on 12/01/2018.
//
#import "SCCaptureRecordingState.h"
#import "SCCaptureImageWhileRecordingStateTransitionPayload.h"
#import "SCCaptureRecordingStateTransitionPayload.h"
#import "SCManagedCapturerV1_Private.h"
#import "SCStateTransitionPayload.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCQueuePerformer.h>
@interface SCCaptureRecordingState () {
__weak id<SCCaptureStateDelegate> _delegate;
SCQueuePerformer *_performer;
}
@end
@implementation SCCaptureRecordingState
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
delegate:(id<SCCaptureStateDelegate>)delegate
{
self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate];
if (self) {
_delegate = delegate;
_performer = performer;
}
return self;
}
- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload
resource:(SCCaptureResource *)resource
context:(NSString *)context
{
SCAssertPerformer(resource.queuePerformer);
SCAssert(payload.toState == [self stateId], @"");
if (![payload isKindOfClass:[SCCaptureRecordingStateTransitionPayload class]]) {
SCAssertFail(@"wrong payload pass in");
[_delegate currentState:self requestToTransferToNewState:payload.fromState payload:nil context:context];
return;
}
SCCaptureRecordingStateTransitionPayload *recordingPayload = (SCCaptureRecordingStateTransitionPayload *)payload;
[SCCaptureWorker startRecordingWithCaptureResource:resource
outputSettings:recordingPayload.outputSettings
audioConfiguration:recordingPayload.configuration
maxDuration:recordingPayload.maxDuration
fileURL:recordingPayload.fileURL
captureSessionID:recordingPayload.captureSessionID
completionHandler:recordingPayload.block];
}
- (void)stopRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context
{
SCTraceODPCompatibleStart(2);
SCAssertPerformer(_performer);
[SCCaptureWorker stopRecordingWithCaptureResource:resource];
[_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context];
NSString *apiName =
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
[self.bookKeeper logAPICalled:apiName context:context];
}
- (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context
{
SCTraceODPCompatibleStart(2);
SCAssertPerformer(_performer);
[SCCaptureWorker cancelRecordingWithCaptureResource:resource];
[_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context];
NSString *apiName =
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
[self.bookKeeper logAPICalled:apiName context:context];
}
- (SCCaptureStateMachineStateId)stateId
{
return SCCaptureRecordingStateId;
}
- (void)captureStillImageWithResource:(SCCaptureResource *)resource
aspectRatio:(CGFloat)aspectRatio
captureSessionID:(NSString *)captureSessionID
completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler
context:(NSString *)context
{
SCAssertPerformer(_performer);
SCCaptureImageWhileRecordingStateTransitionPayload *payload = [
[SCCaptureImageWhileRecordingStateTransitionPayload alloc] initWithFromState:SCCaptureRecordingStateId
toState:SCCaptureImageWhileRecordingStateId
captureSessionId:captureSessionID
aspectRatio:aspectRatio
completionHandler:completionHandler];
[_delegate currentState:self
requestToTransferToNewState:SCCaptureImageWhileRecordingStateId
payload:payload
context:context];
NSString *apiName =
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
[self.bookKeeper logAPICalled:apiName context:context];
}
@end

41
ManagedCapturer/StateMachine/States/SCCaptureRecordingStateTransitionPayload.h

@ -0,0 +1,41 @@
//
// SCCaptureRecordingStateTransitionPayload.h
// Snapchat
//
// Created by Jingtian Yang on 12/01/2018.
//
#import "SCCaptureCommon.h"
#import "SCManagedVideoCapturerOutputSettings.h"
#import "SCStateTransitionPayload.h"
#import <SCAudio/SCAudioConfiguration.h>
#import <Foundation/Foundation.h>
@interface SCCaptureRecordingStateTransitionPayload : SCStateTransitionPayload
@property (nonatomic, readonly, strong) SCManagedVideoCapturerOutputSettings *outputSettings;
@property (nonatomic, readonly, strong) SCAudioConfiguration *configuration;
@property (nonatomic, readonly, assign) NSTimeInterval maxDuration;
@property (nonatomic, readonly, strong) NSURL *fileURL;
@property (nonatomic, readonly, strong) NSString *captureSessionID;
@property (nonatomic, readonly, copy) sc_managed_capturer_start_recording_completion_handler_t block;
SC_INIT_AND_NEW_UNAVAILABLE
- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState
toState:(SCCaptureStateMachineStateId)toState
outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
audioConfiguration:(SCAudioConfiguration *)configuration
maxDuration:(NSTimeInterval)maxDuration
fileURL:(NSURL *)fileURL
captureSessionID:(NSString *)captureSessionID
completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)block;
@end

33
ManagedCapturer/StateMachine/States/SCCaptureRecordingStateTransitionPayload.m

@ -0,0 +1,33 @@
//
// SCCaptureRecordingStateTransitionPayload.m
// Snapchat
//
// Created by Jingtian Yang on 12/01/2018.
//
#import "SCCaptureRecordingStateTransitionPayload.h"
@implementation SCCaptureRecordingStateTransitionPayload
- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState
toState:(SCCaptureStateMachineStateId)toState
outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
audioConfiguration:configuration
maxDuration:(NSTimeInterval)maxDuration
fileURL:(NSURL *)fileURL
captureSessionID:(NSString *)captureSessionID
completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)block
{
self = [super initWithFromState:fromState toState:toState];
if (self) {
_outputSettings = outputSettings;
_configuration = configuration;
_maxDuration = maxDuration;
_fileURL = fileURL;
_captureSessionID = captureSessionID;
_block = block;
}
return self;
}
@end

22
ManagedCapturer/StateMachine/States/SCCaptureRunningState.h

@ -0,0 +1,22 @@
//
// SCCaptureRunningState.h
// Snapchat
//
// Created by Jingtian Yang on 08/01/2018.
//
#import "SCCaptureBaseState.h"
#import <Foundation/Foundation.h>
@class SCQueuePerformer;
@interface SCCaptureRunningState : SCCaptureBaseState
- (instancetype)init NS_UNAVAILABLE;
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
delegate:(id<SCCaptureStateDelegate>)delegate;
@end

176
ManagedCapturer/StateMachine/States/SCCaptureRunningState.m

@ -0,0 +1,176 @@
//
// SCCaptureRunningState.m
// Snapchat
//
// Created by Jingtian Yang on 08/01/2018.
//
#import "SCCaptureRunningState.h"
#import "SCCaptureImageStateTransitionPayload.h"
#import "SCCaptureRecordingStateTransitionPayload.h"
#import "SCCaptureWorker.h"
#import "SCManagedCapturerLogging.h"
#import "SCManagedCapturerV1_Private.h"
#import "SCScanConfiguration.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCQueuePerformer.h>
#import <SCFoundation/SCTraceODPCompatible.h>
@interface SCCaptureRunningState () {
__weak id<SCCaptureStateDelegate> _delegate;
SCQueuePerformer *_performer;
}
@end
@implementation SCCaptureRunningState
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
delegate:(id<SCCaptureStateDelegate>)delegate
{
self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate];
if (self) {
_delegate = delegate;
_performer = performer;
}
return self;
}
- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload
resource:(SCCaptureResource *)resource
context:(NSString *)context
{
// No op.
}
- (void)captureStillImageWithResource:(SCCaptureResource *)resource
aspectRatio:(CGFloat)aspectRatio
captureSessionID:(NSString *)captureSessionID
completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler
context:(NSString *)context
{
SCAssertPerformer(_performer);
SCCaptureImageStateTransitionPayload *payload =
[[SCCaptureImageStateTransitionPayload alloc] initWithFromState:SCCaptureRunningStateId
toState:SCCaptureImageStateId
captureSessionId:captureSessionID
aspectRatio:aspectRatio
completionHandler:completionHandler];
[_delegate currentState:self requestToTransferToNewState:SCCaptureImageStateId payload:payload context:context];
NSString *apiName =
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
[self.bookKeeper logAPICalled:apiName context:context];
}
- (SCCaptureStateMachineStateId)stateId
{
return SCCaptureRunningStateId;
}
- (void)startRunningWithCapturerToken:(SCCapturerToken *)token
resource:(SCCaptureResource *)resource
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
SCAssertPerformer(_performer);
SCTraceODPCompatibleStart(2);
SCLogCapturerInfo(@"startRunningAsynchronouslyWithCompletionHandler called. token: %@", token);
[SCCaptureWorker startRunningWithCaptureResource:resource token:token completionHandler:completionHandler];
NSString *apiName =
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
[self.bookKeeper logAPICalled:apiName context:context];
}
- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token
resource:(SCCaptureResource *)resource
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
context:(NSString *)context
{
SCTraceODPCompatibleStart(2);
SCAssertPerformer(_performer);
SCLogCapturerInfo(@"Stop running asynchronously. token:%@", token);
if ([[SCManagedCapturerV1 sharedInstance] stopRunningWithCaptureToken:token
completionHandler:completionHandler
context:context]) {
[_delegate currentState:self
requestToTransferToNewState:SCCaptureInitializedStateId
payload:nil
context:context];
}
NSString *apiName =
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
[self.bookKeeper logAPICalled:apiName context:context];
}
- (void)startScanWithScanConfiguration:(SCScanConfiguration *)configuration
resource:(SCCaptureResource *)resource
context:(NSString *)context
{
SCTraceODPCompatibleStart(2);
SCLogCapturerInfo(@"Start scan on preview asynchronously. configuration:%@", configuration);
SCAssertPerformer(_performer);
[SCCaptureWorker startScanWithScanConfiguration:configuration resource:resource];
[_delegate currentState:self requestToTransferToNewState:SCCaptureScanningStateId payload:nil context:context];
NSString *apiName =
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
[self.bookKeeper logAPICalled:apiName context:context];
}
- (void)prepareForRecordingWithResource:(SCCaptureResource *)resource
audioConfiguration:(SCAudioConfiguration *)configuration
context:(NSString *)context
{
SCAssertPerformer(_performer);
SCTraceODPCompatibleStart(2);
[SCCaptureWorker prepareForRecordingWithAudioConfiguration:configuration resource:resource];
NSString *apiName =
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
[self.bookKeeper logAPICalled:apiName context:context];
}
- (void)startRecordingWithResource:(SCCaptureResource *)resource
audioConfiguration:(SCAudioConfiguration *)configuration
outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
maxDuration:(NSTimeInterval)maxDuration
fileURL:(NSURL *)fileURL
captureSessionID:(NSString *)captureSessionID
completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler
context:(NSString *)context
{
SCTraceODPCompatibleStart(2);
SCAssertPerformer(_performer);
SCCaptureRecordingStateTransitionPayload *payload =
[[SCCaptureRecordingStateTransitionPayload alloc] initWithFromState:SCCaptureRunningStateId
toState:SCCaptureRecordingStateId
outputSettings:outputSettings
audioConfiguration:configuration
maxDuration:maxDuration
fileURL:fileURL
captureSessionID:captureSessionID
completionHandler:completionHandler];
[_delegate currentState:self requestToTransferToNewState:SCCaptureRecordingStateId payload:payload context:context];
NSString *apiName =
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
[self.bookKeeper logAPICalled:apiName context:context];
}
- (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context
{
// Intentionally No Op, this will be removed once CCAM-13851 gets resolved.
NSString *apiName =
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
[self.bookKeeper logAPICalled:apiName context:context];
}
@end

18
ManagedCapturer/StateMachine/States/SCCaptureScanningState.h

@ -0,0 +1,18 @@
//
// SCCaptureScanningState.h
// Snapchat
//
// Created by Xiaokang Liu on 09/01/2018.
//
#import "SCCaptureBaseState.h"
@class SCQueuePerformer;
@interface SCCaptureScanningState : SCCaptureBaseState
- (instancetype)init NS_UNAVAILABLE;
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
delegate:(id<SCCaptureStateDelegate>)delegate;
@end

75
ManagedCapturer/StateMachine/States/SCCaptureScanningState.m

@ -0,0 +1,75 @@
//
// SCCaptureScanningState.m
// Snapchat
//
// Created by Xiaokang Liu on 09/01/2018.
//
#import "SCCaptureScanningState.h"
#import "SCManagedCapturerLogging.h"
#import "SCManagedCapturerV1_Private.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCQueuePerformer.h>
#import <SCFoundation/SCTraceODPCompatible.h>
@interface SCCaptureScanningState () {
__weak id<SCCaptureStateDelegate> _delegate;
SCQueuePerformer *_performer;
}
@end
@implementation SCCaptureScanningState
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
delegate:(id<SCCaptureStateDelegate>)delegate
{
self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate];
if (self) {
SCAssert(delegate, @"");
SCAssert(performer, @"");
SCAssert(bookKeeper, @"");
_delegate = delegate;
_performer = performer;
}
return self;
}
- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload
resource:(SCCaptureResource *)resource
context:(NSString *)context
{
// No op.
}
- (SCCaptureStateMachineStateId)stateId
{
return SCCaptureScanningStateId;
}
- (void)stopScanWithCompletionHandler:(dispatch_block_t)completionHandler
resource:(SCCaptureResource *)resource
context:(NSString *)context
{
SCAssertPerformer(_performer);
SCTraceODPCompatibleStart(2);
SCLogCapturerInfo(@"stop scan asynchronously.");
[SCCaptureWorker stopScanWithCompletionHandler:completionHandler resource:resource];
[_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context];
NSString *apiName =
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
[self.bookKeeper logAPICalled:apiName context:context];
}
- (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context
{
// Intentionally No Op, this will be removed once CCAM-13851 gets resolved.
NSString *apiName =
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
[self.bookKeeper logAPICalled:apiName context:context];
}
@end

26
ManagedCapturer/StateMachine/States/SCCaptureUninitializedState.h

@ -0,0 +1,26 @@
//
// SCCaptureUninitializedState.h
// Snapchat
//
// Created by Lin Jia on 10/19/17.
//
//
#import "SCCaptureBaseState.h"
#import <Foundation/Foundation.h>
/*
State which handles capture initialialization, which should be used only once for every app life span.
*/
@class SCQueuePerformer;
@interface SCCaptureUninitializedState : SCCaptureBaseState
- (instancetype)init NS_UNAVAILABLE;
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
delegate:(id<SCCaptureStateDelegate>)delegate;
@end

70
ManagedCapturer/StateMachine/States/SCCaptureUninitializedState.m

@ -0,0 +1,70 @@
//
// SCCaptureUninitializedState.m
// Snapchat
//
// Created by Lin Jia on 10/19/17.
//
//
#import "SCCaptureUninitializedState.h"
#import "SCManagedCapturerLogging.h"
#import "SCManagedCapturerV1_Private.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCQueuePerformer.h>
#import <SCFoundation/SCTraceODPCompatible.h>
@interface SCCaptureUninitializedState () {
__weak id<SCCaptureStateDelegate> _delegate;
SCQueuePerformer *_performer;
}
@end
@implementation SCCaptureUninitializedState
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
delegate:(id<SCCaptureStateDelegate>)delegate
{
self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate];
if (self) {
_delegate = delegate;
_performer = performer;
}
return self;
}
- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload
resource:(SCCaptureResource *)resource
context:(NSString *)context
{
// No op.
}
- (SCCaptureStateMachineStateId)stateId
{
return SCCaptureUninitializedStateId;
}
- (void)initializeCaptureWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition
resource:(SCCaptureResource *)resource
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
SCAssertPerformer(_performer);
SCTraceODPCompatibleStart(2);
SCLogCapturerInfo(@"Setting up with devicePosition:%lu", (unsigned long)devicePosition);
// TODO: we need to push completionHandler to a payload and let intializedState handle.
[[SCManagedCapturerV1 sharedInstance] setupWithDevicePosition:devicePosition completionHandler:completionHandler];
[_delegate currentState:self requestToTransferToNewState:SCCaptureInitializedStateId payload:nil context:context];
NSString *apiName =
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
[self.bookKeeper logAPICalled:apiName context:context];
}
@end

22
ManagedCapturer/StateMachine/States/SCStateTransitionPayload.h

@ -0,0 +1,22 @@
//
// SCStateTransitionPayload.h
// Snapchat
//
// Created by Lin Jia on 1/8/18.
//
#import "SCCaptureStateUtil.h"
#import <Foundation/Foundation.h>
@interface SCStateTransitionPayload : NSObject
@property (nonatomic, readonly, assign) SCCaptureStateMachineStateId fromState;
@property (nonatomic, readonly, assign) SCCaptureStateMachineStateId toState;
SC_INIT_AND_NEW_UNAVAILABLE
- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState toState:(SCCaptureStateMachineStateId)toState;
@end

27
ManagedCapturer/StateMachine/States/SCStateTransitionPayload.m

@ -0,0 +1,27 @@
//
// SCStateTransitionPayload.m
// Snapchat
//
// Created by Lin Jia on 1/8/18.
//
#import "SCStateTransitionPayload.h"
#import <SCFoundation/SCAssertWrapper.h>
@implementation SCStateTransitionPayload
- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState toState:(SCCaptureStateMachineStateId)toState
{
self = [super init];
if (self) {
SCAssert(fromState != toState, @"");
SCAssert(fromState > SCCaptureBaseStateId && fromState < SCCaptureStateMachineStateIdCount, @"");
SCAssert(toState > SCCaptureBaseStateId && toState < SCCaptureStateMachineStateIdCount, @"");
_fromState = fromState;
_toState = toState;
}
return self;
}
@end
Loading…
Cancel
Save