diff --git a/ManagedCapturer/SCManagedCaptureSession.h b/ManagedCapturer/SCManagedCaptureSession.h new file mode 100644 index 0000000..9d5f1ee --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureSession.h @@ -0,0 +1,67 @@ +// +// SCManagedCaptureSession.h +// Snapchat +// +// Created by Derek Wang on 02/03/2018. +// + +#import + +#import +#import + +/** + `SCManagedCaptureSession` is a wrapper class of `AVCaptureSession`. The purpose of this class is to provide additional + functionalities to `AVCaptureSession`. + For example, for black camera detection, we need to monitor when some method is called. Another example is that we can + treat it as a more stable version of `AVCaptureSession` by moving some `AVCaptureSession` fixing logic to this class, + and it provides reliable interfaces to the outside. That would be the next step. + It also tries to mimic the `AVCaptureSession` by implmenting some methods in `AVCaptureSession`. The original methods + in `AVCaptureSession` should not be used anymore + */ + +@class SCBlackCameraDetector; + +NS_ASSUME_NONNULL_BEGIN +@interface SCManagedCaptureSession : NSObject + +/** + Expose avSession property + */ +@property (nonatomic, strong, readonly) AVCaptureSession *avSession; + +/** + Expose avSession isRunning property for convenience. + */ +@property (nonatomic, readonly, assign) BOOL isRunning; + +/** + Wrap [AVCaptureSession startRunning] method. Monitor startRunning method. [AVCaptureSession startRunning] should not be + called + */ +- (void)startRunning; +/** + Wrap [AVCaptureSession stopRunning] method. Monitor stopRunning method. [AVCaptureSession stopRunning] should not be + called + */ +- (void)stopRunning; + +/** + Wrap [AVCaptureSession beginConfiguration]. Monitor beginConfiguration method + */ +- (void)beginConfiguration; +/** + Wrap [AVCaptureSession commitConfiguration]. Monitor commitConfiguration method + */ +- (void)commitConfiguration; +/** + Configurate internal AVCaptureSession with block + @params block. configuration block with AVCaptureSession as parameter + */ +- (void)performConfiguration:(void (^)(void))block; + +- (instancetype)initWithBlackCameraDetector:(SCBlackCameraDetector *)detector NS_DESIGNATED_INITIALIZER; +SC_INIT_AND_NEW_UNAVAILABLE + +@end +NS_ASSUME_NONNULL_END diff --git a/ManagedCapturer/SCManagedCaptureSession.m b/ManagedCapturer/SCManagedCaptureSession.m new file mode 100644 index 0000000..076c31b --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureSession.m @@ -0,0 +1,74 @@ +// +// SCManagedCaptureSession.m +// Snapchat +// +// Created by Derek Wang on 02/03/2018. +// + +#import "SCManagedCaptureSession.h" + +#import "SCBlackCameraDetector.h" + +#import + +@interface SCManagedCaptureSession () { + SCBlackCameraDetector *_blackCameraDetector; +} + +@end + +@implementation SCManagedCaptureSession + +- (instancetype)initWithBlackCameraDetector:(SCBlackCameraDetector *)detector +{ + self = [super init]; + if (self) { + _avSession = [[AVCaptureSession alloc] init]; + _blackCameraDetector = detector; + } + return self; +} + +- (void)startRunning +{ + SCTraceODPCompatibleStart(2); + [_blackCameraDetector sessionWillCallStartRunning]; + [_avSession startRunning]; + [_blackCameraDetector sessionDidCallStartRunning]; +} + +- (void)stopRunning +{ + SCTraceODPCompatibleStart(2); + [_blackCameraDetector sessionWillCallStopRunning]; + [_avSession stopRunning]; + [_blackCameraDetector sessionDidCallStopRunning]; +} + +- (void)performConfiguration:(nonnull void (^)(void))block +{ + SC_GUARD_ELSE_RETURN(block); + [self beginConfiguration]; + block(); + [self commitConfiguration]; +} + +- (void)beginConfiguration +{ + [_avSession beginConfiguration]; +} + +- (void)commitConfiguration +{ + SCTraceODPCompatibleStart(2); + [_blackCameraDetector sessionWillCommitConfiguration]; + [_avSession commitConfiguration]; + [_blackCameraDetector sessionDidCommitConfiguration]; +} + +- (BOOL)isRunning +{ + return _avSession.isRunning; +} + +@end diff --git a/ManagedCapturer/SCManagedCapturer.m b/ManagedCapturer/SCManagedCapturer.m new file mode 100644 index 0000000..d009045 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturer.m @@ -0,0 +1,26 @@ +// +// SCManagedCapturer.m +// Snapchat +// +// Created by Lin Jia on 9/28/17. +// + +#import "SCManagedCapturer.h" + +#import "SCCameraTweaks.h" +#import "SCCaptureCore.h" +#import "SCManagedCapturerV1.h" + +@implementation SCManagedCapturer + ++ (id)sharedInstance +{ + static dispatch_once_t onceToken; + static id managedCapturer; + dispatch_once(&onceToken, ^{ + managedCapturer = [[SCCaptureCore alloc] init]; + }); + return managedCapturer; +} + +@end diff --git a/ManagedCapturer/SCManagedCapturerARSessionHandler.h b/ManagedCapturer/SCManagedCapturerARSessionHandler.h new file mode 100644 index 0000000..fcf357c --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerARSessionHandler.h @@ -0,0 +1,26 @@ +// +// SCManagedCapturerARSessionHandler.h +// Snapchat +// +// Created by Xiaokang Liu on 16/03/2018. +// +// This class is used to handle the AVCaptureSession event when ARSession is enabled. +// The stopARSessionRunning will be blocked till the AVCaptureSessionDidStopRunningNotification event has been received +// successfully, +// after then we can restart AVCaptureSession gracefully. + +#import + +#import + +@class SCCaptureResource; + +@interface SCManagedCapturerARSessionHandler : NSObject + +SC_INIT_AND_NEW_UNAVAILABLE +- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource NS_DESIGNATED_INITIALIZER; + +- (void)stopObserving; + +- (void)stopARSessionRunning NS_AVAILABLE_IOS(11_0); +@end diff --git a/ManagedCapturer/SCManagedCapturerARSessionHandler.m b/ManagedCapturer/SCManagedCapturerARSessionHandler.m new file mode 100644 index 0000000..e262085 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerARSessionHandler.m @@ -0,0 +1,76 @@ +// +// SCManagedCapturerARSessionHandler.m +// Snapchat +// +// Created by Xiaokang Liu on 16/03/2018. +// + +#import "SCManagedCapturerARSessionHandler.h" + +#import "SCCaptureResource.h" +#import "SCManagedCaptureSession.h" + +#import +#import +#import + +@import ARKit; + +static CGFloat const kSCManagedCapturerARKitShutdownTimeoutDuration = 2; + +@interface SCManagedCapturerARSessionHandler () { + SCCaptureResource *__weak _captureResource; + dispatch_semaphore_t _arSesssionShutdownSemaphore; +} + +@end + +@implementation SCManagedCapturerARSessionHandler + +- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource +{ + self = [super init]; + if (self) { + SCAssert(captureResource, @""); + _captureResource = captureResource; + _arSesssionShutdownSemaphore = dispatch_semaphore_create(0); + } + return self; +} + +- (void)stopObserving +{ + [[NSNotificationCenter defaultCenter] removeObserver:self + name:AVCaptureSessionDidStopRunningNotification + object:nil]; +} + +- (void)stopARSessionRunning +{ + SCAssertPerformer(_captureResource.queuePerformer); + SCAssert(SC_AT_LEAST_IOS_11, @"Shoule be only call from iOS 11+"); + if (@available(iOS 11.0, *)) { + // ARSession stops its internal AVCaptureSession asynchronously. We listen for its callback and actually restart + // our own capture session once it's finished shutting down so the two ARSessions don't conflict. + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(_completeARSessionShutdown:) + name:AVCaptureSessionDidStopRunningNotification + object:nil]; + [_captureResource.arSession pause]; + dispatch_semaphore_wait( + _arSesssionShutdownSemaphore, + dispatch_time(DISPATCH_TIME_NOW, (int64_t)(kSCManagedCapturerARKitShutdownTimeoutDuration * NSEC_PER_SEC))); + } +} + +- (void)_completeARSessionShutdown:(NSNotification *)note +{ + // This notification is only registered for IMMEDIATELY before arkit shutdown. + // Explicitly guard that the notification object IS NOT the main session's. + SC_GUARD_ELSE_RETURN(![note.object isEqual:_captureResource.managedSession.avSession]); + [[NSNotificationCenter defaultCenter] removeObserver:self + name:AVCaptureSessionDidStopRunningNotification + object:nil]; + dispatch_semaphore_signal(_arSesssionShutdownSemaphore); +} +@end diff --git a/ManagedCapturer/SCManagedCapturerListener.h b/ManagedCapturer/SCManagedCapturerListener.h new file mode 100644 index 0000000..288c201 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerListener.h @@ -0,0 +1,135 @@ +//#!announcer.rb +// +// SCManagedCaptuerListener +// Snapchat +// +// Created by Liu Liu on 4/23/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCCapturer.h" +#import "SCManagedCaptureDevice.h" +#import "SCManagedRecordedVideo.h" +#import "SCVideoCaptureSessionInfo.h" + +#import + +#import +#import + +@class SCManagedCapturer; +@class SCManagedCapturerState; +@class LSAGLView; +@class SCManagedCapturerSampleMetadata; + +@protocol SCManagedCapturerListener + +@optional + +// All these calbacks are invoked on main queue + +// Start / stop / reset + +- (void)managedCapturer:(id)managedCapturer didStartRunning:(SCManagedCapturerState *)state; + +- (void)managedCapturer:(id)managedCapturer didStopRunning:(SCManagedCapturerState *)state; + +- (void)managedCapturer:(id)managedCapturer didResetFromRuntimeError:(SCManagedCapturerState *)state; + +// Change state methods + +- (void)managedCapturer:(id)managedCapturer didChangeState:(SCManagedCapturerState *)state; + +- (void)managedCapturer:(id)managedCapturer didChangeNightModeActive:(SCManagedCapturerState *)state; + +- (void)managedCapturer:(id)managedCapturer didChangePortraitModeActive:(SCManagedCapturerState *)state; + +- (void)managedCapturer:(id)managedCapturer didChangeFlashActive:(SCManagedCapturerState *)state; + +- (void)managedCapturer:(id)managedCapturer didChangeLensesActive:(SCManagedCapturerState *)state; + +- (void)managedCapturer:(id)managedCapturer didChangeARSessionActive:(SCManagedCapturerState *)state; + +- (void)managedCapturer:(id)managedCapturer + didChangeFlashSupportedAndTorchSupported:(SCManagedCapturerState *)state; + +- (void)managedCapturer:(id)managedCapturer didChangeZoomFactor:(SCManagedCapturerState *)state; + +- (void)managedCapturer:(id)managedCapturer didChangeLowLightCondition:(SCManagedCapturerState *)state; + +- (void)managedCapturer:(id)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state; + +- (void)managedCapturer:(id)managedCapturer didChangeCaptureDevicePosition:(SCManagedCapturerState *)state; + +// The video preview layer is not maintained as a state, therefore, its change is not related to the state of +// the camera at all, listener show only manage the setup of the videoPreviewLayer. +// Since the AVCaptureVideoPreviewLayer can only attach to one AVCaptureSession per app, it is recommended you +// have a view and controller which manages the video preview layer, and for upper layer, only manage that view +// or view controller, which maintains the pointer consistency. The video preview layer is required to recreate +// every now and then because otherwise we will have cases that the old video preview layer may contain +// residual images. + +- (void)managedCapturer:(id)managedCapturer + didChangeVideoPreviewLayer:(AVCaptureVideoPreviewLayer *)videoPreviewLayer; + +- (void)managedCapturer:(id)managedCapturer didChangeVideoPreviewGLView:(LSAGLView *)videoPreviewGLView; + +// Video recording-related methods + +- (void)managedCapturer:(id)managedCapturer + didBeginVideoRecording:(SCManagedCapturerState *)state + session:(SCVideoCaptureSessionInfo)session; + +- (void)managedCapturer:(id)managedCapturer + didBeginAudioRecording:(SCManagedCapturerState *)state + session:(SCVideoCaptureSessionInfo)session; + +- (void)managedCapturer:(id)managedCapturer + willFinishRecording:(SCManagedCapturerState *)state + session:(SCVideoCaptureSessionInfo)session + recordedVideoFuture:(SCFuture> *)recordedVideoFuture + videoSize:(CGSize)videoSize + placeholderImage:(UIImage *)placeholderImage; + +- (void)managedCapturer:(id)managedCapturer + didFinishRecording:(SCManagedCapturerState *)state + session:(SCVideoCaptureSessionInfo)session + recordedVideo:(SCManagedRecordedVideo *)recordedVideo; + +- (void)managedCapturer:(id)managedCapturer + didFailRecording:(SCManagedCapturerState *)state + session:(SCVideoCaptureSessionInfo)session + error:(NSError *)error; + +- (void)managedCapturer:(id)managedCapturer + didCancelRecording:(SCManagedCapturerState *)state + session:(SCVideoCaptureSessionInfo)session; + +- (void)managedCapturer:(id)managedCapturer + didGetError:(NSError *)error + forType:(SCManagedVideoCapturerInfoType)type + session:(SCVideoCaptureSessionInfo)session; + +- (void)managedCapturerDidCallLenseResume:(id)managedCapturer session:(SCVideoCaptureSessionInfo)session; + +- (void)managedCapturer:(id)managedCapturer + didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer + sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata; + +// Photo methods +- (void)managedCapturer:(id)managedCapturer + willCapturePhoto:(SCManagedCapturerState *)state + sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata; + +- (void)managedCapturer:(id)managedCapturer didCapturePhoto:(SCManagedCapturerState *)state; + +- (BOOL)managedCapturer:(id)managedCapturer isUnderDeviceMotion:(SCManagedCapturerState *)state; + +- (BOOL)managedCapturer:(id)managedCapturer shouldProcessFileInput:(SCManagedCapturerState *)state; + +// Face detection +- (void)managedCapturer:(id)managedCapturer + didDetectFaceBounds:(NSDictionary *)faceBoundsByFaceID; +- (void)managedCapturer:(id)managedCapturer didChangeExposurePoint:(CGPoint)exposurePoint; +- (void)managedCapturer:(id)managedCapturer didChangeFocusPoint:(CGPoint)focusPoint; +@end diff --git a/ManagedCapturer/SCManagedCapturerListenerAnnouncer.h b/ManagedCapturer/SCManagedCapturerListenerAnnouncer.h new file mode 100644 index 0000000..2dce0b4 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerListenerAnnouncer.h @@ -0,0 +1,12 @@ +// Generated by the announcer.rb DO NOT EDIT!! + +#import "SCManagedCapturerListener.h" + +#import + +@interface SCManagedCapturerListenerAnnouncer : NSObject + +- (BOOL)addListener:(id)listener; +- (void)removeListener:(id)listener; + +@end diff --git a/ManagedCapturer/SCManagedCapturerListenerAnnouncer.mm b/ManagedCapturer/SCManagedCapturerListenerAnnouncer.mm new file mode 100644 index 0000000..d4eea38 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerListenerAnnouncer.mm @@ -0,0 +1,505 @@ +// Generated by the announcer.rb DO NOT EDIT!! + +#import "SCManagedCapturerListenerAnnouncer.h" + +#include +using std::lock_guard; +using std::mutex; +#include +using std::find; +using std::make_shared; +using std::shared_ptr; +using std::vector; + +@implementation SCManagedCapturerListenerAnnouncer { + mutex _mutex; + shared_ptr>> _listeners; +} + +- (NSString *)description +{ + auto listeners = atomic_load(&self->_listeners); + NSMutableString *desc = [NSMutableString string]; + [desc appendFormat:@": [", self]; + for (int i = 0; i < listeners->size(); ++i) { + [desc appendFormat:@"%@", (*listeners)[i]]; + if (i != listeners->size() - 1) { + [desc appendString:@", "]; + } + } + [desc appendString:@"]"]; + return desc; +} + +- (BOOL)addListener:(id)listener +{ + lock_guard lock(_mutex); + auto listeners = make_shared>>(); + if (_listeners != nil) { + // The listener we want to add already exists + if (find(_listeners->begin(), _listeners->end(), listener) != _listeners->end()) { + return NO; + } + for (auto &one : *_listeners) { + if (one != nil) { + listeners->push_back(one); + } + } + listeners->push_back(listener); + atomic_store(&self->_listeners, listeners); + } else { + listeners->push_back(listener); + atomic_store(&self->_listeners, listeners); + } + return YES; +} + +- (void)removeListener:(id)listener +{ + lock_guard lock(_mutex); + if (_listeners == nil) { + return; + } + // If the only item in the listener list is the one we want to remove, store it back to nil again + if (_listeners->size() == 1 && (*_listeners)[0] == listener) { + atomic_store(&self->_listeners, shared_ptr>>()); + return; + } + auto listeners = make_shared>>(); + for (auto &one : *_listeners) { + if (one != nil && one != listener) { + listeners->push_back(one); + } + } + atomic_store(&self->_listeners, listeners); +} + +- (void)managedCapturer:(id)managedCapturer didStartRunning:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didStartRunning:)]) { + [listener managedCapturer:managedCapturer didStartRunning:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didStopRunning:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didStopRunning:)]) { + [listener managedCapturer:managedCapturer didStopRunning:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didResetFromRuntimeError:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didResetFromRuntimeError:)]) { + [listener managedCapturer:managedCapturer didResetFromRuntimeError:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didChangeState:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeState:)]) { + [listener managedCapturer:managedCapturer didChangeState:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didChangeNightModeActive:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeNightModeActive:)]) { + [listener managedCapturer:managedCapturer didChangeNightModeActive:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didChangePortraitModeActive:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangePortraitModeActive:)]) { + [listener managedCapturer:managedCapturer didChangePortraitModeActive:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didChangeFlashActive:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeFlashActive:)]) { + [listener managedCapturer:managedCapturer didChangeFlashActive:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didChangeLensesActive:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeLensesActive:)]) { + [listener managedCapturer:managedCapturer didChangeLensesActive:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didChangeARSessionActive:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeARSessionActive:)]) { + [listener managedCapturer:managedCapturer didChangeARSessionActive:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer + didChangeFlashSupportedAndTorchSupported:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeFlashSupportedAndTorchSupported:)]) { + [listener managedCapturer:managedCapturer didChangeFlashSupportedAndTorchSupported:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didChangeZoomFactor:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeZoomFactor:)]) { + [listener managedCapturer:managedCapturer didChangeZoomFactor:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didChangeLowLightCondition:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeLowLightCondition:)]) { + [listener managedCapturer:managedCapturer didChangeLowLightCondition:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeAdjustingExposure:)]) { + [listener managedCapturer:managedCapturer didChangeAdjustingExposure:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didChangeCaptureDevicePosition:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeCaptureDevicePosition:)]) { + [listener managedCapturer:managedCapturer didChangeCaptureDevicePosition:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer + didChangeVideoPreviewLayer:(AVCaptureVideoPreviewLayer *)videoPreviewLayer +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeVideoPreviewLayer:)]) { + [listener managedCapturer:managedCapturer didChangeVideoPreviewLayer:videoPreviewLayer]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didChangeVideoPreviewGLView:(LSAGLView *)videoPreviewGLView +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeVideoPreviewGLView:)]) { + [listener managedCapturer:managedCapturer didChangeVideoPreviewGLView:videoPreviewGLView]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer + didBeginVideoRecording:(SCManagedCapturerState *)state + session:(SCVideoCaptureSessionInfo)session +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didBeginVideoRecording:session:)]) { + [listener managedCapturer:managedCapturer didBeginVideoRecording:state session:session]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer + didBeginAudioRecording:(SCManagedCapturerState *)state + session:(SCVideoCaptureSessionInfo)session +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didBeginAudioRecording:session:)]) { + [listener managedCapturer:managedCapturer didBeginAudioRecording:state session:session]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer + willFinishRecording:(SCManagedCapturerState *)state + session:(SCVideoCaptureSessionInfo)session + recordedVideoFuture:(SCFuture> *)recordedVideoFuture + videoSize:(CGSize)videoSize + placeholderImage:(UIImage *)placeholderImage +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer: + willFinishRecording: + session: + recordedVideoFuture: + videoSize: + placeholderImage:)]) { + [listener managedCapturer:managedCapturer + willFinishRecording:state + session:session + recordedVideoFuture:recordedVideoFuture + videoSize:videoSize + placeholderImage:placeholderImage]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer + didFinishRecording:(SCManagedCapturerState *)state + session:(SCVideoCaptureSessionInfo)session + recordedVideo:(SCManagedRecordedVideo *)recordedVideo +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didFinishRecording:session:recordedVideo:)]) { + [listener managedCapturer:managedCapturer + didFinishRecording:state + session:session + recordedVideo:recordedVideo]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer + didFailRecording:(SCManagedCapturerState *)state + session:(SCVideoCaptureSessionInfo)session + error:(NSError *)error +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didFailRecording:session:error:)]) { + [listener managedCapturer:managedCapturer didFailRecording:state session:session error:error]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer + didCancelRecording:(SCManagedCapturerState *)state + session:(SCVideoCaptureSessionInfo)session +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didCancelRecording:session:)]) { + [listener managedCapturer:managedCapturer didCancelRecording:state session:session]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer + didGetError:(NSError *)error + forType:(SCManagedVideoCapturerInfoType)type + session:(SCVideoCaptureSessionInfo)session +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didGetError:forType:session:)]) { + [listener managedCapturer:managedCapturer didGetError:error forType:type session:session]; + } + } + } +} + +- (void)managedCapturerDidCallLenseResume:(id)managedCapturer session:(SCVideoCaptureSessionInfo)session +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturerDidCallLenseResume:session:)]) { + [listener managedCapturerDidCallLenseResume:managedCapturer session:session]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer + didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer + sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didAppendVideoSampleBuffer:sampleMetadata:)]) { + [listener managedCapturer:managedCapturer + didAppendVideoSampleBuffer:sampleBuffer + sampleMetadata:sampleMetadata]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer + willCapturePhoto:(SCManagedCapturerState *)state + sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:willCapturePhoto:sampleMetadata:)]) { + [listener managedCapturer:managedCapturer willCapturePhoto:state sampleMetadata:sampleMetadata]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didCapturePhoto:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didCapturePhoto:)]) { + [listener managedCapturer:managedCapturer didCapturePhoto:state]; + } + } + } +} + +- (BOOL)managedCapturer:(id)managedCapturer isUnderDeviceMotion:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didCapturePhoto:)]) { + return [listener managedCapturer:managedCapturer isUnderDeviceMotion:state]; + } + } + } + return NO; +} + +- (BOOL)managedCapturer:(id)managedCapturer shouldProcessFileInput:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didCapturePhoto:)]) { + return [listener managedCapturer:managedCapturer isUnderDeviceMotion:state]; + } + } + } + return NO; +} + +- (void)managedCapturer:(id)managedCapturer + didDetectFaceBounds:(NSDictionary *)faceBoundsByFaceID +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didDetectFaceBounds:)]) { + [listener managedCapturer:managedCapturer didDetectFaceBounds:faceBoundsByFaceID]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didChangeExposurePoint:(CGPoint)exposurePoint +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeExposurePoint:)]) { + [listener managedCapturer:managedCapturer didChangeExposurePoint:exposurePoint]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didChangeFocusPoint:(CGPoint)focusPoint +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeFocusPoint:)]) { + [listener managedCapturer:managedCapturer didChangeFocusPoint:focusPoint]; + } + } + } +} + +@end diff --git a/ManagedCapturer/SCManagedCapturerSampleMetadata.h b/ManagedCapturer/SCManagedCapturerSampleMetadata.h new file mode 100644 index 0000000..50e9c6d --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerSampleMetadata.h @@ -0,0 +1,26 @@ +// +// SCRecordingMetadata.h +// Snapchat +// + +#import + +#import +#import + +NS_ASSUME_NONNULL_BEGIN + +@interface SCManagedCapturerSampleMetadata : NSObject + +SC_INIT_AND_NEW_UNAVAILABLE + +- (instancetype)initWithPresentationTimestamp:(CMTime)presentationTimestamp + fieldOfView:(float)fieldOfView NS_DESIGNATED_INITIALIZER; + +@property (nonatomic, readonly) CMTime presentationTimestamp; + +@property (nonatomic, readonly) float fieldOfView; + +@end + +NS_ASSUME_NONNULL_END diff --git a/ManagedCapturer/SCManagedCapturerSampleMetadata.m b/ManagedCapturer/SCManagedCapturerSampleMetadata.m new file mode 100644 index 0000000..8b08fc4 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerSampleMetadata.m @@ -0,0 +1,24 @@ +// +// SCRecordingMetadata.m +// Snapchat +// + +#import "SCManagedCapturerSampleMetadata.h" + +NS_ASSUME_NONNULL_BEGIN + +@implementation SCManagedCapturerSampleMetadata + +- (instancetype)initWithPresentationTimestamp:(CMTime)presentationTimestamp fieldOfView:(float)fieldOfView +{ + self = [super init]; + if (self) { + _presentationTimestamp = presentationTimestamp; + _fieldOfView = fieldOfView; + } + return self; +} + +@end + +NS_ASSUME_NONNULL_END diff --git a/ManagedCapturer/SCManagedCapturerState.h b/ManagedCapturer/SCManagedCapturerState.h new file mode 100644 index 0000000..439c0a1 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerState.h @@ -0,0 +1,93 @@ +// 49126048c3d19dd5b676b8d39844cf133833b67a +// Generated by the value-object.rb DO NOT EDIT!! + +#import "SCManagedCaptureDevice.h" + +#import + +#import +#import + +@protocol SCManagedCapturerState + +@property (nonatomic, assign, readonly) BOOL isRunning; + +@property (nonatomic, assign, readonly) BOOL isNightModeActive; + +@property (nonatomic, assign, readonly) BOOL isPortraitModeActive; + +@property (nonatomic, assign, readonly) BOOL lowLightCondition; + +@property (nonatomic, assign, readonly) BOOL adjustingExposure; + +@property (nonatomic, assign, readonly) SCManagedCaptureDevicePosition devicePosition; + +@property (nonatomic, assign, readonly) CGFloat zoomFactor; + +@property (nonatomic, assign, readonly) BOOL flashSupported; + +@property (nonatomic, assign, readonly) BOOL torchSupported; + +@property (nonatomic, assign, readonly) BOOL flashActive; + +@property (nonatomic, assign, readonly) BOOL torchActive; + +@property (nonatomic, assign, readonly) BOOL lensesActive; + +@property (nonatomic, assign, readonly) BOOL arSessionActive; + +@property (nonatomic, assign, readonly) BOOL liveVideoStreaming; + +@property (nonatomic, assign, readonly) BOOL lensProcessorReady; + +@end + +@interface SCManagedCapturerState : NSObject + +@property (nonatomic, assign, readonly) BOOL isRunning; + +@property (nonatomic, assign, readonly) BOOL isNightModeActive; + +@property (nonatomic, assign, readonly) BOOL isPortraitModeActive; + +@property (nonatomic, assign, readonly) BOOL lowLightCondition; + +@property (nonatomic, assign, readonly) BOOL adjustingExposure; + +@property (nonatomic, assign, readonly) SCManagedCaptureDevicePosition devicePosition; + +@property (nonatomic, assign, readonly) CGFloat zoomFactor; + +@property (nonatomic, assign, readonly) BOOL flashSupported; + +@property (nonatomic, assign, readonly) BOOL torchSupported; + +@property (nonatomic, assign, readonly) BOOL flashActive; + +@property (nonatomic, assign, readonly) BOOL torchActive; + +@property (nonatomic, assign, readonly) BOOL lensesActive; + +@property (nonatomic, assign, readonly) BOOL arSessionActive; + +@property (nonatomic, assign, readonly) BOOL liveVideoStreaming; + +@property (nonatomic, assign, readonly) BOOL lensProcessorReady; + +- (instancetype)initWithIsRunning:(BOOL)isRunning + isNightModeActive:(BOOL)isNightModeActive + isPortraitModeActive:(BOOL)isPortraitModeActive + lowLightCondition:(BOOL)lowLightCondition + adjustingExposure:(BOOL)adjustingExposure + devicePosition:(SCManagedCaptureDevicePosition)devicePosition + zoomFactor:(CGFloat)zoomFactor + flashSupported:(BOOL)flashSupported + torchSupported:(BOOL)torchSupported + flashActive:(BOOL)flashActive + torchActive:(BOOL)torchActive + lensesActive:(BOOL)lensesActive + arSessionActive:(BOOL)arSessionActive + liveVideoStreaming:(BOOL)liveVideoStreaming + lensProcessorReady:(BOOL)lensProcessorReady; + +@end diff --git a/ManagedCapturer/SCManagedCapturerState.m b/ManagedCapturer/SCManagedCapturerState.m new file mode 100644 index 0000000..d9b9454 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerState.m @@ -0,0 +1,359 @@ +// 49126048c3d19dd5b676b8d39844cf133833b67a +// Generated by the value-object.rb DO NOT EDIT!! + +#import "SCManagedCapturerState.h" + +#import + +#import + +@implementation SCManagedCapturerState + +static ptrdiff_t sSCManagedCapturerStateOffsets[0]; +static BOOL sSCManagedCapturerStateHasOffsets; + +- (instancetype)initWithIsRunning:(BOOL)isRunning + isNightModeActive:(BOOL)isNightModeActive + isPortraitModeActive:(BOOL)isPortraitModeActive + lowLightCondition:(BOOL)lowLightCondition + adjustingExposure:(BOOL)adjustingExposure + devicePosition:(SCManagedCaptureDevicePosition)devicePosition + zoomFactor:(CGFloat)zoomFactor + flashSupported:(BOOL)flashSupported + torchSupported:(BOOL)torchSupported + flashActive:(BOOL)flashActive + torchActive:(BOOL)torchActive + lensesActive:(BOOL)lensesActive + arSessionActive:(BOOL)arSessionActive + liveVideoStreaming:(BOOL)liveVideoStreaming + lensProcessorReady:(BOOL)lensProcessorReady +{ + self = [super init]; + if (self) { + _isRunning = isRunning; + _isNightModeActive = isNightModeActive; + _isPortraitModeActive = isPortraitModeActive; + _lowLightCondition = lowLightCondition; + _adjustingExposure = adjustingExposure; + _devicePosition = devicePosition; + _zoomFactor = zoomFactor; + _flashSupported = flashSupported; + _torchSupported = torchSupported; + _flashActive = flashActive; + _torchActive = torchActive; + _lensesActive = lensesActive; + _arSessionActive = arSessionActive; + _liveVideoStreaming = liveVideoStreaming; + _lensProcessorReady = lensProcessorReady; + } + return self; +} + +#pragma mark - NSCopying + +- (instancetype)copyWithZone:(NSZone *)zone +{ + // Immutable object, bypass copy + return self; +} + +#pragma mark - NSCoding + +- (instancetype)initWithCoder:(NSCoder *)aDecoder +{ + self = [super init]; + if (self) { + _isRunning = [aDecoder decodeBoolForKey:@"isRunning"]; + _isNightModeActive = [aDecoder decodeBoolForKey:@"isNightModeActive"]; + _isPortraitModeActive = [aDecoder decodeBoolForKey:@"isPortraitModeActive"]; + _lowLightCondition = [aDecoder decodeBoolForKey:@"lowLightCondition"]; + _adjustingExposure = [aDecoder decodeBoolForKey:@"adjustingExposure"]; + _devicePosition = (SCManagedCaptureDevicePosition)[aDecoder decodeIntegerForKey:@"devicePosition"]; + _zoomFactor = [aDecoder decodeFloatForKey:@"zoomFactor"]; + _flashSupported = [aDecoder decodeBoolForKey:@"flashSupported"]; + _torchSupported = [aDecoder decodeBoolForKey:@"torchSupported"]; + _flashActive = [aDecoder decodeBoolForKey:@"flashActive"]; + _torchActive = [aDecoder decodeBoolForKey:@"torchActive"]; + _lensesActive = [aDecoder decodeBoolForKey:@"lensesActive"]; + _arSessionActive = [aDecoder decodeBoolForKey:@"arSessionActive"]; + _liveVideoStreaming = [aDecoder decodeBoolForKey:@"liveVideoStreaming"]; + _lensProcessorReady = [aDecoder decodeBoolForKey:@"lensProcessorReady"]; + } + return self; +} + +- (void)encodeWithCoder:(NSCoder *)aCoder +{ + [aCoder encodeBool:_isRunning forKey:@"isRunning"]; + [aCoder encodeBool:_isNightModeActive forKey:@"isNightModeActive"]; + [aCoder encodeBool:_isPortraitModeActive forKey:@"isPortraitModeActive"]; + [aCoder encodeBool:_lowLightCondition forKey:@"lowLightCondition"]; + [aCoder encodeBool:_adjustingExposure forKey:@"adjustingExposure"]; + [aCoder encodeInteger:(NSInteger)_devicePosition forKey:@"devicePosition"]; + [aCoder encodeFloat:_zoomFactor forKey:@"zoomFactor"]; + [aCoder encodeBool:_flashSupported forKey:@"flashSupported"]; + [aCoder encodeBool:_torchSupported forKey:@"torchSupported"]; + [aCoder encodeBool:_flashActive forKey:@"flashActive"]; + [aCoder encodeBool:_torchActive forKey:@"torchActive"]; + [aCoder encodeBool:_lensesActive forKey:@"lensesActive"]; + [aCoder encodeBool:_arSessionActive forKey:@"arSessionActive"]; + [aCoder encodeBool:_liveVideoStreaming forKey:@"liveVideoStreaming"]; + [aCoder encodeBool:_lensProcessorReady forKey:@"lensProcessorReady"]; +} + +#pragma mark - FasterCoding + +- (BOOL)preferFasterCoding +{ + return YES; +} + +- (void)encodeWithFasterCoder:(id)fasterCoder +{ + [fasterCoder encodeBool:_adjustingExposure]; + [fasterCoder encodeBool:_arSessionActive]; + [fasterCoder encodeSInt32:_devicePosition]; + [fasterCoder encodeBool:_flashActive]; + [fasterCoder encodeBool:_flashSupported]; + [fasterCoder encodeBool:_isNightModeActive]; + [fasterCoder encodeBool:_isPortraitModeActive]; + [fasterCoder encodeBool:_isRunning]; + [fasterCoder encodeBool:_lensProcessorReady]; + [fasterCoder encodeBool:_lensesActive]; + [fasterCoder encodeBool:_liveVideoStreaming]; + [fasterCoder encodeBool:_lowLightCondition]; + [fasterCoder encodeBool:_torchActive]; + [fasterCoder encodeBool:_torchSupported]; + [fasterCoder encodeFloat64:_zoomFactor]; +} + +- (void)decodeWithFasterDecoder:(id)fasterDecoder +{ + _adjustingExposure = (BOOL)[fasterDecoder decodeBool]; + _arSessionActive = (BOOL)[fasterDecoder decodeBool]; + _devicePosition = (SCManagedCaptureDevicePosition)[fasterDecoder decodeSInt32]; + _flashActive = (BOOL)[fasterDecoder decodeBool]; + _flashSupported = (BOOL)[fasterDecoder decodeBool]; + _isNightModeActive = (BOOL)[fasterDecoder decodeBool]; + _isPortraitModeActive = (BOOL)[fasterDecoder decodeBool]; + _isRunning = (BOOL)[fasterDecoder decodeBool]; + _lensProcessorReady = (BOOL)[fasterDecoder decodeBool]; + _lensesActive = (BOOL)[fasterDecoder decodeBool]; + _liveVideoStreaming = (BOOL)[fasterDecoder decodeBool]; + _lowLightCondition = (BOOL)[fasterDecoder decodeBool]; + _torchActive = (BOOL)[fasterDecoder decodeBool]; + _torchSupported = (BOOL)[fasterDecoder decodeBool]; + _zoomFactor = (CGFloat)[fasterDecoder decodeFloat64]; +} + +- (void)setBool:(BOOL)val forUInt64Key:(uint64_t)key +{ + switch (key) { + case 15633755733674300ULL: + _adjustingExposure = (BOOL)val; + break; + case 11461798188076803ULL: + _arSessionActive = (BOOL)val; + break; + case 12833337784991002ULL: + _flashActive = (BOOL)val; + break; + case 51252237764061994ULL: + _flashSupported = (BOOL)val; + break; + case 1498048848502287ULL: + _isNightModeActive = (BOOL)val; + break; + case 56151582267629469ULL: + _isPortraitModeActive = (BOOL)val; + break; + case 12346172623874083ULL: + _isRunning = (BOOL)val; + break; + case 67168377441917657ULL: + _lensProcessorReady = (BOOL)val; + break; + case 5791542045168142ULL: + _lensesActive = (BOOL)val; + break; + case 28486888710545224ULL: + _liveVideoStreaming = (BOOL)val; + break; + case 24071673583499455ULL: + _lowLightCondition = (BOOL)val; + break; + case 40774429934225315ULL: + _torchActive = (BOOL)val; + break; + case 41333098301057670ULL: + _torchSupported = (BOOL)val; + break; + } +} + +- (void)setSInt32:(int32_t)val forUInt64Key:(uint64_t)key +{ + switch (key) { + case 66264093189780655ULL: + _devicePosition = (SCManagedCaptureDevicePosition)val; + break; + } +} + +- (void)setFloat64:(double)val forUInt64Key:(uint64_t)key +{ + switch (key) { + case 61340640993537628ULL: + _zoomFactor = (CGFloat)val; + break; + } +} + ++ (uint64_t)fasterCodingVersion +{ + return 10319810232046341562ULL; +} + ++ (uint64_t *)fasterCodingKeys +{ + static uint64_t keys[] = { + 15 /* Total */, + FC_ENCODE_KEY_TYPE(15633755733674300, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(11461798188076803, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(66264093189780655, FCEncodeTypeSInt32), + FC_ENCODE_KEY_TYPE(12833337784991002, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(51252237764061994, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(1498048848502287, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(56151582267629469, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(12346172623874083, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(67168377441917657, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(5791542045168142, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(28486888710545224, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(24071673583499455, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(40774429934225315, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(41333098301057670, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(61340640993537628, FCEncodeTypeFloat64), + }; + return keys; +} + +#pragma mark - isEqual + +- (BOOL)isEqual:(id)object +{ + if (!SCObjectsIsEqual(self, object, &sSCManagedCapturerStateHasOffsets, sSCManagedCapturerStateOffsets, 15, 0)) { + return NO; + } + SCManagedCapturerState *other = (SCManagedCapturerState *)object; + if (other->_isRunning != _isRunning) { + return NO; + } + + if (other->_isNightModeActive != _isNightModeActive) { + return NO; + } + + if (other->_isPortraitModeActive != _isPortraitModeActive) { + return NO; + } + + if (other->_lowLightCondition != _lowLightCondition) { + return NO; + } + + if (other->_adjustingExposure != _adjustingExposure) { + return NO; + } + + if (other->_devicePosition != _devicePosition) { + return NO; + } + + if (other->_zoomFactor != _zoomFactor) { + return NO; + } + + if (other->_flashSupported != _flashSupported) { + return NO; + } + + if (other->_torchSupported != _torchSupported) { + return NO; + } + + if (other->_flashActive != _flashActive) { + return NO; + } + + if (other->_torchActive != _torchActive) { + return NO; + } + + if (other->_lensesActive != _lensesActive) { + return NO; + } + + if (other->_arSessionActive != _arSessionActive) { + return NO; + } + + if (other->_liveVideoStreaming != _liveVideoStreaming) { + return NO; + } + + if (other->_lensProcessorReady != _lensProcessorReady) { + return NO; + } + + return YES; +} + +- (NSUInteger)hash +{ + NSUInteger subhashes[] = { + (NSUInteger)_isRunning, (NSUInteger)_isNightModeActive, (NSUInteger)_isPortraitModeActive, + (NSUInteger)_lowLightCondition, (NSUInteger)_adjustingExposure, (NSUInteger)_devicePosition, + (NSUInteger)_zoomFactor, (NSUInteger)_flashSupported, (NSUInteger)_torchSupported, + (NSUInteger)_flashActive, (NSUInteger)_torchActive, (NSUInteger)_lensesActive, + (NSUInteger)_arSessionActive, (NSUInteger)_liveVideoStreaming, (NSUInteger)_lensProcessorReady}; + NSUInteger result = subhashes[0]; + for (int i = 1; i < 15; i++) { + unsigned long long base = (((unsigned long long)result) << 32 | subhashes[i]); + base = (~base) + (base << 18); + base ^= (base >> 31); + base *= 21; + base ^= (base >> 11); + base += (base << 6); + base ^= (base >> 22); + result = (NSUInteger)base; + } + return result; +} + +#pragma mark - Print description in console: lldb> po #{variable name} + +- (NSString *)description +{ + NSMutableString *desc = [NSMutableString string]; + [desc appendString:@"{\n"]; + [desc appendFormat:@"\tisRunning:%@\n", [@(_isRunning) description]]; + [desc appendFormat:@"\tisNightModeActive:%@\n", [@(_isNightModeActive) description]]; + [desc appendFormat:@"\tisPortraitModeActive:%@\n", [@(_isPortraitModeActive) description]]; + [desc appendFormat:@"\tlowLightCondition:%@\n", [@(_lowLightCondition) description]]; + [desc appendFormat:@"\tadjustingExposure:%@\n", [@(_adjustingExposure) description]]; + [desc appendFormat:@"\tdevicePosition:%@\n", [@(_devicePosition) description]]; + [desc appendFormat:@"\tzoomFactor:%@\n", [@(_zoomFactor) description]]; + [desc appendFormat:@"\tflashSupported:%@\n", [@(_flashSupported) description]]; + [desc appendFormat:@"\ttorchSupported:%@\n", [@(_torchSupported) description]]; + [desc appendFormat:@"\tflashActive:%@\n", [@(_flashActive) description]]; + [desc appendFormat:@"\ttorchActive:%@\n", [@(_torchActive) description]]; + [desc appendFormat:@"\tlensesActive:%@\n", [@(_lensesActive) description]]; + [desc appendFormat:@"\tarSessionActive:%@\n", [@(_arSessionActive) description]]; + [desc appendFormat:@"\tliveVideoStreaming:%@\n", [@(_liveVideoStreaming) description]]; + [desc appendFormat:@"\tlensProcessorReady:%@\n", [@(_lensProcessorReady) description]]; + [desc appendString:@"}\n"]; + + return [desc copy]; +} + +@end diff --git a/ManagedCapturer/SCManagedCapturerState.value b/ManagedCapturer/SCManagedCapturerState.value new file mode 100644 index 0000000..1d49d3d --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerState.value @@ -0,0 +1,20 @@ +#import +#import "SCManagedCaptureDevice.h" + +interface SCManagedCapturerState + BOOL isRunning + BOOL isNightModeActive + BOOL isPortraitModeActive + BOOL lowLightCondition + BOOL adjustingExposure + enum SCManagedCaptureDevicePosition devicePosition + CGFloat zoomFactor + BOOL flashSupported + BOOL torchSupported + BOOL flashActive + BOOL torchActive + BOOL lensesActive + BOOL arSessionActive + BOOL liveVideoStreaming + BOOL lensProcessorReady +end diff --git a/ManagedCapturer/SCManagedCapturerStateBuilder.h b/ManagedCapturer/SCManagedCapturerStateBuilder.h new file mode 100644 index 0000000..7a9adb8 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerStateBuilder.h @@ -0,0 +1,46 @@ +// 49126048c3d19dd5b676b8d39844cf133833b67a +// Generated by the value-object.rb DO NOT EDIT!! + +#import "SCManagedCapturerState.h" + +#import + +#import + +@interface SCManagedCapturerStateBuilder : NSObject + ++ (instancetype)withManagedCapturerState:(id)managedCapturerState; + +- (SCManagedCapturerState *)build; + +- (instancetype)setIsRunning:(BOOL)isRunning; + +- (instancetype)setIsNightModeActive:(BOOL)isNightModeActive; + +- (instancetype)setIsPortraitModeActive:(BOOL)isPortraitModeActive; + +- (instancetype)setLowLightCondition:(BOOL)lowLightCondition; + +- (instancetype)setAdjustingExposure:(BOOL)adjustingExposure; + +- (instancetype)setDevicePosition:(SCManagedCaptureDevicePosition)devicePosition; + +- (instancetype)setZoomFactor:(CGFloat)zoomFactor; + +- (instancetype)setFlashSupported:(BOOL)flashSupported; + +- (instancetype)setTorchSupported:(BOOL)torchSupported; + +- (instancetype)setFlashActive:(BOOL)flashActive; + +- (instancetype)setTorchActive:(BOOL)torchActive; + +- (instancetype)setLensesActive:(BOOL)lensesActive; + +- (instancetype)setArSessionActive:(BOOL)arSessionActive; + +- (instancetype)setLiveVideoStreaming:(BOOL)liveVideoStreaming; + +- (instancetype)setLensProcessorReady:(BOOL)lensProcessorReady; + +@end diff --git a/ManagedCapturer/SCManagedCapturerStateBuilder.m b/ManagedCapturer/SCManagedCapturerStateBuilder.m new file mode 100644 index 0000000..c468335 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerStateBuilder.m @@ -0,0 +1,158 @@ +// 49126048c3d19dd5b676b8d39844cf133833b67a +// Generated by the value-object.rb DO NOT EDIT!! + +#import "SCManagedCapturerStateBuilder.h" + +#import + +#import + +@implementation SCManagedCapturerStateBuilder { + BOOL _isRunning; + BOOL _isNightModeActive; + BOOL _isPortraitModeActive; + BOOL _lowLightCondition; + BOOL _adjustingExposure; + SCManagedCaptureDevicePosition _devicePosition; + CGFloat _zoomFactor; + BOOL _flashSupported; + BOOL _torchSupported; + BOOL _flashActive; + BOOL _torchActive; + BOOL _lensesActive; + BOOL _arSessionActive; + BOOL _liveVideoStreaming; + BOOL _lensProcessorReady; +} + ++ (instancetype)withManagedCapturerState:(id)managedCapturerState +{ + SCManagedCapturerStateBuilder *builder = [[SCManagedCapturerStateBuilder alloc] init]; + builder->_isRunning = managedCapturerState.isRunning; + builder->_isNightModeActive = managedCapturerState.isNightModeActive; + builder->_isPortraitModeActive = managedCapturerState.isPortraitModeActive; + builder->_lowLightCondition = managedCapturerState.lowLightCondition; + builder->_adjustingExposure = managedCapturerState.adjustingExposure; + builder->_devicePosition = managedCapturerState.devicePosition; + builder->_zoomFactor = managedCapturerState.zoomFactor; + builder->_flashSupported = managedCapturerState.flashSupported; + builder->_torchSupported = managedCapturerState.torchSupported; + builder->_flashActive = managedCapturerState.flashActive; + builder->_torchActive = managedCapturerState.torchActive; + builder->_lensesActive = managedCapturerState.lensesActive; + builder->_arSessionActive = managedCapturerState.arSessionActive; + builder->_liveVideoStreaming = managedCapturerState.liveVideoStreaming; + builder->_lensProcessorReady = managedCapturerState.lensProcessorReady; + return builder; +} + +- (SCManagedCapturerState *)build +{ + return [[SCManagedCapturerState alloc] initWithIsRunning:_isRunning + isNightModeActive:_isNightModeActive + isPortraitModeActive:_isPortraitModeActive + lowLightCondition:_lowLightCondition + adjustingExposure:_adjustingExposure + devicePosition:_devicePosition + zoomFactor:_zoomFactor + flashSupported:_flashSupported + torchSupported:_torchSupported + flashActive:_flashActive + torchActive:_torchActive + lensesActive:_lensesActive + arSessionActive:_arSessionActive + liveVideoStreaming:_liveVideoStreaming + lensProcessorReady:_lensProcessorReady]; +} + +- (instancetype)setIsRunning:(BOOL)isRunning +{ + _isRunning = isRunning; + return self; +} + +- (instancetype)setIsNightModeActive:(BOOL)isNightModeActive +{ + _isNightModeActive = isNightModeActive; + return self; +} + +- (instancetype)setIsPortraitModeActive:(BOOL)isPortraitModeActive +{ + _isPortraitModeActive = isPortraitModeActive; + return self; +} + +- (instancetype)setLowLightCondition:(BOOL)lowLightCondition +{ + _lowLightCondition = lowLightCondition; + return self; +} + +- (instancetype)setAdjustingExposure:(BOOL)adjustingExposure +{ + _adjustingExposure = adjustingExposure; + return self; +} + +- (instancetype)setDevicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + _devicePosition = devicePosition; + return self; +} + +- (instancetype)setZoomFactor:(CGFloat)zoomFactor +{ + _zoomFactor = zoomFactor; + return self; +} + +- (instancetype)setFlashSupported:(BOOL)flashSupported +{ + _flashSupported = flashSupported; + return self; +} + +- (instancetype)setTorchSupported:(BOOL)torchSupported +{ + _torchSupported = torchSupported; + return self; +} + +- (instancetype)setFlashActive:(BOOL)flashActive +{ + _flashActive = flashActive; + return self; +} + +- (instancetype)setTorchActive:(BOOL)torchActive +{ + _torchActive = torchActive; + return self; +} + +- (instancetype)setLensesActive:(BOOL)lensesActive +{ + _lensesActive = lensesActive; + return self; +} + +- (instancetype)setArSessionActive:(BOOL)arSessionActive +{ + _arSessionActive = arSessionActive; + return self; +} + +- (instancetype)setLiveVideoStreaming:(BOOL)liveVideoStreaming +{ + _liveVideoStreaming = liveVideoStreaming; + return self; +} + +- (instancetype)setLensProcessorReady:(BOOL)lensProcessorReady +{ + _lensProcessorReady = lensProcessorReady; + return self; +} + +@end diff --git a/ManagedCapturer/SCManagedCapturerUtils.h b/ManagedCapturer/SCManagedCapturerUtils.h new file mode 100644 index 0000000..2a0f0f8 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerUtils.h @@ -0,0 +1,36 @@ +// +// SCManagedCapturerUtils.h +// Snapchat +// +// Created by Chao Pang on 10/4/17. +// + +#import + +#import +#import + +SC_EXTERN_C_BEGIN + +extern const CGFloat kSCIPhoneXCapturedImageVideoCropRatio; + +extern CGFloat SCManagedCapturedImageAndVideoAspectRatio(void); + +extern CGSize SCManagedCapturerAllScreenSize(void); + +extern CGSize SCAsyncImageCapturePlaceholderViewSize(void); + +extern CGFloat SCAdjustedAspectRatio(UIImageOrientation orientation, CGFloat aspectRatio); + +extern UIImage *SCCropImageToTargetAspectRatio(UIImage *image, CGFloat targetAspectRatio); + +extern void SCCropImageSizeToAspectRatio(size_t inputWidth, size_t inputHeight, UIImageOrientation orientation, + CGFloat aspectRatio, size_t *outputWidth, size_t *outputHeight); + +extern BOOL SCNeedsCropImageToAspectRatio(CGImageRef image, UIImageOrientation orientation, CGFloat aspectRatio); + +extern CGRect SCCalculateRectToCrop(size_t imageWidth, size_t imageHeight, size_t croppedWidth, size_t croppedHeight); + +extern CGImageRef SCCreateCroppedImageToAspectRatio(CGImageRef image, UIImageOrientation orientation, + CGFloat aspectRatio); +SC_EXTERN_C_END diff --git a/ManagedCapturer/SCManagedCapturerUtils.m b/ManagedCapturer/SCManagedCapturerUtils.m new file mode 100644 index 0000000..1e7662c --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerUtils.m @@ -0,0 +1,153 @@ +// +// SCManagedCapturerUtils.m +// Snapchat +// +// Created by Chao Pang on 10/4/17. +// + +#import "SCManagedCapturerUtils.h" + +#import "SCCaptureCommon.h" + +#import +#import +#import +#import + +// This is to calculate the crop ratio for generating the image shown in Preview page +// Check https://snapchat.quip.com/lU3kAoDxaAFG for our design. +const CGFloat kSCIPhoneXCapturedImageVideoCropRatio = (397.0 * 739.0) / (375.0 * 812.0); + +CGFloat SCManagedCapturedImageAndVideoAspectRatio(void) +{ + static dispatch_once_t onceToken; + static CGFloat aspectRatio; + dispatch_once(&onceToken, ^{ + CGSize screenSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size; + UIEdgeInsets safeAreaInsets = [UIScreen sc_safeAreaInsets]; + aspectRatio = SCSizeGetAspectRatio( + CGSizeMake(screenSize.width, screenSize.height - safeAreaInsets.top - safeAreaInsets.bottom)); + }); + return aspectRatio; +} + +CGSize SCManagedCapturerAllScreenSize(void) +{ + static CGSize size; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + CGSize screenSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size; + // This logic is complicated because we need to handle iPhone X properly. + // See https://snapchat.quip.com/lU3kAoDxaAFG for our design. + UIEdgeInsets safeAreaInsets = [UIScreen sc_safeAreaInsets]; + UIEdgeInsets visualSafeInsets = [UIScreen sc_visualSafeInsets]; + // This really is just some coordinate computations: + // We know in preview, our size is (screenWidth, screenHeight - topInset - bottomInset) + // We know that when the preview image is in the camera screen, the height is screenHeight - visualTopInset, + // thus, we need to figure out in camera screen, what's the bleed-over width should be + // (screenWidth * (screenHeight - visualTopInset) / (screenHeight - topInset - bottomInset) + size = CGSizeMake(roundf(screenSize.width * (screenSize.height - visualSafeInsets.top) / + (screenSize.height - safeAreaInsets.top - safeAreaInsets.bottom)), + screenSize.height); + }); + return size; +} + +CGSize SCAsyncImageCapturePlaceholderViewSize(void) +{ + static CGSize size; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + CGSize screenSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size; + UIEdgeInsets safeAreaInsets = [UIScreen sc_safeAreaInsets]; + UIEdgeInsets visualSafeInsets = [UIScreen sc_visualSafeInsets]; + size = CGSizeMake(roundf((screenSize.height - visualSafeInsets.top) * screenSize.width / + (screenSize.height - safeAreaInsets.top - safeAreaInsets.bottom)), + screenSize.height - visualSafeInsets.top); + }); + return size; +} + +CGFloat SCAdjustedAspectRatio(UIImageOrientation orientation, CGFloat aspectRatio) +{ + SCCAssert(aspectRatio != kSCManagedCapturerAspectRatioUnspecified, @""); + switch (orientation) { + case UIImageOrientationLeft: + case UIImageOrientationRight: + case UIImageOrientationLeftMirrored: + case UIImageOrientationRightMirrored: + return 1.0 / aspectRatio; + default: + return aspectRatio; + } +} + +UIImage *SCCropImageToTargetAspectRatio(UIImage *image, CGFloat targetAspectRatio) +{ + if (SCNeedsCropImageToAspectRatio(image.CGImage, image.imageOrientation, targetAspectRatio)) { + CGImageRef croppedImageRef = + SCCreateCroppedImageToAspectRatio(image.CGImage, image.imageOrientation, targetAspectRatio); + UIImage *croppedImage = + [UIImage imageWithCGImage:croppedImageRef scale:image.scale orientation:image.imageOrientation]; + CGImageRelease(croppedImageRef); + return croppedImage; + } else { + return image; + } +} + +void SCCropImageSizeToAspectRatio(size_t inputWidth, size_t inputHeight, UIImageOrientation orientation, + CGFloat aspectRatio, size_t *outputWidth, size_t *outputHeight) +{ + SCCAssert(outputWidth != NULL && outputHeight != NULL, @""); + aspectRatio = SCAdjustedAspectRatio(orientation, aspectRatio); + if (inputWidth > roundf(inputHeight * aspectRatio)) { + *outputHeight = inputHeight; + *outputWidth = roundf(*outputHeight * aspectRatio); + } else { + *outputWidth = inputWidth; + *outputHeight = roundf(*outputWidth / aspectRatio); + } +} + +BOOL SCNeedsCropImageToAspectRatio(CGImageRef image, UIImageOrientation orientation, CGFloat aspectRatio) +{ + if (aspectRatio == kSCManagedCapturerAspectRatioUnspecified) { + return NO; + } + aspectRatio = SCAdjustedAspectRatio(orientation, aspectRatio); + size_t width = CGImageGetWidth(image); + size_t height = CGImageGetHeight(image); + return (width != roundf(height * aspectRatio)); +} + +CGRect SCCalculateRectToCrop(size_t imageWidth, size_t imageHeight, size_t croppedWidth, size_t croppedHeight) +{ + if ([SCDeviceName isIphoneX]) { + // X is pushed all the way over to crop out top section but none of bottom + CGFloat x = (imageWidth - croppedWidth); + // Crop y symmetrically. + CGFloat y = roundf((imageHeight - croppedHeight) / 2.0); + + return CGRectMake(x, y, croppedWidth, croppedHeight); + } + return CGRectMake((imageWidth - croppedWidth) / 2, (imageHeight - croppedHeight) / 2, croppedWidth, croppedHeight); +} + +CGImageRef SCCreateCroppedImageToAspectRatio(CGImageRef image, UIImageOrientation orientation, CGFloat aspectRatio) +{ + SCCAssert(aspectRatio != kSCManagedCapturerAspectRatioUnspecified, @""); + size_t width = CGImageGetWidth(image); + size_t height = CGImageGetHeight(image); + size_t croppedWidth, croppedHeight; + if ([SCDeviceName isIphoneX]) { + size_t adjustedWidth = (size_t)(width * kSCIPhoneXCapturedImageVideoCropRatio); + size_t adjustedHeight = (size_t)(height * kSCIPhoneXCapturedImageVideoCropRatio); + SCCropImageSizeToAspectRatio(adjustedWidth, adjustedHeight, orientation, aspectRatio, &croppedWidth, + &croppedHeight); + } else { + SCCropImageSizeToAspectRatio(width, height, orientation, aspectRatio, &croppedWidth, &croppedHeight); + } + CGRect cropRect = SCCalculateRectToCrop(width, height, croppedWidth, croppedHeight); + return CGImageCreateWithImageInRect(image, cropRect); +} diff --git a/ManagedCapturer/SCManagedCapturerV1.h b/ManagedCapturer/SCManagedCapturerV1.h new file mode 100644 index 0000000..be8fe65 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerV1.h @@ -0,0 +1,57 @@ +// +// SCManagedCapturer.h +// Snapchat +// +// Created by Liu Liu on 4/20/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCCaptureCommon.h" +#import "SCCapturer.h" + +#import + +#import +#import + +/** + * Manage AVCaptureSession with SCManagedCapturerV1 + * + * In phantom, there are a lot of places we use AVCaptureSession. However, since for each app, only one session + * can run at the same time, we need some kind of management for the capture session. + * + * SCManagedCapturerV1 manages the state of capture session in following ways: + * + * All operations in SCManagedCapturerV1 are handled on a serial queue, to ensure its sequence. All callbacks (either + * on the listener or the completion handler) are on the main thread. The state of SCManagedCapturerV1 are conveniently + * maintained in a SCManagedCapturerState object, which is immutable and can be passed across threads, it mains a + * consistent view of the capture session, if it is not delayed (thus, the state may deliver as current active device + * is back camera on main thread, but in reality, on the serial queue, the active device switched to the front camera + * already. However, this is OK because state.devicePosition will be back camera and with all its setup at that time. + * Note that it is impossible to have an on-time view of the state across threads without blocking each other). + * + * For main use cases, you setup the capturer, add the preview layer, and then can call capture still image + * or record video, and SCManagedCapturerV1 will do the rest (make sure it actually captures image / video, recover + * from error, or setup our more advanced image / video post-process). + * + * The key classes that drive the recording flow are SCManagedVideoStreamer and SCManagedVideoFileStreamer which + * conform to SCManagedVideoDataSource. They will stream images to consumers conforming to + * SCManagedVideoDataSourceListener + * such as SCManagedLensesProcessor, SCManagedDeviceCapacityAnalyzer, SCManagedVideoScanner and ultimately + * SCManagedVideoCapturer and SCManagedStillImageCapturer which record the final output. + * + */ +@class SCCaptureResource; + +extern NSString *const kSCLensesTweaksDidChangeFileInput; + +@interface SCManagedCapturerV1 : NSObject + ++ (SCManagedCapturerV1 *)sharedInstance; + +/* + The following APIs are reserved to be only used for SCCaptureCore aka managedCapturerV2. + */ +- (instancetype)initWithResource:(SCCaptureResource *)resource; + +@end diff --git a/ManagedCapturer/SCManagedCapturerV1.m b/ManagedCapturer/SCManagedCapturerV1.m new file mode 100644 index 0000000..ba3e579 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerV1.m @@ -0,0 +1,2165 @@ +// +// SCManagedCapturer.m +// Snapchat +// +// Created by Liu Liu on 4/20/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCManagedCapturerV1.h" +#import "SCManagedCapturerV1_Private.h" + +#import "ARConfiguration+SCConfiguration.h" +#import "NSURL+Asset.h" +#import "SCBlackCameraDetector.h" +#import "SCBlackCameraNoOutputDetector.h" +#import "SCCameraTweaks.h" +#import "SCCaptureResource.h" +#import "SCCaptureSessionFixer.h" +#import "SCCaptureUninitializedState.h" +#import "SCCaptureWorker.h" +#import "SCCapturerToken.h" +#import "SCManagedAudioStreamer.h" +#import "SCManagedCaptureDevice+SCManagedCapturer.h" +#import "SCManagedCaptureDeviceDefaultZoomHandler.h" +#import "SCManagedCaptureDeviceHandler.h" +#import "SCManagedCaptureDeviceSubjectAreaHandler.h" +#import "SCManagedCapturePreviewLayerController.h" +#import "SCManagedCaptureSession.h" +#import "SCManagedCapturerARImageCaptureProvider.h" +#import "SCManagedCapturerGLViewManagerAPI.h" +#import "SCManagedCapturerLSAComponentTrackerAPI.h" +#import "SCManagedCapturerLensAPI.h" +#import "SCManagedCapturerListenerAnnouncer.h" +#import "SCManagedCapturerLogging.h" +#import "SCManagedCapturerSampleMetadata.h" +#import "SCManagedCapturerState.h" +#import "SCManagedCapturerStateBuilder.h" +#import "SCManagedDeviceCapacityAnalyzer.h" +#import "SCManagedDroppedFramesReporter.h" +#import "SCManagedFrameHealthChecker.h" +#import "SCManagedFrontFlashController.h" +#import "SCManagedStillImageCapturer.h" +#import "SCManagedStillImageCapturerHandler.h" +#import "SCManagedVideoARDataSource.h" +#import "SCManagedVideoCapturer.h" +#import "SCManagedVideoFileStreamer.h" +#import "SCManagedVideoFrameSampler.h" +#import "SCManagedVideoScanner.h" +#import "SCManagedVideoStreamReporter.h" +#import "SCManagedVideoStreamer.h" +#import "SCMetalUtils.h" +#import "SCProcessingPipeline.h" +#import "SCProcessingPipelineBuilder.h" +#import "SCScanConfiguration.h" +#import "SCSingleFrameStreamCapturer.h" +#import "SCSnapCreationTriggers.h" +#import "SCTimedTask.h" + +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import + +#import + +@import ARKit; + +static NSUInteger const kSCManagedCapturerFixInconsistencyMaxRetriesWithCurrentSession = 22; +static CGFloat const kSCManagedCapturerFixInconsistencyARSessionDelayThreshold = 2; +static CGFloat const kSCManagedCapturerFixInconsistencyARSessionHungInitThreshold = 5; + +static NSTimeInterval const kMinFixAVSessionRunningInterval = 1; // Interval to run _fixAVSessionIfNecessary +static NSTimeInterval const kMinFixSessionRuntimeErrorInterval = + 1; // Min interval that RuntimeError calls _startNewSession + +static NSString *const kSCManagedCapturerErrorDomain = @"kSCManagedCapturerErrorDomain"; + +NSString *const kSCLensesTweaksDidChangeFileInput = @"kSCLensesTweaksDidChangeFileInput"; + +@implementation SCManagedCapturerV1 { + // No ivars for CapturerV1 please, they should be in resource. + SCCaptureResource *_captureResource; +} + ++ (SCManagedCapturerV1 *)sharedInstance +{ + static dispatch_once_t onceToken; + static SCManagedCapturerV1 *managedCapturerV1; + dispatch_once(&onceToken, ^{ + managedCapturerV1 = [[SCManagedCapturerV1 alloc] init]; + }); + return managedCapturerV1; +} + +- (instancetype)init +{ + SCTraceStart(); + SCAssertMainThread(); + SCCaptureResource *resource = [SCCaptureWorker generateCaptureResource]; + return [self initWithResource:resource]; +} + +- (instancetype)initWithResource:(SCCaptureResource *)resource +{ + SCTraceODPCompatibleStart(2); + SCAssertMainThread(); + self = [super init]; + if (self) { + // Assuming I am not in background. I can be more defensive here and fetch the app state. + // But to avoid potential problems, won't do that until later. + SCLogCapturerInfo(@"======================= cool startup ======================="); + // Initialization of capture resource should be done in worker to be shared between V1 and V2. + _captureResource = resource; + _captureResource.handleAVSessionStatusChange = @selector(_handleAVSessionStatusChange:); + _captureResource.sessionRuntimeError = @selector(_sessionRuntimeError:); + _captureResource.livenessConsistency = @selector(_livenessConsistency:); + _captureResource.deviceSubjectAreaHandler = + [[SCManagedCaptureDeviceSubjectAreaHandler alloc] initWithCaptureResource:_captureResource]; + _captureResource.snapCreationTriggers = [SCSnapCreationTriggers new]; + if (SCIsMasterBuild()) { + // We call _sessionRuntimeError to reset _captureResource.videoDataSource if input changes + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(_sessionRuntimeError:) + name:kSCLensesTweaksDidChangeFileInput + object:nil]; + } + } + return self; +} + +- (SCBlackCameraDetector *)blackCameraDetector +{ + return _captureResource.blackCameraDetector; +} + +- (void)recreateAVCaptureSession +{ + SCTraceODPCompatibleStart(2); + [self _startRunningWithNewCaptureSessionIfNecessary]; +} + +- (void)_handleAVSessionStatusChange:(NSDictionary *)change +{ + SCTraceODPCompatibleStart(2); + SC_GUARD_ELSE_RETURN(!_captureResource.state.arSessionActive); + SC_GUARD_ELSE_RETURN(!_captureResource.appInBackground); + BOOL wasRunning = [change[NSKeyValueChangeOldKey] boolValue]; + BOOL isRunning = [change[NSKeyValueChangeNewKey] boolValue]; + SCLogCapturerInfo(@"avSession running status changed: %@ -> %@", wasRunning ? @"running" : @"stopped", + isRunning ? @"running" : @"stopped"); + + [_captureResource.blackCameraDetector sessionDidChangeIsRunning:isRunning]; + + if (_captureResource.isRecreateSessionFixScheduled) { + SCLogCapturerInfo(@"Scheduled AVCaptureSession recreation, return"); + return; + } + + if (wasRunning != isRunning) { + runOnMainThreadAsynchronously(^{ + if (isRunning) { + [_captureResource.announcer managedCapturer:self didStartRunning:_captureResource.state]; + } else { + [_captureResource.announcer managedCapturer:self didStopRunning:_captureResource.state]; + } + }); + } + + if (!isRunning) { + [_captureResource.queuePerformer perform:^{ + [self _fixAVSessionIfNecessary]; + }]; + } else { + if (!SCDeviceSupportsMetal()) { + [self _fixNonMetalSessionPreviewInconsistency]; + } + } +} + +- (void)_fixAVSessionIfNecessary +{ + SCTraceODPCompatibleStart(2); + SCAssert([_captureResource.queuePerformer isCurrentPerformer], @""); + SC_GUARD_ELSE_RETURN(!_captureResource.appInBackground); + SC_GUARD_ELSE_RETURN(_captureResource.status == SCManagedCapturerStatusRunning); + [[SCLogger sharedInstance] logStepToEvent:kSCCameraFixAVCaptureSession + uniqueId:@"" + stepName:@"startConsistencyCheckAndFix"]; + + NSTimeInterval timeNow = [NSDate timeIntervalSinceReferenceDate]; + if (timeNow - _captureResource.lastFixSessionTimestamp < kMinFixAVSessionRunningInterval) { + SCLogCoreCameraInfo(@"Fixing session in less than %f, skip", kMinFixAVSessionRunningInterval); + return; + } + _captureResource.lastFixSessionTimestamp = timeNow; + + if (!_captureResource.managedSession.isRunning) { + SCTraceStartSection("Fix AVSession") + { + _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession++; + SCGhostToSnappableSignalCameraFixInconsistency(); + if (_captureResource.numRetriesFixAVCaptureSessionWithCurrentSession <= + kSCManagedCapturerFixInconsistencyARSessionDelayThreshold) { + SCLogCapturerInfo(@"Fixing AVSession"); + [_captureResource.managedSession startRunning]; + SCLogCapturerInfo(@"Fixed AVSession, success : %@", @(_captureResource.managedSession.isRunning)); + [[SCLogger sharedInstance] logStepToEvent:kSCCameraFixAVCaptureSession + uniqueId:@"" + stepName:@"finishCaptureSessionFix"]; + } else { + // start running with new capture session if the inconsistency fixing not succeeds + SCLogCapturerInfo(@"*** Recreate and run new capture session to fix the inconsistency ***"); + [self _startRunningWithNewCaptureSessionIfNecessary]; + [[SCLogger sharedInstance] logStepToEvent:kSCCameraFixAVCaptureSession + uniqueId:@"" + stepName:@"finishNewCaptureSessionCreation"]; + } + } + SCTraceEndSection(); + [[SCLogger sharedInstance] + logTimedEventEnd:kSCCameraFixAVCaptureSession + uniqueId:@"" + parameters:@{ + @"success" : @(_captureResource.managedSession.isRunning), + @"count" : @(_captureResource.numRetriesFixAVCaptureSessionWithCurrentSession) + }]; + } else { + _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0; + [[SCLogger sharedInstance] cancelLogTimedEvent:kSCCameraFixAVCaptureSession uniqueId:@""]; + } + if (_captureResource.managedSession.isRunning) { + // If it is fixed, we signal received the first frame. + SCGhostToSnappableSignalDidReceiveFirstPreviewFrame(); + + // For non-metal preview render, we need to make sure preview is not hidden + if (!SCDeviceSupportsMetal()) { + [self _fixNonMetalSessionPreviewInconsistency]; + } + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:self didStartRunning:_captureResource.state]; + // To approximate this did render timer, it is not accurate. + SCGhostToSnappableSignalDidRenderFirstPreviewFrame(CACurrentMediaTime()); + }); + } else { + [_captureResource.queuePerformer perform:^{ + [self _fixAVSessionIfNecessary]; + } + after:1]; + } + + [_captureResource.blackCameraDetector sessionDidChangeIsRunning:_captureResource.managedSession.isRunning]; +} + +- (void)_fixNonMetalSessionPreviewInconsistency +{ + SCTraceODPCompatibleStart(2); + SC_GUARD_ELSE_RETURN(_captureResource.status == SCManagedCapturerStatusRunning); + if ((!_captureResource.videoPreviewLayer.hidden) != _captureResource.managedSession.isRunning) { + SCTraceStartSection("Fix non-Metal VideoPreviewLayer"); + { + [CATransaction begin]; + [CATransaction setDisableActions:YES]; + [SCCaptureWorker setupVideoPreviewLayer:_captureResource]; + [CATransaction commit]; + } + SCTraceEndSection(); + } +} + +- (SCCaptureResource *)captureResource +{ + SCTraceODPCompatibleStart(2); + return _captureResource; +} + +- (id)lensProcessingCore +{ + SCTraceODPCompatibleStart(2); + @weakify(self); + return (id)[[SCLazyLoadingProxy alloc] initWithInitializationBlock:^id { + @strongify(self); + SCReportErrorIf(self.captureResource.state.lensProcessorReady, @"[Lenses] Lens processing core is not ready"); + return self.captureResource.lensProcessingCore; + }]; +} + +- (SCVideoCaptureSessionInfo)activeSession +{ + SCTraceODPCompatibleStart(2); + return [SCCaptureWorker activeSession:_captureResource]; +} + +- (BOOL)isLensApplied +{ + SCTraceODPCompatibleStart(2); + return [SCCaptureWorker isLensApplied:_captureResource]; +} + +- (BOOL)isVideoMirrored +{ + SCTraceODPCompatibleStart(2); + return [SCCaptureWorker isVideoMirrored:_captureResource]; +} + +#pragma mark - Setup, Start & Stop + +- (void)_updateHRSIEnabled +{ + SCTraceODPCompatibleStart(2); + // Since night mode is low-res, we set high resolution still image output when night mode is enabled + // SoftwareZoom requires higher resolution image to get better zooming result too. + // We also want a higher resolution on newer devices + BOOL is1080pSupported = [SCManagedCaptureDevice is1080pSupported]; + BOOL shouldHRSIEnabled = + (_captureResource.device.isNightModeActive || _captureResource.device.softwareZoom || is1080pSupported); + SCLogCapturerInfo(@"Setting HRSIEnabled to: %d. isNightModeActive:%d softwareZoom:%d is1080pSupported:%d", + shouldHRSIEnabled, _captureResource.device.isNightModeActive, + _captureResource.device.softwareZoom, is1080pSupported); + [_captureResource.stillImageCapturer setHighResolutionStillImageOutputEnabled:shouldHRSIEnabled]; +} + +- (void)_updateStillImageStabilizationEnabled +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Enabling still image stabilization"); + [_captureResource.stillImageCapturer enableStillImageStabilization]; +} + +- (void)setupWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Setting up with devicePosition:%lu", (unsigned long)devicePosition); + SCTraceResumeToken token = SCTraceCapture(); + [[SCManagedCapturePreviewLayerController sharedInstance] setupPreviewLayer]; + [_captureResource.queuePerformer perform:^{ + SCTraceResume(token); + [self setupWithDevicePosition:devicePosition completionHandler:completionHandler]; + }]; +} + +- (void)setupWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition + completionHandler:(dispatch_block_t)completionHandler +{ + SCTraceODPCompatibleStart(2); + SCAssertPerformer(_captureResource.queuePerformer); + [SCCaptureWorker setupWithCaptureResource:_captureResource devicePosition:devicePosition]; + + [self addListener:_captureResource.stillImageCapturer]; + [self addListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector]; + [self addListener:_captureResource.lensProcessingCore]; + + [self _updateHRSIEnabled]; + [self _updateStillImageStabilizationEnabled]; + + [SCCaptureWorker updateLensesFieldOfViewTracking:_captureResource]; + + if (!SCDeviceSupportsMetal()) { + [SCCaptureWorker makeVideoPreviewLayer:_captureResource]; + } + + // I need to do this setup now. Thus, it is off the main thread. This also means my preview layer controller is + // entangled with the capturer. + [[SCManagedCapturePreviewLayerController sharedInstance] setupRenderPipeline]; + [[SCManagedCapturePreviewLayerController sharedInstance] setManagedCapturer:self]; + _captureResource.status = SCManagedCapturerStatusReady; + + SCManagedCapturerState *state = [_captureResource.state copy]; + AVCaptureVideoPreviewLayer *videoPreviewLayer = _captureResource.videoPreviewLayer; + runOnMainThreadAsynchronously(^{ + SCLogCapturerInfo(@"Did setup with devicePosition:%lu", (unsigned long)devicePosition); + [_captureResource.announcer managedCapturer:self didChangeState:state]; + [_captureResource.announcer managedCapturer:self didChangeCaptureDevicePosition:state]; + if (!SCDeviceSupportsMetal()) { + [_captureResource.announcer managedCapturer:self didChangeVideoPreviewLayer:videoPreviewLayer]; + } + if (completionHandler) { + completionHandler(); + } + }); +} + +- (void)addSampleBufferDisplayController:(id)sampleBufferDisplayController + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + _captureResource.sampleBufferDisplayController = sampleBufferDisplayController; + [_captureResource.videoDataSource addSampleBufferDisplayController:sampleBufferDisplayController]; + }]; +} + +- (SCCapturerToken *)startRunningAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCTraceResumeToken resumeToken = SCTraceCapture(); + [[SCLogger sharedInstance] updateLogTimedEventStart:kSCCameraMetricsOpen uniqueId:@""]; + SCCapturerToken *token = [[SCCapturerToken alloc] initWithIdentifier:context]; + SCLogCapturerInfo(@"startRunningAsynchronouslyWithCompletionHandler called. token: %@", token); + [_captureResource.queuePerformer perform:^{ + SCTraceResume(resumeToken); + [SCCaptureWorker startRunningWithCaptureResource:_captureResource + token:token + completionHandler:completionHandler]; + // After startRunning, we need to make sure _fixAVSessionIfNecessary start running. + // The problem: with the new KVO fix strategy, it may happen that AVCaptureSession is in stopped state, thus no + // KVO callback is triggered. + // And calling startRunningAsynchronouslyWithCompletionHandler has no effect because SCManagedCapturerStatus is + // in SCManagedCapturerStatusRunning state + [self _fixAVSessionIfNecessary]; + }]; + return token; +} + +- (BOOL)stopRunningWithCaptureToken:(SCCapturerToken *)token + completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCAssertPerformer(_captureResource.queuePerformer); + SCLogCapturerInfo(@"Stop running. token:%@ context:%@", token, context); + return [SCCaptureWorker stopRunningWithCaptureResource:_captureResource + token:token + completionHandler:completionHandler]; +} + +- (void)stopRunningAsynchronously:(SCCapturerToken *)token + completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Stop running asynchronously. token:%@ context:%@", token, context); + SCTraceResumeToken resumeToken = SCTraceCapture(); + [_captureResource.queuePerformer perform:^{ + SCTraceResume(resumeToken); + [SCCaptureWorker stopRunningWithCaptureResource:_captureResource + token:token + completionHandler:completionHandler]; + }]; +} + +- (void)stopRunningAsynchronously:(SCCapturerToken *)token + completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler + after:(NSTimeInterval)delay + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Stop running asynchronously. token:%@ delay:%f", token, delay); + NSTimeInterval startTime = CACurrentMediaTime(); + [_captureResource.queuePerformer perform:^{ + NSTimeInterval elapsedTime = CACurrentMediaTime() - startTime; + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + // If we haven't started a new running sequence yet, stop running now + [SCCaptureWorker stopRunningWithCaptureResource:_captureResource + token:token + completionHandler:completionHandler]; + } + after:MAX(delay - elapsedTime, 0)]; + }]; +} + +- (void)startStreamingAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Start streaming asynchronously"); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + [SCCaptureWorker startStreaming:_captureResource]; + if (completionHandler) { + runOnMainThreadAsynchronously(completionHandler); + } + }]; +} + +#pragma mark - Recording / Capture + +- (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio + captureSessionID:(NSString *)captureSessionID + completionHandler: + (sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + [SCCaptureWorker captureStillImageWithCaptureResource:_captureResource + aspectRatio:aspectRatio + captureSessionID:captureSessionID + shouldCaptureFromVideo:[self _shouldCaptureImageFromVideo] + completionHandler:completionHandler + context:context]; + }]; +} + +- (void)captureSingleVideoFrameAsynchronouslyWithCompletionHandler: + (sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + SCLogCapturerInfo(@"Start capturing single video frame"); + _captureResource.frameCap = [[SCSingleFrameStreamCapturer alloc] initWithCompletion:^void(UIImage *image) { + [_captureResource.queuePerformer perform:^{ + [_captureResource.videoDataSource removeListener:_captureResource.frameCap]; + _captureResource.frameCap = nil; + }]; + runOnMainThreadAsynchronously(^{ + [_captureResource.device setTorchActive:NO]; + SCLogCapturerInfo(@"End capturing single video frame"); + completionHandler(image); + }); + }]; + + BOOL waitForTorch = NO; + if (!_captureResource.state.torchActive) { + if (_captureResource.state.flashActive) { + waitForTorch = YES; + [_captureResource.device setTorchActive:YES]; + } + } + [_captureResource.queuePerformer perform:^{ + [_captureResource.videoDataSource addListener:_captureResource.frameCap]; + [SCCaptureWorker startStreaming:_captureResource]; + } + after:(waitForTorch ? 0.5 : 0)]; + + }]; +} + +- (void)prepareForRecordingAsynchronouslyWithContext:(NSString *)context + audioConfiguration:(SCAudioConfiguration *)configuration +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + SCLogCapturerInfo(@"prepare for recording"); + [_captureResource.videoCapturer prepareForRecordingWithAudioConfiguration:configuration]; + }]; +} + +- (void)startRecordingAsynchronouslyWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings + audioConfiguration:(SCAudioConfiguration *)configuration + maxDuration:(NSTimeInterval)maxDuration + fileURL:(NSURL *)fileURL + captureSessionID:(NSString *)captureSessionID + completionHandler: + (sc_managed_capturer_start_recording_completion_handler_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + [SCCaptureWorker startRecordingWithCaptureResource:_captureResource + outputSettings:outputSettings + audioConfiguration:configuration + maxDuration:maxDuration + fileURL:fileURL + captureSessionID:captureSessionID + completionHandler:completionHandler]; + }]; +} + +- (void)stopRecordingAsynchronouslyWithContext:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + [SCCaptureWorker stopRecordingWithCaptureResource:_captureResource]; + }]; +} + +- (void)cancelRecordingAsynchronouslyWithContext:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + [SCCaptureWorker cancelRecordingWithCaptureResource:_captureResource]; + }]; +} + +- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + [SCCaptureWorker startScanWithScanConfiguration:configuration resource:_captureResource]; + }]; +} + +- (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + [SCCaptureWorker stopScanWithCompletionHandler:completionHandler resource:_captureResource]; + }]; +} + +- (void)sampleFrameWithCompletionHandler:(void (^)(UIImage *frame, CMTime presentationTime))completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + // Previously _captureResource.videoFrameSampler was conditionally created when setting up, but if this method is + // called it is a + // safe assumption the client wants it to run instead of failing silently, so always create + // _captureResource.videoFrameSampler + if (!_captureResource.videoFrameSampler) { + _captureResource.videoFrameSampler = [SCManagedVideoFrameSampler new]; + [_captureResource.announcer addListener:_captureResource.videoFrameSampler]; + } + SCLogCapturerInfo(@"Sampling next frame"); + [_captureResource.videoFrameSampler sampleNextFrame:completionHandler]; +} + +- (void)addTimedTask:(SCTimedTask *)task context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Adding timed task:%@", task); + [_captureResource.queuePerformer perform:^{ + [_captureResource.videoCapturer addTimedTask:task]; + }]; +} + +- (void)clearTimedTasksWithContext:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + [_captureResource.videoCapturer clearTimedTasks]; + }]; +} + +#pragma mark - Utilities + +- (void)convertViewCoordinates:(CGPoint)viewCoordinates + completionHandler:(sc_managed_capturer_convert_view_coordniates_completion_handler_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCAssert(completionHandler, @"completionHandler shouldn't be nil"); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + if (SCDeviceSupportsMetal()) { + CGSize viewSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size; + CGPoint pointOfInterest = + [_captureResource.device convertViewCoordinates:viewCoordinates + viewSize:viewSize + videoGravity:AVLayerVideoGravityResizeAspectFill]; + runOnMainThreadAsynchronously(^{ + completionHandler(pointOfInterest); + }); + } else { + CGSize viewSize = _captureResource.videoPreviewLayer.bounds.size; + CGPoint pointOfInterest = + [_captureResource.device convertViewCoordinates:viewCoordinates + viewSize:viewSize + videoGravity:_captureResource.videoPreviewLayer.videoGravity]; + runOnMainThreadAsynchronously(^{ + completionHandler(pointOfInterest); + }); + } + }]; +} + +- (void)detectLensCategoryOnNextFrame:(CGPoint)point + lenses:(NSArray *)lenses + completion:(sc_managed_lenses_processor_category_point_completion_handler_t)completion + context:(NSString *)context + +{ + SCTraceODPCompatibleStart(2); + SCAssert(completion, @"completionHandler shouldn't be nil"); + SCAssertMainThread(); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + SCLogCapturerInfo(@"Detecting lens category on next frame. point:%@, lenses:%@", NSStringFromCGPoint(point), + [lenses valueForKey:NSStringFromSelector(@selector(lensId))]); + [_captureResource.lensProcessingCore + detectLensCategoryOnNextFrame:point + videoOrientation:_captureResource.videoDataSource.videoOrientation + lenses:lenses + completion:^(SCLensCategory *_Nullable category, NSInteger categoriesCount) { + runOnMainThreadAsynchronously(^{ + if (completion) { + completion(category, categoriesCount); + } + }); + }]; + }]; +} + +#pragma mark - Configurations + +- (void)setDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Setting device position asynchronously to: %lu", (unsigned long)devicePosition); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + BOOL devicePositionChanged = NO; + BOOL nightModeChanged = NO; + BOOL portraitModeChanged = NO; + BOOL zoomFactorChanged = NO; + BOOL flashSupportedOrTorchSupportedChanged = NO; + SCManagedCapturerState *state = [_captureResource.state copy]; + if (_captureResource.state.devicePosition != devicePosition) { + SCManagedCaptureDevice *device = [SCManagedCaptureDevice deviceWithPosition:devicePosition]; + if (device) { + if (!device.delegate) { + device.delegate = _captureResource.captureDeviceHandler; + } + + SCManagedCaptureDevice *prevDevice = _captureResource.device; + [SCCaptureWorker turnARSessionOff:_captureResource]; + BOOL isStreaming = _captureResource.videoDataSource.isStreaming; + if (!SCDeviceSupportsMetal()) { + if (isStreaming) { + [_captureResource.videoDataSource stopStreaming]; + } + } + SCLogCapturerInfo(@"Set device position beginConfiguration"); + [_captureResource.videoDataSource beginConfiguration]; + [_captureResource.managedSession beginConfiguration]; + // Turn off flash for the current device in case it is active + [_captureResource.device setTorchActive:NO]; + if (_captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) { + _captureResource.frontFlashController.torchActive = NO; + } + [_captureResource.deviceCapacityAnalyzer removeFocusListener]; + [_captureResource.device removeDeviceAsInput:_captureResource.managedSession.avSession]; + _captureResource.device = device; + BOOL deviceSet = [_captureResource.device setDeviceAsInput:_captureResource.managedSession.avSession]; + // If we are toggling while recording, set the night mode back to not + // active + if (_captureResource.videoRecording) { + [self _setNightModeActive:NO]; + } + // Sync night mode, torch and flash state with the current device + devicePositionChanged = (_captureResource.state.devicePosition != devicePosition); + nightModeChanged = + (_captureResource.state.isNightModeActive != _captureResource.device.isNightModeActive); + portraitModeChanged = + devicePositionChanged && + (devicePosition == SCManagedCaptureDevicePositionBackDualCamera || + _captureResource.state.devicePosition == SCManagedCaptureDevicePositionBackDualCamera); + zoomFactorChanged = (_captureResource.state.zoomFactor != _captureResource.device.zoomFactor); + if (zoomFactorChanged && _captureResource.device.softwareZoom) { + [SCCaptureWorker softwareZoomWithDevice:_captureResource.device resource:_captureResource]; + } + if (_captureResource.state.flashActive != _captureResource.device.flashActive) { + // preserve flashActive across devices + _captureResource.device.flashActive = _captureResource.state.flashActive; + } + if (_captureResource.state.liveVideoStreaming != device.liveVideoStreamingActive) { + // preserve liveVideoStreaming state across devices + [_captureResource.device setLiveVideoStreaming:_captureResource.state.liveVideoStreaming + session:_captureResource.managedSession.avSession]; + } + if (devicePosition == SCManagedCaptureDevicePositionBackDualCamera && + _captureResource.state.isNightModeActive != _captureResource.device.isNightModeActive) { + // preserve nightMode when switching from back camera to back dual camera + [self _setNightModeActive:_captureResource.state.isNightModeActive]; + } + + flashSupportedOrTorchSupportedChanged = + (_captureResource.state.flashSupported != _captureResource.device.isFlashSupported || + _captureResource.state.torchSupported != _captureResource.device.isTorchSupported); + SCLogCapturerInfo(@"Set device position: %lu -> %lu, night mode: %d -> %d, zoom " + @"factor: %f -> %f, flash supported: %d -> %d, torch supported: %d -> %d", + (unsigned long)_captureResource.state.devicePosition, (unsigned long)devicePosition, + _captureResource.state.isNightModeActive, _captureResource.device.isNightModeActive, + _captureResource.state.zoomFactor, _captureResource.device.zoomFactor, + _captureResource.state.flashSupported, _captureResource.device.isFlashSupported, + _captureResource.state.torchSupported, _captureResource.device.isTorchSupported); + _captureResource.state = [[[[[[[[SCManagedCapturerStateBuilder + withManagedCapturerState:_captureResource.state] setDevicePosition:devicePosition] + setIsNightModeActive:_captureResource.device.isNightModeActive] + setZoomFactor:_captureResource.device.zoomFactor] + setFlashSupported:_captureResource.device.isFlashSupported] + setTorchSupported:_captureResource.device.isTorchSupported] + setIsPortraitModeActive:devicePosition == SCManagedCaptureDevicePositionBackDualCamera] build]; + [self _updateHRSIEnabled]; + [self _updateStillImageStabilizationEnabled]; + // This needs to be done after we have finished configure everything + // for session otherwise we + // may set it up without hooking up the video input yet, and will set + // wrong parameter for the + // output. + [_captureResource.videoDataSource setDevicePosition:devicePosition]; + if (@available(ios 11.0, *)) { + if (portraitModeChanged) { + [_captureResource.videoDataSource + setDepthCaptureEnabled:_captureResource.state.isPortraitModeActive]; + [_captureResource.device setCaptureDepthData:_captureResource.state.isPortraitModeActive + session:_captureResource.managedSession.avSession]; + [_captureResource.stillImageCapturer + setPortraitModeCaptureEnabled:_captureResource.state.isPortraitModeActive]; + if (_captureResource.state.isPortraitModeActive) { + SCProcessingPipelineBuilder *processingPipelineBuilder = + [[SCProcessingPipelineBuilder alloc] init]; + processingPipelineBuilder.portraitModeEnabled = YES; + SCProcessingPipeline *pipeline = [processingPipelineBuilder build]; + SCLogCapturerInfo(@"Adding processing pipeline:%@", pipeline); + [_captureResource.videoDataSource addProcessingPipeline:pipeline]; + } else { + [_captureResource.videoDataSource removeProcessingPipeline]; + } + } + } + [_captureResource.deviceCapacityAnalyzer setAsFocusListenerForDevice:_captureResource.device]; + + [SCCaptureWorker updateLensesFieldOfViewTracking:_captureResource]; + [_captureResource.managedSession commitConfiguration]; + [_captureResource.videoDataSource commitConfiguration]; + + // Checks if the flash is activated and if so switches the flash along + // with the camera view. Setting device's torch mode has to be called after -[AVCaptureSession + // commitConfiguration], otherwise flash may be not working, especially for iPhone 8/8 Plus. + if (_captureResource.state.torchActive || + (_captureResource.state.flashActive && _captureResource.videoRecording)) { + [_captureResource.device setTorchActive:YES]; + if (devicePosition == SCManagedCaptureDevicePositionFront) { + _captureResource.frontFlashController.torchActive = YES; + } + } + + SCLogCapturerInfo(@"Set device position commitConfiguration"); + [_captureResource.droppedFramesReporter didChangeCaptureDevicePosition]; + if (!SCDeviceSupportsMetal()) { + if (isStreaming) { + [SCCaptureWorker startStreaming:_captureResource]; + } + } + NSArray *inputs = _captureResource.managedSession.avSession.inputs; + if (!deviceSet) { + [self _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition + to:devicePosition + reason:@"setDeviceForInput failed"]; + } else if (inputs.count == 0) { + [self _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition + to:devicePosition + reason:@"no input"]; + } else if (inputs.count > 1) { + [self + _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition + to:devicePosition + reason:[NSString sc_stringWithFormat:@"multiple inputs: %@", inputs]]; + } else { + AVCaptureDeviceInput *input = [inputs firstObject]; + AVCaptureDevice *resultDevice = input.device; + if (resultDevice == prevDevice.device) { + [self _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition + to:devicePosition + reason:@"stayed on previous device"]; + } else if (resultDevice != _captureResource.device.device) { + [self + _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition + to:devicePosition + reason:[NSString sc_stringWithFormat:@"unknown input device: %@", + resultDevice]]; + } + } + } else { + [self _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition + to:devicePosition + reason:@"no device"]; + } + } else { + SCLogCapturerInfo(@"Device position did not change"); + if (_captureResource.device.position != _captureResource.state.devicePosition) { + [self _logFailureSetDevicePositionFrom:state.devicePosition + to:devicePosition + reason:@"state position set incorrectly"]; + } + } + BOOL stateChanged = ![_captureResource.state isEqual:state]; + state = [_captureResource.state copy]; + runOnMainThreadAsynchronously(^{ + if (stateChanged) { + [_captureResource.announcer managedCapturer:self didChangeState:state]; + } + if (devicePositionChanged) { + [_captureResource.announcer managedCapturer:self didChangeCaptureDevicePosition:state]; + } + if (nightModeChanged) { + [_captureResource.announcer managedCapturer:self didChangeNightModeActive:state]; + } + if (portraitModeChanged) { + [_captureResource.announcer managedCapturer:self didChangePortraitModeActive:state]; + } + if (zoomFactorChanged) { + [_captureResource.announcer managedCapturer:self didChangeZoomFactor:state]; + } + if (flashSupportedOrTorchSupportedChanged) { + [_captureResource.announcer managedCapturer:self didChangeFlashSupportedAndTorchSupported:state]; + } + if (completionHandler) { + completionHandler(); + } + }); + }]; +} + +- (void)_logFailureSetDevicePositionFrom:(SCManagedCaptureDevicePosition)start + to:(SCManagedCaptureDevicePosition)end + reason:(NSString *)reason +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Device position change failed: %@", reason); + [[SCLogger sharedInstance] logEvent:kSCCameraMetricsCameraFlipFailure + parameters:@{ + @"start" : @(start), + @"end" : @(end), + @"reason" : reason, + }]; +} + +- (void)setFlashActive:(BOOL)flashActive + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + BOOL flashActiveOrFrontFlashEnabledChanged = NO; + if (_captureResource.state.flashActive != flashActive) { + [_captureResource.device setFlashActive:flashActive]; + SCLogCapturerInfo(@"Set flash active: %d -> %d", _captureResource.state.flashActive, flashActive); + _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] + setFlashActive:flashActive] build]; + flashActiveOrFrontFlashEnabledChanged = YES; + } + SCManagedCapturerState *state = [_captureResource.state copy]; + runOnMainThreadAsynchronously(^{ + if (flashActiveOrFrontFlashEnabledChanged) { + [_captureResource.announcer managedCapturer:self didChangeState:state]; + [_captureResource.announcer managedCapturer:self didChangeFlashActive:state]; + } + if (completionHandler) { + completionHandler(); + } + }); + }]; +} + +- (void)setLensesActive:(BOOL)lensesActive + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [self _setLensesActive:lensesActive + liveVideoStreaming:NO + filterFactory:nil + completionHandler:completionHandler + context:context]; +} + +- (void)setLensesActive:(BOOL)lensesActive + filterFactory:(SCLookseryFilterFactory *)filterFactory + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + [self _setLensesActive:lensesActive + liveVideoStreaming:NO + filterFactory:filterFactory + completionHandler:completionHandler + context:context]; +} + +- (void)setLensesInTalkActive:(BOOL)lensesActive + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + // Talk requires liveVideoStreaming to be turned on + BOOL liveVideoStreaming = lensesActive; + + dispatch_block_t activationBlock = ^{ + [self _setLensesActive:lensesActive + liveVideoStreaming:liveVideoStreaming + filterFactory:nil + completionHandler:completionHandler + context:context]; + }; + + @weakify(self); + [_captureResource.queuePerformer perform:^{ + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + // If lenses are enabled in TV3 and it was enabled not from TV3. We have to turn off lenses off at first. + BOOL shouldTurnOffBeforeActivation = liveVideoStreaming && !self->_captureResource.state.liveVideoStreaming && + self->_captureResource.state.lensesActive; + if (shouldTurnOffBeforeActivation) { + [self _setLensesActive:NO + liveVideoStreaming:NO + filterFactory:nil + completionHandler:activationBlock + context:context]; + } else { + activationBlock(); + } + }]; +} + +- (void)_setLensesActive:(BOOL)lensesActive + liveVideoStreaming:(BOOL)liveVideoStreaming + filterFactory:(SCLookseryFilterFactory *)filterFactory + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Setting lenses active to: %d", lensesActive); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + BOOL lensesActiveChanged = NO; + if (_captureResource.state.lensesActive != lensesActive) { + SCLogCapturerInfo(@"Set lenses active: %d -> %d", _captureResource.state.lensesActive, lensesActive); + _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] + setLensesActive:lensesActive] build]; + + // Update capturer settings(orientation and resolution) after changing state, because + // _setLiveVideoStreaming logic is depends on it + [self _setLiveVideoStreaming:liveVideoStreaming]; + + [SCCaptureWorker turnARSessionOff:_captureResource]; + + // Only enable sample buffer display when lenses is not active. + [_captureResource.videoDataSource setSampleBufferDisplayEnabled:!lensesActive]; + [_captureResource.debugInfoDict setObject:!lensesActive ? @"True" : @"False" + forKey:@"sampleBufferDisplayEnabled"]; + + lensesActiveChanged = YES; + [_captureResource.lensProcessingCore setAspectRatio:_captureResource.state.liveVideoStreaming]; + [_captureResource.lensProcessingCore setLensesActive:_captureResource.state.lensesActive + videoOrientation:_captureResource.videoDataSource.videoOrientation + filterFactory:filterFactory]; + BOOL modifySource = _captureResource.state.liveVideoStreaming || _captureResource.videoRecording; + [_captureResource.lensProcessingCore setModifySource:modifySource]; + [_captureResource.lensProcessingCore setShouldMuteAllSounds:_captureResource.state.liveVideoStreaming]; + if (_captureResource.fileInputDecider.shouldProcessFileInput) { + [_captureResource.lensProcessingCore setLensesActive:YES + videoOrientation:_captureResource.videoDataSource.videoOrientation + filterFactory:filterFactory]; + } + [_captureResource.videoDataSource + setVideoStabilizationEnabledIfSupported:!_captureResource.state.lensesActive]; + + if (SCIsMasterBuild()) { + // Check that connection configuration is correct + if (_captureResource.state.lensesActive && + _captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) { + for (AVCaptureOutput *output in _captureResource.managedSession.avSession.outputs) { + if ([output isKindOfClass:[AVCaptureVideoDataOutput class]]) { + AVCaptureConnection *connection = [output connectionWithMediaType:AVMediaTypeVideo]; + SCAssert(connection.videoMirrored && + connection.videoOrientation == !_captureResource.state.liveVideoStreaming + ? AVCaptureVideoOrientationLandscapeRight + : AVCaptureVideoOrientationPortrait, + @"Connection configuration is not correct"); + } + } + } + } + } + dispatch_block_t viewChangeHandler = ^{ + SCManagedCapturerState *state = [_captureResource.state copy]; // update to latest state always + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:self didChangeState:state]; + [_captureResource.announcer managedCapturer:self didChangeLensesActive:state]; + [_captureResource.videoPreviewGLViewManager setLensesActive:state.lensesActive]; + if (completionHandler) { + completionHandler(); + } + }); + }; + if (lensesActiveChanged && !lensesActive && SCDeviceSupportsMetal()) { + // If we are turning off lenses and have sample buffer display on. + // We need to wait until new frame presented in sample buffer before + // dismiss the Lenses' OpenGL view. + [_captureResource.videoDataSource waitUntilSampleBufferDisplayed:_captureResource.queuePerformer.queue + completionHandler:viewChangeHandler]; + } else { + viewChangeHandler(); + } + }]; +} + +- (void)_setLiveVideoStreaming:(BOOL)liveVideoStreaming +{ + SCAssertPerformer(_captureResource.queuePerformer); + BOOL enableLiveVideoStreaming = liveVideoStreaming; + if (!_captureResource.state.lensesActive && liveVideoStreaming) { + SCLogLensesError(@"LiveVideoStreaming is not allowed when lenses are turned off"); + enableLiveVideoStreaming = NO; + } + SC_GUARD_ELSE_RETURN(enableLiveVideoStreaming != _captureResource.state.liveVideoStreaming); + + // We will disable blackCameraNoOutputDetector if in live video streaming + // In case there is some black camera when doing video call, will consider re-enable it + [self _setBlackCameraNoOutputDetectorEnabled:!liveVideoStreaming]; + + if (!_captureResource.device.isConnected) { + SCLogCapturerError(@"Can't perform configuration for live video streaming"); + } + SCLogCapturerInfo(@"Set live video streaming: %d -> %d", _captureResource.state.liveVideoStreaming, + enableLiveVideoStreaming); + _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] + setLiveVideoStreaming:enableLiveVideoStreaming] build]; + + BOOL isStreaming = _captureResource.videoDataSource.isStreaming; + if (isStreaming) { + [_captureResource.videoDataSource stopStreaming]; + } + + SCLogCapturerInfo(@"Set live video streaming beginConfiguration"); + [_captureResource.managedSession performConfiguration:^{ + [_captureResource.videoDataSource beginConfiguration]; + + // If video chat is active we should use portrait orientation, otherwise landscape right + [_captureResource.videoDataSource setVideoOrientation:_captureResource.state.liveVideoStreaming + ? AVCaptureVideoOrientationPortrait + : AVCaptureVideoOrientationLandscapeRight]; + + [_captureResource.device setLiveVideoStreaming:_captureResource.state.liveVideoStreaming + session:_captureResource.managedSession.avSession]; + + [_captureResource.videoDataSource commitConfiguration]; + }]; + + SCLogCapturerInfo(@"Set live video streaming commitConfiguration"); + + if (isStreaming) { + [_captureResource.videoDataSource startStreaming]; + } +} + +- (void)_setBlackCameraNoOutputDetectorEnabled:(BOOL)enabled +{ + if (enabled) { + [self addListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector]; + [_captureResource.videoDataSource addListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector]; + } else { + [self removeListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector]; + [_captureResource.videoDataSource + removeListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector]; + } +} + +- (void)setTorchActiveAsynchronously:(BOOL)torchActive + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Setting torch active asynchronously to: %d", torchActive); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + BOOL torchActiveChanged = NO; + if (_captureResource.state.torchActive != torchActive) { + [_captureResource.device setTorchActive:torchActive]; + if (_captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) { + _captureResource.frontFlashController.torchActive = torchActive; + } + SCLogCapturerInfo(@"Set torch active: %d -> %d", _captureResource.state.torchActive, torchActive); + _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] + setTorchActive:torchActive] build]; + torchActiveChanged = YES; + } + SCManagedCapturerState *state = [_captureResource.state copy]; + runOnMainThreadAsynchronously(^{ + if (torchActiveChanged) { + [_captureResource.announcer managedCapturer:self didChangeState:state]; + } + if (completionHandler) { + completionHandler(); + } + }); + }]; +} + +- (void)setNightModeActiveAsynchronously:(BOOL)active + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + // Only do the configuration if current device is connected + if (_captureResource.device.isConnected) { + SCLogCapturerInfo(@"Set night mode beginConfiguration"); + [_captureResource.managedSession performConfiguration:^{ + [self _setNightModeActive:active]; + [self _updateHRSIEnabled]; + [self _updateStillImageStabilizationEnabled]; + }]; + SCLogCapturerInfo(@"Set night mode commitConfiguration"); + } + BOOL nightModeChanged = (_captureResource.state.isNightModeActive != active); + if (nightModeChanged) { + SCLogCapturerInfo(@"Set night mode active: %d -> %d", _captureResource.state.isNightModeActive, active); + _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] + setIsNightModeActive:active] build]; + } + SCManagedCapturerState *state = [_captureResource.state copy]; + runOnMainThreadAsynchronously(^{ + if (nightModeChanged) { + [_captureResource.announcer managedCapturer:self didChangeState:state]; + [_captureResource.announcer managedCapturer:self didChangeNightModeActive:state]; + } + if (completionHandler) { + completionHandler(); + } + }); + }]; +} + +- (void)_setNightModeActive:(BOOL)active +{ + SCTraceODPCompatibleStart(2); + [_captureResource.device setNightModeActive:active session:_captureResource.managedSession.avSession]; + if ([SCManagedCaptureDevice isEnhancedNightModeSupported]) { + [self _toggleSoftwareNightmode:active]; + } +} + +- (void)_toggleSoftwareNightmode:(BOOL)active +{ + SCTraceODPCompatibleStart(2); + if (active) { + SCLogCapturerInfo(@"Set enhanced night mode active"); + SCProcessingPipelineBuilder *processingPipelineBuilder = [[SCProcessingPipelineBuilder alloc] init]; + processingPipelineBuilder.enhancedNightMode = YES; + SCProcessingPipeline *pipeline = [processingPipelineBuilder build]; + SCLogCapturerInfo(@"Adding processing pipeline:%@", pipeline); + [_captureResource.videoDataSource addProcessingPipeline:pipeline]; + } else { + SCLogCapturerInfo(@"Removing processing pipeline"); + [_captureResource.videoDataSource removeProcessingPipeline]; + } +} + +- (BOOL)_shouldCaptureImageFromVideo +{ + SCTraceODPCompatibleStart(2); + BOOL isIphone5Series = [SCDeviceName isSimilarToIphone5orNewer] && ![SCDeviceName isSimilarToIphone6orNewer]; + return isIphone5Series && !_captureResource.state.flashActive && ![self isLensApplied]; +} + +- (void)lockZoomWithContext:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCAssertMainThread(); + SCLogCapturerInfo(@"Lock zoom"); + _captureResource.allowsZoom = NO; +} + +- (void)unlockZoomWithContext:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCAssertMainThread(); + SCLogCapturerInfo(@"Unlock zoom"); + // Don't let anyone unlock the zoom while ARKit is active. When ARKit shuts down, it'll unlock it. + SC_GUARD_ELSE_RETURN(!_captureResource.state.arSessionActive); + _captureResource.allowsZoom = YES; +} + +- (void)setZoomFactorAsynchronously:(CGFloat)zoomFactor context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCAssertMainThread(); + SC_GUARD_ELSE_RETURN(_captureResource.allowsZoom); + SCLogCapturerInfo(@"Setting zoom factor to: %f", zoomFactor); + [_captureResource.deviceZoomHandler setZoomFactor:zoomFactor forDevice:_captureResource.device immediately:NO]; +} + +- (void)resetZoomFactorAsynchronously:(CGFloat)zoomFactor + devicePosition:(SCManagedCaptureDevicePosition)devicePosition + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCAssertMainThread(); + SC_GUARD_ELSE_RETURN(_captureResource.allowsZoom); + SCLogCapturerInfo(@"Setting zoom factor to: %f devicePosition:%lu", zoomFactor, (unsigned long)devicePosition); + SCManagedCaptureDevice *device = [SCManagedCaptureDevice deviceWithPosition:devicePosition]; + [_captureResource.deviceZoomHandler setZoomFactor:zoomFactor forDevice:device immediately:YES]; +} + +- (void)setExposurePointOfInterestAsynchronously:(CGPoint)pointOfInterest + fromUser:(BOOL)fromUser + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + if (_captureResource.device.isConnected) { + CGPoint exposurePoint; + if ([self isVideoMirrored]) { + exposurePoint = CGPointMake(pointOfInterest.x, 1 - pointOfInterest.y); + } else { + exposurePoint = pointOfInterest; + } + if (_captureResource.device.softwareZoom) { + // Fix for the zooming factor + [_captureResource.device + setExposurePointOfInterest:CGPointMake( + (exposurePoint.x - 0.5) / _captureResource.device.softwareZoom + 0.5, + (exposurePoint.y - 0.5) / _captureResource.device.softwareZoom + 0.5) + fromUser:fromUser]; + } else { + [_captureResource.device setExposurePointOfInterest:exposurePoint fromUser:fromUser]; + } + } + if (completionHandler) { + runOnMainThreadAsynchronously(completionHandler); + } + }]; +} + +- (void)setAutofocusPointOfInterestAsynchronously:(CGPoint)pointOfInterest + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + if (_captureResource.device.isConnected) { + CGPoint focusPoint; + if ([self isVideoMirrored]) { + focusPoint = CGPointMake(pointOfInterest.x, 1 - pointOfInterest.y); + } else { + focusPoint = pointOfInterest; + } + if (_captureResource.device.softwareZoom) { + // Fix for the zooming factor + [_captureResource.device + setAutofocusPointOfInterest:CGPointMake( + (focusPoint.x - 0.5) / _captureResource.device.softwareZoom + 0.5, + (focusPoint.y - 0.5) / _captureResource.device.softwareZoom + 0.5)]; + } else { + [_captureResource.device setAutofocusPointOfInterest:focusPoint]; + } + } + if (completionHandler) { + runOnMainThreadAsynchronously(completionHandler); + } + }]; +} + +- (void)setPortraitModePointOfInterestAsynchronously:(CGPoint)pointOfInterest + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [SCCaptureWorker setPortraitModePointOfInterestAsynchronously:pointOfInterest + completionHandler:completionHandler + resource:_captureResource]; +} + +- (void)continuousAutofocusAndExposureAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + if (_captureResource.device.isConnected) { + [_captureResource.device continuousAutofocus]; + [_captureResource.device setExposurePointOfInterest:CGPointMake(0.5, 0.5) fromUser:NO]; + if (SCCameraTweaksEnablePortraitModeAutofocus()) { + [self setPortraitModePointOfInterestAsynchronously:CGPointMake(0.5, 0.5) + completionHandler:nil + context:context]; + } + } + if (completionHandler) { + runOnMainThreadAsynchronously(completionHandler); + } + }]; +} + +#pragma mark - Add / Remove Listener + +- (void)addListener:(id)listener +{ + SCTraceODPCompatibleStart(2); + // Only do the make sure thing if I added it to announcer fresh. + SC_GUARD_ELSE_RETURN([_captureResource.announcer addListener:listener]); + // After added the listener, make sure we called all these methods with its + // initial values + [_captureResource.queuePerformer perform:^{ + SCManagedCapturerState *state = [_captureResource.state copy]; + AVCaptureVideoPreviewLayer *videoPreviewLayer = _captureResource.videoPreviewLayer; + LSAGLView *videoPreviewGLView = _captureResource.videoPreviewGLViewManager.view; + runOnMainThreadAsynchronously(^{ + SCTraceStart(); + if ([listener respondsToSelector:@selector(managedCapturer:didChangeState:)]) { + [listener managedCapturer:self didChangeState:state]; + } + if ([listener respondsToSelector:@selector(managedCapturer:didChangeCaptureDevicePosition:)]) { + [listener managedCapturer:self didChangeCaptureDevicePosition:state]; + } + if ([listener respondsToSelector:@selector(managedCapturer:didChangeNightModeActive:)]) { + [listener managedCapturer:self didChangeNightModeActive:state]; + } + if ([listener respondsToSelector:@selector(managedCapturer:didChangeFlashActive:)]) { + [listener managedCapturer:self didChangeFlashActive:state]; + } + if ([listener respondsToSelector:@selector(managedCapturer:didChangeFlashSupportedAndTorchSupported:)]) { + [listener managedCapturer:self didChangeFlashSupportedAndTorchSupported:state]; + } + if ([listener respondsToSelector:@selector(managedCapturer:didChangeZoomFactor:)]) { + [listener managedCapturer:self didChangeZoomFactor:state]; + } + if ([listener respondsToSelector:@selector(managedCapturer:didChangeLowLightCondition:)]) { + [listener managedCapturer:self didChangeLowLightCondition:state]; + } + if ([listener respondsToSelector:@selector(managedCapturer:didChangeAdjustingExposure:)]) { + [listener managedCapturer:self didChangeAdjustingExposure:state]; + } + if (!SCDeviceSupportsMetal()) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeVideoPreviewLayer:)]) { + [listener managedCapturer:self didChangeVideoPreviewLayer:videoPreviewLayer]; + } + } + if (videoPreviewGLView && + [listener respondsToSelector:@selector(managedCapturer:didChangeVideoPreviewGLView:)]) { + [listener managedCapturer:self didChangeVideoPreviewGLView:videoPreviewGLView]; + } + if ([listener respondsToSelector:@selector(managedCapturer:didChangeLensesActive:)]) { + [listener managedCapturer:self didChangeLensesActive:state]; + } + }); + }]; +} + +- (void)removeListener:(id)listener +{ + SCTraceODPCompatibleStart(2); + [_captureResource.announcer removeListener:listener]; +} + +- (void)addVideoDataSourceListener:(id)listener +{ + SCTraceODPCompatibleStart(2); + [_captureResource.videoDataSource addListener:listener]; +} + +- (void)removeVideoDataSourceListener:(id)listener +{ + SCTraceODPCompatibleStart(2); + [_captureResource.videoDataSource removeListener:listener]; +} + +- (void)addDeviceCapacityAnalyzerListener:(id)listener +{ + SCTraceODPCompatibleStart(2); + [_captureResource.deviceCapacityAnalyzer addListener:listener]; +} + +- (void)removeDeviceCapacityAnalyzerListener:(id)listener +{ + SCTraceODPCompatibleStart(2); + [_captureResource.deviceCapacityAnalyzer removeListener:listener]; +} + +#pragma mark - Debug + +- (NSString *)debugInfo +{ + SCTraceODPCompatibleStart(2); + NSMutableString *info = [NSMutableString new]; + [info appendString:@"==== SCManagedCapturer tokens ====\n"]; + [_captureResource.tokenSet enumerateObjectsUsingBlock:^(SCCapturerToken *_Nonnull token, BOOL *_Nonnull stop) { + [info appendFormat:@"%@\n", token.debugDescription]; + }]; + return info.copy; +} + +- (NSString *)description +{ + return [self debugDescription]; +} + +- (NSString *)debugDescription +{ + return [NSString sc_stringWithFormat:@"SCManagedCapturer state:\n%@\nVideo streamer info:\n%@", + _captureResource.state.debugDescription, + _captureResource.videoDataSource.description]; +} + +- (CMTime)firstWrittenAudioBufferDelay +{ + SCTraceODPCompatibleStart(2); + return [SCCaptureWorker firstWrittenAudioBufferDelay:_captureResource]; +} + +- (BOOL)audioQueueStarted +{ + SCTraceODPCompatibleStart(2); + return [SCCaptureWorker audioQueueStarted:_captureResource]; +} + +#pragma mark - SCTimeProfilable + ++ (SCTimeProfilerContext)context +{ + return SCTimeProfilerContextCamera; +} + +// We disable and re-enable liveness timer when enter background and foreground + +- (void)applicationDidEnterBackground +{ + SCTraceODPCompatibleStart(2); + [SCCaptureWorker destroyLivenessConsistencyTimer:_captureResource]; + // Hide the view when in background. + if (!SCDeviceSupportsMetal()) { + [_captureResource.queuePerformer perform:^{ + _captureResource.appInBackground = YES; + [CATransaction begin]; + [CATransaction setDisableActions:YES]; + _captureResource.videoPreviewLayer.hidden = YES; + [CATransaction commit]; + }]; + } else { + [_captureResource.queuePerformer perform:^{ + _captureResource.appInBackground = YES; + // If it is running, stop the streaming. + if (_captureResource.status == SCManagedCapturerStatusRunning) { + [_captureResource.videoDataSource stopStreaming]; + } + }]; + } + [[SCManagedCapturePreviewLayerController sharedInstance] applicationDidEnterBackground]; +} + +- (void)applicationWillEnterForeground +{ + SCTraceODPCompatibleStart(2); + if (!SCDeviceSupportsMetal()) { + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + _captureResource.appInBackground = NO; + + if (!SCDeviceSupportsMetal()) { + [self _fixNonMetalSessionPreviewInconsistency]; + } + + // Doing this right now on iOS 10. It will probably work on iOS 9 as well, but need to verify. + if (SC_AT_LEAST_IOS_10) { + [self _runningConsistencyCheckAndFix]; + // For OS version >= iOS 10, try to fix AVCaptureSession when app is entering foreground. + _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0; + [self _fixAVSessionIfNecessary]; + } + }]; + } else { + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + _captureResource.appInBackground = NO; + if (_captureResource.status == SCManagedCapturerStatusRunning) { + [_captureResource.videoDataSource startStreaming]; + } + // Doing this right now on iOS 10. It will probably work on iOS 9 as well, but need to verify. + if (SC_AT_LEAST_IOS_10) { + [self _runningConsistencyCheckAndFix]; + // For OS version >= iOS 10, try to fix AVCaptureSession when app is entering foreground. + _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0; + [self _fixAVSessionIfNecessary]; + } + }]; + } + [[SCManagedCapturePreviewLayerController sharedInstance] applicationWillEnterForeground]; +} + +- (void)applicationWillResignActive +{ + SCTraceODPCompatibleStart(2); + [[SCManagedCapturePreviewLayerController sharedInstance] applicationWillResignActive]; + [_captureResource.queuePerformer perform:^{ + [self _pauseCaptureSessionKVOCheck]; + }]; +} + +- (void)applicationDidBecomeActive +{ + SCTraceODPCompatibleStart(2); + [[SCManagedCapturePreviewLayerController sharedInstance] applicationDidBecomeActive]; + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + // Since we foreground it, do the running consistency check immediately. + // Reset number of retries for fixing status inconsistency + _captureResource.numRetriesFixInconsistencyWithCurrentSession = 0; + [self _runningConsistencyCheckAndFix]; + if (!SC_AT_LEAST_IOS_10) { + // For OS version < iOS 10, try to fix AVCaptureSession after app becomes active. + _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0; + [self _fixAVSessionIfNecessary]; + } + [self _resumeCaptureSessionKVOCheck]; + if (_captureResource.status == SCManagedCapturerStatusRunning) { + // Reschedule the timer if we don't have it already + runOnMainThreadAsynchronously(^{ + SCTraceStart(); + [SCCaptureWorker setupLivenessConsistencyTimerIfForeground:_captureResource]; + }); + } + }]; +} + +- (void)_runningConsistencyCheckAndFix +{ + SCTraceODPCompatibleStart(2); + // Don't enforce consistency on simulator, as it'll constantly false-positive and restart session. + SC_GUARD_ELSE_RETURN(![SCDeviceName isSimulator]); + if (_captureResource.state.arSessionActive) { + [self _runningARSessionConsistencyCheckAndFix]; + } else { + [self _runningAVCaptureSessionConsistencyCheckAndFix]; + } +} + +- (void)_runningARSessionConsistencyCheckAndFix +{ + SCTraceODPCompatibleStart(2); + SCAssert([_captureResource.queuePerformer isCurrentPerformer], @""); + SCAssert(_captureResource.state.arSessionActive, @""); + if (@available(iOS 11.0, *)) { + // Occassionally the capture session will get into a weird "stuck" state. + // If this happens, we'll see that the timestamp for the most recent frame is behind the current time. + // Pausinging the session for a moment and restarting to attempt to jog it loose. + NSTimeInterval timeSinceLastFrame = CACurrentMediaTime() - _captureResource.arSession.currentFrame.timestamp; + BOOL reset = NO; + if (_captureResource.arSession.currentFrame.camera.trackingStateReason == ARTrackingStateReasonInitializing) { + if (timeSinceLastFrame > kSCManagedCapturerFixInconsistencyARSessionHungInitThreshold) { + SCLogCapturerInfo(@"*** Found inconsistency for ARSession timestamp (possible hung init), fix now ***"); + reset = YES; + } + } else if (timeSinceLastFrame > kSCManagedCapturerFixInconsistencyARSessionDelayThreshold) { + SCLogCapturerInfo(@"*** Found inconsistency for ARSession timestamp (init complete), fix now ***"); + reset = YES; + } + if (reset) { + [SCCaptureWorker turnARSessionOff:_captureResource]; + [SCCaptureWorker turnARSessionOn:_captureResource]; + } + } +} + +- (void)_runningAVCaptureSessionConsistencyCheckAndFix +{ + SCTraceODPCompatibleStart(2); + SCAssert([_captureResource.queuePerformer isCurrentPerformer], @""); + SCAssert(!_captureResource.state.arSessionActive, @""); + [[SCLogger sharedInstance] logStepToEvent:@"CAMERA_OPEN_WITH_FIX_INCONSISTENCY" + uniqueId:@"" + stepName:@"startConsistencyCheckAndFix"]; + // If the video preview layer's hidden status is out of sync with the + // session's running status, + // fix that now. Also, we don't care that much if the status is not running. + if (!SCDeviceSupportsMetal()) { + [self _fixNonMetalSessionPreviewInconsistency]; + } + // Skip the liveness consistency check if we are in background + if (_captureResource.appInBackground) { + SCLogCapturerInfo(@"*** Skipped liveness consistency check, as we are in the background ***"); + return; + } + if (_captureResource.status == SCManagedCapturerStatusRunning && !_captureResource.managedSession.isRunning) { + SCGhostToSnappableSignalCameraFixInconsistency(); + SCLogCapturerInfo(@"*** Found status inconsistency for running, fix now ***"); + _captureResource.numRetriesFixInconsistencyWithCurrentSession++; + if (_captureResource.numRetriesFixInconsistencyWithCurrentSession <= + kSCManagedCapturerFixInconsistencyMaxRetriesWithCurrentSession) { + SCTraceStartSection("Fix non-running session") + { + if (!SCDeviceSupportsMetal()) { + [CATransaction begin]; + [CATransaction setDisableActions:YES]; + [_captureResource.managedSession startRunning]; + [SCCaptureWorker setupVideoPreviewLayer:_captureResource]; + [CATransaction commit]; + } else { + [_captureResource.managedSession startRunning]; + } + } + SCTraceEndSection(); + } else { + SCTraceStartSection("Create new capturer session") + { + // start running with new capture session if the inconsistency fixing not succeeds + // after kSCManagedCapturerFixInconsistencyMaxRetriesWithCurrentSession retries + SCLogCapturerInfo(@"*** Recreate and run new capture session to fix the inconsistency ***"); + [self _startRunningWithNewCaptureSession]; + } + SCTraceEndSection(); + } + BOOL sessionIsRunning = _captureResource.managedSession.isRunning; + if (sessionIsRunning && !SCDeviceSupportsMetal()) { + // If it is fixed, we signal received the first frame. + SCGhostToSnappableSignalDidReceiveFirstPreviewFrame(); + runOnMainThreadAsynchronously(^{ + // To approximate this did render timer, it is not accurate. + SCGhostToSnappableSignalDidRenderFirstPreviewFrame(CACurrentMediaTime()); + }); + } + SCLogCapturerInfo(@"*** Applied inconsistency fix, running state : %@ ***", sessionIsRunning ? @"YES" : @"NO"); + if (_captureResource.managedSession.isRunning) { + [[SCLogger sharedInstance] logStepToEvent:@"CAMERA_OPEN_WITH_FIX_INCONSISTENCY" + uniqueId:@"" + stepName:@"finishConsistencyCheckAndFix"]; + [[SCLogger sharedInstance] + logTimedEventEnd:@"CAMERA_OPEN_WITH_FIX_INCONSISTENCY" + uniqueId:@"" + parameters:@{ + @"count" : @(_captureResource.numRetriesFixInconsistencyWithCurrentSession) + }]; + } + } else { + [[SCLogger sharedInstance] cancelLogTimedEvent:@"CAMERA_OPEN_WITH_FIX_INCONSISTENCY" uniqueId:@""]; + // Reset number of retries for fixing status inconsistency + _captureResource.numRetriesFixInconsistencyWithCurrentSession = 0; + } + + [_captureResource.blackCameraDetector sessionDidChangeIsRunning:_captureResource.managedSession.isRunning]; +} + +- (void)mediaServicesWereReset +{ + SCTraceODPCompatibleStart(2); + [self mediaServicesWereLost]; + [_captureResource.queuePerformer perform:^{ + /* If the current state requires the ARSession, restart it. + Explicitly flip the arSessionActive flag so that `turnSessionOn` thinks it can reset itself. + */ + if (_captureResource.state.arSessionActive) { + _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] + setArSessionActive:NO] build]; + [SCCaptureWorker turnARSessionOn:_captureResource]; + } + }]; +} + +- (void)mediaServicesWereLost +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + if (!_captureResource.state.arSessionActive && !_captureResource.managedSession.isRunning) { + /* + If the session is running we will trigger + _sessionRuntimeError: so nothing else is + needed here. + */ + [_captureResource.videoCapturer.outputURL reloadAssetKeys]; + } + }]; +} + +- (void)_livenessConsistency:(NSTimer *)timer +{ + SCTraceODPCompatibleStart(2); + SCAssertMainThread(); + // We can directly check the application state because this timer is scheduled + // on the main thread. + if ([UIApplication sharedApplication].applicationState == UIApplicationStateActive) { + [_captureResource.queuePerformer perform:^{ + [self _runningConsistencyCheckAndFix]; + }]; + } +} + +- (void)_sessionRuntimeError:(NSNotification *)notification +{ + SCTraceODPCompatibleStart(2); + NSError *sessionError = notification.userInfo[AVCaptureSessionErrorKey]; + SCLogCapturerError(@"Encountered runtime error for capture session %@", sessionError); + + NSString *errorString = + [sessionError.description stringByReplacingOccurrencesOfString:@" " withString:@"_"].uppercaseString + ?: @"UNKNOWN_ERROR"; + [[SCUserTraceLogger shared] + logUserTraceEvent:[NSString sc_stringWithFormat:@"AVCAPTURESESSION_RUNTIME_ERROR_%@", errorString]]; + + if (sessionError.code == AVErrorMediaServicesWereReset) { + // If it is a AVErrorMediaServicesWereReset error, we can just call startRunning, it is much light weighted + [_captureResource.queuePerformer perform:^{ + if (!SCDeviceSupportsMetal()) { + [CATransaction begin]; + [CATransaction setDisableActions:YES]; + [_captureResource.managedSession startRunning]; + [SCCaptureWorker setupVideoPreviewLayer:_captureResource]; + [CATransaction commit]; + } else { + [_captureResource.managedSession startRunning]; + } + }]; + } else { + if (_captureResource.isRecreateSessionFixScheduled) { + SCLogCoreCameraInfo(@"Fixing session runtime error is scheduled, skip"); + return; + } + + _captureResource.isRecreateSessionFixScheduled = YES; + NSTimeInterval delay = 0; + NSTimeInterval timeNow = [NSDate timeIntervalSinceReferenceDate]; + if (timeNow - _captureResource.lastSessionRuntimeErrorTime < kMinFixSessionRuntimeErrorInterval) { + SCLogCoreCameraInfo(@"Fixing runtime error session in less than %f, delay", + kMinFixSessionRuntimeErrorInterval); + delay = kMinFixSessionRuntimeErrorInterval; + } + _captureResource.lastSessionRuntimeErrorTime = timeNow; + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + // Occasionaly _captureResource.avSession will throw out an error when shutting down. If this happens while + // ARKit is starting up, + // _startRunningWithNewCaptureSession will throw a wrench in ARSession startup and freeze the image. + SC_GUARD_ELSE_RETURN(!_captureResource.state.arSessionActive); + // Need to reset the flag before _startRunningWithNewCaptureSession + _captureResource.isRecreateSessionFixScheduled = NO; + [self _startRunningWithNewCaptureSession]; + [self _fixAVSessionIfNecessary]; + } + after:delay]; + } + + [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsRuntimeError + parameters:@{ + @"error" : sessionError == nil ? @"Unknown error" : sessionError.description, + } + secretParameters:nil + metrics:nil]; +} + +- (void)_startRunningWithNewCaptureSessionIfNecessary +{ + SCTraceODPCompatibleStart(2); + if (_captureResource.isRecreateSessionFixScheduled) { + SCLogCapturerInfo(@"Session recreation is scheduled, return"); + return; + } + _captureResource.isRecreateSessionFixScheduled = YES; + [_captureResource.queuePerformer perform:^{ + // Need to reset the flag before _startRunningWithNewCaptureSession + _captureResource.isRecreateSessionFixScheduled = NO; + [self _startRunningWithNewCaptureSession]; + }]; +} + +- (void)_startRunningWithNewCaptureSession +{ + SCTraceODPCompatibleStart(2); + SCAssert([_captureResource.queuePerformer isCurrentPerformer], @""); + SCLogCapturerInfo(@"Start running with new capture session. isRecording:%d isStreaming:%d status:%lu", + _captureResource.videoRecording, _captureResource.videoDataSource.isStreaming, + (unsigned long)_captureResource.status); + + // Mark the start of recreating session + [_captureResource.blackCameraDetector sessionWillRecreate]; + + // Light weight fix gating + BOOL lightWeightFix = SCCameraTweaksSessionLightWeightFixEnabled() || SCCameraTweaksBlackCameraRecoveryEnabled(); + + if (!lightWeightFix) { + [_captureResource.deviceCapacityAnalyzer removeListener:_captureResource.stillImageCapturer]; + [self removeListener:_captureResource.stillImageCapturer]; + [_captureResource.videoDataSource removeListener:_captureResource.lensProcessingCore.capturerListener]; + + [_captureResource.videoDataSource removeListener:_captureResource.deviceCapacityAnalyzer]; + [_captureResource.videoDataSource removeListener:_captureResource.stillImageCapturer]; + + if (SCIsMasterBuild()) { + [_captureResource.videoDataSource removeListener:_captureResource.videoStreamReporter]; + } + [_captureResource.videoDataSource removeListener:_captureResource.videoScanner]; + [_captureResource.videoDataSource removeListener:_captureResource.videoCapturer]; + [_captureResource.videoDataSource + removeListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector]; + } + + [_captureResource.videoCapturer.outputURL reloadAssetKeys]; + + BOOL isStreaming = _captureResource.videoDataSource.isStreaming; + if (_captureResource.videoRecording) { + // Stop video recording prematurely + [self stopRecordingAsynchronouslyWithContext:SCCapturerContext]; + NSError *error = [NSError + errorWithDomain:kSCManagedCapturerErrorDomain + description: + [NSString + sc_stringWithFormat:@"Interrupt video recording to start new session. %@", + @{ + @"isAVSessionRunning" : @(_captureResource.managedSession.isRunning), + @"numRetriesFixInconsistency" : + @(_captureResource.numRetriesFixInconsistencyWithCurrentSession), + @"numRetriesFixAVCaptureSession" : + @(_captureResource.numRetriesFixAVCaptureSessionWithCurrentSession), + @"lastSessionRuntimeErrorTime" : + @(_captureResource.lastSessionRuntimeErrorTime), + }] + code:-1]; + [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoRecordingInterrupted + parameters:@{ + @"error" : error.description + } + secretParameters:nil + metrics:nil]; + } + @try { + if (@available(iOS 11.0, *)) { + [_captureResource.arSession pause]; + if (!lightWeightFix) { + [_captureResource.videoDataSource removeListener:_captureResource.arImageCapturer]; + } + } + [_captureResource.managedSession stopRunning]; + [_captureResource.device removeDeviceAsInput:_captureResource.managedSession.avSession]; + } @catch (NSException *exception) { + SCLogCapturerError(@"Encountered Exception %@", exception); + } @finally { + // Nil out device inputs from both devices + [[SCManagedCaptureDevice front] resetDeviceAsInput]; + [[SCManagedCaptureDevice back] resetDeviceAsInput]; + } + + if (!SCDeviceSupportsMetal()) { + // Redo the video preview to mitigate https://ph.sc-corp.net/T42584 + [SCCaptureWorker redoVideoPreviewLayer:_captureResource]; + } + +#if !TARGET_IPHONE_SIMULATOR + if (@available(iOS 11.0, *)) { + _captureResource.arSession = [[ARSession alloc] init]; + _captureResource.arImageCapturer = + [_captureResource.arImageCaptureProvider arImageCapturerWith:_captureResource.queuePerformer + lensProcessingCore:_captureResource.lensProcessingCore]; + } + [self _resetAVCaptureSession]; +#endif + [_captureResource.managedSession.avSession setAutomaticallyConfiguresApplicationAudioSession:NO]; + [_captureResource.device setDeviceAsInput:_captureResource.managedSession.avSession]; + + if (_captureResource.fileInputDecider.shouldProcessFileInput) { + // Keep the same logic, always create new VideoDataSource + [self _setupNewVideoFileDataSource]; + } else { + if (!lightWeightFix) { + [self _setupNewVideoDataSource]; + } else { + [self _setupVideoDataSourceWithNewSession]; + } + } + + if (_captureResource.status == SCManagedCapturerStatusRunning) { + if (!SCDeviceSupportsMetal()) { + [CATransaction begin]; + [CATransaction setDisableActions:YES]; + // Set the session to be the new session before start running. + _captureResource.videoPreviewLayer.session = _captureResource.managedSession.avSession; + if (!_captureResource.appInBackground) { + [_captureResource.managedSession startRunning]; + } + [SCCaptureWorker setupVideoPreviewLayer:_captureResource]; + [CATransaction commit]; + } else { + if (!_captureResource.appInBackground) { + [_captureResource.managedSession startRunning]; + } + } + } + // Since this start and stop happens in one block, we don't have to worry + // about streamingSequence issues + if (isStreaming) { + [_captureResource.videoDataSource startStreaming]; + } + SCManagedCapturerState *state = [_captureResource.state copy]; + AVCaptureVideoPreviewLayer *videoPreviewLayer = _captureResource.videoPreviewLayer; + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:self didResetFromRuntimeError:state]; + if (!SCDeviceSupportsMetal()) { + [_captureResource.announcer managedCapturer:self didChangeVideoPreviewLayer:videoPreviewLayer]; + } + }); + + // Mark the end of recreating session + [_captureResource.blackCameraDetector sessionDidRecreate]; +} + +/** + * Heavy-weight session fixing approach: recreating everything + */ +- (void)_setupNewVideoDataSource +{ + if (@available(iOS 11.0, *)) { + _captureResource.videoDataSource = + [[SCManagedVideoStreamer alloc] initWithSession:_captureResource.managedSession.avSession + arSession:_captureResource.arSession + devicePosition:_captureResource.state.devicePosition]; + [_captureResource.videoDataSource addListener:_captureResource.arImageCapturer]; + if (_captureResource.state.isPortraitModeActive) { + [_captureResource.videoDataSource setDepthCaptureEnabled:YES]; + + SCProcessingPipelineBuilder *processingPipelineBuilder = [[SCProcessingPipelineBuilder alloc] init]; + processingPipelineBuilder.portraitModeEnabled = YES; + SCProcessingPipeline *pipeline = [processingPipelineBuilder build]; + [_captureResource.videoDataSource addProcessingPipeline:pipeline]; + } + } else { + _captureResource.videoDataSource = + [[SCManagedVideoStreamer alloc] initWithSession:_captureResource.managedSession.avSession + devicePosition:_captureResource.state.devicePosition]; + } + + [self _setupVideoDataSourceListeners]; +} + +- (void)_setupNewVideoFileDataSource +{ + _captureResource.videoDataSource = + [[SCManagedVideoFileStreamer alloc] initWithPlaybackForURL:_captureResource.fileInputDecider.fileURL]; + [_captureResource.lensProcessingCore setLensesActive:YES + videoOrientation:_captureResource.videoDataSource.videoOrientation + filterFactory:nil]; + runOnMainThreadAsynchronously(^{ + [_captureResource.videoPreviewGLViewManager prepareViewIfNecessary]; + }); + [self _setupVideoDataSourceListeners]; +} + +/** + * Light-weight session fixing approach: recreating AVCaptureSession / AVCaptureOutput, and bind it to the new session + */ +- (void)_setupVideoDataSourceWithNewSession +{ + if (@available(iOS 11.0, *)) { + SCManagedVideoStreamer *streamer = (SCManagedVideoStreamer *)_captureResource.videoDataSource; + [streamer setupWithSession:_captureResource.managedSession.avSession + devicePosition:_captureResource.state.devicePosition]; + [streamer setupWithARSession:_captureResource.arSession]; + } else { + SCManagedVideoStreamer *streamer = (SCManagedVideoStreamer *)_captureResource.videoDataSource; + [streamer setupWithSession:_captureResource.managedSession.avSession + devicePosition:_captureResource.state.devicePosition]; + } + [_captureResource.stillImageCapturer setupWithSession:_captureResource.managedSession.avSession]; +} + +- (void)_setupVideoDataSourceListeners +{ + if (_captureResource.videoFrameSampler) { + [_captureResource.announcer addListener:_captureResource.videoFrameSampler]; + } + + [_captureResource.videoDataSource addSampleBufferDisplayController:_captureResource.sampleBufferDisplayController]; + [_captureResource.videoDataSource addListener:_captureResource.lensProcessingCore.capturerListener]; + [_captureResource.videoDataSource addListener:_captureResource.deviceCapacityAnalyzer]; + if (SCIsMasterBuild()) { + [_captureResource.videoDataSource addListener:_captureResource.videoStreamReporter]; + } + [_captureResource.videoDataSource addListener:_captureResource.videoScanner]; + [_captureResource.videoDataSource addListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector]; + _captureResource.stillImageCapturer = [SCManagedStillImageCapturer capturerWithCaptureResource:_captureResource]; + [_captureResource.deviceCapacityAnalyzer addListener:_captureResource.stillImageCapturer]; + [_captureResource.videoDataSource addListener:_captureResource.stillImageCapturer]; + + [self addListener:_captureResource.stillImageCapturer]; +} + +- (void)_resetAVCaptureSession +{ + SCTraceODPCompatibleStart(2); + SCAssert([_captureResource.queuePerformer isCurrentPerformer], @""); + _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0; + // lazily initialize _captureResource.kvoController on background thread + if (!_captureResource.kvoController) { + _captureResource.kvoController = [[FBKVOController alloc] initWithObserver:self]; + } + [_captureResource.kvoController unobserve:_captureResource.managedSession.avSession]; + _captureResource.managedSession = + [[SCManagedCaptureSession alloc] initWithBlackCameraDetector:_captureResource.blackCameraDetector]; + [_captureResource.kvoController observe:_captureResource.managedSession.avSession + keyPath:@keypath(_captureResource.managedSession.avSession, running) + options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld + action:_captureResource.handleAVSessionStatusChange]; +} + +- (void)_pauseCaptureSessionKVOCheck +{ + SCTraceODPCompatibleStart(2); + SCAssert([_captureResource.queuePerformer isCurrentPerformer], @""); + [_captureResource.kvoController unobserve:_captureResource.managedSession.avSession]; +} + +- (void)_resumeCaptureSessionKVOCheck +{ + SCTraceODPCompatibleStart(2); + SCAssert([_captureResource.queuePerformer isCurrentPerformer], @""); + [_captureResource.kvoController observe:_captureResource.managedSession.avSession + keyPath:@keypath(_captureResource.managedSession.avSession, running) + options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld + action:_captureResource.handleAVSessionStatusChange]; +} + +- (id)currentVideoDataSource +{ + SCTraceODPCompatibleStart(2); + return _captureResource.videoDataSource; +} + +- (void)checkRestrictedCamera:(void (^)(BOOL, BOOL, AVAuthorizationStatus))callback +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + // Front and back should be available if user has no restriction on camera. + BOOL front = [[SCManagedCaptureDevice front] isAvailable]; + BOOL back = [[SCManagedCaptureDevice back] isAvailable]; + AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo]; + runOnMainThreadAsynchronously(^{ + callback(front, back, status); + }); + }]; +} + +- (SCSnapCreationTriggers *)snapCreationTriggers +{ + return _captureResource.snapCreationTriggers; +} + +- (void)setBlackCameraDetector:(SCBlackCameraDetector *)blackCameraDetector + deviceMotionProvider:(id)deviceMotionProvider + fileInputDecider:(id)fileInputDecider + arImageCaptureProvider:(id)arImageCaptureProvider + glviewManager:(id)glViewManager + lensAPIProvider:(id)lensAPIProvider + lsaComponentTracker:(id)lsaComponentTracker + managedCapturerPreviewLayerControllerDelegate: + (id)previewLayerControllerDelegate +{ + _captureResource.blackCameraDetector = blackCameraDetector; + _captureResource.deviceMotionProvider = deviceMotionProvider; + _captureResource.fileInputDecider = fileInputDecider; + _captureResource.arImageCaptureProvider = arImageCaptureProvider; + _captureResource.videoPreviewGLViewManager = glViewManager; + [_captureResource.videoPreviewGLViewManager configureWithCaptureResource:_captureResource]; + _captureResource.lensAPIProvider = lensAPIProvider; + _captureResource.lsaTrackingComponentHandler = lsaComponentTracker; + [_captureResource.lsaTrackingComponentHandler configureWithCaptureResource:_captureResource]; + _captureResource.previewLayerControllerDelegate = previewLayerControllerDelegate; + [SCManagedCapturePreviewLayerController sharedInstance].delegate = previewLayerControllerDelegate; +} + +@end diff --git a/ManagedCapturer/SCManagedCapturerV1_Private.h b/ManagedCapturer/SCManagedCapturerV1_Private.h new file mode 100644 index 0000000..9826155 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerV1_Private.h @@ -0,0 +1,20 @@ +// +// SCManagedCapturerV1_Private.h +// Snapchat +// +// Created by Jingtian Yang on 20/12/2017. +// + +#import "SCManagedCapturerV1.h" + +@interface SCManagedCapturerV1 () + +- (SCCaptureResource *)captureResource; + +- (void)setupWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition + completionHandler:(dispatch_block_t)completionHandler; + +- (BOOL)stopRunningWithCaptureToken:(SCCapturerToken *)token + completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler + context:(NSString *)context; +@end diff --git a/ManagedCapturer/SCManagedDeviceCapacityAnalyzer.h b/ManagedCapturer/SCManagedDeviceCapacityAnalyzer.h new file mode 100644 index 0000000..07aca7f --- /dev/null +++ b/ManagedCapturer/SCManagedDeviceCapacityAnalyzer.h @@ -0,0 +1,32 @@ +// +// SCManagedDeviceCapacityAnalyzer.h +// Snapchat +// +// Created by Liu Liu on 5/1/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCManagedDeviceCapacityAnalyzerListener.h" + +#import + +#import + +@class SCManagedCaptureDevice; +@protocol SCPerforming; + +extern NSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHigh; + +@interface SCManagedDeviceCapacityAnalyzer : NSObject + +@property (nonatomic, assign) BOOL lowLightConditionEnabled; + +- (instancetype)initWithPerformer:(id)performer; + +- (void)addListener:(id)listener; +- (void)removeListener:(id)listener; + +- (void)setAsFocusListenerForDevice:(SCManagedCaptureDevice *)captureDevice; +- (void)removeFocusListener; + +@end diff --git a/ManagedCapturer/SCManagedDeviceCapacityAnalyzer.m b/ManagedCapturer/SCManagedDeviceCapacityAnalyzer.m new file mode 100644 index 0000000..94f2804 --- /dev/null +++ b/ManagedCapturer/SCManagedDeviceCapacityAnalyzer.m @@ -0,0 +1,294 @@ +// +// SCManagedDeviceCapacityAnalyzer.m +// Snapchat +// +// Created by Liu Liu on 5/1/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCManagedDeviceCapacityAnalyzer.h" + +#import "SCCameraSettingUtils.h" +#import "SCCameraTweaks.h" +#import "SCManagedCaptureDevice+SCManagedDeviceCapacityAnalyzer.h" +#import "SCManagedCaptureDevice.h" +#import "SCManagedDeviceCapacityAnalyzerListenerAnnouncer.h" + +#import +#import +#import +#import +#import + +#import + +@import ImageIO; +@import QuartzCore; + +NSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor6WithHRSI = 500; + +NSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor6S = 800; + +NSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor7 = 640; + +NSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor8 = 800; + +// After this much frames we haven't changed exposure time or ISO, we will assume that the adjustingExposure is ended. +static NSInteger const kExposureUnchangedHighWatermark = 5; +// If deadline reached, and we still haven't reached high watermark yet, we will consult the low watermark and at least +// give the system a chance to take not-so-great pictures. +static NSInteger const kExposureUnchangedLowWatermark = 1; +static NSTimeInterval const kExposureUnchangedDeadline = 0.2; + +// It seems that between ISO 500 to 640, the brightness value is always somewhere around -0.4 to -0.5. +// Therefore, this threshold probably will work fine. +static float const kBrightnessValueThreshold = -2.25; +// Give some margins between recognized as bright enough and not enough light. +// If the brightness is lower than kBrightnessValueThreshold - kBrightnessValueThresholdConfidenceInterval, +// and then we count the frame as low light frame. Only if the brightness is higher than +// kBrightnessValueThreshold + kBrightnessValueThresholdConfidenceInterval, we think that we +// have enough light, and reset low light frame count to 0. 0.5 is choosing because in dark +// environment, the brightness value changes +-0.3 with minor orientation changes. +static float const kBrightnessValueThresholdConfidenceInterval = 0.5; +// If we are at good light condition for 5 frames, ready to change back +static NSInteger const kLowLightBoostUnchangedLowWatermark = 7; +// Requires we are at low light condition for ~2 seconds (assuming 20~30fps) +static NSInteger const kLowLightBoostUnchangedHighWatermark = 25; + +static NSInteger const kSCLightingConditionDecisionWatermark = 15; // For 30 fps, it is 0.5 second +static float const kSCLightingConditionNormalThreshold = 0; +static float const kSCLightingConditionDarkThreshold = -3; + +@implementation SCManagedDeviceCapacityAnalyzer { + float _lastExposureTime; + int _lastISOSpeedRating; + NSTimeInterval _lastAdjustingExposureStartTime; + + NSInteger _lowLightBoostLowLightCount; + NSInteger _lowLightBoostEnoughLightCount; + NSInteger _exposureUnchangedCount; + NSInteger _maxISOPresetHigh; + + NSInteger _normalLightingConditionCount; + NSInteger _darkLightingConditionCount; + NSInteger _extremeDarkLightingConditionCount; + SCCapturerLightingConditionType _lightingCondition; + + BOOL _lowLightCondition; + BOOL _adjustingExposure; + + SCManagedDeviceCapacityAnalyzerListenerAnnouncer *_announcer; + FBKVOController *_observeController; + id _performer; + + float + _lastBrightnessToLog; // Remember last logged brightness, only log again if it changes greater than a threshold +} + +- (instancetype)initWithPerformer:(id)performer +{ + SCTraceStart(); + self = [super init]; + if (self) { + _performer = performer; + _maxISOPresetHigh = kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor6WithHRSI; + if ([SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone8orNewer]) { + _maxISOPresetHigh = kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor8; + } else if ([SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone7orNewer]) { + _maxISOPresetHigh = kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor7; + } else if ([SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone6SorNewer]) { + // iPhone 6S supports higher ISO rate for video recording, accommadating that. + _maxISOPresetHigh = kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor6S; + } + _announcer = [[SCManagedDeviceCapacityAnalyzerListenerAnnouncer alloc] init]; + _observeController = [[FBKVOController alloc] initWithObserver:self]; + } + return self; +} + +- (void)addListener:(id)listener +{ + SCTraceStart(); + [_announcer addListener:listener]; +} + +- (void)removeListener:(id)listener +{ + SCTraceStart(); + [_announcer removeListener:listener]; +} + +- (void)setLowLightConditionEnabled:(BOOL)lowLightConditionEnabled +{ + SCTraceStart(); + if (_lowLightConditionEnabled != lowLightConditionEnabled) { + _lowLightConditionEnabled = lowLightConditionEnabled; + if (!lowLightConditionEnabled) { + _lowLightBoostLowLightCount = 0; + _lowLightBoostEnoughLightCount = 0; + _lowLightCondition = NO; + [_announcer managedDeviceCapacityAnalyzer:self didChangeLowLightCondition:_lowLightCondition]; + } + } +} + +- (void)managedVideoDataSource:(id)managedVideoDataSource + didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer + devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + SCTraceStart(); + SampleBufferMetadata metadata = { + .isoSpeedRating = _lastISOSpeedRating, .brightness = 0, .exposureTime = _lastExposureTime, + }; + retrieveSampleBufferMetadata(sampleBuffer, &metadata); + if ((SCIsDebugBuild() || SCIsMasterBuild()) + // Enable this on internal build only (excluding alpha) + && fabs(metadata.brightness - _lastBrightnessToLog) > 0.5f) { + // Log only when brightness change is greater than 0.5 + _lastBrightnessToLog = metadata.brightness; + SCLogCoreCameraInfo(@"ExposureTime: %f, ISO: %ld, Brightness: %f", metadata.exposureTime, + (long)metadata.isoSpeedRating, metadata.brightness); + } + [self _automaticallyDetectAdjustingExposure:metadata.exposureTime ISOSpeedRating:metadata.isoSpeedRating]; + _lastExposureTime = metadata.exposureTime; + _lastISOSpeedRating = metadata.isoSpeedRating; + if (!_adjustingExposure && _lastISOSpeedRating <= _maxISOPresetHigh && + _lowLightConditionEnabled) { // If we are not recording, we are not at ISO higher than we needed + [self _automaticallyDetectLowLightCondition:metadata.brightness]; + } + [self _automaticallyDetectLightingConditionWithBrightness:metadata.brightness]; + [_announcer managedDeviceCapacityAnalyzer:self didChangeBrightness:metadata.brightness]; +} + +- (void)setAsFocusListenerForDevice:(SCManagedCaptureDevice *)captureDevice +{ + SCTraceStart(); + [_observeController observe:captureDevice.device + keyPath:@keypath(captureDevice.device, adjustingFocus) + options:NSKeyValueObservingOptionNew + action:@selector(_adjustingFocusingChanged:)]; +} + +- (void)removeFocusListener +{ + SCTraceStart(); + [_observeController unobserveAll]; +} + +#pragma mark - Private methods + +- (void)_automaticallyDetectAdjustingExposure:(float)currentExposureTime ISOSpeedRating:(NSInteger)currentISOSpeedRating +{ + SCTraceStart(); + if (currentISOSpeedRating != _lastISOSpeedRating || fabsf(currentExposureTime - _lastExposureTime) > FLT_MIN) { + _exposureUnchangedCount = 0; + } else { + ++_exposureUnchangedCount; + } + NSTimeInterval currentTime = CACurrentMediaTime(); + if (_exposureUnchangedCount >= kExposureUnchangedHighWatermark || + (currentTime - _lastAdjustingExposureStartTime > kExposureUnchangedDeadline && + _exposureUnchangedCount >= kExposureUnchangedLowWatermark)) { + // The exposure values haven't changed for kExposureUnchangedHighWatermark times, considering the adjustment + // as done. Otherwise, if we waited long enough, and the exposure unchange count at least reached low + // watermark, we will call it done and give it a shot. + if (_adjustingExposure) { + _adjustingExposure = NO; + SCLogGeneralInfo(@"Adjusting exposure is done, unchanged count: %zd", _exposureUnchangedCount); + [_announcer managedDeviceCapacityAnalyzer:self didChangeAdjustingExposure:_adjustingExposure]; + } + } else { + // Otherwise signal that we have adjustments on exposure + if (!_adjustingExposure) { + _adjustingExposure = YES; + _lastAdjustingExposureStartTime = currentTime; + [_announcer managedDeviceCapacityAnalyzer:self didChangeAdjustingExposure:_adjustingExposure]; + } + } +} + +- (void)_automaticallyDetectLowLightCondition:(float)brightness +{ + SCTraceStart(); + if (!_lowLightCondition && _lastISOSpeedRating == _maxISOPresetHigh) { + // If we are at the stage that we need to use higher ISO (because current ISO is maxed out) + // and the brightness is lower than the threshold + if (brightness < kBrightnessValueThreshold - kBrightnessValueThresholdConfidenceInterval) { + // Either count how many frames like this continuously we encountered + // Or if reached the watermark, change the low light boost mode + if (_lowLightBoostLowLightCount >= kLowLightBoostUnchangedHighWatermark) { + _lowLightCondition = YES; + [_announcer managedDeviceCapacityAnalyzer:self didChangeLowLightCondition:_lowLightCondition]; + } else { + ++_lowLightBoostLowLightCount; + } + } else if (brightness >= kBrightnessValueThreshold + kBrightnessValueThresholdConfidenceInterval) { + // If the brightness is consistently better, reset the low light boost unchanged count to 0 + _lowLightBoostLowLightCount = 0; + } + } else if (_lowLightCondition) { + // Check the current ISO to see if we can disable low light boost + if (_lastISOSpeedRating <= _maxISOPresetHigh && + brightness >= kBrightnessValueThreshold + kBrightnessValueThresholdConfidenceInterval) { + if (_lowLightBoostEnoughLightCount >= kLowLightBoostUnchangedLowWatermark) { + _lowLightCondition = NO; + [_announcer managedDeviceCapacityAnalyzer:self didChangeLowLightCondition:_lowLightCondition]; + _lowLightBoostEnoughLightCount = 0; + } else { + ++_lowLightBoostEnoughLightCount; + } + } + } +} + +- (void)_adjustingFocusingChanged:(NSDictionary *)change +{ + SCTraceStart(); + BOOL adjustingFocus = [change[NSKeyValueChangeNewKey] boolValue]; + [_performer perform:^{ + [_announcer managedDeviceCapacityAnalyzer:self didChangeAdjustingFocus:adjustingFocus]; + }]; +} + +- (void)_automaticallyDetectLightingConditionWithBrightness:(float)brightness +{ + if (brightness >= kSCLightingConditionNormalThreshold) { + if (_normalLightingConditionCount > kSCLightingConditionDecisionWatermark) { + if (_lightingCondition != SCCapturerLightingConditionTypeNormal) { + _lightingCondition = SCCapturerLightingConditionTypeNormal; + [_announcer managedDeviceCapacityAnalyzer:self + didChangeLightingCondition:SCCapturerLightingConditionTypeNormal]; + } + } else { + _normalLightingConditionCount++; + } + _darkLightingConditionCount = 0; + _extremeDarkLightingConditionCount = 0; + } else if (brightness >= kSCLightingConditionDarkThreshold) { + if (_darkLightingConditionCount > kSCLightingConditionDecisionWatermark) { + if (_lightingCondition != SCCapturerLightingConditionTypeDark) { + _lightingCondition = SCCapturerLightingConditionTypeDark; + [_announcer managedDeviceCapacityAnalyzer:self + didChangeLightingCondition:SCCapturerLightingConditionTypeDark]; + } + } else { + _darkLightingConditionCount++; + } + _normalLightingConditionCount = 0; + _extremeDarkLightingConditionCount = 0; + } else { + if (_extremeDarkLightingConditionCount > kSCLightingConditionDecisionWatermark) { + if (_lightingCondition != SCCapturerLightingConditionTypeExtremeDark) { + _lightingCondition = SCCapturerLightingConditionTypeExtremeDark; + [_announcer managedDeviceCapacityAnalyzer:self + didChangeLightingCondition:SCCapturerLightingConditionTypeExtremeDark]; + } + } else { + _extremeDarkLightingConditionCount++; + } + _normalLightingConditionCount = 0; + _darkLightingConditionCount = 0; + } +} + +@end diff --git a/ManagedCapturer/SCManagedDeviceCapacityAnalyzerHandler.h b/ManagedCapturer/SCManagedDeviceCapacityAnalyzerHandler.h new file mode 100644 index 0000000..537f517 --- /dev/null +++ b/ManagedCapturer/SCManagedDeviceCapacityAnalyzerHandler.h @@ -0,0 +1,20 @@ +// +// SCManagedDeviceCapacityAnalyzerHandler.h +// Snapchat +// +// Created by Jingtian Yang on 11/12/2017. +// + +#import "SCManagedDeviceCapacityAnalyzerListener.h" + +#import + +@class SCCaptureResource; + +@interface SCManagedDeviceCapacityAnalyzerHandler : NSObject + +- (instancetype)init NS_UNAVAILABLE; + +- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource; + +@end diff --git a/ManagedCapturer/SCManagedDeviceCapacityAnalyzerHandler.m b/ManagedCapturer/SCManagedDeviceCapacityAnalyzerHandler.m new file mode 100644 index 0000000..d0f660c --- /dev/null +++ b/ManagedCapturer/SCManagedDeviceCapacityAnalyzerHandler.m @@ -0,0 +1,72 @@ +// +// SCManagedDeviceCapacityAnalyzerHandler.m +// Snapchat +// +// Created by Jingtian Yang on 11/12/2017. +// + +#import "SCManagedDeviceCapacityAnalyzerHandler.h" + +#import "SCCaptureResource.h" +#import "SCManagedCapturer.h" +#import "SCManagedCapturerLogging.h" +#import "SCManagedCapturerState.h" +#import "SCManagedCapturerStateBuilder.h" + +#import +#import +#import +#import + +@interface SCManagedDeviceCapacityAnalyzerHandler () { + __weak SCCaptureResource *_captureResource; +} +@end + +@implementation SCManagedDeviceCapacityAnalyzerHandler + +- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource +{ + self = [super init]; + if (self) { + SCAssert(captureResource, @""); + _captureResource = captureResource; + } + return self; +} + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeLowLightCondition:(BOOL)lowLightCondition +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Change Low Light Condition %d", lowLightCondition); + [_captureResource.queuePerformer perform:^{ + _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] + setLowLightCondition:lowLightCondition] build]; + SCManagedCapturerState *state = [_captureResource.state copy]; + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:state]; + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didChangeLowLightCondition:state]; + }); + }]; +} + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeAdjustingExposure:(BOOL)adjustingExposure +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Capacity Analyzer Changes adjustExposure %d", adjustingExposure); + [_captureResource.queuePerformer perform:^{ + _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] + setAdjustingExposure:adjustingExposure] build]; + SCManagedCapturerState *state = [_captureResource.state copy]; + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:state]; + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didChangeAdjustingExposure:state]; + }); + }]; +} + +@end diff --git a/ManagedCapturer/SCManagedDeviceCapacityAnalyzerListener.h b/ManagedCapturer/SCManagedDeviceCapacityAnalyzerListener.h new file mode 100644 index 0000000..21f9beb --- /dev/null +++ b/ManagedCapturer/SCManagedDeviceCapacityAnalyzerListener.h @@ -0,0 +1,35 @@ +//#!announcer.rb +// SCManagedDeviceCapacityAnalyzerListener.h +// Snapchat +// +// Created by Liu Liu on 5/4/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCCapturerDefines.h" + +#import + +@class SCManagedDeviceCapacityAnalyzer; + +@protocol SCManagedDeviceCapacityAnalyzerListener + +@optional + +// These callbacks happen on a internal queue +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeLowLightCondition:(BOOL)lowLightCondition; + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeAdjustingExposure:(BOOL)adjustingExposure; + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeAdjustingFocus:(BOOL)adjustingFocus; + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeBrightness:(float)adjustingBrightness; + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition; + +@end diff --git a/ManagedCapturer/SCManagedDeviceCapacityAnalyzerListenerAnnouncer.h b/ManagedCapturer/SCManagedDeviceCapacityAnalyzerListenerAnnouncer.h new file mode 100644 index 0000000..933e0fa --- /dev/null +++ b/ManagedCapturer/SCManagedDeviceCapacityAnalyzerListenerAnnouncer.h @@ -0,0 +1,12 @@ +// Generated by the announcer.rb DO NOT EDIT!! + +#import "SCManagedDeviceCapacityAnalyzerListener.h" + +#import + +@interface SCManagedDeviceCapacityAnalyzerListenerAnnouncer : NSObject + +- (void)addListener:(id)listener; +- (void)removeListener:(id)listener; + +@end diff --git a/ManagedCapturer/SCManagedDeviceCapacityAnalyzerListenerAnnouncer.mm b/ManagedCapturer/SCManagedDeviceCapacityAnalyzerListenerAnnouncer.mm new file mode 100644 index 0000000..85581ff --- /dev/null +++ b/ManagedCapturer/SCManagedDeviceCapacityAnalyzerListenerAnnouncer.mm @@ -0,0 +1,146 @@ +// Generated by the announcer.rb DO NOT EDIT!! + +#import "SCManagedDeviceCapacityAnalyzerListenerAnnouncer.h" + +#include +using std::lock_guard; +using std::mutex; +#include +using std::find; +using std::make_shared; +using std::shared_ptr; +using std::vector; + +@implementation SCManagedDeviceCapacityAnalyzerListenerAnnouncer { + mutex _mutex; + shared_ptr>> _listeners; +} + +- (NSString *)description +{ + auto listeners = atomic_load(&self->_listeners); + NSMutableString *desc = [NSMutableString string]; + [desc appendFormat:@": [", self]; + for (int i = 0; i < listeners->size(); ++i) { + [desc appendFormat:@"%@", (*listeners)[i]]; + if (i != listeners->size() - 1) { + [desc appendString:@", "]; + } + } + [desc appendString:@"]"]; + return desc; +} + +- (void)addListener:(id)listener +{ + lock_guard lock(_mutex); + auto listeners = make_shared>>(); + if (_listeners != nil) { + // The listener we want to add already exists + if (find(_listeners->begin(), _listeners->end(), listener) != _listeners->end()) { + return; + } + for (auto &one : *_listeners) { + if (one != nil) { + listeners->push_back(one); + } + } + listeners->push_back(listener); + atomic_store(&self->_listeners, listeners); + } else { + listeners->push_back(listener); + atomic_store(&self->_listeners, listeners); + } +} + +- (void)removeListener:(id)listener +{ + lock_guard lock(_mutex); + if (_listeners == nil) { + return; + } + // If the only item in the listener list is the one we want to remove, store it back to nil again + if (_listeners->size() == 1 && (*_listeners)[0] == listener) { + atomic_store(&self->_listeners, shared_ptr>>()); + return; + } + auto listeners = make_shared>>(); + for (auto &one : *_listeners) { + if (one != nil && one != listener) { + listeners->push_back(one); + } + } + atomic_store(&self->_listeners, listeners); +} + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeLowLightCondition:(BOOL)lowLightCondition +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeLowLightCondition:)]) { + [listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer + didChangeLowLightCondition:lowLightCondition]; + } + } + } +} + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeAdjustingExposure:(BOOL)adjustingExposure +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeAdjustingExposure:)]) { + [listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer + didChangeAdjustingExposure:adjustingExposure]; + } + } + } +} + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeAdjustingFocus:(BOOL)adjustingFocus +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeAdjustingFocus:)]) { + [listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer + didChangeAdjustingFocus:adjustingFocus]; + } + } + } +} + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeBrightness:(float)adjustingBrightness +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeBrightness:)]) { + [listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer + didChangeBrightness:adjustingBrightness]; + } + } + } +} + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeLightingCondition:)]) { + [listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer + didChangeLightingCondition:lightingCondition]; + } + } + } +} + +@end diff --git a/ManagedCapturer/SCManagedDroppedFramesReporter.h b/ManagedCapturer/SCManagedDroppedFramesReporter.h new file mode 100644 index 0000000..e7404fb --- /dev/null +++ b/ManagedCapturer/SCManagedDroppedFramesReporter.h @@ -0,0 +1,25 @@ +// +// SCManagedDroppedFramesReporter.h +// Snapchat +// +// Created by Michel Loenngren on 3/21/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCapturerListener.h" + +#import + +#import + +/* + Conforms to SCManagedVideoDataSourceListener and records frame rate statistics + during recording. + */ +@interface SCManagedDroppedFramesReporter : NSObject + +- (void)reportWithKeepLateFrames:(BOOL)keepLateFrames lensesApplied:(BOOL)lensesApplied; + +- (void)didChangeCaptureDevicePosition; + +@end diff --git a/ManagedCapturer/SCManagedDroppedFramesReporter.m b/ManagedCapturer/SCManagedDroppedFramesReporter.m new file mode 100644 index 0000000..b64a679 --- /dev/null +++ b/ManagedCapturer/SCManagedDroppedFramesReporter.m @@ -0,0 +1,86 @@ +// +// SCManagedDroppedFramesReporter.m +// Snapchat +// +// Created by Michel Loenngren on 3/21/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedDroppedFramesReporter.h" + +#import "SCCameraTweaks.h" +#import "SCManagedCapturerState.h" + +#import +#import +#import +#import +#import +#import + +CGFloat const kSCCaptureTargetFramerate = 30; + +@interface SCManagedDroppedFramesReporter () + +@property (nonatomic) SCVideoFrameDropCounter *frameDropCounter; + +@end + +@implementation SCManagedDroppedFramesReporter { + SCVideoFrameDropCounter *_frameDropCounter; + NSUInteger _droppedFrames; +} + +- (SCVideoFrameDropCounter *)frameDropCounter +{ + if (_frameDropCounter == nil) { + _frameDropCounter = [[SCVideoFrameDropCounter alloc] initWithTargetFramerate:kSCCaptureTargetFramerate]; + _droppedFrames = 0; + } + return _frameDropCounter; +} + +- (void)reportWithKeepLateFrames:(BOOL)keepLateFrames lensesApplied:(BOOL)lensesApplied +{ + if (_frameDropCounter == nil) { + return; + } + + NSMutableDictionary *eventDict = [_frameDropCounter.toDict mutableCopy]; + eventDict[@"total_frame_drop_measured"] = @(_droppedFrames); + eventDict[@"keep_late_frames"] = @(keepLateFrames); + // if user select none of the lenses when activing the lenses scroll view, we still enable keepLateFrames + eventDict[@"lenses_applied"] = @(lensesApplied); + + [[SCLogger sharedInstance] logEvent:kSCCameraMetricsFramesDroppedDuringRecording parameters:eventDict]; + + // Reset + _frameDropCounter = nil; + _droppedFrames = 0; +} + +- (void)didChangeCaptureDevicePosition +{ + [_frameDropCounter didChangeCaptureDevicePosition]; +} + +#pragma mark - SCManagedVideoDataSourceListener + +- (void)managedVideoDataSource:(id)managedVideoDataSource + didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer + devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + [self.frameDropCounter processFrameTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)]; +} + +- (void)managedVideoDataSource:(id)managedVideoDataSource + didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer + devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + _droppedFrames += 1; + NSDictionary *backgroundTaskScreenshot = SCBackgrounTaskScreenshotReport(); + SCLogCoreCameraInfo(@"[SCManagedDroppedFramesReporter] frame dropped, background tasks: %@", + backgroundTaskScreenshot); +} + +@end diff --git a/ManagedCapturer/SCManagedFrameHealthChecker.h b/ManagedCapturer/SCManagedFrameHealthChecker.h new file mode 100644 index 0000000..6b783b1 --- /dev/null +++ b/ManagedCapturer/SCManagedFrameHealthChecker.h @@ -0,0 +1,57 @@ +// +// SCManagedFrameHealthChecker.h +// Snapchat +// +// Created by Pinlin Chen on 30/08/2017. +// + +#import +#import + +#import +#import + +@interface SCManagedFrameHealthChecker : NSObject + ++ (SCManagedFrameHealthChecker *)sharedInstance; +/*! @abstract Use sharedInstance instead. */ +SC_INIT_AND_NEW_UNAVAILABLE; + +/* Utility method */ +- (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer extraInfo:(NSDictionary *)extraInfo; +- (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer + photoCapturerEnabled:(BOOL)photoCapturerEnabled + lensEnabled:(BOOL)lensesEnabled + lensID:(NSString *)lensID; +- (NSMutableDictionary *)metadataForMetadata:(NSDictionary *)metadata + photoCapturerEnabled:(BOOL)photoCapturerEnabled + lensEnabled:(BOOL)lensesEnabled + lensID:(NSString *)lensID; +- (NSMutableDictionary *)getPropertiesFromAsset:(AVAsset *)asset; + +/* Image snap */ +- (void)checkImageHealthForCaptureFrameImage:(UIImage *)image + captureSettings:(NSDictionary *)captureSettings + captureSessionID:(NSString *)captureSessionID; +- (void)checkImageHealthForPreTranscoding:(UIImage *)image + metadata:(NSDictionary *)metadata + captureSessionID:(NSString *)captureSessionID; +- (void)checkImageHealthForPostTranscoding:(NSData *)imageData + metadata:(NSDictionary *)metadata + captureSessionID:(NSString *)captureSessionID; + +/* Video snap */ +- (void)checkVideoHealthForCaptureFrameImage:(UIImage *)image + metedata:(NSDictionary *)metadata + captureSessionID:(NSString *)captureSessionID; +- (void)checkVideoHealthForOverlayImage:(UIImage *)image + metedata:(NSDictionary *)metadata + captureSessionID:(NSString *)captureSessionID; +- (void)checkVideoHealthForPostTranscodingThumbnail:(UIImage *)image + metedata:(NSDictionary *)metadata + properties:(NSDictionary *)properties + captureSessionID:(NSString *)captureSessionID; + +- (void)reportFrameHealthCheckForCaptureSessionID:(NSString *)captureSessionID; + +@end diff --git a/ManagedCapturer/SCManagedFrameHealthChecker.m b/ManagedCapturer/SCManagedFrameHealthChecker.m new file mode 100644 index 0000000..b261a0d --- /dev/null +++ b/ManagedCapturer/SCManagedFrameHealthChecker.m @@ -0,0 +1,709 @@ +// +// SCManagedFrameHealthChecker.m +// Snapchat +// +// Created by Pinlin Chen on 30/08/2017. +// + +#import "SCManagedFrameHealthChecker.h" + +#import "SCCameraSettingUtils.h" +#import "SCCameraTweaks.h" + +#import +#import +#import +#import +#import +#import +#import +#import +#import + +#import +@import Accelerate; + +static const char *kSCManagedFrameHealthCheckerQueueLabel = "com.snapchat.frame_health_checker"; +static const int kSCManagedFrameHealthCheckerMaxSamples = 2304; +static const float kSCManagedFrameHealthCheckerPossibleBlackThreshold = 20.0; +static const float kSCManagedFrameHealthCheckerScaledImageMaxEdgeLength = 300.0; +static const float kSCManagedFrameHealthCheckerScaledImageScale = 1.0; +// assume we could process at most of 2 RGBA images which are 2304*4096 RGBA image +static const double kSCManagedFrameHealthCheckerMinFreeMemMB = 72.0; + +typedef NS_ENUM(NSUInteger, SCManagedFrameHealthCheckType) { + SCManagedFrameHealthCheck_ImageCapture = 0, + SCManagedFrameHealthCheck_ImagePreTranscoding, + SCManagedFrameHealthCheck_ImagePostTranscoding, + SCManagedFrameHealthCheck_VideoCapture, + SCManagedFrameHealthCheck_VideoOverlayImage, + SCManagedFrameHealthCheck_VideoPostTranscoding, +}; + +typedef NS_ENUM(NSUInteger, SCManagedFrameHealthCheckErrorType) { + SCManagedFrameHealthCheckError_None = 0, + SCManagedFrameHealthCheckError_Invalid_Bitmap, + SCManagedFrameHealthCheckError_Frame_Possibly_Black, + SCManagedFrameHealthCheckError_Frame_Totally_Black, + SCManagedFrameHealthCheckError_Execution_Error, +}; + +typedef struct { + float R; + float G; + float B; + float A; +} FloatRGBA; + +@class SCManagedFrameHealthCheckerTask; +typedef NSMutableDictionary * (^sc_managed_frame_checker_block)(SCManagedFrameHealthCheckerTask *task); + +float vDspColorElementSum(const Byte *data, NSInteger stripLength, NSInteger bufferLength) +{ + float sum = 0; + float colorArray[bufferLength]; + // Convert to float for DSP registerator + vDSP_vfltu8(data, stripLength, colorArray, 1, bufferLength); + // Calculate sum of color element + vDSP_sve(colorArray, 1, &sum, bufferLength); + return sum; +} + +@interface SCManagedFrameHealthCheckerTask : NSObject + +@property (nonatomic, assign) SCManagedFrameHealthCheckType type; +@property (nonatomic, strong) id targetObject; +@property (nonatomic, assign) CGSize sourceImageSize; +@property (nonatomic, strong) UIImage *unifiedImage; +@property (nonatomic, strong) NSDictionary *metadata; +@property (nonatomic, strong) NSDictionary *videoProperties; +@property (nonatomic, assign) SCManagedFrameHealthCheckErrorType errorType; + ++ (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type + targetObject:(id)targetObject + metadata:(NSDictionary *)metadata + videoProperties:(NSDictionary *)videoProperties; + ++ (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type + targetObject:(id)targetObject + metadata:(NSDictionary *)metadata; + +@end + +@implementation SCManagedFrameHealthCheckerTask + ++ (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type + targetObject:(id)targetObject + metadata:(NSDictionary *)metadata +{ + return [self taskWithType:type targetObject:targetObject metadata:metadata videoProperties:nil]; +} + ++ (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type + targetObject:(id)targetObject + metadata:(NSDictionary *)metadata + videoProperties:(NSDictionary *)videoProperties +{ + SCManagedFrameHealthCheckerTask *task = [[SCManagedFrameHealthCheckerTask alloc] init]; + task.type = type; + task.targetObject = targetObject; + task.metadata = metadata; + task.videoProperties = videoProperties; + return task; +} + +- (NSString *)textForSnapType +{ + switch (self.type) { + case SCManagedFrameHealthCheck_ImageCapture: + case SCManagedFrameHealthCheck_ImagePreTranscoding: + case SCManagedFrameHealthCheck_ImagePostTranscoding: + return @"IMAGE"; + case SCManagedFrameHealthCheck_VideoCapture: + case SCManagedFrameHealthCheck_VideoOverlayImage: + case SCManagedFrameHealthCheck_VideoPostTranscoding: + return @"VIDEO"; + } +} + +- (NSString *)textForSource +{ + switch (self.type) { + case SCManagedFrameHealthCheck_ImageCapture: + return @"CAPTURE"; + case SCManagedFrameHealthCheck_ImagePreTranscoding: + return @"PRE_TRANSCODING"; + case SCManagedFrameHealthCheck_ImagePostTranscoding: + return @"POST_TRANSCODING"; + case SCManagedFrameHealthCheck_VideoCapture: + return @"CAPTURE"; + case SCManagedFrameHealthCheck_VideoOverlayImage: + return @"OVERLAY_IMAGE"; + case SCManagedFrameHealthCheck_VideoPostTranscoding: + return @"POST_TRANSCODING"; + } +} + +- (NSString *)textForErrorType +{ + switch (self.errorType) { + case SCManagedFrameHealthCheckError_None: + return nil; + case SCManagedFrameHealthCheckError_Invalid_Bitmap: + return @"Invalid_Bitmap"; + case SCManagedFrameHealthCheckError_Frame_Possibly_Black: + return @"Frame_Possibly_Black"; + case SCManagedFrameHealthCheckError_Frame_Totally_Black: + return @"Frame_Totally_Black"; + case SCManagedFrameHealthCheckError_Execution_Error: + return @"Execution_Error"; + } +} + +@end + +@interface SCManagedFrameHealthChecker () { + id _performer; + // Dictionary structure + // Key - NSString, captureSessionID + // Value - NSMutableArray + NSMutableDictionary *_frameCheckTasks; +} + +@end + +@implementation SCManagedFrameHealthChecker + ++ (SCManagedFrameHealthChecker *)sharedInstance +{ + SCTraceODPCompatibleStart(2); + static SCManagedFrameHealthChecker *checker; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + checker = [[SCManagedFrameHealthChecker alloc] _init]; + }); + return checker; +} + +- (instancetype)_init +{ + SCTraceODPCompatibleStart(2); + if (self = [super init]) { + // Use the lowest QoS level + _performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedFrameHealthCheckerQueueLabel + qualityOfService:QOS_CLASS_UTILITY + queueType:DISPATCH_QUEUE_SERIAL + context:SCQueuePerformerContextCamera]; + _frameCheckTasks = [NSMutableDictionary dictionary]; + } + return self; +} + +- (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer +{ + SCTraceODPCompatibleStart(2); + // add exposure, ISO, brightness + NSMutableDictionary *metadata = [NSMutableDictionary dictionary]; + if (!sampleBuffer || !CMSampleBufferDataIsReady(sampleBuffer)) { + return metadata; + } + CFDictionaryRef exifAttachments = + (CFDictionaryRef)CMGetAttachment(sampleBuffer, kCGImagePropertyExifDictionary, NULL); + NSNumber *exposureTimeNum = retrieveExposureTimeFromEXIFAttachments(exifAttachments); + if (exposureTimeNum) { + metadata[@"exposure"] = exposureTimeNum; + } + NSNumber *isoSpeedRatingNum = retrieveISOSpeedRatingFromEXIFAttachments(exifAttachments); + if (isoSpeedRatingNum) { + metadata[@"iso"] = isoSpeedRatingNum; + } + NSNumber *brightnessNum = retrieveBrightnessFromEXIFAttachments(exifAttachments); + if (brightnessNum) { + float brightness = [brightnessNum floatValue]; + metadata[@"brightness"] = isfinite(brightness) ? @(brightness) : @(0); + } + + return metadata; +} + +- (NSMutableDictionary *)metadataForMetadata:(NSDictionary *)metadata +{ + SCTraceODPCompatibleStart(2); + // add exposure, ISO, brightness + NSMutableDictionary *newMetadata = [NSMutableDictionary dictionary]; + CFDictionaryRef exifAttachments = (__bridge CFDictionaryRef)metadata; + NSNumber *exposureTimeNum = retrieveExposureTimeFromEXIFAttachments(exifAttachments); + if (exposureTimeNum) { + newMetadata[@"exposure"] = exposureTimeNum; + } + NSNumber *isoSpeedRatingNum = retrieveISOSpeedRatingFromEXIFAttachments(exifAttachments); + if (isoSpeedRatingNum) { + newMetadata[@"iso"] = isoSpeedRatingNum; + } + NSNumber *brightnessNum = retrieveBrightnessFromEXIFAttachments(exifAttachments); + if (brightnessNum) { + float brightness = [brightnessNum floatValue]; + newMetadata[@"brightness"] = isfinite(brightness) ? @(brightness) : @(0); + } + + return newMetadata; +} + +- (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer extraInfo:(NSDictionary *)extraInfo +{ + SCTraceODPCompatibleStart(2); + NSMutableDictionary *metadata = [self metadataForSampleBuffer:sampleBuffer]; + [metadata addEntriesFromDictionary:extraInfo]; + return metadata; +} + +- (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer + photoCapturerEnabled:(BOOL)photoCapturerEnabled + lensEnabled:(BOOL)lensesEnabled + lensID:(NSString *)lensID +{ + SCTraceODPCompatibleStart(2); + NSMutableDictionary *metadata = [self metadataForSampleBuffer:sampleBuffer]; + metadata[@"photo_capturer_enabled"] = @(photoCapturerEnabled); + + metadata[@"lens_enabled"] = @(lensesEnabled); + if (lensesEnabled) { + metadata[@"lens_id"] = lensID ?: @""; + } + + return metadata; +} + +- (NSMutableDictionary *)metadataForMetadata:(NSDictionary *)metadata + photoCapturerEnabled:(BOOL)photoCapturerEnabled + lensEnabled:(BOOL)lensesEnabled + lensID:(NSString *)lensID +{ + SCTraceODPCompatibleStart(2); + NSMutableDictionary *newMetadata = [self metadataForMetadata:metadata]; + newMetadata[@"photo_capturer_enabled"] = @(photoCapturerEnabled); + + newMetadata[@"lens_enabled"] = @(lensesEnabled); + if (lensesEnabled) { + newMetadata[@"lens_id"] = lensID ?: @""; + } + + return newMetadata; +} + +- (NSMutableDictionary *)getPropertiesFromAsset:(AVAsset *)asset +{ + SCTraceODPCompatibleStart(2); + SC_GUARD_ELSE_RETURN_VALUE(asset != nil, nil); + NSMutableDictionary *properties = [NSMutableDictionary dictionary]; + // file size + properties[@"file_size"] = @([asset fileSize]); + // duration + properties[@"duration"] = @(CMTimeGetSeconds(asset.duration)); + // video track count + NSArray *videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo]; + properties[@"video_track_count"] = @(videoTracks.count); + if (videoTracks.count > 0) { + // video bitrate + properties[@"video_bitrate"] = @([videoTracks.firstObject estimatedDataRate]); + // frame rate + properties[@"video_frame_rate"] = @([videoTracks.firstObject nominalFrameRate]); + } + // audio track count + NSArray *audioTracks = [asset tracksWithMediaType:AVMediaTypeAudio]; + properties[@"audio_track_count"] = @(audioTracks.count); + if (audioTracks.count > 0) { + // audio bitrate + properties[@"audio_bitrate"] = @([audioTracks.firstObject estimatedDataRate]); + } + // playable + properties[@"playable"] = @(asset.isPlayable); + return properties; +} + +#pragma mark - Image snap + +- (void)checkImageHealthForCaptureFrameImage:(UIImage *)image + captureSettings:(NSDictionary *)captureSettings + captureSessionID:(NSString *)captureSessionID +{ + SCTraceODPCompatibleStart(2); + if (captureSessionID.length == 0) { + SCLogCoreCameraError(@"[FrameHealthChecker] #IMAGE:CAPTURE - captureSessionID shouldn't be empty"); + return; + } + SCManagedFrameHealthCheckerTask *task = + [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_ImageCapture + targetObject:image + metadata:captureSettings]; + [self _addTask:task withCaptureSessionID:captureSessionID]; +} + +- (void)checkImageHealthForPreTranscoding:(UIImage *)image + metadata:(NSDictionary *)metadata + captureSessionID:(NSString *)captureSessionID +{ + SCTraceODPCompatibleStart(2); + if (captureSessionID.length == 0) { + SCLogCoreCameraError(@"[FrameHealthChecker] #IMAGE:PRE_CAPTURE - captureSessionID shouldn't be empty"); + return; + } + SCManagedFrameHealthCheckerTask *task = + [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_ImagePreTranscoding + targetObject:image + metadata:metadata]; + [self _addTask:task withCaptureSessionID:captureSessionID]; +} + +- (void)checkImageHealthForPostTranscoding:(NSData *)imageData + metadata:(NSDictionary *)metadata + captureSessionID:(NSString *)captureSessionID +{ + SCTraceODPCompatibleStart(2); + if (captureSessionID.length == 0) { + SCLogCoreCameraError(@"[FrameHealthChecker] #IMAGE:POST_CAPTURE - captureSessionID shouldn't be empty"); + return; + } + SCManagedFrameHealthCheckerTask *task = + [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_ImagePostTranscoding + targetObject:imageData + metadata:metadata]; + [self _addTask:task withCaptureSessionID:captureSessionID]; +} + +#pragma mark - Video snap +- (void)checkVideoHealthForCaptureFrameImage:(UIImage *)image + metedata:(NSDictionary *)metadata + captureSessionID:(NSString *)captureSessionID +{ + SCTraceODPCompatibleStart(2); + if (captureSessionID.length == 0) { + SCLogCoreCameraError(@"[FrameHealthChecker] #VIDEO:CAPTURE - captureSessionID shouldn't be empty"); + return; + } + SCManagedFrameHealthCheckerTask *task = + [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_VideoCapture + targetObject:image + metadata:metadata]; + [self _addTask:task withCaptureSessionID:captureSessionID]; +} + +- (void)checkVideoHealthForOverlayImage:(UIImage *)image + metedata:(NSDictionary *)metadata + captureSessionID:(NSString *)captureSessionID +{ + SCTraceODPCompatibleStart(2); + if (captureSessionID.length == 0) { + SCLogCoreCameraError(@"[FrameHealthChecker] #VIDEO:OVERLAY_IMAGE - captureSessionID shouldn't be empty"); + return; + } + // Overlay image could be nil + if (!image) { + SCLogCoreCameraInfo(@"[FrameHealthChecker] #VIDEO:OVERLAY_IMAGE - overlayImage is nil."); + return; + } + SCManagedFrameHealthCheckerTask *task = + [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_VideoOverlayImage + targetObject:image + metadata:metadata]; + [self _addTask:task withCaptureSessionID:captureSessionID]; +} + +- (void)checkVideoHealthForPostTranscodingThumbnail:(UIImage *)image + metedata:(NSDictionary *)metadata + properties:(NSDictionary *)properties + captureSessionID:(NSString *)captureSessionID +{ + SCTraceODPCompatibleStart(2); + if (captureSessionID.length == 0) { + SCLogCoreCameraError(@"[FrameHealthChecker] #VIDEO:POST_TRANSCODING - captureSessionID shouldn't be empty"); + return; + } + SCManagedFrameHealthCheckerTask *task = + [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_VideoPostTranscoding + targetObject:image + metadata:metadata + videoProperties:properties]; + [self _addTask:task withCaptureSessionID:captureSessionID]; +} + +#pragma mark - Task management +- (void)reportFrameHealthCheckForCaptureSessionID:(NSString *)captureSessionID +{ + SCTraceODPCompatibleStart(2); + if (!captureSessionID) { + SCLogCoreCameraError(@"[FrameHealthChecker] report - captureSessionID shouldn't be nil"); + return; + } + [self _asynchronouslyCheckForCaptureSessionID:captureSessionID]; +} + +#pragma mark - Private functions + +/// Scale the source image to a new image with edges less than kSCManagedFrameHealthCheckerScaledImageMaxEdgeLength. +- (UIImage *)_unifyImage:(UIImage *)sourceImage +{ + CGFloat sourceWidth = sourceImage.size.width; + CGFloat sourceHeight = sourceImage.size.height; + + if (sourceWidth == 0.0 || sourceHeight == 0.0) { + SCLogCoreCameraInfo(@"[FrameHealthChecker] Tried scaling image with no size"); + return sourceImage; + } + + CGFloat maxEdgeLength = kSCManagedFrameHealthCheckerScaledImageMaxEdgeLength; + + CGFloat widthScalingFactor = maxEdgeLength / sourceWidth; + CGFloat heightScalingFactor = maxEdgeLength / sourceHeight; + + CGFloat scalingFactor = MIN(widthScalingFactor, heightScalingFactor); + + if (scalingFactor >= 1) { + SCLogCoreCameraInfo(@"[FrameHealthChecker] No need to scale image."); + return sourceImage; + } + + CGSize targetSize = CGSizeMake(sourceWidth * scalingFactor, sourceHeight * scalingFactor); + + SCLogCoreCameraInfo(@"[FrameHealthChecker] Scaling image from %@ to %@", NSStringFromCGSize(sourceImage.size), + NSStringFromCGSize(targetSize)); + return [sourceImage scaledImageToSize:targetSize scale:kSCManagedFrameHealthCheckerScaledImageScale]; +} + +- (void)_addTask:(SCManagedFrameHealthCheckerTask *)newTask withCaptureSessionID:(NSString *)captureSessionID +{ + SCTraceODPCompatibleStart(2); + if (captureSessionID.length == 0) { + return; + } + [_performer perform:^{ + SCTraceODPCompatibleStart(2); + + CFTimeInterval beforeScaling = CACurrentMediaTime(); + if (newTask.targetObject) { + if ([newTask.targetObject isKindOfClass:[UIImage class]]) { + UIImage *sourceImage = (UIImage *)newTask.targetObject; + newTask.unifiedImage = [self _unifyImage:sourceImage]; + newTask.sourceImageSize = sourceImage.size; + } else if ([newTask.targetObject isKindOfClass:[NSData class]]) { + UIImage *sourceImage = [UIImage sc_imageWithData:newTask.targetObject]; + CFTimeInterval betweenDecodingAndScaling = CACurrentMediaTime(); + SCLogCoreCameraInfo(@"[FrameHealthChecker] #Image decoding delay: %f", + betweenDecodingAndScaling - beforeScaling); + beforeScaling = betweenDecodingAndScaling; + newTask.unifiedImage = [self _unifyImage:sourceImage]; + newTask.sourceImageSize = sourceImage.size; + } else { + SCLogCoreCameraError(@"[FrameHealthChecker] Invalid targetObject class:%@", + NSStringFromClass([newTask.targetObject class])); + } + newTask.targetObject = nil; + } + SCLogCoreCameraInfo(@"[FrameHealthChecker] #Scale image delay: %f", CACurrentMediaTime() - beforeScaling); + + NSMutableArray *taskQueue = _frameCheckTasks[captureSessionID]; + if (!taskQueue) { + taskQueue = [NSMutableArray array]; + _frameCheckTasks[captureSessionID] = taskQueue; + } + // Remove previous same type task, avoid meaningless task, + // for example repeat click "Send Button" and then "Back button" + // will produce a lot of PRE_TRANSCODING and POST_TRANSCODING + for (SCManagedFrameHealthCheckerTask *task in taskQueue) { + if (task.type == newTask.type) { + [taskQueue removeObject:task]; + break; + } + } + + [taskQueue addObject:newTask]; + }]; +} + +- (void)_asynchronouslyCheckForCaptureSessionID:(NSString *)captureSessionID +{ + SCTraceODPCompatibleStart(2); + [_performer perform:^{ + SCTraceODPCompatibleStart(2); + NSMutableArray *tasksQueue = _frameCheckTasks[captureSessionID]; + if (!tasksQueue) { + return; + } + + // Check the free memory, if it is too low, drop these tasks + double memFree = [SCLogger memoryFreeMB]; + if (memFree < kSCManagedFrameHealthCheckerMinFreeMemMB) { + SCLogCoreCameraWarning( + @"[FrameHealthChecker] mem_free:%f is too low, dropped checking tasks for captureSessionID:%@", memFree, + captureSessionID); + [_frameCheckTasks removeObjectForKey:captureSessionID]; + return; + } + + __block NSMutableArray *frameHealthInfoArray = [NSMutableArray array]; + // Execute all tasks and wait for complete + [tasksQueue enumerateObjectsUsingBlock:^(id _Nonnull obj, NSUInteger idx, BOOL *_Nonnull stop) { + SCManagedFrameHealthCheckerTask *task = (SCManagedFrameHealthCheckerTask *)obj; + NSMutableDictionary *frameHealthInfo; + UIImage *image = task.unifiedImage; + + if (image) { + // Get frame health info + frameHealthInfo = [self _getFrameHealthInfoForImage:image + source:[task textForSource] + snapType:[task textForSnapType] + metadata:task.metadata + sourceImageSize:task.sourceImageSize + captureSessionID:captureSessionID]; + NSNumber *isPossibleBlackNum = frameHealthInfo[@"is_possible_black"]; + NSNumber *isTotallyBlackNum = frameHealthInfo[@"is_total_black"]; + NSNumber *hasExecutionError = frameHealthInfo[@"execution_error"]; + if ([isTotallyBlackNum boolValue]) { + task.errorType = SCManagedFrameHealthCheckError_Frame_Totally_Black; + } else if ([isPossibleBlackNum boolValue]) { + task.errorType = SCManagedFrameHealthCheckError_Frame_Possibly_Black; + } else if ([hasExecutionError boolValue]) { + task.errorType = SCManagedFrameHealthCheckError_Execution_Error; + } + } else { + frameHealthInfo = [NSMutableDictionary dictionary]; + task.errorType = SCManagedFrameHealthCheckError_Invalid_Bitmap; + } + + if (frameHealthInfo) { + frameHealthInfo[@"frame_source"] = [task textForSource]; + frameHealthInfo[@"snap_type"] = [task textForSnapType]; + frameHealthInfo[@"error_type"] = [task textForErrorType]; + frameHealthInfo[@"capture_session_id"] = captureSessionID; + frameHealthInfo[@"metadata"] = task.metadata; + if (task.videoProperties.count > 0) { + [frameHealthInfo addEntriesFromDictionary:task.videoProperties]; + } + [frameHealthInfoArray addObject:frameHealthInfo]; + } + + // Release the image as soon as possible to mitigate the memory pressure + task.unifiedImage = nil; + }]; + + for (NSDictionary *frameHealthInfo in frameHealthInfoArray) { + if ([frameHealthInfo[@"is_total_black"] boolValue] || [frameHealthInfo[@"is_possible_black"] boolValue]) { + // // TODO: Zi Kai Chen - add this back. Normally we use id for + // this but as this is a shared instance we cannot easily inject it. The work would + // involve making this not a shared instance. + // SCShakeBetaLogEvent(SCShakeBetaLoggerKeyCCamBlackSnap, + // JSONStringSerializeObjectForLogging(frameHealthInfo)); + } + + [[SCLogger sharedInstance] logUnsampledEventToEventLogger:kSCCameraMetricsFrameHealthCheckIndex + parameters:frameHealthInfo + secretParameters:nil + metrics:nil]; + } + + [_frameCheckTasks removeObjectForKey:captureSessionID]; + }]; +} + +- (NSMutableDictionary *)_getFrameHealthInfoForImage:(UIImage *)image + source:(NSString *)source + snapType:(NSString *)snapType + metadata:(NSDictionary *)metadata + sourceImageSize:(CGSize)sourceImageSize + captureSessionID:(NSString *)captureSessionID +{ + SCTraceODPCompatibleStart(2); + NSMutableDictionary *parameters = [NSMutableDictionary dictionary]; + size_t samplesCount = 0; + CFTimeInterval start = CACurrentMediaTime(); + CGImageRef imageRef = image.CGImage; + size_t imageWidth = CGImageGetWidth(imageRef); + size_t imageHeight = CGImageGetHeight(imageRef); + CFDataRef pixelData = CGDataProviderCopyData(CGImageGetDataProvider(imageRef)); + CFTimeInterval getImageDataTime = CACurrentMediaTime(); + if (pixelData) { + const Byte *imageData = CFDataGetBytePtr(pixelData); + NSInteger stripLength = 0; + NSInteger bufferLength = 0; + NSInteger imagePixels = imageWidth * imageHeight; + // Limit the max sampled frames + if (imagePixels > kSCManagedFrameHealthCheckerMaxSamples) { + stripLength = imagePixels / kSCManagedFrameHealthCheckerMaxSamples * 4; + bufferLength = kSCManagedFrameHealthCheckerMaxSamples; + } else { + stripLength = 4; + bufferLength = imagePixels; + } + samplesCount = bufferLength; + + // Avoid dividing by zero + if (samplesCount != 0) { + FloatRGBA sumRGBA = [self _getSumRGBAFromData:imageData + stripLength:stripLength + bufferLength:bufferLength + bitmapInfo:CGImageGetBitmapInfo(imageRef)]; + float averageR = sumRGBA.R / samplesCount; + float averageG = sumRGBA.G / samplesCount; + float averageB = sumRGBA.B / samplesCount; + float averageA = sumRGBA.A / samplesCount; + parameters[@"average_sampled_rgba_r"] = @(averageR); + parameters[@"average_sampled_rgba_g"] = @(averageG); + parameters[@"average_sampled_rgba_b"] = @(averageB); + parameters[@"average_sampled_rgba_a"] = @(averageA); + parameters[@"origin_frame_width"] = @(sourceImageSize.width); + parameters[@"origin_frame_height"] = @(sourceImageSize.height); + // Also report possible black to identify the intentional black snap by covering camera. + // Normally, the averageA very near 255, but for video overlay image, it is very small. + // So we use averageA > 250 to avoid considing video overlay image as possible black. + if (averageA > 250 && averageR < kSCManagedFrameHealthCheckerPossibleBlackThreshold && + averageG < kSCManagedFrameHealthCheckerPossibleBlackThreshold && + averageB < kSCManagedFrameHealthCheckerPossibleBlackThreshold) { + parameters[@"is_possible_black"] = @(YES); + // Use this parameters for BigQuery conditions in Grafana + if (averageR == 0 && averageG == 0 && averageB == 0) { + parameters[@"is_total_black"] = @(YES); + } + } + } else { + SCLogCoreCameraError(@"[FrameHealthChecker] #%@:%@ - samplesCount is zero! captureSessionID:%@", snapType, + source, captureSessionID); + parameters[@"execution_error"] = @(YES); + } + CFRelease(pixelData); + } else { + SCLogCoreCameraError(@"[FrameHealthChecker] #%@:%@ - pixelData is nil! captureSessionID:%@", snapType, source, + captureSessionID); + parameters[@"execution_error"] = @(YES); + } + parameters[@"sample_size"] = @(samplesCount); + + CFTimeInterval end = CACurrentMediaTime(); + SCLogCoreCameraInfo(@"[FrameHealthChecker] #%@:%@ - GET_IMAGE_DATA_TIME:%f SAMPLE_DATA_TIME:%f TOTAL_TIME:%f", + snapType, source, getImageDataTime - start, end - getImageDataTime, end - start); + return parameters; +} + +- (FloatRGBA)_getSumRGBAFromData:(const Byte *)imageData + stripLength:(NSInteger)stripLength + bufferLength:(NSInteger)bufferLength + bitmapInfo:(CGBitmapInfo)bitmapInfo +{ + SCTraceODPCompatibleStart(2); + FloatRGBA sumRGBA; + if ((bitmapInfo & kCGImageAlphaPremultipliedFirst) && (bitmapInfo & kCGImageByteOrder32Little)) { + // BGRA + sumRGBA.B = vDspColorElementSum(imageData, stripLength, bufferLength); + sumRGBA.G = vDspColorElementSum(imageData + 1, stripLength, bufferLength); + sumRGBA.R = vDspColorElementSum(imageData + 2, stripLength, bufferLength); + sumRGBA.A = vDspColorElementSum(imageData + 3, stripLength, bufferLength); + } else { + // TODO. support other types beside RGBA + sumRGBA.R = vDspColorElementSum(imageData, stripLength, bufferLength); + sumRGBA.G = vDspColorElementSum(imageData + 1, stripLength, bufferLength); + sumRGBA.B = vDspColorElementSum(imageData + 2, stripLength, bufferLength); + sumRGBA.A = vDspColorElementSum(imageData + 3, stripLength, bufferLength); + } + return sumRGBA; +} + +@end diff --git a/ManagedCapturer/SCManagedFrontFlashController.h b/ManagedCapturer/SCManagedFrontFlashController.h new file mode 100644 index 0000000..c5bef55 --- /dev/null +++ b/ManagedCapturer/SCManagedFrontFlashController.h @@ -0,0 +1,18 @@ +// +// SCManagedFrontFlashController.h +// Snapchat +// +// Created by Liu Liu on 5/4/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import + +// This object is only access on SCManagedCapturer thread +@interface SCManagedFrontFlashController : NSObject + +@property (nonatomic, assign) BOOL flashActive; + +@property (nonatomic, assign) BOOL torchActive; + +@end diff --git a/ManagedCapturer/SCManagedFrontFlashController.m b/ManagedCapturer/SCManagedFrontFlashController.m new file mode 100644 index 0000000..61b4fac --- /dev/null +++ b/ManagedCapturer/SCManagedFrontFlashController.m @@ -0,0 +1,105 @@ +// +// SCManagedFrontFlashController.m +// Snapchat +// +// Created by Liu Liu on 5/4/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCManagedFrontFlashController.h" + +#import +#import +#import +#import + +@import UIKit; + +@implementation SCManagedFrontFlashController { + BOOL _active; + UIView *_brightView; + CGFloat _brightnessWhenFlashAndTorchOff; +} + +- (void)_setScreenWithFrontViewFlashActive:(BOOL)flashActive torchActive:(BOOL)torchActive +{ + SCTraceStart(); + SCAssertMainThread(); + BOOL wasActive = _active; + _active = flashActive || torchActive; + if (!wasActive && _active) { + [self _activateFlash:flashActive]; + } else if (wasActive && !_active) { + [self _deactivateFlash]; + } +} + +- (void)_activateFlash:(BOOL)flashActive +{ + UIWindow *mainWindow = [[UIApplication sharedApplication] keyWindow]; + if (!_brightView) { + CGRect frame = [mainWindow bounds]; + CGFloat maxLength = MAX(CGRectGetWidth(frame), CGRectGetHeight(frame)); + frame.size = CGSizeMake(maxLength, maxLength); + // Using the max length on either side to be compatible with different orientations + _brightView = [[UIView alloc] initWithFrame:frame]; + _brightView.userInteractionEnabled = NO; + _brightView.backgroundColor = [UIColor whiteColor]; + } + _brightnessWhenFlashAndTorchOff = [UIScreen mainScreen].brightness; + SCLogGeneralInfo(@"[SCManagedFrontFlashController] Activating flash, setting screen brightness from %f to 1.0", + _brightnessWhenFlashAndTorchOff); + [self _brightenLoop]; + _brightView.alpha = flashActive ? 1.0 : 0.75; + [mainWindow addSubview:_brightView]; +} + +- (void)_deactivateFlash +{ + SCLogGeneralInfo(@"[SCManagedFrontFlashController] Deactivating flash, setting screen brightness from %f to %f", + [UIScreen mainScreen].brightness, _brightnessWhenFlashAndTorchOff); + [UIScreen mainScreen].brightness = _brightnessWhenFlashAndTorchOff; + if (_brightView) { + [_brightView removeFromSuperview]; + } +} + +- (void)_brightenLoop +{ + if (_active) { + SCLogGeneralInfo(@"[SCManagedFrontFlashController] In brighten loop, setting brightness from %f to 1.0", + [UIScreen mainScreen].brightness); + [UIScreen mainScreen].brightness = 1.0; + dispatch_after(dispatch_time(DISPATCH_TIME_NOW, NSEC_PER_SEC / 2), dispatch_get_main_queue(), ^(void) { + [self _brightenLoop]; + }); + } else { + SCLogGeneralInfo(@"[SCManagedFrontFlashController] Recording is done, brighten loop ends"); + } +} + +- (void)setFlashActive:(BOOL)flashActive +{ + SCTraceStart(); + if (_flashActive != flashActive) { + _flashActive = flashActive; + BOOL torchActive = _torchActive; + runOnMainThreadAsynchronously(^{ + [self _setScreenWithFrontViewFlashActive:flashActive torchActive:torchActive]; + }); + } +} + +- (void)setTorchActive:(BOOL)torchActive +{ + SCTraceStart(); + if (_torchActive != torchActive) { + _torchActive = torchActive; + BOOL flashActive = _flashActive; + runOnMainThreadAsynchronously(^{ + [self _setScreenWithFrontViewFlashActive:flashActive torchActive:torchActive]; + }); + } +} + +@end diff --git a/ManagedCapturer/SCManagedLegacyStillImageCapturer.h b/ManagedCapturer/SCManagedLegacyStillImageCapturer.h new file mode 100644 index 0000000..7c2919f --- /dev/null +++ b/ManagedCapturer/SCManagedLegacyStillImageCapturer.h @@ -0,0 +1,13 @@ +// +// SCManagedLegacyStillImageCapturer.h +// Snapchat +// +// Created by Chao Pang on 10/4/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedStillImageCapturer.h" + +@interface SCManagedLegacyStillImageCapturer : SCManagedStillImageCapturer + +@end diff --git a/ManagedCapturer/SCManagedLegacyStillImageCapturer.m b/ManagedCapturer/SCManagedLegacyStillImageCapturer.m new file mode 100644 index 0000000..9e1e9d1 --- /dev/null +++ b/ManagedCapturer/SCManagedLegacyStillImageCapturer.m @@ -0,0 +1,460 @@ +// +// SCManagedLegacyStillImageCapturer.m +// Snapchat +// +// Created by Chao Pang on 10/4/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedLegacyStillImageCapturer.h" + +#import "AVCaptureConnection+InputDevice.h" +#import "SCCameraTweaks.h" +#import "SCLogger+Camera.h" +#import "SCManagedCapturer.h" +#import "SCManagedStillImageCapturer_Protected.h" +#import "SCStillImageCaptureVideoInputMethod.h" + +#import +#import +#import +#import +#import +#import +#import +#import +#import + +@import ImageIO; + +static NSString *const kSCLegacyStillImageCaptureDefaultMethodErrorDomain = + @"kSCLegacyStillImageCaptureDefaultMethodErrorDomain"; +static NSString *const kSCLegacyStillImageCaptureLensStabilizationMethodErrorDomain = + @"kSCLegacyStillImageCaptureLensStabilizationMethodErrorDomain"; + +static NSInteger const kSCLegacyStillImageCaptureDefaultMethodErrorEncounteredException = 10000; +static NSInteger const kSCLegacyStillImageCaptureLensStabilizationMethodErrorEncounteredException = 10001; + +@implementation SCManagedLegacyStillImageCapturer { +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + AVCaptureStillImageOutput *_stillImageOutput; +#pragma clang diagnostic pop + + BOOL _shouldCapture; + NSUInteger _retries; + + SCStillImageCaptureVideoInputMethod *_videoFileMethod; +} + +- (instancetype)initWithSession:(AVCaptureSession *)session + performer:(id)performer + lensProcessingCore:(id)lensProcessingCore + delegate:(id)delegate +{ + SCTraceStart(); + self = [super initWithSession:session performer:performer lensProcessingCore:lensProcessingCore delegate:delegate]; + if (self) { + [self setupWithSession:session]; + } + return self; +} + +- (void)setupWithSession:(AVCaptureSession *)session +{ + SCTraceStart(); +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + _stillImageOutput = [[AVCaptureStillImageOutput alloc] init]; +#pragma clang diagnostic pop + _stillImageOutput.outputSettings = @{AVVideoCodecKey : AVVideoCodecJPEG}; + [self setAsOutput:session]; +} + +- (void)setAsOutput:(AVCaptureSession *)session +{ + SCTraceStart(); + if ([session canAddOutput:_stillImageOutput]) { + [session addOutput:_stillImageOutput]; + } +} + +- (void)setHighResolutionStillImageOutputEnabled:(BOOL)highResolutionStillImageOutputEnabled +{ + SCTraceStart(); + if (_stillImageOutput.isHighResolutionStillImageOutputEnabled != highResolutionStillImageOutputEnabled) { + _stillImageOutput.highResolutionStillImageOutputEnabled = highResolutionStillImageOutputEnabled; + } +} + +- (void)setPortraitModeCaptureEnabled:(BOOL)enabled +{ + // Legacy capturer only used on devices running versions under 10.2, which don't support depth data + // so this function is never called and does not need to be implemented +} + +- (void)enableStillImageStabilization +{ + SCTraceStart(); +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + if (_stillImageOutput.isLensStabilizationDuringBracketedCaptureSupported) { + _stillImageOutput.lensStabilizationDuringBracketedCaptureEnabled = YES; + } +#pragma clang diagnostic pop +} + +- (void)removeAsOutput:(AVCaptureSession *)session +{ + SCTraceStart(); + [session removeOutput:_stillImageOutput]; +} + +- (void)captureStillImageWithAspectRatio:(CGFloat)aspectRatio + atZoomFactor:(float)zoomFactor + fieldOfView:(float)fieldOfView + state:(SCManagedCapturerState *)state + captureSessionID:(NSString *)captureSessionID + shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo + completionHandler: + (sc_managed_still_image_capturer_capture_still_image_completion_handler_t)completionHandler +{ + SCTraceStart(); + SCAssert(completionHandler, @"completionHandler shouldn't be nil"); + _retries = 6; // AVFoundation Unknown Error usually resolves itself within 0.5 seconds + _aspectRatio = aspectRatio; + _zoomFactor = zoomFactor; + _fieldOfView = fieldOfView; + _state = state; + _captureSessionID = captureSessionID; + _shouldCaptureFromVideo = shouldCaptureFromVideo; + SCAssert(!_completionHandler, @"We shouldn't have a _completionHandler at this point otherwise we are destroying " + @"current completion handler."); + _completionHandler = [completionHandler copy]; + [[SCLogger sharedInstance] logCameraExposureAdjustmentDelayStart]; + if (!_adjustingExposureManualDetect) { + SCLogCoreCameraInfo(@"Capturing still image now"); + [self _captureStillImageWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyNo]; + _shouldCapture = NO; + } else { + SCLogCoreCameraInfo(@"Wait adjusting exposure (or after 0.4 seconds) and then capture still image"); + _shouldCapture = YES; + [self _deadlineCaptureStillImage]; + } +} + +#pragma mark - SCManagedDeviceCapacityAnalyzerListener + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeAdjustingExposure:(BOOL)adjustingExposure +{ + SCTraceStart(); + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + // Since this is handled on a different thread, therefore, dispatch back to the queue we operated on. + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + self->_adjustingExposureManualDetect = adjustingExposure; + [self _didChangeAdjustingExposure:adjustingExposure + withStrategy:kSCCameraExposureAdjustmentStrategyManualDetect]; + }]; +} + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition +{ + SCTraceStart(); + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + self->_lightingConditionType = lightingCondition; + }]; +} + +#pragma mark - SCManagedCapturerListener + +- (void)managedCapturer:(id)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state +{ + SCTraceStart(); + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + // Since this is handled on a different thread, therefore, dispatch back to the queue we operated on. + [self _didChangeAdjustingExposure:state.adjustingExposure withStrategy:kSCCameraExposureAdjustmentStrategyKVO]; + }]; +} + +#pragma mark - Private methods + +- (void)_didChangeAdjustingExposure:(BOOL)adjustingExposure withStrategy:(NSString *)strategy +{ + if (!adjustingExposure && self->_shouldCapture) { + SCLogCoreCameraInfo(@"Capturing after adjusting exposure using strategy: %@", strategy); + [self _captureStillImageWithExposureAdjustmentStrategy:strategy]; + self->_shouldCapture = NO; + } +} + +- (void)_deadlineCaptureStillImage +{ + SCTraceStart(); + // Use the SCManagedCapturer's private queue. + [_performer perform:^{ + if (_shouldCapture) { + [self _captureStillImageWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyDeadline]; + _shouldCapture = NO; + } + } + after:SCCameraTweaksExposureDeadline()]; +} + +- (void)_captureStillImageWithExposureAdjustmentStrategy:(NSString *)strategy +{ + SCTraceStart(); + [[SCLogger sharedInstance] logCameraExposureAdjustmentDelayEndWithStrategy:strategy]; + if (_shouldCaptureFromVideo) { + [self captureStillImageFromVideoBuffer]; + return; + } + SCAssert(_stillImageOutput, @"stillImageOutput shouldn't be nil"); +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + AVCaptureStillImageOutput *stillImageOutput = _stillImageOutput; +#pragma clang diagnostic pop + AVCaptureConnection *captureConnection = [self _captureConnectionFromStillImageOutput:stillImageOutput]; + SCManagedCapturerState *state = [_state copy]; + dispatch_block_t legacyStillImageCaptureBlock = ^{ + SCCAssertMainThread(); + // If the application is not in background, and we have still image connection, do thecapture. Otherwise fail. + if ([UIApplication sharedApplication].applicationState == UIApplicationStateBackground) { + [_performer performImmediatelyIfCurrentPerformer:^{ + sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = + _completionHandler; + _completionHandler = nil; + completionHandler(nil, nil, + [NSError errorWithDomain:kSCManagedStillImageCapturerErrorDomain + code:kSCManagedStillImageCapturerApplicationStateBackground + userInfo:nil]); + }]; + return; + } +#if !TARGET_IPHONE_SIMULATOR + if (!captureConnection) { + [_performer performImmediatelyIfCurrentPerformer:^{ + sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = + _completionHandler; + _completionHandler = nil; + completionHandler(nil, nil, [NSError errorWithDomain:kSCManagedStillImageCapturerErrorDomain + code:kSCManagedStillImageCapturerNoStillImageConnection + userInfo:nil]); + }]; + return; + } +#endif + // Select appropriate image capture method + if ([_delegate managedStillImageCapturerShouldProcessFileInput:self]) { + if (!_videoFileMethod) { + _videoFileMethod = [[SCStillImageCaptureVideoInputMethod alloc] init]; + } + [[SCLogger sharedInstance] logStillImageCaptureApi:@"SCStillImageCapture"]; + [[SCCoreCameraLogger sharedInstance] + logCameraCreationDelaySplitPointStillImageCaptureApi:@"SCStillImageCapture"]; + [_videoFileMethod captureStillImageWithCapturerState:state + successBlock:^(NSData *imageData, NSDictionary *cameraInfo, NSError *error) { + [self _legacyStillImageCaptureDidSucceedWithImageData:imageData + sampleBuffer:nil + cameraInfo:cameraInfo + error:error]; + } + failureBlock:^(NSError *error) { + [self _legacyStillImageCaptureDidFailWithError:error]; + }]; + } else { +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + if (stillImageOutput.isLensStabilizationDuringBracketedCaptureSupported && !state.flashActive) { + [self _captureStabilizedStillImageWithStillImageOutput:stillImageOutput + captureConnection:captureConnection + capturerState:state]; + } else { + [self _captureStillImageWithStillImageOutput:stillImageOutput + captureConnection:captureConnection + capturerState:state]; + } +#pragma clang diagnostic pop + } + }; + // We need to call this on main thread and blocking. + [[SCQueuePerformer mainQueuePerformer] performAndWait:legacyStillImageCaptureBlock]; +} + +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" +- (void)_captureStillImageWithStillImageOutput:(AVCaptureStillImageOutput *)stillImageOutput + captureConnection:(AVCaptureConnection *)captureConnection + capturerState:(SCManagedCapturerState *)state +{ + [[SCLogger sharedInstance] logStillImageCaptureApi:@"AVStillImageCaptureAsynchronous"]; + [[SCCoreCameraLogger sharedInstance] + logCameraCreationDelaySplitPointStillImageCaptureApi:@"AVStillImageCaptureAsynchronous"]; + @try { + [stillImageOutput + captureStillImageAsynchronouslyFromConnection:captureConnection + completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) { + if (imageDataSampleBuffer) { + NSData *imageData = [AVCaptureStillImageOutput + jpegStillImageNSDataRepresentation:imageDataSampleBuffer]; + [self + _legacyStillImageCaptureDidSucceedWithImageData:imageData + sampleBuffer: + imageDataSampleBuffer + cameraInfo: + cameraInfoForBuffer( + imageDataSampleBuffer) + error:error]; + } else { + if (error.domain == AVFoundationErrorDomain && error.code == -11800) { + // iOS 7 "unknown error"; works if we retry + [self _legacyStillImageCaptureWillRetryWithError:error]; + } else { + [self _legacyStillImageCaptureDidFailWithError:error]; + } + } + }]; + } @catch (NSException *e) { + [SCCrashLogger logHandledException:e]; + [self _legacyStillImageCaptureDidFailWithError: + [NSError errorWithDomain:kSCLegacyStillImageCaptureDefaultMethodErrorDomain + code:kSCLegacyStillImageCaptureDefaultMethodErrorEncounteredException + userInfo:@{ + @"exception" : e + }]]; + } +} + +- (void)_captureStabilizedStillImageWithStillImageOutput:(AVCaptureStillImageOutput *)stillImageOutput + captureConnection:(AVCaptureConnection *)captureConnection + capturerState:(SCManagedCapturerState *)state +{ + [[SCLogger sharedInstance] logStillImageCaptureApi:@"AVStillImageOutputCaptureBracketAsynchronously"]; + [[SCCoreCameraLogger sharedInstance] + logCameraCreationDelaySplitPointStillImageCaptureApi:@"AVStillImageOutputCaptureBracketAsynchronously"]; + NSArray *bracketArray = [self _bracketSettingsArray:captureConnection]; + @try { + [stillImageOutput + captureStillImageBracketAsynchronouslyFromConnection:captureConnection + withSettingsArray:bracketArray + completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, + AVCaptureBracketedStillImageSettings *settings, + NSError *err) { + if (!imageDataSampleBuffer) { + [self _legacyStillImageCaptureDidFailWithError:err]; + return; + } + NSData *jpegData = [AVCaptureStillImageOutput + jpegStillImageNSDataRepresentation:imageDataSampleBuffer]; + [self + _legacyStillImageCaptureDidSucceedWithImageData:jpegData + sampleBuffer: + imageDataSampleBuffer + cameraInfo: + cameraInfoForBuffer( + imageDataSampleBuffer) + error:nil]; + }]; + } @catch (NSException *e) { + [SCCrashLogger logHandledException:e]; + [self _legacyStillImageCaptureDidFailWithError: + [NSError errorWithDomain:kSCLegacyStillImageCaptureLensStabilizationMethodErrorDomain + code:kSCLegacyStillImageCaptureLensStabilizationMethodErrorEncounteredException + userInfo:@{ + @"exception" : e + }]]; + } +} +#pragma clang diagnostic pop + +- (NSArray *)_bracketSettingsArray:(AVCaptureConnection *)stillImageConnection +{ + NSInteger const stillCount = 1; + NSMutableArray *bracketSettingsArray = [NSMutableArray arrayWithCapacity:stillCount]; + AVCaptureDevice *device = [stillImageConnection inputDevice]; + AVCaptureManualExposureBracketedStillImageSettings *settings = [AVCaptureManualExposureBracketedStillImageSettings + manualExposureSettingsWithExposureDuration:device.exposureDuration + ISO:AVCaptureISOCurrent]; + for (NSInteger i = 0; i < stillCount; i++) { + [bracketSettingsArray addObject:settings]; + } + return [bracketSettingsArray copy]; +} + +- (void)_legacyStillImageCaptureDidSucceedWithImageData:(NSData *)imageData + sampleBuffer:(CMSampleBufferRef)sampleBuffer + cameraInfo:(NSDictionary *)cameraInfo + error:(NSError *)error +{ + [[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CACurrentMediaTime()]; + [[SCCoreCameraLogger sharedInstance] + logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CACurrentMediaTime()]; + if (sampleBuffer) { + CFRetain(sampleBuffer); + } + [_performer performImmediatelyIfCurrentPerformer:^{ + UIImage *fullScreenImage = [self imageFromData:imageData + currentZoomFactor:_zoomFactor + targetAspectRatio:_aspectRatio + fieldOfView:_fieldOfView + state:_state + sampleBuffer:sampleBuffer]; + + sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; + _completionHandler = nil; + completionHandler(fullScreenImage, cameraInfo, error); + if (sampleBuffer) { + CFRelease(sampleBuffer); + } + }]; +} + +- (void)_legacyStillImageCaptureDidFailWithError:(NSError *)error +{ + [_performer performImmediatelyIfCurrentPerformer:^{ + sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; + _completionHandler = nil; + completionHandler(nil, nil, error); + }]; +} + +- (void)_legacyStillImageCaptureWillRetryWithError:(NSError *)error +{ + if (_retries-- > 0) { + [_performer perform:^{ + [self _captureStillImageWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyNo]; + } + after:kSCCameraRetryInterval]; + } else { + [self _legacyStillImageCaptureDidFailWithError:error]; + } +} + +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" +- (AVCaptureConnection *)_captureConnectionFromStillImageOutput:(AVCaptureStillImageOutput *)stillImageOutput +#pragma clang diagnostic pop +{ + SCTraceStart(); + SCAssert([_performer isCurrentPerformer], @""); + NSArray *connections = [stillImageOutput.connections copy]; + for (AVCaptureConnection *connection in connections) { + for (AVCaptureInputPort *port in [connection inputPorts]) { + if ([[port mediaType] isEqual:AVMediaTypeVideo]) { + return connection; + } + } + } + return nil; +} + +@end diff --git a/ManagedCapturer/SCManagedPhotoCapturer.h b/ManagedCapturer/SCManagedPhotoCapturer.h new file mode 100644 index 0000000..5e1da9b --- /dev/null +++ b/ManagedCapturer/SCManagedPhotoCapturer.h @@ -0,0 +1,13 @@ +// +// SCManagedPhotoCapturer.h +// Snapchat +// +// Created by Chao Pang on 10/5/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedStillImageCapturer.h" + +@interface SCManagedPhotoCapturer : SCManagedStillImageCapturer + +@end diff --git a/ManagedCapturer/SCManagedPhotoCapturer.m b/ManagedCapturer/SCManagedPhotoCapturer.m new file mode 100644 index 0000000..07b441e --- /dev/null +++ b/ManagedCapturer/SCManagedPhotoCapturer.m @@ -0,0 +1,667 @@ +// +// SCManagedPhotoCapturer.m +// Snapchat +// +// Created by Chao Pang on 10/5/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedPhotoCapturer.h" + +#import "AVCaptureConnection+InputDevice.h" +#import "SCCameraTweaks.h" +#import "SCLogger+Camera.h" +#import "SCManagedCapturer.h" +#import "SCManagedFrameHealthChecker.h" +#import "SCManagedStillImageCapturer_Protected.h" +#import "SCStillImageCaptureVideoInputMethod.h" +#import "SCStillImageDepthBlurFilter.h" + +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import + +@import ImageIO; + +static NSString *const kSCManagedPhotoCapturerErrorDomain = @"kSCManagedPhotoCapturerErrorDomain"; + +static NSInteger const kSCManagedPhotoCapturerErrorEncounteredException = 10000; +static NSInteger const kSCManagedPhotoCapturerInconsistentStatus = 10001; + +typedef NS_ENUM(NSUInteger, SCManagedPhotoCapturerStatus) { + SCManagedPhotoCapturerStatusPrepareToCapture, + SCManagedPhotoCapturerStatusWillCapture, + SCManagedPhotoCapturerStatusDidFinishProcess, +}; + +@interface SCManagedPhotoCapturer () +@end + +@implementation SCManagedPhotoCapturer { + AVCapturePhotoOutput *_photoOutput; + + BOOL _shouldCapture; + BOOL _shouldEnableHRSI; + BOOL _portraitModeCaptureEnabled; + NSUInteger _retries; + + CGPoint _portraitModePointOfInterest; + SCStillImageDepthBlurFilter *_depthBlurFilter; + sc_managed_still_image_capturer_capture_still_image_completion_handler_t _callbackBlock; + + SCStillImageCaptureVideoInputMethod *_videoFileMethod; + + SCManagedPhotoCapturerStatus _status; +} + +- (instancetype)initWithSession:(AVCaptureSession *)session + performer:(id)performer + lensProcessingCore:(id)lensProcessingCore + delegate:(id)delegate +{ + SCTraceStart(); + self = [super initWithSession:session performer:performer lensProcessingCore:lensProcessingCore delegate:delegate]; + if (self) { + [self setupWithSession:session]; + _portraitModePointOfInterest = CGPointMake(0.5, 0.5); + } + return self; +} + +- (void)setupWithSession:(AVCaptureSession *)session +{ + SCTraceStart(); + _photoOutput = [[AVCapturePhotoOutput alloc] init]; + _photoOutput.highResolutionCaptureEnabled = YES; + [self setAsOutput:session]; +} + +- (void)setAsOutput:(AVCaptureSession *)session +{ + SCTraceStart(); + if ([session canAddOutput:_photoOutput]) { + [session addOutput:_photoOutput]; + } +} + +- (void)setHighResolutionStillImageOutputEnabled:(BOOL)highResolutionStillImageOutputEnabled +{ + SCTraceStart(); + SCAssert([_performer isCurrentPerformer], @""); + // Here we cannot directly set _photoOutput.highResolutionCaptureEnabled, since it will cause + // black frame blink when enabling lenses. Instead, we enable HRSI in AVCapturePhotoSettings. + // https://ph.sc-corp.net/T96228 + _shouldEnableHRSI = highResolutionStillImageOutputEnabled; +} + +- (void)enableStillImageStabilization +{ + // The lens stabilization is enabled when configure AVCapturePhotoSettings + // instead of AVCapturePhotoOutput + SCTraceStart(); +} + +- (void)setPortraitModeCaptureEnabled:(BOOL)enabled +{ + _portraitModeCaptureEnabled = enabled; + if (@available(ios 11.0, *)) { + _photoOutput.depthDataDeliveryEnabled = enabled; + } + if (enabled && _depthBlurFilter == nil) { + _depthBlurFilter = [[SCStillImageDepthBlurFilter alloc] init]; + } +} + +- (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest +{ + _portraitModePointOfInterest = pointOfInterest; +} + +- (void)removeAsOutput:(AVCaptureSession *)session +{ + SCTraceStart(); + [session removeOutput:_photoOutput]; +} + +- (void)captureStillImageWithAspectRatio:(CGFloat)aspectRatio + atZoomFactor:(float)zoomFactor + fieldOfView:(float)fieldOfView + state:(SCManagedCapturerState *)state + captureSessionID:(NSString *)captureSessionID + shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo + completionHandler: + (sc_managed_still_image_capturer_capture_still_image_completion_handler_t)completionHandler +{ + SCTraceStart(); + SCAssert(completionHandler, @"completionHandler shouldn't be nil"); + SCAssert([_performer isCurrentPerformer], @""); + _retries = 6; // AVFoundation Unknown Error usually resolves itself within 0.5 seconds + _aspectRatio = aspectRatio; + _zoomFactor = zoomFactor; + _fieldOfView = fieldOfView; + _state = state; + _captureSessionID = captureSessionID; + _shouldCaptureFromVideo = shouldCaptureFromVideo; + SCAssert(!_completionHandler, @"We shouldn't have a _completionHandler at this point otherwise we are destroying " + @"current completion handler."); + + // The purpose of these lines is to attach a strong reference to self to the completion handler. + // This is because AVCapturePhotoOutput does not hold a strong reference to its delegate, which acts as a completion + // handler. + // If self is deallocated during the call to _photoOuptut capturePhotoWithSettings:delegate:, which may happen if + // any AVFoundationError occurs, + // then it's callback method, captureOutput:didFinish..., will not be called, and the completion handler will be + // forgotten. + // This comes with a risk of a memory leak. If for whatever reason the completion handler field is never used and + // then unset, + // then we have a permanent retain cycle. + _callbackBlock = completionHandler; + __typeof(self) strongSelf = self; + _completionHandler = ^(UIImage *fullScreenImage, NSDictionary *metadata, NSError *error) { + strongSelf->_callbackBlock(fullScreenImage, metadata, error); + strongSelf->_callbackBlock = nil; + }; + [[SCLogger sharedInstance] logCameraExposureAdjustmentDelayStart]; + + if (!_adjustingExposureManualDetect) { + SCLogCoreCameraInfo(@"Capturing still image now"); + [self _capturePhotoWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyNo]; + _shouldCapture = NO; + } else { + SCLogCoreCameraInfo(@"Wait adjusting exposure (or after 0.4 seconds) and then capture still image"); + _shouldCapture = YES; + [self _deadlineCapturePhoto]; + } +} + +#pragma mark - SCManagedDeviceCapacityAnalyzerListener + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeAdjustingExposure:(BOOL)adjustingExposure +{ + SCTraceStart(); + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + // Since this is handled on a different thread, therefore, dispatch back to the queue we operated on. + self->_adjustingExposureManualDetect = adjustingExposure; + [self _didChangeAdjustingExposure:adjustingExposure + withStrategy:kSCCameraExposureAdjustmentStrategyManualDetect]; + }]; +} + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition +{ + SCTraceStart(); + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + self->_lightingConditionType = lightingCondition; + }]; +} + +#pragma mark - SCManagedCapturerListener + +- (void)managedCapturer:(id)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state +{ + SCTraceStart(); + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + // Since this is handled on a different thread, therefore, dispatch back to the queue we operated on. + [self _didChangeAdjustingExposure:state.adjustingExposure withStrategy:kSCCameraExposureAdjustmentStrategyKVO]; + }]; +} + +#pragma mark - AVCapturePhotoCaptureDelegate + +- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput + didFinishProcessingPhotoSampleBuffer:(CMSampleBufferRef)photoSampleBuffer + previewPhotoSampleBuffer:(CMSampleBufferRef)previewPhotoSampleBuffer + resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings + bracketSettings:(AVCaptureBracketedStillImageSettings *)bracketSettings + error:(NSError *)error +{ + SCTraceStart(); + if (photoSampleBuffer) { + CFRetain(photoSampleBuffer); + } + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + SC_GUARD_ELSE_RUN_AND_RETURN(photoSampleBuffer, [self _photoCaptureDidFailWithError:error]); + if (self->_status == SCManagedPhotoCapturerStatusWillCapture) { + NSData *imageData = [AVCapturePhotoOutput JPEGPhotoDataRepresentationForJPEGSampleBuffer:photoSampleBuffer + previewPhotoSampleBuffer:nil]; + + [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay + uniqueId:@"IMAGE" + splitPoint:@"DID_FINISH_PROCESSING"]; + [self _capturePhotoFinishedWithImageData:imageData + sampleBuffer:photoSampleBuffer + cameraInfo:cameraInfoForBuffer(photoSampleBuffer) + error:error]; + + } else { + SCLogCoreCameraInfo(@"DidFinishProcessingPhoto with unexpected status: %@", + [self _photoCapturerStatusToString:self->_status]); + [self _photoCaptureDidFailWithError:[NSError errorWithDomain:kSCManagedPhotoCapturerErrorDomain + code:kSCManagedPhotoCapturerInconsistentStatus + userInfo:nil]]; + } + CFRelease(photoSampleBuffer); + }]; +} + +- (void)captureOutput:(AVCapturePhotoOutput *)output + didFinishProcessingPhoto:(nonnull AVCapturePhoto *)photo + error:(nullable NSError *)error NS_AVAILABLE_IOS(11_0) +{ + SCTraceStart(); + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + NSData *imageData = [photo fileDataRepresentation]; + SC_GUARD_ELSE_RUN_AND_RETURN(imageData, [self _photoCaptureDidFailWithError:error]); + if (self->_status == SCManagedPhotoCapturerStatusWillCapture) { + if (@available(ios 11.0, *)) { + if (_portraitModeCaptureEnabled) { + RenderData renderData = { + .depthDataMap = photo.depthData.depthDataMap, + .depthBlurPointOfInterest = &_portraitModePointOfInterest, + }; + imageData = [_depthBlurFilter renderWithPhotoData:imageData renderData:renderData]; + } + } + + [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay + uniqueId:@"IMAGE" + splitPoint:@"DID_FINISH_PROCESSING"]; + + [self _capturePhotoFinishedWithImageData:imageData metadata:photo.metadata error:error]; + + } else { + SCLogCoreCameraInfo(@"DidFinishProcessingPhoto with unexpected status: %@", + [self _photoCapturerStatusToString:self->_status]); + [self _photoCaptureDidFailWithError:[NSError errorWithDomain:kSCManagedPhotoCapturerErrorDomain + code:kSCManagedPhotoCapturerInconsistentStatus + userInfo:nil]]; + } + }]; +} + +- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput + willBeginCaptureForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings +{ + SCTraceStart(); + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + if ([self->_delegate respondsToSelector:@selector(managedStillImageCapturerWillCapturePhoto:)]) { + if (self->_status == SCManagedPhotoCapturerStatusPrepareToCapture) { + self->_status = SCManagedPhotoCapturerStatusWillCapture; + + [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay + uniqueId:@"IMAGE" + splitPoint:@"WILL_BEGIN_CAPTURE"]; + [self->_delegate managedStillImageCapturerWillCapturePhoto:self]; + } else { + SCLogCoreCameraInfo(@"WillBeginCapture with unexpected status: %@", + [self _photoCapturerStatusToString:self->_status]); + } + } + }]; +} + +- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput + didCapturePhotoForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings +{ + SCTraceStart(); + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + if ([self->_delegate respondsToSelector:@selector(managedStillImageCapturerDidCapturePhoto:)]) { + if (self->_status == SCManagedPhotoCapturerStatusWillCapture || + self->_status == SCManagedPhotoCapturerStatusDidFinishProcess) { + [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay + uniqueId:@"IMAGE" + splitPoint:@"DID_CAPTURE_PHOTO"]; + [self->_delegate managedStillImageCapturerDidCapturePhoto:self]; + } else { + SCLogCoreCameraInfo(@"DidCapturePhoto with unexpected status: %@", + [self _photoCapturerStatusToString:self->_status]); + } + } + }]; +} + +#pragma mark - Private methods + +- (void)_didChangeAdjustingExposure:(BOOL)adjustingExposure withStrategy:(NSString *)strategy +{ + if (!adjustingExposure && self->_shouldCapture) { + SCLogCoreCameraInfo(@"Capturing after adjusting exposure using strategy: %@", strategy); + [self _capturePhotoWithExposureAdjustmentStrategy:strategy]; + self->_shouldCapture = NO; + } +} + +- (void)_capturePhotoFinishedWithImageData:(NSData *)imageData + sampleBuffer:(CMSampleBufferRef)sampleBuffer + cameraInfo:(NSDictionary *)cameraInfo + error:(NSError *)error +{ + [self _photoCaptureDidSucceedWithImageData:imageData + sampleBuffer:sampleBuffer + cameraInfo:cameraInfoForBuffer(sampleBuffer) + error:error]; + self->_status = SCManagedPhotoCapturerStatusDidFinishProcess; +} + +- (void)_capturePhotoFinishedWithImageData:(NSData *)imageData metadata:(NSDictionary *)metadata error:(NSError *)error +{ + [self _photoCaptureDidSucceedWithImageData:imageData metadata:metadata error:error]; + self->_status = SCManagedPhotoCapturerStatusDidFinishProcess; +} + +- (void)_deadlineCapturePhoto +{ + SCTraceStart(); + // Use the SCManagedCapturer's private queue. + @weakify(self); + [_performer perform:^{ + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + if (self->_shouldCapture) { + [self _capturePhotoWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyDeadline]; + self->_shouldCapture = NO; + } + } + after:SCCameraTweaksExposureDeadline()]; +} + +- (void)_capturePhotoWithExposureAdjustmentStrategy:(NSString *)strategy +{ + SCTraceStart(); + [[SCLogger sharedInstance] logCameraExposureAdjustmentDelayEndWithStrategy:strategy]; + if (_shouldCaptureFromVideo) { + [self captureStillImageFromVideoBuffer]; + return; + } + SCAssert([_performer isCurrentPerformer], @""); + SCAssert(_photoOutput, @"_photoOutput shouldn't be nil"); + _status = SCManagedPhotoCapturerStatusPrepareToCapture; + AVCapturePhotoOutput *photoOutput = _photoOutput; + AVCaptureConnection *captureConnection = [self _captureConnectionFromPhotoOutput:photoOutput]; + SCManagedCapturerState *state = [_state copy]; +#if !TARGET_IPHONE_SIMULATOR + if (!captureConnection) { + sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; + _completionHandler = nil; + completionHandler(nil, nil, [NSError errorWithDomain:kSCManagedStillImageCapturerErrorDomain + code:kSCManagedStillImageCapturerNoStillImageConnection + userInfo:nil]); + } +#endif + AVCapturePhotoSettings *photoSettings = + [self _photoSettingsWithPhotoOutput:photoOutput captureConnection:captureConnection captureState:state]; + // Select appropriate image capture method + + if ([_delegate managedStillImageCapturerShouldProcessFileInput:self]) { + if (!_videoFileMethod) { + _videoFileMethod = [[SCStillImageCaptureVideoInputMethod alloc] init]; + } + [[SCLogger sharedInstance] logStillImageCaptureApi:@"SCStillImageCaptureVideoFileInput"]; + [[SCCoreCameraLogger sharedInstance] + logCameraCreationDelaySplitPointStillImageCaptureApi:@"SCStillImageCaptureVideoFileInput"]; + [_delegate managedStillImageCapturerWillCapturePhoto:self]; + [_videoFileMethod captureStillImageWithCapturerState:state + successBlock:^(NSData *imageData, NSDictionary *cameraInfo, NSError *error) { + [_performer performImmediatelyIfCurrentPerformer:^{ + [self _photoCaptureDidSucceedWithImageData:imageData + sampleBuffer:nil + cameraInfo:cameraInfo + error:error]; + }]; + } + failureBlock:^(NSError *error) { + [_performer performImmediatelyIfCurrentPerformer:^{ + [self _photoCaptureDidFailWithError:error]; + }]; + }]; + } else { + [[SCLogger sharedInstance] logStillImageCaptureApi:@"AVCapturePhoto"]; + [[SCCoreCameraLogger sharedInstance] logCameraCreationDelaySplitPointStillImageCaptureApi:@"AVCapturePhoto"]; + @try { + [photoOutput capturePhotoWithSettings:photoSettings delegate:self]; + } @catch (NSException *e) { + [SCCrashLogger logHandledException:e]; + [self + _photoCaptureDidFailWithError:[NSError errorWithDomain:kSCManagedPhotoCapturerErrorDomain + code:kSCManagedPhotoCapturerErrorEncounteredException + userInfo:@{ + @"exception" : e + }]]; + } + } +} + +- (void)_photoCaptureDidSucceedWithImageData:(NSData *)imageData + sampleBuffer:(CMSampleBufferRef)sampleBuffer + cameraInfo:(NSDictionary *)cameraInfo + error:(NSError *)error +{ + SCTraceStart(); + SCAssert([_performer isCurrentPerformer], @""); + [[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CACurrentMediaTime()]; + [[SCCoreCameraLogger sharedInstance] + logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CACurrentMediaTime()]; + + UIImage *fullScreenImage = [self imageFromData:imageData + currentZoomFactor:_zoomFactor + targetAspectRatio:_aspectRatio + fieldOfView:_fieldOfView + state:_state + sampleBuffer:sampleBuffer]; + [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay + uniqueId:@"IMAGE" + splitPoint:@"WILL_START_COMPLETION_HANDLER"]; + sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; + _completionHandler = nil; + if (completionHandler) { + completionHandler(fullScreenImage, cameraInfo, error); + } +} + +- (void)_photoCaptureDidSucceedWithImageData:(NSData *)imageData + metadata:(NSDictionary *)metadata + error:(NSError *)error +{ + SCTraceStart(); + SCAssert([_performer isCurrentPerformer], @""); + [[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CACurrentMediaTime()]; + [[SCCoreCameraLogger sharedInstance] + logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CACurrentMediaTime()]; + + UIImage *fullScreenImage = [self imageFromData:imageData + currentZoomFactor:_zoomFactor + targetAspectRatio:_aspectRatio + fieldOfView:_fieldOfView + state:_state + metadata:metadata]; + [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay + uniqueId:@"IMAGE" + splitPoint:@"WILL_START_COMPLETION_HANDLER"]; + sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; + _completionHandler = nil; + if (completionHandler) { + completionHandler(fullScreenImage, metadata, error); + } +} + +- (void)_photoCaptureDidFailWithError:(NSError *)error +{ + SCTraceStart(); + SCAssert([_performer isCurrentPerformer], @""); + sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; + _completionHandler = nil; + if (completionHandler) { + completionHandler(nil, nil, error); + } +} + +- (AVCaptureConnection *)_captureConnectionFromPhotoOutput:(AVCapturePhotoOutput *)photoOutput +{ + SCTraceStart(); + SCAssert([_performer isCurrentPerformer], @""); + NSArray *connections = [photoOutput.connections copy]; + for (AVCaptureConnection *connection in connections) { + for (AVCaptureInputPort *port in [connection inputPorts]) { + if ([[port mediaType] isEqual:AVMediaTypeVideo]) { + return connection; + } + } + } + return nil; +} + +- (AVCapturePhotoSettings *)_photoSettingsWithPhotoOutput:(AVCapturePhotoOutput *)photoOutput + captureConnection:(AVCaptureConnection *)captureConnection + captureState:(SCManagedCapturerState *)state +{ + SCTraceStart(); + if ([self _shouldUseBracketPhotoSettingsWithCaptureState:state]) { + return [self _bracketPhotoSettingsWithPhotoOutput:photoOutput + captureConnection:captureConnection + captureState:state]; + } else { + return [self _defaultPhotoSettingsWithPhotoOutput:photoOutput captureState:state]; + } +} + +- (BOOL)_shouldUseBracketPhotoSettingsWithCaptureState:(SCManagedCapturerState *)state +{ + // According to Apple docmentation, AVCapturePhotoBracketSettings do not support flashMode, + // autoStillImageStabilizationEnabled, livePhotoMovieFileURL or livePhotoMovieMetadata. + // Besides, we only use AVCapturePhotoBracketSettings if capture settings needs to be set manually. + return !state.flashActive && !_portraitModeCaptureEnabled && + (([SCManagedCaptureDevice isEnhancedNightModeSupported] && state.isNightModeActive) || + [_delegate managedStillImageCapturerIsUnderDeviceMotion:self]); +} + +- (AVCapturePhotoSettings *)_defaultPhotoSettingsWithPhotoOutput:(AVCapturePhotoOutput *)photoOutput + captureState:(SCManagedCapturerState *)state +{ + SCTraceStart(); + // Specify the output file format + AVCapturePhotoSettings *photoSettings = + [AVCapturePhotoSettings photoSettingsWithFormat:@{AVVideoCodecKey : AVVideoCodecJPEG}]; + + // Enable HRSI if necessary + if (photoSettings.isHighResolutionPhotoEnabled != _shouldEnableHRSI) { + photoSettings.highResolutionPhotoEnabled = _shouldEnableHRSI; + } + + // Turn on flash if active and supported by device + if (state.flashActive && state.flashSupported) { + photoSettings.flashMode = AVCaptureFlashModeOn; + } + + // Turn on stabilization if available + // Seems that setting autoStillImageStabilizationEnabled doesn't work during video capture session, + // but we set enable it anyway as it is harmless. + if (photoSettings.isAutoStillImageStabilizationEnabled) { + photoSettings.autoStillImageStabilizationEnabled = YES; + } + + if (_portraitModeCaptureEnabled) { + if (@available(ios 11.0, *)) { + photoSettings.depthDataDeliveryEnabled = YES; + } + } + + return photoSettings; +} + +- (AVCapturePhotoSettings *)_bracketPhotoSettingsWithPhotoOutput:(AVCapturePhotoOutput *)photoOutput + captureConnection:(AVCaptureConnection *)captureConnection + captureState:(SCManagedCapturerState *)state +{ + SCTraceStart(); + OSType rawPixelFormatType = [photoOutput.availableRawPhotoPixelFormatTypes.firstObject unsignedIntValue]; + NSArray *bracketedSettings = + [self _bracketSettingsArray:captureConnection withCaptureState:state]; + SCAssert(bracketedSettings.count <= photoOutput.maxBracketedCapturePhotoCount, + @"Bracket photo count cannot exceed maximum count"); + // Specify the output file format and raw pixel format + AVCapturePhotoBracketSettings *photoSettings = + [AVCapturePhotoBracketSettings photoBracketSettingsWithRawPixelFormatType:rawPixelFormatType + processedFormat:@{ + AVVideoCodecKey : AVVideoCodecJPEG + } + bracketedSettings:bracketedSettings]; + + // Enable HRSI if necessary + if (photoSettings.isHighResolutionPhotoEnabled != _shouldEnableHRSI) { + photoSettings.highResolutionPhotoEnabled = _shouldEnableHRSI; + } + + // If lens stabilization is supportd, enable the stabilization when device is moving + if (photoOutput.isLensStabilizationDuringBracketedCaptureSupported && !photoSettings.isLensStabilizationEnabled && + [_delegate managedStillImageCapturerIsUnderDeviceMotion:self]) { + photoSettings.lensStabilizationEnabled = YES; + } + return photoSettings; +} + +- (NSArray *)_bracketSettingsArray:(AVCaptureConnection *)stillImageConnection + withCaptureState:(SCManagedCapturerState *)state +{ + NSInteger const stillCount = 1; + NSMutableArray *bracketSettingsArray = [NSMutableArray arrayWithCapacity:stillCount]; + AVCaptureDevice *device = [stillImageConnection inputDevice]; + CMTime exposureDuration = device.exposureDuration; + if ([SCManagedCaptureDevice isEnhancedNightModeSupported] && state.isNightModeActive) { + exposureDuration = [self adjustedExposureDurationForNightModeWithCurrentExposureDuration:exposureDuration]; + } + AVCaptureBracketedStillImageSettings *settings = [AVCaptureManualExposureBracketedStillImageSettings + manualExposureSettingsWithExposureDuration:exposureDuration + ISO:AVCaptureISOCurrent]; + for (NSInteger i = 0; i < stillCount; i++) { + [bracketSettingsArray addObject:settings]; + } + return [bracketSettingsArray copy]; +} + +- (NSString *)_photoCapturerStatusToString:(SCManagedPhotoCapturerStatus)status +{ + switch (status) { + case SCManagedPhotoCapturerStatusPrepareToCapture: + return @"PhotoCapturerStatusPrepareToCapture"; + case SCManagedPhotoCapturerStatusWillCapture: + return @"PhotoCapturerStatusWillCapture"; + case SCManagedPhotoCapturerStatusDidFinishProcess: + return @"PhotoCapturerStatusDidFinishProcess"; + } +} + +@end diff --git a/ManagedCapturer/SCManagedRecordedVideo.h b/ManagedCapturer/SCManagedRecordedVideo.h new file mode 100644 index 0000000..c877384 --- /dev/null +++ b/ManagedCapturer/SCManagedRecordedVideo.h @@ -0,0 +1,36 @@ +// ed265cb0c346ae35dce70d3fc12a0bd8deae0802 +// Generated by the value-object.rb DO NOT EDIT!! + +#import + +#import +#import + +@protocol SCManagedRecordedVideo + +@property (nonatomic, copy, readonly) NSURL *videoURL; + +@property (nonatomic, copy, readonly) NSURL *rawVideoDataFileURL; + +@property (nonatomic, copy, readonly) UIImage *placeholderImage; + +@property (nonatomic, assign, readonly) BOOL isFrontFacingCamera; + +@end + +@interface SCManagedRecordedVideo : NSObject + +@property (nonatomic, copy, readonly) NSURL *videoURL; + +@property (nonatomic, copy, readonly) NSURL *rawVideoDataFileURL; + +@property (nonatomic, copy, readonly) UIImage *placeholderImage; + +@property (nonatomic, assign, readonly) BOOL isFrontFacingCamera; + +- (instancetype)initWithVideoURL:(NSURL *)videoURL + rawVideoDataFileURL:(NSURL *)rawVideoDataFileURL + placeholderImage:(UIImage *)placeholderImage + isFrontFacingCamera:(BOOL)isFrontFacingCamera; + +@end diff --git a/ManagedCapturer/SCManagedRecordedVideo.m b/ManagedCapturer/SCManagedRecordedVideo.m new file mode 100644 index 0000000..078acce --- /dev/null +++ b/ManagedCapturer/SCManagedRecordedVideo.m @@ -0,0 +1,180 @@ +// ed265cb0c346ae35dce70d3fc12a0bd8deae0802 +// Generated by the value-object.rb DO NOT EDIT!! + +#import "SCManagedRecordedVideo.h" + +#import + +@implementation SCManagedRecordedVideo + +- (instancetype)initWithVideoURL:(NSURL *)videoURL + rawVideoDataFileURL:(NSURL *)rawVideoDataFileURL + placeholderImage:(UIImage *)placeholderImage + isFrontFacingCamera:(BOOL)isFrontFacingCamera +{ + self = [super init]; + if (self) { + _videoURL = [(NSObject *)videoURL copy]; + _rawVideoDataFileURL = [(NSObject *)rawVideoDataFileURL copy]; + _placeholderImage = [(NSObject *)placeholderImage copy]; + _isFrontFacingCamera = isFrontFacingCamera; + } + return self; +} + +#pragma mark - NSCopying + +- (instancetype)copyWithZone:(NSZone *)zone +{ + // Immutable object, bypass copy + return self; +} + +#pragma mark - NSCoding + +- (instancetype)initWithCoder:(NSCoder *)aDecoder +{ + self = [super init]; + if (self) { + _videoURL = [aDecoder decodeObjectForKey:@"videoURL"]; + _rawVideoDataFileURL = [aDecoder decodeObjectForKey:@"rawVideoDataFileURL"]; + _placeholderImage = [aDecoder decodeObjectForKey:@"placeholderImage"]; + _isFrontFacingCamera = [aDecoder decodeBoolForKey:@"isFrontFacingCamera"]; + } + return self; +} + +- (void)encodeWithCoder:(NSCoder *)aCoder +{ + [aCoder encodeObject:_videoURL forKey:@"videoURL"]; + [aCoder encodeObject:_rawVideoDataFileURL forKey:@"rawVideoDataFileURL"]; + [aCoder encodeObject:_placeholderImage forKey:@"placeholderImage"]; + [aCoder encodeBool:_isFrontFacingCamera forKey:@"isFrontFacingCamera"]; +} + +#pragma mark - FasterCoding + +- (BOOL)preferFasterCoding +{ + return YES; +} + +- (void)encodeWithFasterCoder:(id)fasterCoder +{ + [fasterCoder encodeBool:_isFrontFacingCamera]; + [fasterCoder encodeObject:_placeholderImage]; + [fasterCoder encodeObject:_rawVideoDataFileURL]; + [fasterCoder encodeObject:_videoURL]; +} + +- (void)decodeWithFasterDecoder:(id)fasterDecoder +{ + _isFrontFacingCamera = (BOOL)[fasterDecoder decodeBool]; + _placeholderImage = (UIImage *)[fasterDecoder decodeObject]; + _rawVideoDataFileURL = (NSURL *)[fasterDecoder decodeObject]; + _videoURL = (NSURL *)[fasterDecoder decodeObject]; +} + +- (void)setObject:(id)val forUInt64Key:(uint64_t)key +{ + switch (key) { + case 50783861721184594ULL: + _placeholderImage = (UIImage *)val; + break; + case 13152167848358790ULL: + _rawVideoDataFileURL = (NSURL *)val; + break; + case 48945309622713334ULL: + _videoURL = (NSURL *)val; + break; + } +} + +- (void)setBool:(BOOL)val forUInt64Key:(uint64_t)key +{ + switch (key) { + case 11924284868025312ULL: + _isFrontFacingCamera = (BOOL)val; + break; + } +} + ++ (uint64_t)fasterCodingVersion +{ + return 17435789727352013688ULL; +} + ++ (uint64_t *)fasterCodingKeys +{ + static uint64_t keys[] = { + 4 /* Total */, + FC_ENCODE_KEY_TYPE(11924284868025312, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(50783861721184594, FCEncodeTypeObject), + FC_ENCODE_KEY_TYPE(13152167848358790, FCEncodeTypeObject), + FC_ENCODE_KEY_TYPE(48945309622713334, FCEncodeTypeObject), + }; + return keys; +} + +#pragma mark - isEqual + +- (BOOL)isEqual:(id)object +{ + if (self == object) { + return YES; + } + if (![object isMemberOfClass:[self class]]) { + return NO; + } + SCManagedRecordedVideo *other = (SCManagedRecordedVideo *)object; + if (other.videoURL != _videoURL && ![(NSObject *)other.videoURL isEqual:_videoURL]) { + return NO; + } + if (other.rawVideoDataFileURL != _rawVideoDataFileURL && + ![(NSObject *)other.rawVideoDataFileURL isEqual:_rawVideoDataFileURL]) { + return NO; + } + if (other.placeholderImage != _placeholderImage && + ![(NSObject *)other.placeholderImage isEqual:_placeholderImage]) { + return NO; + } + if (other.isFrontFacingCamera != _isFrontFacingCamera) { + return NO; + } + return YES; +} + +- (NSUInteger)hash +{ + NSUInteger subhashes[] = {[_videoURL hash], [_rawVideoDataFileURL hash], [_placeholderImage hash], + (NSUInteger)_isFrontFacingCamera}; + NSUInteger result = subhashes[0]; + for (int i = 1; i < 4; i++) { + unsigned long long base = (((unsigned long long)result) << 32 | subhashes[i]); + base = (~base) + (base << 18); + base ^= (base >> 31); + base *= 21; + base ^= (base >> 11); + base += (base << 6); + base ^= (base >> 22); + result = (NSUInteger)base; + } + return result; +} + +#pragma mark - Print description in console: lldb> po #{variable name} + +- (NSString *)description +{ + NSMutableString *desc = [NSMutableString string]; + [desc appendString:@"{\n"]; + [desc appendFormat:@"\tvideoURL:%@\n", [_videoURL description]]; + [desc appendFormat:@"\trawVideoDataFileURL:%@\n", [_rawVideoDataFileURL description]]; + [desc appendFormat:@"\tplaceholderImage:%@\n", [_placeholderImage description]]; + [desc appendFormat:@"\tisFrontFacingCamera:%@\n", [@(_isFrontFacingCamera) description]]; + [desc appendString:@"}\n"]; + + return [desc copy]; +} + +@end diff --git a/ManagedCapturer/SCManagedRecordedVideo.value b/ManagedCapturer/SCManagedRecordedVideo.value new file mode 100644 index 0000000..78bfd3c --- /dev/null +++ b/ManagedCapturer/SCManagedRecordedVideo.value @@ -0,0 +1,6 @@ +interface SCManagedRecordedVideo + NSURL *videoURL; + NSURL *rawVideoDataFileURL; + UIImage *placeholderImage; + BOOL isFrontFacingCamera; +end \ No newline at end of file diff --git a/ManagedCapturer/SCManagedStillImageCapturer.h b/ManagedCapturer/SCManagedStillImageCapturer.h new file mode 100644 index 0000000..0d62afd --- /dev/null +++ b/ManagedCapturer/SCManagedStillImageCapturer.h @@ -0,0 +1,92 @@ +// +// SCManagedStillImageCapturer.h +// Snapchat +// +// Created by Liu Liu on 4/30/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCCoreCameraLogger.h" +#import "SCManagedCaptureDevice.h" +#import "SCManagedCapturerListener.h" +#import "SCManagedCapturerState.h" +#import "SCManagedDeviceCapacityAnalyzerListener.h" + +#import +#import + +#import +#import + +SC_EXTERN_C_BEGIN + +extern BOOL SCPhotoCapturerIsEnabled(void); + +SC_EXTERN_C_END + +@protocol SCPerforming; +@protocol SCManagedStillImageCapturerDelegate; +@class SCCaptureResource; + +typedef void (^sc_managed_still_image_capturer_capture_still_image_completion_handler_t)(UIImage *fullScreenImage, + NSDictionary *metadata, + NSError *error); + +@interface SCManagedStillImageCapturer + : NSObject { + SCManagedCapturerState *_state; + BOOL _shouldCaptureFromVideo; + BOOL _captureImageFromVideoImmediately; + CGFloat _aspectRatio; + float _zoomFactor; + float _fieldOfView; + BOOL _adjustingExposureManualDetect; + sc_managed_still_image_capturer_capture_still_image_completion_handler_t _completionHandler; +} + ++ (instancetype)capturerWithCaptureResource:(SCCaptureResource *)captureResource; + +SC_INIT_AND_NEW_UNAVAILABLE; + +@property (nonatomic, weak) id delegate; + +- (void)setupWithSession:(AVCaptureSession *)session; + +- (void)setAsOutput:(AVCaptureSession *)session; + +- (void)removeAsOutput:(AVCaptureSession *)session; + +- (void)setHighResolutionStillImageOutputEnabled:(BOOL)highResolutionStillImageOutputEnabled; + +- (void)setPortraitModeCaptureEnabled:(BOOL)enabled; + +- (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest; + +- (void)enableStillImageStabilization; + +- (void)captureStillImageWithAspectRatio:(CGFloat)aspectRatio + atZoomFactor:(float)zoomFactor + fieldOfView:(float)fieldOfView + state:(SCManagedCapturerState *)state + captureSessionID:(NSString *)captureSessionID + shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo + completionHandler: + (sc_managed_still_image_capturer_capture_still_image_completion_handler_t)completionHandler; + +- (void)captureStillImageFromVideoBuffer; + +@end + +@protocol SCManagedStillImageCapturerDelegate + +- (BOOL)managedStillImageCapturerIsUnderDeviceMotion:(SCManagedStillImageCapturer *)managedStillImageCapturer; + +- (BOOL)managedStillImageCapturerShouldProcessFileInput:(SCManagedStillImageCapturer *)managedStillImageCapturer; + +@optional + +- (void)managedStillImageCapturerWillCapturePhoto:(SCManagedStillImageCapturer *)managedStillImageCapturer; + +- (void)managedStillImageCapturerDidCapturePhoto:(SCManagedStillImageCapturer *)managedStillImageCapturer; + +@end diff --git a/ManagedCapturer/SCManagedStillImageCapturer.mm b/ManagedCapturer/SCManagedStillImageCapturer.mm new file mode 100644 index 0000000..91c55de --- /dev/null +++ b/ManagedCapturer/SCManagedStillImageCapturer.mm @@ -0,0 +1,399 @@ +// +// SCManagedStillImageCapturer.m +// Snapchat +// +// Created by Liu Liu on 4/30/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCManagedStillImageCapturer.h" + +#import "SCCameraSettingUtils.h" +#import "SCCameraTweaks.h" +#import "SCCaptureResource.h" +#import "SCLogger+Camera.h" +#import "SCManagedCaptureSession.h" +#import "SCManagedCapturer.h" +#import "SCManagedCapturerLensAPI.h" +#import "SCManagedFrameHealthChecker.h" +#import "SCManagedLegacyStillImageCapturer.h" +#import "SCManagedPhotoCapturer.h" +#import "SCManagedStillImageCapturerHandler.h" +#import "SCManagedStillImageCapturer_Protected.h" + +#import +#import +#import +#import +#import +#import +#import +#import +#import + +#import + +NSString *const kSCManagedStillImageCapturerErrorDomain = @"kSCManagedStillImageCapturerErrorDomain"; + +NSInteger const kSCCameraShutterSoundID = 1108; + +#if !TARGET_IPHONE_SIMULATOR +NSInteger const kSCManagedStillImageCapturerNoStillImageConnection = 1101; +#endif +NSInteger const kSCManagedStillImageCapturerApplicationStateBackground = 1102; + +// We will do the image capture regardless if these is still camera adjustment in progress after 0.4 seconds. +NSTimeInterval const kSCManagedStillImageCapturerDeadline = 0.4; +NSTimeInterval const kSCCameraRetryInterval = 0.1; + +BOOL SCPhotoCapturerIsEnabled(void) +{ + // Due to the native crash in https://jira.sc-corp.net/browse/CCAM-4904, we guard it >= 10.2 + return SC_AT_LEAST_IOS_10_2; +} + +NSDictionary *cameraInfoForBuffer(CMSampleBufferRef imageDataSampleBuffer) +{ + CFDictionaryRef exifAttachments = + (CFDictionaryRef)CMGetAttachment(imageDataSampleBuffer, kCGImagePropertyExifDictionary, NULL); + float brightness = [retrieveBrightnessFromEXIFAttachments(exifAttachments) floatValue]; + NSInteger ISOSpeedRating = [retrieveISOSpeedRatingFromEXIFAttachments(exifAttachments) integerValue]; + return @{ + (__bridge NSString *) kCGImagePropertyExifISOSpeedRatings : @(ISOSpeedRating), (__bridge NSString *) + kCGImagePropertyExifBrightnessValue : @(brightness) + }; +} + +@implementation SCManagedStillImageCapturer + ++ (instancetype)capturerWithCaptureResource:(SCCaptureResource *)captureResource +{ + if (SCPhotoCapturerIsEnabled()) { + return [[SCManagedPhotoCapturer alloc] initWithSession:captureResource.managedSession.avSession + performer:captureResource.queuePerformer + lensProcessingCore:captureResource.lensProcessingCore + delegate:captureResource.stillImageCapturerHandler]; + } else { + return [[SCManagedLegacyStillImageCapturer alloc] initWithSession:captureResource.managedSession.avSession + performer:captureResource.queuePerformer + lensProcessingCore:captureResource.lensProcessingCore + delegate:captureResource.stillImageCapturerHandler]; + } +} + +- (instancetype)initWithSession:(AVCaptureSession *)session + performer:(id)performer + lensProcessingCore:(id)lensAPI + delegate:(id)delegate +{ + self = [super init]; + if (self) { + _session = session; + _performer = performer; + _lensAPI = lensAPI; + _delegate = delegate; + } + return self; +} + +- (void)setupWithSession:(AVCaptureSession *)session +{ + UNIMPLEMENTED_METHOD; +} + +- (void)setAsOutput:(AVCaptureSession *)session +{ + UNIMPLEMENTED_METHOD; +} + +- (void)setHighResolutionStillImageOutputEnabled:(BOOL)highResolutionStillImageOutputEnabled +{ + UNIMPLEMENTED_METHOD; +} + +- (void)enableStillImageStabilization +{ + UNIMPLEMENTED_METHOD; +} + +- (void)removeAsOutput:(AVCaptureSession *)session +{ + UNIMPLEMENTED_METHOD; +} + +- (void)setPortraitModeCaptureEnabled:(BOOL)enabled +{ + UNIMPLEMENTED_METHOD; +} + +- (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest +{ + UNIMPLEMENTED_METHOD; +} + +- (void)captureStillImageWithAspectRatio:(CGFloat)aspectRatio + atZoomFactor:(float)zoomFactor + fieldOfView:(float)fieldOfView + state:(SCManagedCapturerState *)state + captureSessionID:(NSString *)captureSessionID + shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo + completionHandler: + (sc_managed_still_image_capturer_capture_still_image_completion_handler_t)completionHandler +{ + UNIMPLEMENTED_METHOD; +} + +#pragma mark - SCManagedDeviceCapacityAnalyzerListener + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeAdjustingExposure:(BOOL)adjustingExposure +{ + UNIMPLEMENTED_METHOD; +} + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition +{ + UNIMPLEMENTED_METHOD; +} + +#pragma mark - SCManagedCapturerListener + +- (void)managedCapturer:(id)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state +{ + UNIMPLEMENTED_METHOD; +} + +- (UIImage *)imageFromData:(NSData *)data + currentZoomFactor:(float)currentZoomFactor + targetAspectRatio:(CGFloat)targetAspectRatio + fieldOfView:(float)fieldOfView + state:(SCManagedCapturerState *)state + sampleBuffer:(CMSampleBufferRef)sampleBuffer +{ + UIImage *capturedImage = [self imageFromImage:[UIImage sc_imageWithData:data] + currentZoomFactor:currentZoomFactor + targetAspectRatio:targetAspectRatio + fieldOfView:fieldOfView + state:state]; + // Check capture frame health before showing preview + NSDictionary *metadata = + [[SCManagedFrameHealthChecker sharedInstance] metadataForSampleBuffer:sampleBuffer + photoCapturerEnabled:SCPhotoCapturerIsEnabled() + lensEnabled:state.lensesActive + lensID:[_lensAPI activeLensId]]; + [[SCManagedFrameHealthChecker sharedInstance] checkImageHealthForCaptureFrameImage:capturedImage + captureSettings:metadata + captureSessionID:_captureSessionID]; + _captureSessionID = nil; + return capturedImage; +} + +- (UIImage *)imageFromData:(NSData *)data + currentZoomFactor:(float)currentZoomFactor + targetAspectRatio:(CGFloat)targetAspectRatio + fieldOfView:(float)fieldOfView + state:(SCManagedCapturerState *)state + metadata:(NSDictionary *)metadata +{ + UIImage *capturedImage = [self imageFromImage:[UIImage sc_imageWithData:data] + currentZoomFactor:currentZoomFactor + targetAspectRatio:targetAspectRatio + fieldOfView:fieldOfView + state:state]; + // Check capture frame health before showing preview + NSDictionary *newMetadata = + [[SCManagedFrameHealthChecker sharedInstance] metadataForMetadata:metadata + photoCapturerEnabled:SCPhotoCapturerIsEnabled() + lensEnabled:state.lensesActive + lensID:[_lensAPI activeLensId]]; + [[SCManagedFrameHealthChecker sharedInstance] checkImageHealthForCaptureFrameImage:capturedImage + captureSettings:newMetadata + captureSessionID:_captureSessionID]; + _captureSessionID = nil; + return capturedImage; +} + +- (UIImage *)imageFromImage:(UIImage *)image + currentZoomFactor:(float)currentZoomFactor + targetAspectRatio:(CGFloat)targetAspectRatio + fieldOfView:(float)fieldOfView + state:(SCManagedCapturerState *)state +{ + UIImage *fullScreenImage = image; + if (state.lensesActive && _lensAPI.isLensApplied) { + fullScreenImage = [_lensAPI processImage:fullScreenImage + maxPixelSize:[_lensAPI maxPixelSize] + devicePosition:state.devicePosition + fieldOfView:fieldOfView]; + } + // Resize and crop + return [self resizeImage:fullScreenImage currentZoomFactor:currentZoomFactor targetAspectRatio:targetAspectRatio]; +} + +- (UIImage *)resizeImage:(UIImage *)image + currentZoomFactor:(float)currentZoomFactor + targetAspectRatio:(CGFloat)targetAspectRatio +{ + SCTraceStart(); + if (currentZoomFactor == 1) { + return SCCropImageToTargetAspectRatio(image, targetAspectRatio); + } else { + @autoreleasepool { + return [self resizeImageUsingCG:image + currentZoomFactor:currentZoomFactor + targetAspectRatio:targetAspectRatio + maxPixelSize:[_lensAPI maxPixelSize]]; + } + } +} + +- (UIImage *)resizeImageUsingCG:(UIImage *)inputImage + currentZoomFactor:(float)currentZoomFactor + targetAspectRatio:(CGFloat)targetAspectRatio + maxPixelSize:(CGFloat)maxPixelSize +{ + size_t imageWidth = CGImageGetWidth(inputImage.CGImage); + size_t imageHeight = CGImageGetHeight(inputImage.CGImage); + SCLogGeneralInfo(@"Captured still image at %dx%d", (int)imageWidth, (int)imageHeight); + size_t targetWidth, targetHeight; + float zoomFactor = currentZoomFactor; + if (imageWidth > imageHeight) { + targetWidth = maxPixelSize; + targetHeight = (maxPixelSize * imageHeight + imageWidth / 2) / imageWidth; + // Update zoom factor here + zoomFactor *= (float)maxPixelSize / imageWidth; + } else { + targetHeight = maxPixelSize; + targetWidth = (maxPixelSize * imageWidth + imageHeight / 2) / imageHeight; + zoomFactor *= (float)maxPixelSize / imageHeight; + } + if (targetAspectRatio != kSCManagedCapturerAspectRatioUnspecified) { + SCCropImageSizeToAspectRatio(targetWidth, targetHeight, inputImage.imageOrientation, targetAspectRatio, + &targetWidth, &targetHeight); + } + CGContextRef context = + CGBitmapContextCreate(NULL, targetWidth, targetHeight, CGImageGetBitsPerComponent(inputImage.CGImage), + CGImageGetBitsPerPixel(inputImage.CGImage) * targetWidth / 8, + CGImageGetColorSpace(inputImage.CGImage), CGImageGetBitmapInfo(inputImage.CGImage)); + CGContextSetInterpolationQuality(context, kCGInterpolationHigh); + CGContextDrawImage(context, CGRectMake(targetWidth * 0.5 - imageWidth * 0.5 * zoomFactor, + targetHeight * 0.5 - imageHeight * 0.5 * zoomFactor, imageWidth * zoomFactor, + imageHeight * zoomFactor), + inputImage.CGImage); + CGImageRef thumbnail = CGBitmapContextCreateImage(context); + CGContextRelease(context); + UIImage *image = + [UIImage imageWithCGImage:thumbnail scale:inputImage.scale orientation:inputImage.imageOrientation]; + CGImageRelease(thumbnail); + return image; +} + +- (CMTime)adjustedExposureDurationForNightModeWithCurrentExposureDuration:(CMTime)exposureDuration +{ + CMTime adjustedExposureDuration = exposureDuration; + if (_lightingConditionType == SCCapturerLightingConditionTypeDark) { + adjustedExposureDuration = CMTimeMultiplyByFloat64(exposureDuration, 1.5); + } else if (_lightingConditionType == SCCapturerLightingConditionTypeExtremeDark) { + adjustedExposureDuration = CMTimeMultiplyByFloat64(exposureDuration, 2.5); + } + return adjustedExposureDuration; +} + +#pragma mark - SCManagedVideoDataSourceListener + +- (void)managedVideoDataSource:(id)managedVideoDataSource + didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer + devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + SCTraceStart(); + SC_GUARD_ELSE_RETURN(_captureImageFromVideoImmediately); + _captureImageFromVideoImmediately = NO; + @weakify(self); + CFRetain(sampleBuffer); + [_performer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + [self _didCapturePhotoFromVideoBuffer]; + UIImageOrientation orientation = devicePosition == SCManagedCaptureDevicePositionBack + ? UIImageOrientationRight + : UIImageOrientationLeftMirrored; + UIImage *videoImage = [UIImage imageWithPixelBufferRef:CMSampleBufferGetImageBuffer(sampleBuffer) + backingType:UIImageBackingTypeCGImage + orientation:orientation + context:[CIContext contextWithOptions:nil]]; + UIImage *fullScreenImage = [self imageFromImage:videoImage + currentZoomFactor:_zoomFactor + targetAspectRatio:_aspectRatio + fieldOfView:_fieldOfView + state:_state]; + NSMutableDictionary *cameraInfo = [cameraInfoForBuffer(sampleBuffer) mutableCopy]; + cameraInfo[@"capture_image_from_video_buffer"] = @"enabled"; + [self _didFinishProcessingFromVideoBufferWithImage:fullScreenImage cameraInfo:cameraInfo]; + CFRelease(sampleBuffer); + }]; +} + +- (void)_willBeginCapturePhotoFromVideoBuffer +{ + SCTraceStart(); + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + if ([self->_delegate respondsToSelector:@selector(managedStillImageCapturerWillCapturePhoto:)]) { + [self->_delegate managedStillImageCapturerWillCapturePhoto:self]; + } + }]; +} + +- (void)_didCapturePhotoFromVideoBuffer +{ + SCTraceStart(); + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + if ([self->_delegate respondsToSelector:@selector(managedStillImageCapturerDidCapturePhoto:)]) { + [self->_delegate managedStillImageCapturerDidCapturePhoto:self]; + } + }]; +} + +- (void)_didFinishProcessingFromVideoBufferWithImage:(UIImage *)image cameraInfo:(NSDictionary *)cameraInfo +{ + SCTraceStart(); + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + [[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CACurrentMediaTime()]; + [[SCCoreCameraLogger sharedInstance] + logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CACurrentMediaTime()]; + sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; + _completionHandler = nil; + if (completionHandler) { + completionHandler(image, cameraInfo, nil); + } + }]; +} + +- (void)captureStillImageFromVideoBuffer +{ + SCTraceStart(); + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + AudioServicesPlaySystemSoundWithCompletion(kSCCameraShutterSoundID, nil); + [self _willBeginCapturePhotoFromVideoBuffer]; + self->_captureImageFromVideoImmediately = YES; + }]; +} + +@end diff --git a/ManagedCapturer/SCManagedStillImageCapturerHandler.h b/ManagedCapturer/SCManagedStillImageCapturerHandler.h new file mode 100644 index 0000000..7535483 --- /dev/null +++ b/ManagedCapturer/SCManagedStillImageCapturerHandler.h @@ -0,0 +1,21 @@ +// +// SCManagedStillImageCapturerHandler.h +// Snapchat +// +// Created by Jingtian Yang on 11/12/2017. +// + +#import "SCManagedStillImageCapturer.h" + +#import + +@class SCCaptureResource; +@protocol SCDeviceMotionProvider +, SCFileInputDecider; + +@interface SCManagedStillImageCapturerHandler : NSObject + +SC_INIT_AND_NEW_UNAVAILABLE +- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource; + +@end diff --git a/ManagedCapturer/SCManagedStillImageCapturerHandler.m b/ManagedCapturer/SCManagedStillImageCapturerHandler.m new file mode 100644 index 0000000..0b39565 --- /dev/null +++ b/ManagedCapturer/SCManagedStillImageCapturerHandler.m @@ -0,0 +1,85 @@ +// +// SCManagedStillImageCapturerHandler.m +// Snapchat +// +// Created by Jingtian Yang on 11/12/2017. +// + +#import "SCManagedStillImageCapturerHandler.h" + +#import "SCCaptureResource.h" +#import "SCManagedCaptureDevice+SCManagedCapturer.h" +#import "SCManagedCapturer.h" +#import "SCManagedCapturerLogging.h" +#import "SCManagedCapturerSampleMetadata.h" +#import "SCManagedCapturerState.h" + +#import +#import +#import +#import + +@interface SCManagedStillImageCapturerHandler () { + __weak SCCaptureResource *_captureResource; +} + +@end + +@implementation SCManagedStillImageCapturerHandler + +- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource +{ + self = [super init]; + if (self) { + SCAssert(captureResource, @""); + _captureResource = captureResource; + } + return self; +} + +- (void)managedStillImageCapturerWillCapturePhoto:(SCManagedStillImageCapturer *)managedStillImageCapturer +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Will capture photo. stillImageCapturer:%@", _captureResource.stillImageCapturer); + [_captureResource.queuePerformer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + if (_captureResource.stillImageCapturer) { + SCManagedCapturerState *state = [_captureResource.state copy]; + SCManagedCapturerSampleMetadata *sampleMetadata = [[SCManagedCapturerSampleMetadata alloc] + initWithPresentationTimestamp:kCMTimeZero + fieldOfView:_captureResource.device.fieldOfView]; + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + willCapturePhoto:state + sampleMetadata:sampleMetadata]; + }); + } + }]; +} + +- (void)managedStillImageCapturerDidCapturePhoto:(SCManagedStillImageCapturer *)managedStillImageCapturer +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Did capture photo. stillImageCapturer:%@", _captureResource.stillImageCapturer); + [_captureResource.queuePerformer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + if (_captureResource.stillImageCapturer) { + SCManagedCapturerState *state = [_captureResource.state copy]; + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didCapturePhoto:state]; + }); + } + }]; +} + +- (BOOL)managedStillImageCapturerIsUnderDeviceMotion:(SCManagedStillImageCapturer *)managedStillImageCapturer +{ + return _captureResource.deviceMotionProvider.isUnderDeviceMotion; +} + +- (BOOL)managedStillImageCapturerShouldProcessFileInput:(SCManagedStillImageCapturer *)managedStillImageCapturer +{ + return _captureResource.fileInputDecider.shouldProcessFileInput; +} + +@end diff --git a/ManagedCapturer/SCManagedStillImageCapturer_Protected.h b/ManagedCapturer/SCManagedStillImageCapturer_Protected.h new file mode 100644 index 0000000..30fe409 --- /dev/null +++ b/ManagedCapturer/SCManagedStillImageCapturer_Protected.h @@ -0,0 +1,63 @@ +// +// SCManagedStillImageCapturer_Protected.h +// Snapchat +// +// Created by Chao Pang on 10/4/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +SC_EXTERN_C_BEGIN +extern NSDictionary *cameraInfoForBuffer(CMSampleBufferRef imageDataSampleBuffer); +SC_EXTERN_C_END + +extern NSString *const kSCManagedStillImageCapturerErrorDomain; + +#if !TARGET_IPHONE_SIMULATOR +extern NSInteger const kSCManagedStillImageCapturerNoStillImageConnection; +#endif +extern NSInteger const kSCManagedStillImageCapturerApplicationStateBackground; + +// We will do the image capture regardless if these is still camera adjustment in progress after 0.4 seconds. +extern NSTimeInterval const kSCManagedStillImageCapturerDeadline; +extern NSTimeInterval const kSCCameraRetryInterval; + +@protocol SCManagedCapturerLensAPI; + +@interface SCManagedStillImageCapturer () { + @protected + id _lensAPI; + id _performer; + AVCaptureSession *_session; + id __weak _delegate; + NSString *_captureSessionID; + SCCapturerLightingConditionType _lightingConditionType; +} + +- (instancetype)initWithSession:(AVCaptureSession *)session + performer:(id)performer + lensProcessingCore:(id)lensProcessingCore + delegate:(id)delegate; + +- (UIImage *)imageFromData:(NSData *)data + currentZoomFactor:(float)currentZoomFactor + targetAspectRatio:(CGFloat)targetAspectRatio + fieldOfView:(float)fieldOfView + state:(SCManagedCapturerState *)state + sampleBuffer:(CMSampleBufferRef)sampleBuffer; + +- (UIImage *)imageFromData:(NSData *)data + currentZoomFactor:(float)currentZoomFactor + targetAspectRatio:(CGFloat)targetAspectRatio + fieldOfView:(float)fieldOfView + state:(SCManagedCapturerState *)state + metadata:(NSDictionary *)metadata; + +- (UIImage *)imageFromImage:(UIImage *)image + currentZoomFactor:(float)currentZoomFactor + targetAspectRatio:(CGFloat)targetAspectRatio + fieldOfView:(float)fieldOfView + state:(SCManagedCapturerState *)state; + +- (CMTime)adjustedExposureDurationForNightModeWithCurrentExposureDuration:(CMTime)exposureDuration; + +@end diff --git a/ManagedCapturer/SCManagedVideoARDataSource.h b/ManagedCapturer/SCManagedVideoARDataSource.h new file mode 100644 index 0000000..b5486c3 --- /dev/null +++ b/ManagedCapturer/SCManagedVideoARDataSource.h @@ -0,0 +1,24 @@ +// +// SCManagedVideoARDataSource.h +// Snapchat +// +// Created by Eyal Segal on 20/10/2017. +// + +#import "SCCapturerDefines.h" + +#import + +#import + +@protocol SCManagedVideoARDataSource + +@property (atomic, strong) ARFrame *currentFrame NS_AVAILABLE_IOS(11_0); + +#ifdef SC_USE_ARKIT_FACE +@property (atomic, strong) AVDepthData *lastDepthData NS_AVAILABLE_IOS(11_0); +#endif + +@property (atomic, assign) float fieldOfView NS_AVAILABLE_IOS(11_0); + +@end diff --git a/ManagedCapturer/SCManagedVideoCapturer.h b/ManagedCapturer/SCManagedVideoCapturer.h new file mode 100644 index 0000000..1a4a16e --- /dev/null +++ b/ManagedCapturer/SCManagedVideoCapturer.h @@ -0,0 +1,102 @@ +// +// SCManagedVideoCapturer.h +// Snapchat +// +// Created by Liu Liu on 5/1/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCManagedRecordedVideo.h" +#import "SCManagedVideoCapturerOutputSettings.h" +#import "SCVideoCaptureSessionInfo.h" + +#import +#import +#import + +#import +#import + +typedef void (^sc_managed_video_capturer_recording_completion_handler_t)(NSURL *fileURL, NSError *error); + +@class SCManagedVideoCapturer, SCTimedTask; + +@protocol SCManagedVideoCapturerDelegate + +// All these calbacks are invoked on a private queue for video recording channels + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didBeginVideoRecording:(SCVideoCaptureSessionInfo)sessionInfo; + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didBeginAudioRecording:(SCVideoCaptureSessionInfo)sessionInfo; + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + willStopWithRecordedVideoFuture:(SCFuture> *)videoProviderFuture + videoSize:(CGSize)videoSize + placeholderImage:(UIImage *)placeholderImage + session:(SCVideoCaptureSessionInfo)sessionInfo; + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didSucceedWithRecordedVideo:(SCManagedRecordedVideo *)recordedVideo + session:(SCVideoCaptureSessionInfo)sessionInfo; + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didFailWithError:(NSError *)error + session:(SCVideoCaptureSessionInfo)sessionInfo; + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didCancelVideoRecording:(SCVideoCaptureSessionInfo)sessionInfo; + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didGetError:(NSError *)error + forType:(SCManagedVideoCapturerInfoType)type + session:(SCVideoCaptureSessionInfo)sessionInfo; + +- (NSDictionary *)managedVideoCapturerGetExtraFrameHealthInfo:(SCManagedVideoCapturer *)managedVideoCapturer; + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer + presentationTimestamp:(CMTime)presentationTimestamp; + +@end + +/** + * AVFoundation backed class that writes frames to an output file. SCManagedVideoCapturer + * uses SCManagedVideoCapturerOutputSettings to determine output settings. If no output + * settings are passed in (nil) SCManagedVideoCapturer will fall back on default settings. + */ +@interface SCManagedVideoCapturer : NSObject + +/** + * Return the output URL that passed into beginRecordingToURL method + */ +@property (nonatomic, copy, readonly) NSURL *outputURL; + +@property (nonatomic, weak) id delegate; +@property (nonatomic, readonly) SCVideoCaptureSessionInfo activeSession; +@property (nonatomic, assign, readonly) CMTime firstWrittenAudioBufferDelay; +@property (nonatomic, assign, readonly) BOOL audioQueueStarted; + +- (instancetype)initWithQueuePerformer:(SCQueuePerformer *)queuePerformer; + +- (void)prepareForRecordingWithAudioConfiguration:(SCAudioConfiguration *)configuration; +- (SCVideoCaptureSessionInfo)startRecordingAsynchronouslyWithOutputSettings: + (SCManagedVideoCapturerOutputSettings *)outputSettings + audioConfiguration:(SCAudioConfiguration *)audioConfiguration + maxDuration:(NSTimeInterval)maxDuration + toURL:(NSURL *)URL + deviceFormat:(AVCaptureDeviceFormat *)deviceFormat + orientation:(AVCaptureVideoOrientation)videoOrientation + captureSessionID:(NSString *)captureSessionID; + +- (void)stopRecordingAsynchronously; +- (void)cancelRecordingAsynchronously; + +// Schedule a task to run, it is thread safe. +- (void)addTimedTask:(SCTimedTask *)task; + +// Clear all tasks, it is thread safe. +- (void)clearTimedTasks; + +@end diff --git a/ManagedCapturer/SCManagedVideoCapturer.m b/ManagedCapturer/SCManagedVideoCapturer.m new file mode 100644 index 0000000..60f2002 --- /dev/null +++ b/ManagedCapturer/SCManagedVideoCapturer.m @@ -0,0 +1,1107 @@ +// +// SCManagedVideoCapturer.m +// Snapchat +// +// Created by Liu Liu on 5/1/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCManagedVideoCapturer.h" + +#import "NSURL+Asset.h" +#import "SCAudioCaptureSession.h" +#import "SCCameraTweaks.h" +#import "SCCapturerBufferedVideoWriter.h" +#import "SCCoreCameraLogger.h" +#import "SCLogger+Camera.h" +#import "SCManagedCapturer.h" +#import "SCManagedFrameHealthChecker.h" +#import "SCManagedVideoCapturerLogger.h" +#import "SCManagedVideoCapturerTimeObserver.h" + +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import + +#import + +@import CoreMedia; +@import ImageIO; + +static NSString *const kSCAudioCaptureAudioSessionLabel = @"CAMERA"; + +// wild card audio queue error code +static NSInteger const kSCAudioQueueErrorWildCard = -50; +// kAudioHardwareIllegalOperationError, it means hardware failure +static NSInteger const kSCAudioQueueErrorHardware = 1852797029; + +typedef NS_ENUM(NSUInteger, SCManagedVideoCapturerStatus) { + SCManagedVideoCapturerStatusUnknown, + SCManagedVideoCapturerStatusIdle, + SCManagedVideoCapturerStatusPrepareToRecord, + SCManagedVideoCapturerStatusReadyForRecording, + SCManagedVideoCapturerStatusRecording, + SCManagedVideoCapturerStatusError, +}; + +#define SCLogVideoCapturerInfo(fmt, ...) SCLogCoreCameraInfo(@"[SCManagedVideoCapturer] " fmt, ##__VA_ARGS__) +#define SCLogVideoCapturerWarning(fmt, ...) SCLogCoreCameraWarning(@"[SCManagedVideoCapturer] " fmt, ##__VA_ARGS__) +#define SCLogVideoCapturerError(fmt, ...) SCLogCoreCameraError(@"[SCManagedVideoCapturer] " fmt, ##__VA_ARGS__) + +@interface SCManagedVideoCapturer () +// This value has to be atomic because it is read on a different thread (write +// on output queue, as always) +@property (atomic, assign, readwrite) SCManagedVideoCapturerStatus status; + +@property (nonatomic, assign) CMTime firstWrittenAudioBufferDelay; + +@end + +static char *const kSCManagedVideoCapturerQueueLabel = "com.snapchat.managed-video-capturer-queue"; +static char *const kSCManagedVideoCapturerPromiseQueueLabel = "com.snapchat.video-capture-promise"; + +static NSString *const kSCManagedVideoCapturerErrorDomain = @"kSCManagedVideoCapturerErrorDomain"; + +static NSInteger const kSCManagedVideoCapturerCannotAddAudioVideoInput = 1001; +static NSInteger const kSCManagedVideoCapturerEmptyFrame = 1002; +static NSInteger const kSCManagedVideoCapturerStopBeforeStart = 1003; +static NSInteger const kSCManagedVideoCapturerStopWithoutStart = 1004; +static NSInteger const kSCManagedVideoCapturerZeroVideoSize = -111; + +static NSUInteger const kSCVideoContentComplexitySamplingRate = 90; + +// This is the maximum time we will wait for the Recording Capturer pipeline to drain +// When video stabilization is turned on the extra frame delay is around 20 frames. +// @30 fps this is 0.66 seconds +static NSTimeInterval const kSCManagedVideoCapturerStopRecordingDeadline = 1.0; + +static const char *SCPlaceholderImageGenerationQueueLabel = "com.snapchat.video-capturer-placeholder-queue"; + +static const char *SCVideoRecordingPreparationQueueLabel = "com.snapchat.video-recording-preparation-queue"; + +static dispatch_queue_t SCPlaceholderImageGenerationQueue(void) +{ + static dispatch_queue_t queue; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + queue = dispatch_queue_create(SCPlaceholderImageGenerationQueueLabel, DISPATCH_QUEUE_SERIAL); + }); + return queue; +} + +@interface SCManagedVideoCapturer () + +@end + +@implementation SCManagedVideoCapturer { + NSTimeInterval _maxDuration; + NSTimeInterval _recordStartTime; + + SCCapturerBufferedVideoWriter *_videoWriter; + + BOOL _hasWritten; + SCQueuePerformer *_performer; + SCQueuePerformer *_videoPreparationPerformer; + SCAudioCaptureSession *_audioCaptureSession; + NSError *_lastError; + UIImage *_placeholderImage; + + // For logging purpose + BOOL _isVideoSnap; + NSDictionary *_videoOutputSettings; + + // The following value is used to control the encoder shutdown following a stop recording message. + // When a shutdown is requested this value will be the timestamp of the last captured frame. + CFTimeInterval _stopTime; + NSInteger _stopSession; + SCAudioConfigurationToken *_preparedAudioConfiguration; + SCAudioConfigurationToken *_audioConfiguration; + + dispatch_semaphore_t _startRecordingSemaphore; + + // For store the raw frame datas + NSInteger _rawDataFrameNum; + NSURL *_rawDataURL; + SCVideoFrameRawDataCollector *_videoFrameRawDataCollector; + + CMTime _startSessionTime; + // Indicates how actual processing time of first frame. Also used for camera timer animation start offset. + NSTimeInterval _startSessionRealTime; + CMTime _endSessionTime; + sc_managed_capturer_recording_session_t _sessionId; + + SCManagedVideoCapturerTimeObserver *_timeObserver; + SCManagedVideoCapturerLogger *_capturerLogger; + + CGSize _outputSize; + BOOL _isFrontFacingCamera; + SCPromise> *_recordedVideoPromise; + SCManagedAudioDataSourceListenerAnnouncer *_announcer; + + NSString *_captureSessionID; + CIContext *_ciContext; +} + +@synthesize performer = _performer; + +- (instancetype)init +{ + SCTraceStart(); + return [self initWithQueuePerformer:[[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoCapturerQueueLabel + qualityOfService:QOS_CLASS_USER_INTERACTIVE + queueType:DISPATCH_QUEUE_SERIAL + context:SCQueuePerformerContextCamera]]; +} + +- (instancetype)initWithQueuePerformer:(SCQueuePerformer *)queuePerformer +{ + SCTraceStart(); + self = [super init]; + if (self) { + _performer = queuePerformer; + _audioCaptureSession = [[SCAudioCaptureSession alloc] init]; + _audioCaptureSession.delegate = self; + _announcer = [SCManagedAudioDataSourceListenerAnnouncer new]; + self.status = SCManagedVideoCapturerStatusIdle; + _capturerLogger = [[SCManagedVideoCapturerLogger alloc] init]; + _startRecordingSemaphore = dispatch_semaphore_create(0); + } + return self; +} + +- (void)dealloc +{ + SCLogVideoCapturerInfo(@"SCVideoCaptureSessionInfo before dealloc: %@", + SCVideoCaptureSessionInfoGetDebugDescription(self.activeSession)); +} + +- (SCVideoCaptureSessionInfo)activeSession +{ + return SCVideoCaptureSessionInfoMake(_startSessionTime, _endSessionTime, _sessionId); +} + +- (CGSize)defaultSizeForDeviceFormat:(AVCaptureDeviceFormat *)format +{ + SCTraceStart(); + // if there is no device, and no format + if (format == nil) { + // hard code 720p + return CGSizeMake(kSCManagedCapturerDefaultVideoActiveFormatWidth, + kSCManagedCapturerDefaultVideoActiveFormatHeight); + } + CMVideoDimensions videoDimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription); + CGSize size = CGSizeMake(videoDimensions.width, videoDimensions.height); + if (videoDimensions.width > kSCManagedCapturerDefaultVideoActiveFormatWidth && + videoDimensions.height > kSCManagedCapturerDefaultVideoActiveFormatHeight) { + CGFloat scaleFactor = MAX((kSCManagedCapturerDefaultVideoActiveFormatWidth / videoDimensions.width), + (kSCManagedCapturerDefaultVideoActiveFormatHeight / videoDimensions.height)); + size = SCSizeMakeAlignTo(SCSizeApplyScale(size, scaleFactor), 2); + } + if ([SCDeviceName isIphoneX]) { + size = SCSizeApplyScale(size, kSCIPhoneXCapturedImageVideoCropRatio); + } + return size; +} + +- (CGSize)cropSize:(CGSize)size toAspectRatio:(CGFloat)aspectRatio +{ + if (aspectRatio == kSCManagedCapturerAspectRatioUnspecified) { + return size; + } + // video input is always in landscape mode + aspectRatio = 1.0 / aspectRatio; + if (size.width > size.height * aspectRatio) { + size.width = size.height * aspectRatio; + } else { + size.height = size.width / aspectRatio; + } + return CGSizeMake(roundf(size.width / 2) * 2, roundf(size.height / 2) * 2); +} + +- (SCManagedVideoCapturerOutputSettings *)defaultRecordingOutputSettingsWithDeviceFormat: + (AVCaptureDeviceFormat *)deviceFormat +{ + SCTraceStart(); + CGFloat aspectRatio = SCManagedCapturedImageAndVideoAspectRatio(); + CGSize outputSize = [self defaultSizeForDeviceFormat:deviceFormat]; + outputSize = [self cropSize:outputSize toAspectRatio:aspectRatio]; + + // [TODO](Chao): remove the dependency of SCManagedVideoCapturer on SnapVideoMetaData + NSInteger videoBitRate = [SnapVideoMetadata averageTranscodingBitRate:outputSize + isRecording:YES + highQuality:YES + duration:0 + iFrameOnly:NO + originalVideoBitRate:0 + overlayImageFileSizeBits:0 + videoPlaybackRate:1 + isLagunaVideo:NO + hasOverlayToBlend:NO + sourceType:SCSnapVideoFilterSourceTypeUndefined]; + SCTraceSignal(@"Setup transcoding video bitrate"); + [_capturerLogger logStartingStep:kSCCapturerStartingStepTranscodeingVideoBitrate]; + + SCManagedVideoCapturerOutputSettings *outputSettings = + [[SCManagedVideoCapturerOutputSettings alloc] initWithWidth:outputSize.width + height:outputSize.height + videoBitRate:videoBitRate + audioBitRate:64000.0 + keyFrameInterval:15 + outputType:SCManagedVideoCapturerOutputTypeVideoSnap]; + + return outputSettings; +} + +- (SCQueuePerformer *)_getVideoPreparationPerformer +{ + SCAssert([_performer isCurrentPerformer], @"must run on _performer"); + if (!_videoPreparationPerformer) { + _videoPreparationPerformer = [[SCQueuePerformer alloc] initWithLabel:SCVideoRecordingPreparationQueueLabel + qualityOfService:QOS_CLASS_USER_INTERACTIVE + queueType:DISPATCH_QUEUE_SERIAL + context:SCQueuePerformerContextCamera]; + } + return _videoPreparationPerformer; +} + +- (void)prepareForRecordingWithAudioConfiguration:(SCAudioConfiguration *)configuration +{ + SCTraceStart(); + [_performer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + self.status = SCManagedVideoCapturerStatusPrepareToRecord; + if (_audioConfiguration) { + [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration performer:nil completion:nil]; + } + __block NSError *audioSessionError = nil; + _preparedAudioConfiguration = _audioConfiguration = + [SCAudioSessionExperimentAdapter configureWith:configuration + performer:[self _getVideoPreparationPerformer] + completion:^(NSError *error) { + audioSessionError = error; + if (self.status == SCManagedVideoCapturerStatusPrepareToRecord) { + dispatch_semaphore_signal(_startRecordingSemaphore); + } + }]; + + // Wait until preparation for recording is done + dispatch_semaphore_wait(_startRecordingSemaphore, DISPATCH_TIME_FOREVER); + [_delegate managedVideoCapturer:self + didGetError:audioSessionError + forType:SCManagedVideoCapturerInfoAudioSessionError + session:self.activeSession]; + }]; +} + +- (SCVideoCaptureSessionInfo)startRecordingAsynchronouslyWithOutputSettings: + (SCManagedVideoCapturerOutputSettings *)outputSettings + audioConfiguration:(SCAudioConfiguration *)audioConfiguration + maxDuration:(NSTimeInterval)maxDuration + toURL:(NSURL *)URL + deviceFormat:(AVCaptureDeviceFormat *)deviceFormat + orientation:(AVCaptureVideoOrientation)videoOrientation + captureSessionID:(NSString *)captureSessionID +{ + SCTraceStart(); + _captureSessionID = [captureSessionID copy]; + [_capturerLogger prepareForStartingLog]; + + [[SCLogger sharedInstance] logTimedEventStart:kSCCameraMetricsAudioDelay + uniqueId:_captureSessionID + isUniqueEvent:NO]; + + NSTimeInterval startTime = CACurrentMediaTime(); + [[SCLogger sharedInstance] logPreCaptureOperationRequestedAt:startTime]; + [[SCCoreCameraLogger sharedInstance] logCameraCreationDelaySplitPointPreCaptureOperationRequested]; + _sessionId = arc4random(); + + // Set a invalid time so that we don't process videos when no frame available + _startSessionTime = kCMTimeInvalid; + _endSessionTime = kCMTimeInvalid; + _firstWrittenAudioBufferDelay = kCMTimeInvalid; + _audioQueueStarted = NO; + + SCLogVideoCapturerInfo(@"SCVideoCaptureSessionInfo at start of recording: %@", + SCVideoCaptureSessionInfoGetDebugDescription(self.activeSession)); + + SCVideoCaptureSessionInfo sessionInfo = self.activeSession; + [_performer performImmediatelyIfCurrentPerformer:^{ + _maxDuration = maxDuration; + dispatch_block_t startRecordingBlock = ^{ + _rawDataFrameNum = 0; + // Begin audio recording asynchronously, first, need to have correct audio session. + SCTraceStart(); + SCLogVideoCapturerInfo(@"Dequeue begin recording with audio session change delay: %lf seconds", + CACurrentMediaTime() - startTime); + if (self.status != SCManagedVideoCapturerStatusReadyForRecording) { + SCLogVideoCapturerInfo(@"SCManagedVideoCapturer status: %lu", (unsigned long)self.status); + // We may already released, but this should be OK. + [SCAudioSessionExperimentAdapter relinquishConfiguration:_preparedAudioConfiguration + performer:nil + completion:nil]; + return; + } + if (_preparedAudioConfiguration != _audioConfiguration) { + SCLogVideoCapturerInfo( + @"SCManagedVideoCapturer has mismatched audio session token, prepared: %@, have: %@", + _preparedAudioConfiguration.token, _audioConfiguration.token); + // We are on a different audio session token already. + [SCAudioSessionExperimentAdapter relinquishConfiguration:_preparedAudioConfiguration + performer:nil + completion:nil]; + return; + } + + // Divide start recording workflow into different steps to log delay time. + // And checkpoint is the end of a step + [_capturerLogger logStartingStep:kSCCapturerStartingStepAudioSession]; + [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay + uniqueId:_captureSessionID + stepName:@"audio_session_start_end"]; + + SCLogVideoCapturerInfo(@"Prepare to begin recording"); + _lastError = nil; + + // initialize stopTime to a number much larger than the CACurrentMediaTime() which is the time from Jan 1, + // 2001 + _stopTime = kCFAbsoluteTimeIntervalSince1970; + + // Restart everything + _hasWritten = NO; + + SCManagedVideoCapturerOutputSettings *finalOutputSettings = + outputSettings ? outputSettings : [self defaultRecordingOutputSettingsWithDeviceFormat:deviceFormat]; + _isVideoSnap = finalOutputSettings.outputType == SCManagedVideoCapturerOutputTypeVideoSnap; + _outputSize = CGSizeMake(finalOutputSettings.height, finalOutputSettings.width); + [[SCLogger sharedInstance] logEvent:kSCCameraMetricsVideoRecordingStart + parameters:@{ + @"video_width" : @(finalOutputSettings.width), + @"video_height" : @(finalOutputSettings.height), + @"bit_rate" : @(finalOutputSettings.videoBitRate), + @"is_video_snap" : @(_isVideoSnap), + }]; + + _outputURL = [URL copy]; + _rawDataURL = [_outputURL URLByAppendingPathExtension:@"dat"]; + [_capturerLogger logStartingStep:kSCCapturerStartingStepOutputSettings]; + + // Make sure the raw frame data file is gone + SCTraceSignal(@"Setup video frame raw data"); + [[NSFileManager defaultManager] removeItemAtURL:_rawDataURL error:NULL]; + if ([SnapVideoMetadata deviceMeetsRequirementsForContentAdaptiveVideoEncoding]) { + if (!_videoFrameRawDataCollector) { + _videoFrameRawDataCollector = [[SCVideoFrameRawDataCollector alloc] initWithPerformer:_performer]; + } + [_videoFrameRawDataCollector prepareForCollectingVideoFrameRawDataWithRawDataURL:_rawDataURL]; + } + [_capturerLogger logStartingStep:kSCCapturerStartingStepVideoFrameRawData]; + + SCLogVideoCapturerInfo(@"Prepare to begin audio recording"); + + [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay + uniqueId:_captureSessionID + stepName:@"audio_queue_start_begin"]; + [self _beginAudioQueueRecordingWithCompleteHandler:^(NSError *error) { + [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay + uniqueId:_captureSessionID + stepName:@"audio_queue_start_end"]; + if (error) { + [_delegate managedVideoCapturer:self + didGetError:error + forType:SCManagedVideoCapturerInfoAudioQueueError + session:sessionInfo]; + } else { + _audioQueueStarted = YES; + } + if (self.status == SCManagedVideoCapturerStatusRecording) { + [_delegate managedVideoCapturer:self didBeginAudioRecording:sessionInfo]; + } + }]; + + // Call this delegate first so that we have proper state transition from begin recording to finish / error + [_delegate managedVideoCapturer:self didBeginVideoRecording:sessionInfo]; + + // We need to start with a fresh recording file, make sure it's gone + [[NSFileManager defaultManager] removeItemAtURL:_outputURL error:NULL]; + [_capturerLogger logStartingStep:kSCCapturerStartingStepAudioRecording]; + + SCTraceSignal(@"Setup asset writer"); + + NSError *error = nil; + _videoWriter = [[SCCapturerBufferedVideoWriter alloc] initWithPerformer:_performer + outputURL:self.outputURL + delegate:self + error:&error]; + if (error) { + self.status = SCManagedVideoCapturerStatusError; + _lastError = error; + _placeholderImage = nil; + [_delegate managedVideoCapturer:self + didGetError:error + forType:SCManagedVideoCapturerInfoAssetWriterError + session:sessionInfo]; + [_delegate managedVideoCapturer:self didFailWithError:_lastError session:sessionInfo]; + return; + } + + [_capturerLogger logStartingStep:kSCCapturerStartingStepAssetWriterConfiguration]; + if (![_videoWriter prepareWritingWithOutputSettings:finalOutputSettings]) { + _lastError = [NSError errorWithDomain:kSCManagedVideoCapturerErrorDomain + code:kSCManagedVideoCapturerCannotAddAudioVideoInput + userInfo:nil]; + _placeholderImage = nil; + [_delegate managedVideoCapturer:self didFailWithError:_lastError session:sessionInfo]; + return; + } + SCTraceSignal(@"Observe asset writer status change"); + SCCAssert(_placeholderImage == nil, @"placeholderImage should be nil"); + self.status = SCManagedVideoCapturerStatusRecording; + // Only log the recording delay event from camera view (excluding video note recording) + if (_isVideoSnap) { + [[SCLogger sharedInstance] logTimedEventEnd:kSCCameraMetricsRecordingDelay + uniqueId:@"VIDEO" + parameters:@{ + @"type" : @"video" + }]; + } + _recordStartTime = CACurrentMediaTime(); + }; + + [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay + uniqueId:_captureSessionID + stepName:@"audio_session_start_begin"]; + + if (self.status == SCManagedVideoCapturerStatusPrepareToRecord) { + self.status = SCManagedVideoCapturerStatusReadyForRecording; + startRecordingBlock(); + } else { + self.status = SCManagedVideoCapturerStatusReadyForRecording; + if (_audioConfiguration) { + [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration + performer:nil + completion:nil]; + } + _preparedAudioConfiguration = _audioConfiguration = [SCAudioSessionExperimentAdapter + configureWith:audioConfiguration + performer:_performer + completion:^(NSError *error) { + if (error) { + [_delegate managedVideoCapturer:self + didGetError:error + forType:SCManagedVideoCapturerInfoAudioSessionError + session:sessionInfo]; + } + startRecordingBlock(); + }]; + } + }]; + return sessionInfo; +} + +- (NSError *)_handleRetryBeginAudioRecordingErrorCode:(NSInteger)errorCode + error:(NSError *)error + micResult:(NSDictionary *)resultInfo +{ + SCTraceStart(); + NSString *resultStr = SC_CAST_TO_CLASS_OR_NIL(resultInfo[SCAudioSessionRetryDataSourceInfoKey], NSString); + BOOL changeMicSuccess = [resultInfo[SCAudioSessionRetryDataSourceResultKey] boolValue]; + if (!error) { + SCManagedVideoCapturerInfoType type = SCManagedVideoCapturerInfoAudioQueueRetrySuccess; + if (changeMicSuccess) { + if (errorCode == kSCAudioQueueErrorWildCard) { + type = SCManagedVideoCapturerInfoAudioQueueRetryDataSourceSuccess_audioQueue; + } else if (errorCode == kSCAudioQueueErrorHardware) { + type = SCManagedVideoCapturerInfoAudioQueueRetryDataSourceSuccess_hardware; + } + } + [_delegate managedVideoCapturer:self didGetError:nil forType:type session:self.activeSession]; + } else { + error = [self _appendInfo:resultStr forInfoKey:@"retry_datasource_result" toError:error]; + SCLogVideoCapturerError(@"Retry setting audio session failed with error:%@", error); + } + return error; +} + +- (BOOL)_isBottomMicBrokenCode:(NSInteger)errorCode +{ + // we consider both -50 and 1852797029 as a broken microphone case + return (errorCode == kSCAudioQueueErrorWildCard || errorCode == kSCAudioQueueErrorHardware); +} + +- (void)_beginAudioQueueRecordingWithCompleteHandler:(audio_capture_session_block)block +{ + SCTraceStart(); + SCAssert(block, @"block can not be nil"); + @weakify(self); + void (^beginAudioBlock)(NSError *error) = ^(NSError *error) { + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + + SCTraceStart(); + NSInteger errorCode = error.code; + if ([self _isBottomMicBrokenCode:errorCode] && + (self.status == SCManagedVideoCapturerStatusReadyForRecording || + self.status == SCManagedVideoCapturerStatusRecording)) { + + SCLogVideoCapturerError(@"Start to retry begin audio queue (error code: %@)", @(errorCode)); + + // use front microphone to retry + NSDictionary *resultInfo = [[SCAudioSession sharedInstance] tryUseFrontMicWithErrorCode:errorCode]; + [self _retryRequestRecordingWithCompleteHandler:^(NSError *error) { + // then retry audio queue again + [_audioCaptureSession + beginAudioRecordingAsynchronouslyWithSampleRate:kSCAudioCaptureSessionDefaultSampleRate + completionHandler:^(NSError *innerError) { + NSError *modifyError = [self + _handleRetryBeginAudioRecordingErrorCode:errorCode + error:innerError + micResult:resultInfo]; + block(modifyError); + }]; + }]; + + } else { + block(error); + } + }]; + }; + [_audioCaptureSession beginAudioRecordingAsynchronouslyWithSampleRate:kSCAudioCaptureSessionDefaultSampleRate + completionHandler:^(NSError *error) { + beginAudioBlock(error); + }]; +} + +// This method must not change nullability of error, it should only either append info into userInfo, +// or return the NSError as it is. +- (NSError *)_appendInfo:(NSString *)infoStr forInfoKey:(NSString *)infoKey toError:(NSError *)error +{ + if (!error || infoStr.length == 0 || infoKey.length == 0 || error.domain.length == 0) { + return error; + } + NSMutableDictionary *errorInfo = [[error userInfo] mutableCopy]; + errorInfo[infoKey] = infoStr.length > 0 ? infoStr : @"(null)"; + + return [NSError errorWithDomain:error.domain code:error.code userInfo:errorInfo]; +} + +- (void)_retryRequestRecordingWithCompleteHandler:(audio_capture_session_block)block +{ + SCTraceStart(); + if (_audioConfiguration) { + [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration performer:nil completion:nil]; + } + SCVideoCaptureSessionInfo sessionInfo = self.activeSession; + _preparedAudioConfiguration = _audioConfiguration = [SCAudioSessionExperimentAdapter + configureWith:_audioConfiguration.configuration + performer:_performer + completion:^(NSError *error) { + if (error) { + [_delegate managedVideoCapturer:self + didGetError:error + forType:SCManagedVideoCapturerInfoAudioSessionError + session:sessionInfo]; + } + if (block) { + block(error); + } + }]; +} + +#pragma SCCapturerBufferedVideoWriterDelegate + +- (void)videoWriterDidFailWritingWithError:(NSError *)error +{ + // If it failed, we call the delegate method, release everything else we + // have, well, on the output queue obviously + SCTraceStart(); + [_performer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + SCVideoCaptureSessionInfo sessionInfo = self.activeSession; + [_outputURL reloadAssetKeys]; + [self _cleanup]; + [self _disposeAudioRecording]; + self.status = SCManagedVideoCapturerStatusError; + _lastError = error; + _placeholderImage = nil; + [_delegate managedVideoCapturer:self + didGetError:error + forType:SCManagedVideoCapturerInfoAssetWriterError + session:sessionInfo]; + [_delegate managedVideoCapturer:self didFailWithError:_lastError session:sessionInfo]; + }]; +} + +- (void)_willStopRecording +{ + if (self.status == SCManagedVideoCapturerStatusRecording) { + // To notify UI continue the preview processing + SCQueuePerformer *promisePerformer = + [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoCapturerPromiseQueueLabel + qualityOfService:QOS_CLASS_USER_INTERACTIVE + queueType:DISPATCH_QUEUE_SERIAL + context:SCQueuePerformerContextCamera]; + _recordedVideoPromise = [[SCPromise alloc] initWithPerformer:promisePerformer]; + [_delegate managedVideoCapturer:self + willStopWithRecordedVideoFuture:_recordedVideoPromise.future + videoSize:_outputSize + placeholderImage:_placeholderImage + session:self.activeSession]; + } +} + +- (void)_stopRecording +{ + SCTraceStart(); + SCAssert([_performer isCurrentPerformer], @"Needs to be on the performing queue"); + // Reset stop session as well as stop time. + ++_stopSession; + _stopTime = kCFAbsoluteTimeIntervalSince1970; + SCPromise> *recordedVideoPromise = _recordedVideoPromise; + _recordedVideoPromise = nil; + sc_managed_capturer_recording_session_t sessionId = _sessionId; + if (self.status == SCManagedVideoCapturerStatusRecording) { + self.status = SCManagedVideoCapturerStatusIdle; + if (CMTIME_IS_VALID(_endSessionTime)) { + [_videoWriter + finishWritingAtSourceTime:_endSessionTime + withCompletionHanlder:^{ + // actually, make sure everything happens on outputQueue + [_performer performImmediatelyIfCurrentPerformer:^{ + if (sessionId != _sessionId) { + SCLogVideoCapturerError(@"SessionId mismatch: before: %@, after: %@", @(sessionId), + @(_sessionId)); + return; + } + [self _disposeAudioRecording]; + // Log the video snap recording success event w/ parameters, not including video + // note + if (_isVideoSnap) { + [SnapVideoMetadata logVideoEvent:kSCCameraMetricsVideoRecordingSuccess + videoSettings:_videoOutputSettings + isSave:NO]; + } + void (^stopRecordingCompletionBlock)(NSURL *) = ^(NSURL *rawDataURL) { + SCAssert([_performer isCurrentPerformer], @"Needs to be on the performing queue"); + SCVideoCaptureSessionInfo sessionInfo = self.activeSession; + + [self _cleanup]; + + [[SCLogger sharedInstance] logTimedEventStart:@"SNAP_VIDEO_SIZE_LOADING" + uniqueId:@"" + isUniqueEvent:NO]; + CGSize videoSize = + [SnapVideoMetadata videoSizeForURL:_outputURL waitWhileLoadingTracksIfNeeded:YES]; + [[SCLogger sharedInstance] logTimedEventEnd:@"SNAP_VIDEO_SIZE_LOADING" + uniqueId:@"" + parameters:nil]; + // Log error if video file is not really ready + if (videoSize.width == 0.0 || videoSize.height == 0.0) { + _lastError = [NSError errorWithDomain:kSCManagedVideoCapturerErrorDomain + code:kSCManagedVideoCapturerZeroVideoSize + userInfo:nil]; + [recordedVideoPromise completeWithError:_lastError]; + [_delegate managedVideoCapturer:self + didFailWithError:_lastError + session:sessionInfo]; + _placeholderImage = nil; + return; + } + // If the video duration is too short, the future object will complete + // with error as well + SCManagedRecordedVideo *recordedVideo = + [[SCManagedRecordedVideo alloc] initWithVideoURL:_outputURL + rawVideoDataFileURL:_rawDataURL + placeholderImage:_placeholderImage + isFrontFacingCamera:_isFrontFacingCamera]; + [recordedVideoPromise completeWithValue:recordedVideo]; + [_delegate managedVideoCapturer:self + didSucceedWithRecordedVideo:recordedVideo + session:sessionInfo]; + _placeholderImage = nil; + }; + + if (_videoFrameRawDataCollector) { + [_videoFrameRawDataCollector + drainFrameDataCollectionWithCompletionHandler:^(NSURL *rawDataURL) { + stopRecordingCompletionBlock(rawDataURL); + }]; + } else { + stopRecordingCompletionBlock(nil); + } + }]; + }]; + + } else { + [self _disposeAudioRecording]; + SCVideoCaptureSessionInfo sessionInfo = self.activeSession; + [self _cleanup]; + self.status = SCManagedVideoCapturerStatusError; + _lastError = [NSError errorWithDomain:kSCManagedVideoCapturerErrorDomain + code:kSCManagedVideoCapturerEmptyFrame + userInfo:nil]; + _placeholderImage = nil; + [recordedVideoPromise completeWithError:_lastError]; + [_delegate managedVideoCapturer:self didFailWithError:_lastError session:sessionInfo]; + } + } else { + if (self.status == SCManagedVideoCapturerStatusPrepareToRecord || + self.status == SCManagedVideoCapturerStatusReadyForRecording) { + _lastError = [NSError errorWithDomain:kSCManagedVideoCapturerErrorDomain + code:kSCManagedVideoCapturerStopBeforeStart + userInfo:nil]; + } else { + _lastError = [NSError errorWithDomain:kSCManagedVideoCapturerErrorDomain + code:kSCManagedVideoCapturerStopWithoutStart + userInfo:nil]; + } + SCVideoCaptureSessionInfo sessionInfo = self.activeSession; + [self _cleanup]; + _placeholderImage = nil; + if (_audioConfiguration) { + [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration performer:nil completion:nil]; + _audioConfiguration = nil; + } + [recordedVideoPromise completeWithError:_lastError]; + [_delegate managedVideoCapturer:self didFailWithError:_lastError session:sessionInfo]; + self.status = SCManagedVideoCapturerStatusIdle; + [_capturerLogger logEventIfStartingTooSlow]; + } +} + +- (void)stopRecordingAsynchronously +{ + SCTraceStart(); + NSTimeInterval stopTime = CACurrentMediaTime(); + [_performer performImmediatelyIfCurrentPerformer:^{ + _stopTime = stopTime; + NSInteger stopSession = _stopSession; + [self _willStopRecording]; + [_performer perform:^{ + // If we haven't stopped yet, call the stop now nevertheless. + if (stopSession == _stopSession) { + [self _stopRecording]; + } + } + after:kSCManagedVideoCapturerStopRecordingDeadline]; + }]; +} + +- (void)cancelRecordingAsynchronously +{ + SCTraceStart(); + [_performer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + SCLogVideoCapturerInfo(@"Cancel recording. status: %lu", (unsigned long)self.status); + if (self.status == SCManagedVideoCapturerStatusRecording) { + self.status = SCManagedVideoCapturerStatusIdle; + [self _disposeAudioRecording]; + [_videoWriter cancelWriting]; + SCVideoCaptureSessionInfo sessionInfo = self.activeSession; + [self _cleanup]; + _placeholderImage = nil; + [_delegate managedVideoCapturer:self didCancelVideoRecording:sessionInfo]; + } else if ((self.status == SCManagedVideoCapturerStatusPrepareToRecord) || + (self.status == SCManagedVideoCapturerStatusReadyForRecording)) { + SCVideoCaptureSessionInfo sessionInfo = self.activeSession; + [self _cleanup]; + self.status = SCManagedVideoCapturerStatusIdle; + _placeholderImage = nil; + if (_audioConfiguration) { + [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration + performer:nil + completion:nil]; + _audioConfiguration = nil; + } + [_delegate managedVideoCapturer:self didCancelVideoRecording:sessionInfo]; + } + [_capturerLogger logEventIfStartingTooSlow]; + }]; +} + +- (void)addTimedTask:(SCTimedTask *)task +{ + [_performer performImmediatelyIfCurrentPerformer:^{ + // Only allow to add observers when we are not recording. + if (!self->_timeObserver) { + self->_timeObserver = [SCManagedVideoCapturerTimeObserver new]; + } + [self->_timeObserver addTimedTask:task]; + SCLogVideoCapturerInfo(@"Added timetask: %@", task); + }]; +} + +- (void)clearTimedTasks +{ + // _timeObserver will be initialized lazily when adding timed tasks + SCLogVideoCapturerInfo(@"Clearing time observer"); + [_performer performImmediatelyIfCurrentPerformer:^{ + if (self->_timeObserver) { + self->_timeObserver = nil; + } + }]; +} + +- (void)_cleanup +{ + [_videoWriter cleanUp]; + _timeObserver = nil; + + SCLogVideoCapturerInfo(@"SCVideoCaptureSessionInfo before cleanup: %@", + SCVideoCaptureSessionInfoGetDebugDescription(self.activeSession)); + + _startSessionTime = kCMTimeInvalid; + _endSessionTime = kCMTimeInvalid; + _firstWrittenAudioBufferDelay = kCMTimeInvalid; + _sessionId = 0; + _captureSessionID = nil; + _audioQueueStarted = NO; +} + +- (void)_disposeAudioRecording +{ + SCLogVideoCapturerInfo(@"Disposing audio recording"); + SCAssert([_performer isCurrentPerformer], @""); + // Setup the audio session token correctly + SCAudioConfigurationToken *audioConfiguration = _audioConfiguration; + [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay + uniqueId:_captureSessionID + stepName:@"audio_queue_stop_begin"]; + NSString *captureSessionID = _captureSessionID; + [_audioCaptureSession disposeAudioRecordingSynchronouslyWithCompletionHandler:^{ + [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay + uniqueId:captureSessionID + stepName:@"audio_queue_stop_end"]; + SCLogVideoCapturerInfo(@"Did dispose audio recording"); + if (audioConfiguration) { + [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay + uniqueId:captureSessionID + stepName:@"audio_session_stop_begin"]; + [SCAudioSessionExperimentAdapter + relinquishConfiguration:audioConfiguration + performer:_performer + completion:^(NSError *_Nullable error) { + [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay + uniqueId:captureSessionID + stepName:@"audio_session_stop_end"]; + [[SCLogger sharedInstance] logTimedEventEnd:kSCCameraMetricsAudioDelay + uniqueId:captureSessionID + parameters:nil]; + }]; + } + }]; + _audioConfiguration = nil; +} + +- (CIContext *)ciContext +{ + if (!_ciContext) { + _ciContext = [CIContext contextWithOptions:nil]; + } + return _ciContext; +} + +#pragma mark - SCAudioCaptureSessionDelegate + +- (void)audioCaptureSession:(SCAudioCaptureSession *)audioCaptureSession + didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer +{ + SCTraceStart(); + if (self.status != SCManagedVideoCapturerStatusRecording) { + return; + } + CFRetain(sampleBuffer); + [_performer performImmediatelyIfCurrentPerformer:^{ + if (self.status == SCManagedVideoCapturerStatusRecording) { + // Audio always follows video, there is no other way around this :) + if (_hasWritten && CACurrentMediaTime() - _recordStartTime <= _maxDuration) { + [self _processAudioSampleBuffer:sampleBuffer]; + [_videoWriter appendAudioSampleBuffer:sampleBuffer]; + } + } + CFRelease(sampleBuffer); + }]; +} + +#pragma mark - SCManagedVideoDataSourceListener + +- (void)managedVideoDataSource:(id)managedVideoDataSource + didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer + devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + SCTraceStart(); + if (self.status != SCManagedVideoCapturerStatusRecording) { + return; + } + CFRetain(sampleBuffer); + [_performer performImmediatelyIfCurrentPerformer:^{ + // the following check will allow the capture pipeline to drain + if (CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) > _stopTime) { + [self _stopRecording]; + } else { + if (self.status == SCManagedVideoCapturerStatusRecording) { + _isFrontFacingCamera = (devicePosition == SCManagedCaptureDevicePositionFront); + CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); + if (CMTIME_IS_VALID(presentationTime)) { + SCLogVideoCapturerInfo(@"Obtained video data source at time %lld", presentationTime.value); + } else { + SCLogVideoCapturerInfo(@"Obtained video data source with an invalid time"); + } + if (!_hasWritten) { + // Start writing! + [_videoWriter startWritingAtSourceTime:presentationTime]; + [_capturerLogger endLoggingForStarting]; + _startSessionTime = presentationTime; + _startSessionRealTime = CACurrentMediaTime(); + SCLogVideoCapturerInfo(@"First frame processed %f seconds after presentation Time", + _startSessionRealTime - CMTimeGetSeconds(presentationTime)); + _hasWritten = YES; + [[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CMTimeGetSeconds(presentationTime)]; + [[SCCoreCameraLogger sharedInstance] + logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CMTimeGetSeconds( + presentationTime)]; + SCLogVideoCapturerInfo(@"SCVideoCaptureSessionInfo after first frame: %@", + SCVideoCaptureSessionInfoGetDebugDescription(self.activeSession)); + } + // Only respect video end session time, audio can be cut off, not video, + // not video + if (CMTIME_IS_INVALID(_endSessionTime)) { + _endSessionTime = presentationTime; + } else { + _endSessionTime = CMTimeMaximum(_endSessionTime, presentationTime); + } + if (CACurrentMediaTime() - _recordStartTime <= _maxDuration) { + [_videoWriter appendVideoSampleBuffer:sampleBuffer]; + [self _processVideoSampleBuffer:sampleBuffer]; + } + if (_timeObserver) { + [_timeObserver processTime:CMTimeSubtract(presentationTime, _startSessionTime) + sessionStartTimeDelayInSecond:_startSessionRealTime - CMTimeGetSeconds(_startSessionTime)]; + } + } + } + CFRelease(sampleBuffer); + }]; +} + +- (void)_generatePlaceholderImageWithPixelBuffer:(CVImageBufferRef)pixelBuffer metaData:(NSDictionary *)metadata +{ + SCTraceStart(); + CVImageBufferRef imageBuffer = CVPixelBufferRetain(pixelBuffer); + if (imageBuffer) { + dispatch_async(SCPlaceholderImageGenerationQueue(), ^{ + UIImage *placeholderImage = [UIImage imageWithPixelBufferRef:imageBuffer + backingType:UIImageBackingTypeCGImage + orientation:UIImageOrientationRight + context:[self ciContext]]; + placeholderImage = + SCCropImageToTargetAspectRatio(placeholderImage, SCManagedCapturedImageAndVideoAspectRatio()); + [_performer performImmediatelyIfCurrentPerformer:^{ + // After processing, assign it back. + if (self.status == SCManagedVideoCapturerStatusRecording) { + _placeholderImage = placeholderImage; + // Check video frame health by placeholder image + [[SCManagedFrameHealthChecker sharedInstance] + checkVideoHealthForCaptureFrameImage:placeholderImage + metedata:metadata + captureSessionID:_captureSessionID]; + } + CVPixelBufferRelease(imageBuffer); + }]; + }); + } +} + +#pragma mark - Pixel Buffer methods + +- (void)_processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer +{ + SC_GUARD_ELSE_RETURN(sampleBuffer); + CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); + BOOL shouldGeneratePlaceholderImage = CMTimeCompare(presentationTime, _startSessionTime) == 0; + + CVImageBufferRef outputPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + if (outputPixelBuffer) { + [self _addVideoRawDataWithPixelBuffer:outputPixelBuffer]; + if (shouldGeneratePlaceholderImage) { + NSDictionary *extraInfo = [_delegate managedVideoCapturerGetExtraFrameHealthInfo:self]; + NSDictionary *metadata = + [[[SCManagedFrameHealthChecker sharedInstance] metadataForSampleBuffer:sampleBuffer extraInfo:extraInfo] + copy]; + [self _generatePlaceholderImageWithPixelBuffer:outputPixelBuffer metaData:metadata]; + } + } + + [_delegate managedVideoCapturer:self + didAppendVideoSampleBuffer:sampleBuffer + presentationTimestamp:CMTimeSubtract(presentationTime, _startSessionTime)]; +} + +- (void)_processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer +{ + [_announcer managedAudioDataSource:self didOutputSampleBuffer:sampleBuffer]; + if (!CMTIME_IS_VALID(self.firstWrittenAudioBufferDelay)) { + self.firstWrittenAudioBufferDelay = + CMTimeSubtract(CMSampleBufferGetPresentationTimeStamp(sampleBuffer), _startSessionTime); + } +} + +- (void)_addVideoRawDataWithPixelBuffer:(CVImageBufferRef)pixelBuffer +{ + if (_videoFrameRawDataCollector && [SnapVideoMetadata deviceMeetsRequirementsForContentAdaptiveVideoEncoding] && + ((_rawDataFrameNum % kSCVideoContentComplexitySamplingRate) == 0) && (_rawDataFrameNum > 0)) { + if (_videoFrameRawDataCollector) { + CVImageBufferRef imageBuffer = CVPixelBufferRetain(pixelBuffer); + [_videoFrameRawDataCollector collectVideoFrameRawDataWithImageBuffer:imageBuffer + frameNum:_rawDataFrameNum + completion:^{ + CVPixelBufferRelease(imageBuffer); + }]; + } + } + _rawDataFrameNum++; +} + +#pragma mark - SCManagedAudioDataSource + +- (void)addListener:(id)listener +{ + [_announcer addListener:listener]; +} + +- (void)removeListener:(id)listener +{ + [_announcer removeListener:listener]; +} + +- (void)startStreamingWithAudioConfiguration:(SCAudioConfiguration *)configuration +{ + SCAssertFail(@"Controlled by recorder"); +} + +- (void)stopStreaming +{ + SCAssertFail(@"Controlled by recorder"); +} + +- (BOOL)isStreaming +{ + return self.status == SCManagedVideoCapturerStatusRecording; +} + +@end diff --git a/ManagedCapturer/SCManagedVideoCapturerHandler.h b/ManagedCapturer/SCManagedVideoCapturerHandler.h new file mode 100644 index 0000000..1c55cea --- /dev/null +++ b/ManagedCapturer/SCManagedVideoCapturerHandler.h @@ -0,0 +1,20 @@ +// +// SCManagedVideoCapturerHandler.h +// Snapchat +// +// Created by Jingtian Yang on 11/12/2017. +// + +#import "SCManagedVideoCapturer.h" + +#import + +@class SCCaptureResource; + +@interface SCManagedVideoCapturerHandler : NSObject + +- (instancetype)init NS_UNAVAILABLE; + +- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource; + +@end diff --git a/ManagedCapturer/SCManagedVideoCapturerHandler.m b/ManagedCapturer/SCManagedVideoCapturerHandler.m new file mode 100644 index 0000000..7c4866e --- /dev/null +++ b/ManagedCapturer/SCManagedVideoCapturerHandler.m @@ -0,0 +1,252 @@ +// +// SCManagedVideoCapturerHandler.m +// Snapchat +// +// Created by Jingtian Yang on 11/12/2017. +// + +#import "SCManagedVideoCapturerHandler.h" + +#import "SCCaptureResource.h" +#import "SCManagedCaptureDevice+SCManagedCapturer.h" +#import "SCManagedCapturer.h" +#import "SCManagedCapturerLensAPI.h" +#import "SCManagedCapturerLogging.h" +#import "SCManagedCapturerSampleMetadata.h" +#import "SCManagedCapturerState.h" +#import "SCManagedDeviceCapacityAnalyzer.h" +#import "SCManagedFrontFlashController.h" +#import "SCManagedVideoFileStreamer.h" +#import "SCManagedVideoFrameSampler.h" +#import "SCManagedVideoStreamer.h" + +#import +#import +#import +#import +#import + +@interface SCManagedVideoCapturerHandler () { + __weak SCCaptureResource *_captureResource; +} +@end + +@implementation SCManagedVideoCapturerHandler + +- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource +{ + self = [super init]; + if (self) { + SCAssert(captureResource, @""); + _captureResource = captureResource; + } + return self; +} + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didBeginVideoRecording:(SCVideoCaptureSessionInfo)sessionInfo +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Did begin video recording. sessionId:%u", sessionInfo.sessionId); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + SCManagedCapturerState *state = [_captureResource.state copy]; + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didBeginVideoRecording:state + session:sessionInfo]; + }); + }]; +} + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didBeginAudioRecording:(SCVideoCaptureSessionInfo)sessionInfo +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Did begin audio recording. sessionId:%u", sessionInfo.sessionId); + [_captureResource.queuePerformer perform:^{ + if ([_captureResource.fileInputDecider shouldProcessFileInput]) { + [_captureResource.videoDataSource startStreaming]; + } + SCTraceStart(); + SCManagedCapturerState *state = [_captureResource.state copy]; + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didBeginAudioRecording:state + session:sessionInfo]; + }); + }]; +} + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + willStopWithRecordedVideoFuture:(SCFuture> *)recordedVideoFuture + videoSize:(CGSize)videoSize + placeholderImage:(UIImage *)placeholderImage + session:(SCVideoCaptureSessionInfo)sessionInfo +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Will stop recording. sessionId:%u placeHolderImage:%@ videoSize:(%f, %f)", + sessionInfo.sessionId, placeholderImage, videoSize.width, videoSize.height); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + if (_captureResource.videoRecording) { + SCManagedCapturerState *state = [_captureResource.state copy]; + // Then, sync back to main thread to notify will finish recording + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + willFinishRecording:state + session:sessionInfo + recordedVideoFuture:recordedVideoFuture + videoSize:videoSize + placeholderImage:placeholderImage]; + }); + } + }]; +} + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didSucceedWithRecordedVideo:(SCManagedRecordedVideo *)recordedVideo + session:(SCVideoCaptureSessionInfo)sessionInfo +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Did succeed recording. sessionId:%u recordedVideo:%@", sessionInfo.sessionId, recordedVideo); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + if (_captureResource.videoRecording) { + [self _videoRecordingCleanup]; + SCManagedCapturerState *state = [_captureResource.state copy]; + // Then, sync back to main thread to notify the finish recording + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didFinishRecording:state + session:sessionInfo + recordedVideo:recordedVideo]; + }); + } + }]; +} + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didFailWithError:(NSError *)error + session:(SCVideoCaptureSessionInfo)sessionInfo +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Did fail recording. sessionId:%u", sessionInfo.sessionId); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + if (_captureResource.videoRecording) { + [self _videoRecordingCleanup]; + SCManagedCapturerState *state = [_captureResource.state copy]; + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didFailRecording:state + session:sessionInfo + error:error]; + }); + } + }]; +} + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didCancelVideoRecording:(SCVideoCaptureSessionInfo)sessionInfo +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Did cancel recording. sessionId:%u", sessionInfo.sessionId); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + if (_captureResource.videoRecording) { + [self _videoRecordingCleanup]; + SCManagedCapturerState *state = [_captureResource.state copy]; + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didCancelRecording:state + session:sessionInfo]; + }); + } + }]; +} + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didGetError:(NSError *)error + forType:(SCManagedVideoCapturerInfoType)type + session:(SCVideoCaptureSessionInfo)sessionInfo +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Did get error. sessionId:%u errorType:%lu, error:%@", sessionInfo.sessionId, (long)type, error); + [_captureResource.queuePerformer perform:^{ + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didGetError:error + forType:type + session:sessionInfo]; + }); + }]; +} + +- (NSDictionary *)managedVideoCapturerGetExtraFrameHealthInfo:(SCManagedVideoCapturer *)managedVideoCapturer +{ + SCTraceODPCompatibleStart(2); + if (_captureResource.state.lensesActive) { + return @{ + @"lens_active" : @(YES), + @"lens_id" : ([_captureResource.lensProcessingCore activeLensId] ?: [NSNull null]) + }; + } + return nil; +} + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer + presentationTimestamp:(CMTime)presentationTimestamp +{ + CFRetain(sampleBuffer); + [_captureResource.queuePerformer perform:^{ + SCManagedCapturerSampleMetadata *sampleMetadata = + [[SCManagedCapturerSampleMetadata alloc] initWithPresentationTimestamp:presentationTimestamp + fieldOfView:_captureResource.device.fieldOfView]; + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didAppendVideoSampleBuffer:sampleBuffer + sampleMetadata:sampleMetadata]; + CFRelease(sampleBuffer); + }]; +} + +- (void)_videoRecordingCleanup +{ + SCTraceODPCompatibleStart(2); + SCAssert(_captureResource.videoRecording, @"clean up function only can be called if the " + @"video recording is still in progress."); + SCAssert([_captureResource.queuePerformer isCurrentPerformer], @""); + SCLogCapturerInfo(@"Video recording cleanup. previous state:%@", _captureResource.state); + [_captureResource.videoDataSource removeListener:_captureResource.videoCapturer]; + if (_captureResource.videoFrameSampler) { + SCManagedVideoFrameSampler *sampler = _captureResource.videoFrameSampler; + _captureResource.videoFrameSampler = nil; + [_captureResource.announcer removeListener:sampler]; + } + // Add back other listeners to video streamer + [_captureResource.videoDataSource addListener:_captureResource.deviceCapacityAnalyzer]; + if (!_captureResource.state.torchActive) { + // We should turn off torch for the device that we specifically turned on + // for recording + [_captureResource.device setTorchActive:NO]; + if (_captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) { + _captureResource.frontFlashController.torchActive = NO; + } + } + + // Unlock focus on both front and back camera if they were locked. + // Even if ARKit was being used during recording, it'll be shut down by the time we get here + // So DON'T match the ARKit check we use around [_ setRecording:YES] + SCManagedCaptureDevice *front = [SCManagedCaptureDevice front]; + SCManagedCaptureDevice *back = [SCManagedCaptureDevice back]; + [front setRecording:NO]; + [back setRecording:NO]; + _captureResource.videoRecording = NO; + if (_captureResource.state.lensesActive) { + BOOL modifySource = _captureResource.videoRecording || _captureResource.state.liveVideoStreaming; + [_captureResource.lensProcessingCore setModifySource:modifySource]; + } +} + +@end diff --git a/ManagedCapturer/SCManagedVideoCapturerLogger.h b/ManagedCapturer/SCManagedVideoCapturerLogger.h new file mode 100644 index 0000000..f37b24e --- /dev/null +++ b/ManagedCapturer/SCManagedVideoCapturerLogger.h @@ -0,0 +1,27 @@ +// +// SCCaptureLogger.h +// Snapchat +// +// Created by Pinlin on 12/04/2017. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import + +static NSString *const kSCCapturerStartingStepAudioSession = @"audio_session"; +static NSString *const kSCCapturerStartingStepTranscodeingVideoBitrate = @"transcoding_video_bitrate"; +static NSString *const kSCCapturerStartingStepOutputSettings = @"output_settings"; +static NSString *const kSCCapturerStartingStepVideoFrameRawData = @"video_frame_raw_data"; +static NSString *const kSCCapturerStartingStepAudioRecording = @"audio_recording"; +static NSString *const kSCCapturerStartingStepAssetWriterConfiguration = @"asset_writer_config"; +static NSString *const kSCCapturerStartingStepStartingWriting = @"start_writing"; +static NSString *const kCapturerStartingTotalDelay = @"total_delay"; + +@interface SCManagedVideoCapturerLogger : NSObject + +- (void)prepareForStartingLog; +- (void)logStartingStep:(NSString *)stepName; +- (void)endLoggingForStarting; +- (void)logEventIfStartingTooSlow; + +@end diff --git a/ManagedCapturer/SCManagedVideoCapturerLogger.m b/ManagedCapturer/SCManagedVideoCapturerLogger.m new file mode 100644 index 0000000..2e5ad96 --- /dev/null +++ b/ManagedCapturer/SCManagedVideoCapturerLogger.m @@ -0,0 +1,77 @@ +// +// SCManagedVideoCapturerLogger.m +// Snapchat +// +// Created by Pinlin on 12/04/2017. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedVideoCapturerLogger.h" + +#import +#import +#import +#import + +@import QuartzCore; + +@interface SCManagedVideoCapturerLogger () { + // For time profiles metric during start recording + NSMutableDictionary *_startingStepsDelayTime; + NSTimeInterval _beginStartTime; + NSTimeInterval _lastCheckpointTime; + NSTimeInterval _startedTime; +} + +@end + +@implementation SCManagedVideoCapturerLogger + +- (instancetype)init +{ + self = [super init]; + if (self) { + _startingStepsDelayTime = [NSMutableDictionary dictionary]; + } + return self; +} + +- (void)prepareForStartingLog +{ + _beginStartTime = CACurrentMediaTime(); + _lastCheckpointTime = _beginStartTime; + [_startingStepsDelayTime removeAllObjects]; +} + +- (void)logStartingStep:(NSString *)stepname +{ + SCAssert(_beginStartTime > 0, @"logger is not ready yet, please call prepareForStartingLog at first"); + NSTimeInterval currentCheckpointTime = CACurrentMediaTime(); + _startingStepsDelayTime[stepname] = @(currentCheckpointTime - _lastCheckpointTime); + _lastCheckpointTime = currentCheckpointTime; +} + +- (void)endLoggingForStarting +{ + SCAssert(_beginStartTime > 0, @"logger is not ready yet, please call prepareForStartingLog at first"); + _startedTime = CACurrentMediaTime(); + [self logStartingStep:kSCCapturerStartingStepStartingWriting]; + _startingStepsDelayTime[kCapturerStartingTotalDelay] = @(CACurrentMediaTime() - _beginStartTime); +} + +- (void)logEventIfStartingTooSlow +{ + if (_beginStartTime > 0) { + if (_startingStepsDelayTime.count == 0) { + // It should not be here. We only need to log once. + return; + } + SCLogGeneralWarning(@"Capturer starting delay(in second):%f", _startedTime - _beginStartTime); + [[SCLogger sharedInstance] logEvent:kSCCameraMetricsVideoCapturerStartDelay parameters:_startingStepsDelayTime]; + // Clean all delay times after logging + [_startingStepsDelayTime removeAllObjects]; + _beginStartTime = 0; + } +} + +@end diff --git a/ManagedCapturer/SCManagedVideoCapturerOutputSettings.h b/ManagedCapturer/SCManagedVideoCapturerOutputSettings.h new file mode 100644 index 0000000..693894e --- /dev/null +++ b/ManagedCapturer/SCManagedVideoCapturerOutputSettings.h @@ -0,0 +1,48 @@ +// 42f6113daff3eebf06d809a073c99651867c42ea +// Generated by the value-object.rb DO NOT EDIT!! + +#import "SCManagedVideoCapturerOutputType.h" + +#import + +#import +#import + +@protocol SCManagedVideoCapturerOutputSettings + +@property (nonatomic, assign, readonly) CGFloat width; + +@property (nonatomic, assign, readonly) CGFloat height; + +@property (nonatomic, assign, readonly) CGFloat videoBitRate; + +@property (nonatomic, assign, readonly) CGFloat audioBitRate; + +@property (nonatomic, assign, readonly) NSUInteger keyFrameInterval; + +@property (nonatomic, assign, readonly) SCManagedVideoCapturerOutputType outputType; + +@end + +@interface SCManagedVideoCapturerOutputSettings : NSObject + +@property (nonatomic, assign, readonly) CGFloat width; + +@property (nonatomic, assign, readonly) CGFloat height; + +@property (nonatomic, assign, readonly) CGFloat videoBitRate; + +@property (nonatomic, assign, readonly) CGFloat audioBitRate; + +@property (nonatomic, assign, readonly) NSUInteger keyFrameInterval; + +@property (nonatomic, assign, readonly) SCManagedVideoCapturerOutputType outputType; + +- (instancetype)initWithWidth:(CGFloat)width + height:(CGFloat)height + videoBitRate:(CGFloat)videoBitRate + audioBitRate:(CGFloat)audioBitRate + keyFrameInterval:(NSUInteger)keyFrameInterval + outputType:(SCManagedVideoCapturerOutputType)outputType; + +@end diff --git a/ManagedCapturer/SCManagedVideoCapturerOutputSettings.m b/ManagedCapturer/SCManagedVideoCapturerOutputSettings.m new file mode 100644 index 0000000..275e33d --- /dev/null +++ b/ManagedCapturer/SCManagedVideoCapturerOutputSettings.m @@ -0,0 +1,221 @@ +// 42f6113daff3eebf06d809a073c99651867c42ea +// Generated by the value-object.rb DO NOT EDIT!! + +#import "SCManagedVideoCapturerOutputSettings.h" + +#import + +#import + +@implementation SCManagedVideoCapturerOutputSettings + +static ptrdiff_t sSCManagedVideoCapturerOutputSettingsOffsets[0]; +static BOOL sSCManagedVideoCapturerOutputSettingsHasOffsets; + +- (instancetype)initWithWidth:(CGFloat)width + height:(CGFloat)height + videoBitRate:(CGFloat)videoBitRate + audioBitRate:(CGFloat)audioBitRate + keyFrameInterval:(NSUInteger)keyFrameInterval + outputType:(SCManagedVideoCapturerOutputType)outputType +{ + self = [super init]; + if (self) { + _width = width; + _height = height; + _videoBitRate = videoBitRate; + _audioBitRate = audioBitRate; + _keyFrameInterval = keyFrameInterval; + _outputType = outputType; + } + return self; +} + +#pragma mark - NSCopying + +- (instancetype)copyWithZone:(NSZone *)zone +{ + // Immutable object, bypass copy + return self; +} + +#pragma mark - NSCoding + +- (instancetype)initWithCoder:(NSCoder *)aDecoder +{ + self = [super init]; + if (self) { + _width = [aDecoder decodeFloatForKey:@"width"]; + _height = [aDecoder decodeFloatForKey:@"height"]; + _videoBitRate = [aDecoder decodeFloatForKey:@"videoBitRate"]; + _audioBitRate = [aDecoder decodeFloatForKey:@"audioBitRate"]; + _keyFrameInterval = [[aDecoder decodeObjectForKey:@"keyFrameInterval"] unsignedIntegerValue]; + _outputType = (SCManagedVideoCapturerOutputType)[aDecoder decodeIntegerForKey:@"outputType"]; + } + return self; +} + +- (void)encodeWithCoder:(NSCoder *)aCoder +{ + [aCoder encodeFloat:_width forKey:@"width"]; + [aCoder encodeFloat:_height forKey:@"height"]; + [aCoder encodeFloat:_videoBitRate forKey:@"videoBitRate"]; + [aCoder encodeFloat:_audioBitRate forKey:@"audioBitRate"]; + [aCoder encodeObject:@(_keyFrameInterval) forKey:@"keyFrameInterval"]; + [aCoder encodeInteger:(NSInteger)_outputType forKey:@"outputType"]; +} + +#pragma mark - FasterCoding + +- (BOOL)preferFasterCoding +{ + return YES; +} + +- (void)encodeWithFasterCoder:(id)fasterCoder +{ + [fasterCoder encodeFloat64:_audioBitRate]; + [fasterCoder encodeFloat64:_height]; + [fasterCoder encodeUInt64:_keyFrameInterval]; + [fasterCoder encodeSInt32:_outputType]; + [fasterCoder encodeFloat64:_videoBitRate]; + [fasterCoder encodeFloat64:_width]; +} + +- (void)decodeWithFasterDecoder:(id)fasterDecoder +{ + _audioBitRate = (CGFloat)[fasterDecoder decodeFloat64]; + _height = (CGFloat)[fasterDecoder decodeFloat64]; + _keyFrameInterval = (NSUInteger)[fasterDecoder decodeUInt64]; + _outputType = (SCManagedVideoCapturerOutputType)[fasterDecoder decodeSInt32]; + _videoBitRate = (CGFloat)[fasterDecoder decodeFloat64]; + _width = (CGFloat)[fasterDecoder decodeFloat64]; +} + +- (void)setSInt32:(int32_t)val forUInt64Key:(uint64_t)key +{ + switch (key) { + case 54425104364133881ULL: + _outputType = (SCManagedVideoCapturerOutputType)val; + break; + } +} + +- (void)setUInt64:(uint64_t)val forUInt64Key:(uint64_t)key +{ + switch (key) { + case 47327990652274883ULL: + _keyFrameInterval = (NSUInteger)val; + break; + } +} + +- (void)setFloat64:(double)val forUInt64Key:(uint64_t)key +{ + switch (key) { + case 50995534680662654ULL: + _audioBitRate = (CGFloat)val; + break; + case 11656660716170763ULL: + _height = (CGFloat)val; + break; + case 29034524155663716ULL: + _videoBitRate = (CGFloat)val; + break; + case 30689178641753681ULL: + _width = (CGFloat)val; + break; + } +} + ++ (uint64_t)fasterCodingVersion +{ + return 14709152111692666517ULL; +} + ++ (uint64_t *)fasterCodingKeys +{ + static uint64_t keys[] = { + 6 /* Total */, + FC_ENCODE_KEY_TYPE(50995534680662654, FCEncodeTypeFloat64), + FC_ENCODE_KEY_TYPE(11656660716170763, FCEncodeTypeFloat64), + FC_ENCODE_KEY_TYPE(47327990652274883, FCEncodeTypeUInt64), + FC_ENCODE_KEY_TYPE(54425104364133881, FCEncodeTypeSInt32), + FC_ENCODE_KEY_TYPE(29034524155663716, FCEncodeTypeFloat64), + FC_ENCODE_KEY_TYPE(30689178641753681, FCEncodeTypeFloat64), + }; + return keys; +} + +#pragma mark - isEqual + +- (BOOL)isEqual:(id)object +{ + if (!SCObjectsIsEqual(self, object, &sSCManagedVideoCapturerOutputSettingsHasOffsets, + sSCManagedVideoCapturerOutputSettingsOffsets, 6, 0)) { + return NO; + } + SCManagedVideoCapturerOutputSettings *other = (SCManagedVideoCapturerOutputSettings *)object; + if (other->_width != _width) { + return NO; + } + + if (other->_height != _height) { + return NO; + } + + if (other->_videoBitRate != _videoBitRate) { + return NO; + } + + if (other->_audioBitRate != _audioBitRate) { + return NO; + } + + if (other->_keyFrameInterval != _keyFrameInterval) { + return NO; + } + + if (other->_outputType != _outputType) { + return NO; + } + + return YES; +} + +- (NSUInteger)hash +{ + NSUInteger subhashes[] = {(NSUInteger)_width, (NSUInteger)_height, (NSUInteger)_videoBitRate, + (NSUInteger)_audioBitRate, (NSUInteger)_keyFrameInterval, (NSUInteger)_outputType}; + NSUInteger result = subhashes[0]; + for (int i = 1; i < 6; i++) { + unsigned long long base = (((unsigned long long)result) << 32 | subhashes[i]); + base = (~base) + (base << 18); + base ^= (base >> 31); + base *= 21; + base ^= (base >> 11); + base += (base << 6); + base ^= (base >> 22); + result = (NSUInteger)base; + } + return result; +} + +#pragma mark - Print description in console: lldb> po #{variable name} + +- (NSString *)description +{ + NSMutableString *desc = [NSMutableString string]; + [desc appendString:@"{\n"]; + [desc appendFormat:@"\twidth:%@\n", [@(_width) description]]; + [desc appendFormat:@"\theight:%@\n", [@(_height) description]]; + [desc appendFormat:@"\tvideoBitRate:%@\n", [@(_videoBitRate) description]]; + [desc appendFormat:@"\taudioBitRate:%@\n", [@(_audioBitRate) description]]; + [desc appendFormat:@"\tkeyFrameInterval:%@\n", [@(_keyFrameInterval) description]]; + [desc appendFormat:@"\toutputType:%@\n", [@(_outputType) description]]; + [desc appendString:@"}\n"]; + + return [desc copy]; +} + +@end diff --git a/ManagedCapturer/SCManagedVideoCapturerOutputSettings.value b/ManagedCapturer/SCManagedVideoCapturerOutputSettings.value new file mode 100644 index 0000000..a9fa2f3 --- /dev/null +++ b/ManagedCapturer/SCManagedVideoCapturerOutputSettings.value @@ -0,0 +1,10 @@ +#import "SCManagedVideoCapturerOutputType.h" + +interface SCManagedVideoCapturerOutputSettings + CGFloat width + CGFloat height + CGFloat videoBitRate + CGFloat audioBitRate + NSUInteger keyFrameInterval + enum SCManagedVideoCapturerOutputType outputType +end \ No newline at end of file diff --git a/ManagedCapturer/SCManagedVideoCapturerOutputType.h b/ManagedCapturer/SCManagedVideoCapturerOutputType.h new file mode 100644 index 0000000..d033f4f --- /dev/null +++ b/ManagedCapturer/SCManagedVideoCapturerOutputType.h @@ -0,0 +1,14 @@ +// +// SCManagedVideoCapturerOutputType.h +// Snapchat +// +// Created by Chao Pang on 8/8/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +#import + +typedef NS_ENUM(NSInteger, SCManagedVideoCapturerOutputType) { + SCManagedVideoCapturerOutputTypeVideoSnap = 0, + SCManagedVideoCapturerOutputTypeVideoNote, +}; diff --git a/ManagedCapturer/SCManagedVideoCapturerTimeObserver.h b/ManagedCapturer/SCManagedVideoCapturerTimeObserver.h new file mode 100644 index 0000000..80c9def --- /dev/null +++ b/ManagedCapturer/SCManagedVideoCapturerTimeObserver.h @@ -0,0 +1,25 @@ +// +// SCManagedVideoCapturerTimeObserver.h +// Snapchat +// +// Created by Michel Loenngren on 4/3/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import +#import + +@class SCTimedTask; + +/* + Class keeping track of SCTimedTasks and firing them on the main thread + when needed. + */ +@interface SCManagedVideoCapturerTimeObserver : NSObject + +- (void)addTimedTask:(SCTimedTask *_Nonnull)task; + +- (void)processTime:(CMTime)relativePresentationTime + sessionStartTimeDelayInSecond:(CGFloat)sessionStartTimeDelayInSecond; + +@end diff --git a/ManagedCapturer/SCManagedVideoCapturerTimeObserver.m b/ManagedCapturer/SCManagedVideoCapturerTimeObserver.m new file mode 100644 index 0000000..5b16547 --- /dev/null +++ b/ManagedCapturer/SCManagedVideoCapturerTimeObserver.m @@ -0,0 +1,61 @@ +// +// SCManagedVideoCapturerTimeObserver.m +// Snapchat +// +// Created by Michel Loenngren on 4/3/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedVideoCapturerTimeObserver.h" + +#import "SCTimedTask.h" + +#import +#import +#import + +@implementation SCManagedVideoCapturerTimeObserver { + NSMutableArray *_tasks; + BOOL _isProcessing; +} + +- (instancetype)init +{ + if (self = [super init]) { + _tasks = [NSMutableArray new]; + _isProcessing = NO; + } + return self; +} + +- (void)addTimedTask:(SCTimedTask *_Nonnull)task +{ + SCAssert(!_isProcessing, + @"[SCManagedVideoCapturerTimeObserver] Trying to add an SCTimedTask after streaming started."); + SCAssert(CMTIME_IS_VALID(task.targetTime), + @"[SCManagedVideoCapturerTimeObserver] Trying to add an SCTimedTask with invalid time."); + [_tasks addObject:task]; + [_tasks sortUsingComparator:^NSComparisonResult(SCTimedTask *_Nonnull obj1, SCTimedTask *_Nonnull obj2) { + return (NSComparisonResult)CMTimeCompare(obj2.targetTime, obj1.targetTime); + }]; + SCLogGeneralInfo(@"[SCManagedVideoCapturerTimeObserver] Adding task: %@, task count: %lu", task, + (unsigned long)_tasks.count); +} + +- (void)processTime:(CMTime)relativePresentationTime + sessionStartTimeDelayInSecond:(CGFloat)sessionStartTimeDelayInSecond +{ + _isProcessing = YES; + SCTimedTask *last = _tasks.lastObject; + while (last && last.task && CMTimeCompare(relativePresentationTime, last.targetTime) >= 0) { + [_tasks removeLastObject]; + void (^task)(CMTime relativePresentationTime, CGFloat sessionStartTimeDelay) = last.task; + last.task = nil; + runOnMainThreadAsynchronously(^{ + task(relativePresentationTime, sessionStartTimeDelayInSecond); + }); + last = _tasks.lastObject; + } +} + +@end diff --git a/ManagedCapturer/SCManagedVideoFileStreamer.h b/ManagedCapturer/SCManagedVideoFileStreamer.h new file mode 100644 index 0000000..6ede4ea --- /dev/null +++ b/ManagedCapturer/SCManagedVideoFileStreamer.h @@ -0,0 +1,26 @@ +// +// SCManagedVideoFileStreamer.h +// Snapchat +// +// Created by Alexander Grytsiuk on 3/4/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +#import + +#import +#import + +typedef void (^sc_managed_video_file_streamer_pixel_buffer_completion_handler_t)(CVPixelBufferRef pixelBuffer); + +/** + * SCManagedVideoFileStreamer reads a video file from provided NSURL to create + * and publish video output frames. SCManagedVideoFileStreamer also conforms + * to SCManagedVideoDataSource allowing chained consumption of video frames. + */ +@interface SCManagedVideoFileStreamer : NSObject + +- (instancetype)initWithPlaybackForURL:(NSURL *)URL; +- (void)getNextPixelBufferWithCompletion:(sc_managed_video_file_streamer_pixel_buffer_completion_handler_t)completion; + +@end diff --git a/ManagedCapturer/SCManagedVideoFileStreamer.m b/ManagedCapturer/SCManagedVideoFileStreamer.m new file mode 100644 index 0000000..aed6089 --- /dev/null +++ b/ManagedCapturer/SCManagedVideoFileStreamer.m @@ -0,0 +1,299 @@ +// +// SCManagedVideoFileStreamer.m +// Snapchat +// +// Created by Alexander Grytsiuk on 3/4/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedVideoFileStreamer.h" + +#import "SCManagedCapturePreviewLayerController.h" + +#import +#import +#import +#import +#import + +@import AVFoundation; +@import CoreMedia; + +static char *const kSCManagedVideoFileStreamerQueueLabel = "com.snapchat.managed-video-file-streamer"; + +@interface SCManagedVideoFileStreamer () +@end + +@implementation SCManagedVideoFileStreamer { + SCManagedVideoDataSourceListenerAnnouncer *_announcer; + SCManagedCaptureDevicePosition _devicePosition; + sc_managed_video_file_streamer_pixel_buffer_completion_handler_t _nextPixelBufferHandler; + + id _notificationToken; + id _performer; + dispatch_semaphore_t _semaphore; + + CADisplayLink *_displayLink; + AVPlayerItemVideoOutput *_videoOutput; + AVPlayer *_player; + + BOOL _sampleBufferDisplayEnabled; + id _sampleBufferDisplayController; +} + +@synthesize isStreaming = _isStreaming; +@synthesize performer = _performer; +@synthesize videoOrientation = _videoOrientation; + +- (instancetype)initWithPlaybackForURL:(NSURL *)URL +{ + SCTraceStart(); + self = [super init]; + if (self) { + _videoOrientation = AVCaptureVideoOrientationLandscapeRight; + _announcer = [[SCManagedVideoDataSourceListenerAnnouncer alloc] init]; + _semaphore = dispatch_semaphore_create(1); + _performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoFileStreamerQueueLabel + qualityOfService:QOS_CLASS_UNSPECIFIED + queueType:DISPATCH_QUEUE_SERIAL + context:SCQueuePerformerContextStories]; + + // Setup CADisplayLink which will callback displayPixelBuffer: at every vsync. + _displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkCallback:)]; + [_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSDefaultRunLoopMode]; + [_displayLink setPaused:YES]; + + // Prepare player + _player = [[SCPlayer alloc] initWithPlayerDomain:SCPlayerDomainCameraFileStreamer URL:URL]; +#if TARGET_IPHONE_SIMULATOR + _player.volume = 0.0; +#endif + // Configure output + [self configureOutput]; + } + return self; +} + +- (void)addSampleBufferDisplayController:(id)sampleBufferDisplayController +{ + _sampleBufferDisplayController = sampleBufferDisplayController; +} + +- (void)setSampleBufferDisplayEnabled:(BOOL)sampleBufferDisplayEnabled +{ + _sampleBufferDisplayEnabled = sampleBufferDisplayEnabled; + SCLogGeneralInfo(@"[SCManagedVideoFileStreamer] sampleBufferDisplayEnabled set to:%d", _sampleBufferDisplayEnabled); +} + +- (void)setKeepLateFrames:(BOOL)keepLateFrames +{ + // Do nothing +} + +- (BOOL)getKeepLateFrames +{ + // return default NO value + return NO; +} + +- (void)waitUntilSampleBufferDisplayed:(dispatch_queue_t)queue completionHandler:(dispatch_block_t)completionHandler +{ + SCAssert(queue, @"callback queue must be provided"); + SCAssert(completionHandler, @"completion handler must be provided"); + dispatch_async(queue, completionHandler); +} + +- (void)startStreaming +{ + SCTraceStart(); + if (!_isStreaming) { + _isStreaming = YES; + [self addDidPlayToEndTimeNotificationForPlayerItem:_player.currentItem]; + [_player play]; + } +} + +- (void)stopStreaming +{ + SCTraceStart(); + if (_isStreaming) { + _isStreaming = NO; + [_player pause]; + [self removePlayerObservers]; + } +} + +- (void)pauseStreaming +{ + [self stopStreaming]; +} + +- (void)addListener:(id)listener +{ + SCTraceStart(); + [_announcer addListener:listener]; +} + +- (void)removeListener:(id)listener +{ + SCTraceStart(); + [_announcer removeListener:listener]; +} + +- (void)setAsOutput:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + _devicePosition = devicePosition; +} + +- (void)setDevicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + _devicePosition = devicePosition; +} + +- (void)setVideoOrientation:(AVCaptureVideoOrientation)videoOrientation +{ + _videoOrientation = videoOrientation; +} + +- (void)removeAsOutput:(AVCaptureSession *)session +{ + // Ignored +} + +- (void)setVideoStabilizationEnabledIfSupported:(BOOL)videoStabilizationIfSupported +{ + // Ignored +} + +- (void)beginConfiguration +{ + // Ignored +} + +- (void)commitConfiguration +{ + // Ignored +} + +- (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest +{ + // Ignored +} + +#pragma mark - AVPlayerItemOutputPullDelegate + +- (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender +{ + if (![_videoOutput hasNewPixelBufferForItemTime:CMTimeMake(1, 10)]) { + [self configureOutput]; + } + [_displayLink setPaused:NO]; +} + +#pragma mark - Internal + +- (void)displayLinkCallback:(CADisplayLink *)sender +{ + CFTimeInterval nextVSync = [sender timestamp] + [sender duration]; + + CMTime time = [_videoOutput itemTimeForHostTime:nextVSync]; + if (dispatch_semaphore_wait(_semaphore, DISPATCH_TIME_NOW) == 0) { + [_performer perform:^{ + if ([_videoOutput hasNewPixelBufferForItemTime:time]) { + CVPixelBufferRef pixelBuffer = [_videoOutput copyPixelBufferForItemTime:time itemTimeForDisplay:NULL]; + if (pixelBuffer != NULL) { + if (_nextPixelBufferHandler) { + _nextPixelBufferHandler(pixelBuffer); + _nextPixelBufferHandler = nil; + } else { + CMSampleBufferRef sampleBuffer = + [self createSampleBufferFromPixelBuffer:pixelBuffer + presentationTime:CMTimeMake(CACurrentMediaTime() * 1000, 1000)]; + if (sampleBuffer) { + if (_sampleBufferDisplayEnabled) { + [_sampleBufferDisplayController enqueueSampleBuffer:sampleBuffer]; + } + [_announcer managedVideoDataSource:self + didOutputSampleBuffer:sampleBuffer + devicePosition:_devicePosition]; + CFRelease(sampleBuffer); + } + } + CVBufferRelease(pixelBuffer); + } + } + dispatch_semaphore_signal(_semaphore); + }]; + } +} + +- (CMSampleBufferRef)createSampleBufferFromPixelBuffer:(CVPixelBufferRef)pixelBuffer presentationTime:(CMTime)time +{ + CMSampleBufferRef sampleBuffer = NULL; + CMVideoFormatDescriptionRef formatDesc = NULL; + + OSStatus err = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &formatDesc); + if (err != noErr) { + return NULL; + } + + CMSampleTimingInfo sampleTimingInfo = {kCMTimeInvalid, time, kCMTimeInvalid}; + CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, formatDesc, + &sampleTimingInfo, &sampleBuffer); + + CFRelease(formatDesc); + + return sampleBuffer; +} + +- (void)configureOutput +{ + // Remove old output + if (_videoOutput) { + [[_player currentItem] removeOutput:_videoOutput]; + } + + // Setup AVPlayerItemVideoOutput with the required pixelbuffer attributes. + _videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:@{ + (id) kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) + }]; + _videoOutput.suppressesPlayerRendering = YES; + [_videoOutput setDelegate:self queue:_performer.queue]; + + // Add new output + [[_player currentItem] addOutput:_videoOutput]; + [_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:1.0 / 30.0]; +} + +- (void)getNextPixelBufferWithCompletion:(sc_managed_video_file_streamer_pixel_buffer_completion_handler_t)completion +{ + _nextPixelBufferHandler = completion; +} + +- (void)addDidPlayToEndTimeNotificationForPlayerItem:(AVPlayerItem *)item +{ + if (_notificationToken) { + _notificationToken = nil; + } + + _player.actionAtItemEnd = AVPlayerActionAtItemEndNone; + _notificationToken = + [[NSNotificationCenter defaultCenter] addObserverForName:AVPlayerItemDidPlayToEndTimeNotification + object:item + queue:[NSOperationQueue mainQueue] + usingBlock:^(NSNotification *note) { + [[_player currentItem] seekToTime:kCMTimeZero]; + }]; +} + +- (void)removePlayerObservers +{ + if (_notificationToken) { + [[NSNotificationCenter defaultCenter] removeObserver:_notificationToken + name:AVPlayerItemDidPlayToEndTimeNotification + object:_player.currentItem]; + _notificationToken = nil; + } +} + +@end diff --git a/ManagedCapturer/SCManagedVideoFrameSampler.h b/ManagedCapturer/SCManagedVideoFrameSampler.h new file mode 100644 index 0000000..69fa80c --- /dev/null +++ b/ManagedCapturer/SCManagedVideoFrameSampler.h @@ -0,0 +1,22 @@ +// +// SCManagedVideoFrameSampler.h +// Snapchat +// +// Created by Michel Loenngren on 3/10/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCapturerListener.h" + +#import + +/** + Allows consumer to register a block to sample the next CMSampleBufferRef and + automatically leverages Core image to convert the pixel buffer to a UIImage. + Returned image will be a copy. + */ +@interface SCManagedVideoFrameSampler : NSObject + +- (void)sampleNextFrame:(void (^)(UIImage *frame, CMTime presentationTime))completeBlock; + +@end diff --git a/ManagedCapturer/SCManagedVideoFrameSampler.m b/ManagedCapturer/SCManagedVideoFrameSampler.m new file mode 100644 index 0000000..1d0eb62 --- /dev/null +++ b/ManagedCapturer/SCManagedVideoFrameSampler.m @@ -0,0 +1,65 @@ +// +// SCManagedVideoFrameSampler.m +// Snapchat +// +// Created by Michel Loenngren on 3/10/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedVideoFrameSampler.h" + +#import +#import + +@import CoreImage; +@import ImageIO; + +@interface SCManagedVideoFrameSampler () + +@property (nonatomic, copy) void (^frameSampleBlock)(UIImage *, CMTime); +@property (nonatomic, strong) CIContext *ciContext; + +@end + +@implementation SCManagedVideoFrameSampler + +- (void)sampleNextFrame:(void (^)(UIImage *, CMTime))completeBlock +{ + _frameSampleBlock = completeBlock; +} + +#pragma mark - SCManagedCapturerListener + +- (void)managedCapturer:(id)managedCapturer + didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer + sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata +{ + void (^block)(UIImage *, CMTime) = _frameSampleBlock; + _frameSampleBlock = nil; + + if (!block) { + return; + } + + CVImageBufferRef cvImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); + UIImage *image; + if (cvImageBuffer) { + CGImageRef cgImage = SCCreateCGImageFromPixelBufferRef(cvImageBuffer); + image = [[UIImage alloc] initWithCGImage:cgImage scale:1.0 orientation:UIImageOrientationRight]; + CGImageRelease(cgImage); + } + runOnMainThreadAsynchronously(^{ + block(image, presentationTime); + }); +} + +- (CIContext *)ciContext +{ + if (!_ciContext) { + _ciContext = [CIContext context]; + } + return _ciContext; +} + +@end diff --git a/ManagedCapturer/SCManagedVideoNoSoundLogger.h b/ManagedCapturer/SCManagedVideoNoSoundLogger.h new file mode 100644 index 0000000..23e5772 --- /dev/null +++ b/ManagedCapturer/SCManagedVideoNoSoundLogger.h @@ -0,0 +1,44 @@ +// +// SCManagedVideoNoSoundLogger.h +// Snapchat +// +// Created by Pinlin Chen on 15/07/2017. +// +// + +#import + +#import +#import + +@protocol SCManiphestTicketCreator; + +@interface SCManagedVideoNoSoundLogger : NSObject + +@property (nonatomic, strong) NSError *audioSessionError; +@property (nonatomic, strong) NSError *audioQueueError; +@property (nonatomic, strong) NSError *assetWriterError; +@property (nonatomic, assign) BOOL retryAudioQueueSuccess; +@property (nonatomic, assign) BOOL retryAudioQueueSuccessSetDataSource; +@property (nonatomic, strong) NSString *brokenMicCodeType; +@property (nonatomic, assign) BOOL lenseActiveWhileRecording; +@property (nonatomic, strong) NSString *activeLensId; +@property (nonatomic, assign) CMTime firstWrittenAudioBufferDelay; +@property (nonatomic, assign) BOOL audioQueueStarted; + +SC_INIT_AND_NEW_UNAVAILABLE +- (instancetype)initWithTicketCreator:(id)ticketCreator; + +/* Use to counting how many no sound issue we have fixed */ +// Call at the place where we have fixed the AVPlayer leak before ++ (void)startCountingVideoNoSoundHaveBeenFixed; + +/* Use to report the detail of new no sound issue */ +// Reset all the properties of recording error +- (void)resetAll; +// Log if the audio track is empty +- (void)checkVideoFileAndLogIfNeeded:(NSURL *)videoURL; +// called by AVCameraViewController when lense resume audio +- (void)managedLensesProcessorDidCallResumeAllSounds; + +@end diff --git a/ManagedCapturer/SCManagedVideoNoSoundLogger.m b/ManagedCapturer/SCManagedVideoNoSoundLogger.m new file mode 100644 index 0000000..f0a5dd0 --- /dev/null +++ b/ManagedCapturer/SCManagedVideoNoSoundLogger.m @@ -0,0 +1,283 @@ +// +// SCManagedVideoNoSoundLogger.m +// Snapchat +// +// Created by Pinlin Chen on 15/07/2017. +// +// + +#import "SCManagedVideoNoSoundLogger.h" + +#import "SCManagedCapturer.h" +#import "SCManiphestTicketCreator.h" + +#import +#import +#import +#import +#import +#import +#import +#import +#import + +@import AVFoundation; + +static BOOL s_startCountingVideoNoSoundFixed; +// Count the number of no sound errors for an App session +static NSUInteger s_noSoundCaseCount = 0; + +@interface SCManagedVideoNoSoundLogger () { + BOOL _isAudioSessionDeactivated; + int _lenseResumeCount; +} + +@property (nonatomic) id ticketCreator; + +@end + +@implementation SCManagedVideoNoSoundLogger + +- (instancetype)initWithTicketCreator:(id)ticketCreator +{ + if (self = [super init]) { + _ticketCreator = ticketCreator; + } + return self; +} + ++ (NSUInteger)noSoundCount +{ + return s_noSoundCaseCount; +} + ++ (void)increaseNoSoundCount +{ + s_noSoundCaseCount += 1; +} + ++ (void)startCountingVideoNoSoundHaveBeenFixed +{ + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + s_startCountingVideoNoSoundFixed = YES; + SCLogGeneralInfo(@"start counting video no sound have been fixed"); + }); +} + ++ (NSString *)appSessionIdForNoSound +{ + static dispatch_once_t onceToken; + static NSString *s_AppSessionIdForNoSound = @"SCDefaultSession"; + dispatch_once(&onceToken, ^{ + s_AppSessionIdForNoSound = SCUUID(); + }); + return s_AppSessionIdForNoSound; +} + ++ (void)logVideoNoSoundHaveBeenFixedIfNeeded +{ + if (s_startCountingVideoNoSoundFixed) { + [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoNoSoundError + parameters:@{ + @"have_been_fixed" : @"true", + @"fixed_type" : @"player_leak", + @"asset_writer_success" : @"true", + @"audio_session_success" : @"true", + @"audio_queue_success" : @"true", + } + secretParameters:nil + metrics:nil]; + } +} + ++ (void)logAudioSessionCategoryHaveBeenFixed +{ + [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoNoSoundError + parameters:@{ + @"have_been_fixed" : @"true", + @"fixed_type" : @"audio_session_category_mismatch", + @"asset_writer_success" : @"true", + @"audio_session_success" : @"true", + @"audio_queue_success" : @"true", + } + secretParameters:nil + metrics:nil]; +} + ++ (void)logAudioSessionBrokenMicHaveBeenFixed:(NSString *)type +{ + [[SCLogger sharedInstance] + logUnsampledEvent:kSCCameraMetricsVideoNoSoundError + parameters:@{ + @"have_been_fixed" : @"true", + @"fixed_type" : @"broken_microphone", + @"asset_writer_success" : @"true", + @"audio_session_success" : @"true", + @"audio_queue_success" : @"true", + @"mic_broken_type" : SC_NULL_STRING_IF_NIL(type), + @"audio_session_debug_info" : + [SCAudioSession sharedInstance].lastRecordingRequestDebugInfo ?: @"(null)", + } + secretParameters:nil + metrics:nil]; +} + +- (instancetype)init +{ + if (self = [super init]) { + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(_audioSessionWillDeactivate) + name:SCAudioSessionWillDeactivateNotification + object:nil]; + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(_audioSessionDidActivate) + name:SCAudioSessionActivatedNotification + object:nil]; + _firstWrittenAudioBufferDelay = kCMTimeInvalid; + } + return self; +} + +- (void)resetAll +{ + _audioQueueError = nil; + _audioSessionError = nil; + _assetWriterError = nil; + _retryAudioQueueSuccess = NO; + _retryAudioQueueSuccessSetDataSource = NO; + _brokenMicCodeType = nil; + _lenseActiveWhileRecording = NO; + _lenseResumeCount = 0; + _activeLensId = nil; + self.firstWrittenAudioBufferDelay = kCMTimeInvalid; +} + +- (void)checkVideoFileAndLogIfNeeded:(NSURL *)videoURL +{ + AVURLAsset *asset = [AVURLAsset assetWithURL:videoURL]; + + __block BOOL hasAudioTrack = ([asset tracksWithMediaType:AVMediaTypeAudio].count > 0); + + dispatch_block_t block = ^{ + + // Log no audio issues have been fixed + if (hasAudioTrack) { + if (_retryAudioQueueSuccess) { + [SCManagedVideoNoSoundLogger logAudioSessionCategoryHaveBeenFixed]; + } else if (_retryAudioQueueSuccessSetDataSource) { + [SCManagedVideoNoSoundLogger logAudioSessionBrokenMicHaveBeenFixed:_brokenMicCodeType]; + } else { + [SCManagedVideoNoSoundLogger logVideoNoSoundHaveBeenFixedIfNeeded]; + } + } else { + // Log no audio issues caused by no permission into "wont_fixed_type", won't show in Grafana + BOOL isPermissonGranted = + [[SCAudioSession sharedInstance] recordPermission] == AVAudioSessionRecordPermissionGranted; + if (!isPermissonGranted) { + [SCManagedVideoNoSoundLogger increaseNoSoundCount]; + [[SCLogger sharedInstance] + logUnsampledEvent:kSCCameraMetricsVideoNoSoundError + parameters:@{ + @"wont_fix_type" : @"no_permission", + @"no_sound_count" : + [@([SCManagedVideoNoSoundLogger noSoundCount]) stringValue] ?: @"(null)", + @"session_id" : [SCManagedVideoNoSoundLogger appSessionIdForNoSound] ?: @"(null)" + } + secretParameters:nil + metrics:nil]; + + } + // Log no audio issues caused by microphone occupied into "wont_fixed_type", for example Phone Call, + // It won't show in Grafana + // TODO: maybe we should prompt the user of these errors in the future + else if (_audioSessionError.code == AVAudioSessionErrorInsufficientPriority || + _audioQueueError.code == AVAudioSessionErrorInsufficientPriority) { + NSDictionary *parameters = @{ + @"wont_fix_type" : @"microphone_in_use", + @"asset_writer_error" : _assetWriterError ? [_assetWriterError description] : @"(null)", + @"audio_session_error" : _audioSessionError.userInfo ?: @"(null)", + @"audio_queue_error" : _audioQueueError.userInfo ?: @"(null)", + @"audio_session_deactivated" : _isAudioSessionDeactivated ? @"true" : @"false", + @"audio_session_debug_info" : + [SCAudioSession sharedInstance].lastRecordingRequestDebugInfo ?: @"(null)", + @"no_sound_count" : [@([SCManagedVideoNoSoundLogger noSoundCount]) stringValue] ?: @"(null)", + @"session_id" : [SCManagedVideoNoSoundLogger appSessionIdForNoSound] ?: @"(null)" + }; + + [SCManagedVideoNoSoundLogger increaseNoSoundCount]; + [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoNoSoundError + parameters:parameters + secretParameters:nil + metrics:nil]; + [_ticketCreator createAndFileBetaReport:JSONStringSerializeObjectForLogging(parameters)]; + } else { + // Log other new no audio issues, use "have_been_fixed=false" to show in Grafana + NSDictionary *parameters = @{ + @"have_been_fixed" : @"false", + @"asset_writer_error" : _assetWriterError ? [_assetWriterError description] : @"(null)", + @"audio_session_error" : _audioSessionError.userInfo ?: @"(null)", + @"audio_queue_error" : _audioQueueError.userInfo ?: @"(null)", + @"asset_writer_success" : [NSString stringWithBool:_assetWriterError == nil], + @"audio_session_success" : [NSString stringWithBool:_audioSessionError == nil], + @"audio_queue_success" : [NSString stringWithBool:_audioQueueError == nil], + @"audio_session_deactivated" : _isAudioSessionDeactivated ? @"true" : @"false", + @"video_duration" : [NSString sc_stringWithFormat:@"%f", CMTimeGetSeconds(asset.duration)], + @"is_audio_session_nil" : + [[SCAudioSession sharedInstance] noSoundCheckAudioSessionIsNil] ? @"true" : @"false", + @"lenses_active" : [NSString stringWithBool:self.lenseActiveWhileRecording], + @"active_lense_id" : self.activeLensId ?: @"(null)", + @"lense_audio_resume_count" : @(_lenseResumeCount), + @"first_audio_buffer_delay" : + [NSString sc_stringWithFormat:@"%f", CMTimeGetSeconds(self.firstWrittenAudioBufferDelay)], + @"audio_session_debug_info" : + [SCAudioSession sharedInstance].lastRecordingRequestDebugInfo ?: @"(null)", + @"audio_queue_started" : [NSString stringWithBool:_audioQueueStarted], + @"no_sound_count" : [@([SCManagedVideoNoSoundLogger noSoundCount]) stringValue] ?: @"(null)", + @"session_id" : [SCManagedVideoNoSoundLogger appSessionIdForNoSound] ?: @"(null)" + }; + [SCManagedVideoNoSoundLogger increaseNoSoundCount]; + [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoNoSoundError + parameters:parameters + secretParameters:nil + metrics:nil]; + [_ticketCreator createAndFileBetaReport:JSONStringSerializeObjectForLogging(parameters)]; + } + } + }; + if (hasAudioTrack) { + block(); + } else { + // Wait for all tracks to be loaded, in case of error counting the metric + [asset loadValuesAsynchronouslyForKeys:@[ @"tracks" ] + completionHandler:^{ + // Return when the tracks couldn't be loaded + NSError *error = nil; + if ([asset statusOfValueForKey:@"tracks" error:&error] != AVKeyValueStatusLoaded || + error != nil) { + return; + } + + // check audio track again + hasAudioTrack = ([asset tracksWithMediaType:AVMediaTypeAudio].count > 0); + runOnMainThreadAsynchronously(block); + }]; + } +} + +- (void)_audioSessionWillDeactivate +{ + _isAudioSessionDeactivated = YES; +} + +- (void)_audioSessionDidActivate +{ + _isAudioSessionDeactivated = NO; +} + +- (void)managedLensesProcessorDidCallResumeAllSounds +{ + _lenseResumeCount += 1; +} + +@end diff --git a/ManagedCapturer/SCManagedVideoScanner.h b/ManagedCapturer/SCManagedVideoScanner.h new file mode 100644 index 0000000..e2dfe72 --- /dev/null +++ b/ManagedCapturer/SCManagedVideoScanner.h @@ -0,0 +1,35 @@ +// +// SCManagedVideoScanner.h +// Snapchat +// +// Created by Liu Liu on 5/5/15. +// Copyright (c) 2015 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCapturer.h" +#import "SCManagedDeviceCapacityAnalyzerListener.h" + +#import + +#import + +@class SCScanConfiguration; + +@interface SCManagedVideoScanner : NSObject + +/** + * Calling this method to start scan, scan will automatically stop when a snapcode detected + */ +- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration; + +/** + * Calling this method to stop scan immediately (it is still possible that a successful scan can happen after this is + * called) + */ +- (void)stopScanAsynchronously; + +- (instancetype)initWithMaxFrameDefaultDuration:(NSTimeInterval)maxFrameDefaultDuration + maxFramePassiveDuration:(NSTimeInterval)maxFramePassiveDuration + restCycle:(float)restCycle; + +@end diff --git a/ManagedCapturer/SCManagedVideoScanner.m b/ManagedCapturer/SCManagedVideoScanner.m new file mode 100644 index 0000000..36eaa7a --- /dev/null +++ b/ManagedCapturer/SCManagedVideoScanner.m @@ -0,0 +1,299 @@ +// +// SCManagedVideoScanner.m +// Snapchat +// +// Created by Liu Liu on 5/5/15. +// Copyright (c) 2015 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedVideoScanner.h" + +#import "SCScanConfiguration.h" + +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import + +// In seconds +static NSTimeInterval const kDefaultScanTimeout = 60; + +static const char *kSCManagedVideoScannerQueueLabel = "com.snapchat.scvideoscanningcapturechannel.video.snapcode-scan"; + +@interface SCManagedVideoScanner () + +@end + +@implementation SCManagedVideoScanner { + SCSnapScanner *_snapScanner; + dispatch_semaphore_t _activeSemaphore; + NSTimeInterval _maxFrameDuration; // Used to restrict how many frames the scanner processes + NSTimeInterval _maxFrameDefaultDuration; + NSTimeInterval _maxFramePassiveDuration; + float _restCycleOfBusyCycle; + NSTimeInterval _scanStartTime; + BOOL _active; + BOOL _shouldEmitEvent; + dispatch_block_t _completionHandler; + NSTimeInterval _scanTimeout; + SCManagedCaptureDevicePosition _devicePosition; + SCQueuePerformer *_performer; + BOOL _adjustingFocus; + NSArray *_codeTypes; + NSArray *_codeTypesOld; + sc_managed_capturer_scan_results_handler_t _scanResultsHandler; + + SCUserSession *_userSession; +} + +- (instancetype)initWithMaxFrameDefaultDuration:(NSTimeInterval)maxFrameDefaultDuration + maxFramePassiveDuration:(NSTimeInterval)maxFramePassiveDuration + restCycle:(float)restCycle +{ + SCTraceStart(); + self = [super init]; + if (self) { + _snapScanner = [SCSnapScanner sharedInstance]; + _performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoScannerQueueLabel + qualityOfService:QOS_CLASS_UNSPECIFIED + queueType:DISPATCH_QUEUE_SERIAL + context:SCQueuePerformerContextCamera]; + _activeSemaphore = dispatch_semaphore_create(0); + SCAssert(restCycle >= 0 && restCycle < 1, @"rest cycle should be between 0 to 1"); + _maxFrameDefaultDuration = maxFrameDefaultDuration; + _maxFramePassiveDuration = maxFramePassiveDuration; + _restCycleOfBusyCycle = restCycle / (1 - restCycle); // Give CPU time to rest + } + return self; +} +#pragma mark - Public methods + +- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration +{ + SCTraceStart(); + [_performer perform:^{ + _shouldEmitEvent = YES; + _completionHandler = nil; + _scanResultsHandler = configuration.scanResultsHandler; + _userSession = configuration.userSession; + _scanTimeout = kDefaultScanTimeout; + _maxFrameDuration = _maxFrameDefaultDuration; + _codeTypes = [self _scanCodeTypes]; + _codeTypesOld = @[ @(SCCodeTypeSnapcode18x18Old), @(SCCodeTypeQRCode) ]; + + SCTraceStart(); + // Set the scan start time properly, if we call startScan multiple times while it is active, + // This makes sure we can scan long enough. + _scanStartTime = CACurrentMediaTime(); + // we are not active, need to send the semaphore to start the scan + if (!_active) { + _active = YES; + + // Signal the semaphore that we can start scan! + dispatch_semaphore_signal(_activeSemaphore); + } + }]; +} + +- (void)stopScanAsynchronously +{ + SCTraceStart(); + [_performer perform:^{ + SCTraceStart(); + if (_active) { + SCLogScanDebug(@"VideoScanner:stopScanAsynchronously turn off from active"); + _active = NO; + _scanStartTime = 0; + _scanResultsHandler = nil; + _userSession = nil; + } else { + SCLogScanDebug(@"VideoScanner:stopScanAsynchronously off already"); + } + }]; +} + +#pragma mark - Private Methods + +- (void)_handleSnapScanResult:(SCSnapScannedData *)scannedData +{ + if (scannedData.hasScannedData) { + if (scannedData.codeType == SCCodeTypeSnapcode18x18 || scannedData.codeType == SCCodeTypeSnapcodeBitmoji || + scannedData.codeType == SCCodeTypeSnapcode18x18Old) { + NSString *data = [scannedData.rawData base64EncodedString]; + NSString *version = [NSString sc_stringWithFormat:@"%i", scannedData.codeTypeMeta]; + [[SCLogger sharedInstance] logEvent:@"SNAPCODE_18x18_SCANNED_FROM_CAMERA" + parameters:@{ + @"version" : version + } + secretParameters:@{ + @"data" : data + }]; + + if (_completionHandler != nil) { + runOnMainThreadAsynchronously(_completionHandler); + _completionHandler = nil; + } + } else if (scannedData.codeType == SCCodeTypeBarcode) { + if (!_userSession || !_userSession.featureSettingsManager.barCodeScanEnabled) { + return; + } + NSString *data = scannedData.data; + NSString *type = [SCSnapScannedData stringFromBarcodeType:scannedData.codeTypeMeta]; + [[SCLogger sharedInstance] logEvent:@"BARCODE_SCANNED_FROM_CAMERA" + parameters:@{ + @"type" : type + } + secretParameters:@{ + @"data" : data + }]; + } else if (scannedData.codeType == SCCodeTypeQRCode) { + if (!_userSession || !_userSession.featureSettingsManager.qrCodeScanEnabled) { + return; + } + NSURL *url = [NSURL URLWithString:scannedData.data]; + [[SCLogger sharedInstance] logEvent:@"QR_CODE_SCANNED_FROM_CAMERA" + parameters:@{ + @"type" : (url) ? @"url" : @"other" + } + secretParameters:@{}]; + } + + if (_shouldEmitEvent) { + sc_managed_capturer_scan_results_handler_t scanResultsHandler = _scanResultsHandler; + runOnMainThreadAsynchronously(^{ + if (scanResultsHandler != nil && scannedData) { + SCMachineReadableCodeResult *result = + [SCMachineReadableCodeResult machineReadableCodeResultWithScannedData:scannedData]; + scanResultsHandler(result); + } + }); + } + } +} + +- (NSArray *)_scanCodeTypes +{ + // Scan types are defined by codetypes. SnapScan will scan the frame based on codetype. + NSMutableArray *codeTypes = [[NSMutableArray alloc] + initWithObjects:@(SCCodeTypeSnapcode18x18), @(SCCodeTypeQRCode), @(SCCodeTypeSnapcodeBitmoji), nil]; + if (SCSearchEnableBarcodeProductSearch()) { + [codeTypes addObject:@(SCCodeTypeBarcode)]; + } + return [codeTypes copy]; +} + +#pragma mark - SCManagedVideoDataSourceListener + +- (void)managedVideoDataSource:(id)managedVideoDataSource + didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer + devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + SCTraceStart(); + _devicePosition = devicePosition; + + if (!_active) { + SCLogScanDebug(@"VideoScanner: Scanner is not active"); + return; + } + SCLogScanDebug(@"VideoScanner: Scanner is active"); + + // If we have the semaphore now, enqueue a new buffer, otherwise drop the buffer + if (dispatch_semaphore_wait(_activeSemaphore, DISPATCH_TIME_NOW) == 0) { + CFRetain(sampleBuffer); + NSTimeInterval startTime = CACurrentMediaTime(); + [_performer perform:^{ + SCTraceStart(); + CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + SCLogScanInfo(@"VideoScanner: Scanner will scan a frame"); + SCSnapScannedData *scannedData; + + SCLogScanInfo(@"VideoScanner:Use new scanner without false alarm check"); + scannedData = [_snapScanner scanPixelBuffer:pixelBuffer forCodeTypes:_codeTypes]; + + if ([UIDevice shouldLogPerfEvents]) { + NSInteger loadingMs = (CACurrentMediaTime() - startTime) * 1000; + // Since there are too many unsuccessful scans, we will only log 1/10 of them for now. + if (scannedData.hasScannedData || (!scannedData.hasScannedData && arc4random() % 10 == 0)) { + [[SCLogger sharedInstance] logEvent:@"SCAN_SINGLE_FRAME" + parameters:@{ + @"time_span" : @(loadingMs), + @"has_scanned_data" : @(scannedData.hasScannedData), + }]; + } + } + + [self _handleSnapScanResult:scannedData]; + // If it is not turned off, we will continue to scan if result is not presetn + if (_active) { + _active = !scannedData.hasScannedData; + } + + // Clean up if result is reported for scan + if (!_active) { + _scanResultsHandler = nil; + _completionHandler = nil; + } + + CFRelease(sampleBuffer); + + NSTimeInterval currentTime = CACurrentMediaTime(); + SCLogScanInfo(@"VideoScanner:Scan time %f maxFrameDuration:%f timeout:%f", currentTime - startTime, + _maxFrameDuration, _scanTimeout); + // Haven't found the scanned data yet, haven't reached maximum scan timeout yet, haven't turned this off + // yet, ready for the next frame + if (_active && currentTime < _scanStartTime + _scanTimeout) { + // We've finished processing current sample buffer, ready for next one, but before that, we need to rest + // a bit (if possible) + if (currentTime - startTime >= _maxFrameDuration && _restCycleOfBusyCycle < FLT_MIN) { + // If we already reached deadline (used too much time) and don't want to rest CPU, give the signal + // now to grab the next frame + SCLogScanInfo(@"VideoScanner:Signal to get next frame for snapcode scanner"); + dispatch_semaphore_signal(_activeSemaphore); + } else { + NSTimeInterval afterTime = MAX((currentTime - startTime) * _restCycleOfBusyCycle, + _maxFrameDuration - (currentTime - startTime)); + // If we need to wait more than 0 second, then do that, otherwise grab the next frame immediately + if (afterTime > 0) { + [_performer perform:^{ + SCLogScanInfo( + @"VideoScanner:Waited and now signaling to get next frame for snapcode scanner"); + dispatch_semaphore_signal(_activeSemaphore); + } + after:afterTime]; + } else { + SCLogScanInfo(@"VideoScanner:Now signaling to get next frame for snapcode scanner"); + dispatch_semaphore_signal(_activeSemaphore); + } + } + } else { + // We are not active, and not going to be active any more. + SCLogScanInfo(@"VideoScanner:not active anymore"); + _active = NO; + _scanResultsHandler = nil; + _completionHandler = nil; + } + }]; + } +} + +#pragma mark - SCManagedDeviceCapacityAnalyzerListener + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeAdjustingFocus:(BOOL)adjustingFocus +{ + [_performer perform:^{ + _adjustingFocus = adjustingFocus; + }]; +} + +@end diff --git a/ManagedCapturer/SCManagedVideoStreamReporter.h b/ManagedCapturer/SCManagedVideoStreamReporter.h new file mode 100644 index 0000000..22ef049 --- /dev/null +++ b/ManagedCapturer/SCManagedVideoStreamReporter.h @@ -0,0 +1,15 @@ +// +// SCManagedVideoStreamReporter.h +// Snapchat +// +// Created by Liu Liu on 5/16/15. +// Copyright (c) 2015 Snapchat, Inc. All rights reserved. +// + +#import + +#import + +@interface SCManagedVideoStreamReporter : NSObject + +@end diff --git a/ManagedCapturer/SCManagedVideoStreamReporter.m b/ManagedCapturer/SCManagedVideoStreamReporter.m new file mode 100644 index 0000000..a0addeb --- /dev/null +++ b/ManagedCapturer/SCManagedVideoStreamReporter.m @@ -0,0 +1,58 @@ +// +// SCManagedVideoStreamReporter.m +// Snapchat +// +// Created by Liu Liu on 5/16/15. +// Copyright (c) 2015 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedVideoStreamReporter.h" + +#import +#import + +static NSTimeInterval const SCManagedVideoStreamReporterInterval = 10; + +@implementation SCManagedVideoStreamReporter { + NSUInteger _droppedSampleBuffers; + NSUInteger _outputSampleBuffers; + NSTimeInterval _lastReportTime; +} + +- (instancetype)init +{ + self = [super init]; + if (self) { + _lastReportTime = CACurrentMediaTime(); + } + return self; +} + +- (void)_reportIfNeeded +{ + NSTimeInterval currentTime = CACurrentMediaTime(); + if (currentTime - _lastReportTime > SCManagedVideoStreamReporterInterval) { + SCLogGeneralInfo(@"Time: (%.3f - %.3f], Video Streamer Dropped %tu, Output %tu", _lastReportTime, currentTime, + _droppedSampleBuffers, _outputSampleBuffers); + _droppedSampleBuffers = _outputSampleBuffers = 0; + _lastReportTime = currentTime; + } +} + +- (void)managedVideoDataSource:(id)managedVideoDataSource + didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer + devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + ++_outputSampleBuffers; + [self _reportIfNeeded]; +} + +- (void)managedVideoDataSource:(id)managedVideoDataSource + didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer + devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + ++_droppedSampleBuffers; + [self _reportIfNeeded]; +} + +@end diff --git a/ManagedCapturer/SCManagedVideoStreamer.h b/ManagedCapturer/SCManagedVideoStreamer.h new file mode 100644 index 0000000..8432d12 --- /dev/null +++ b/ManagedCapturer/SCManagedVideoStreamer.h @@ -0,0 +1,36 @@ +// +// SCManagedVideoStreamer.h +// Snapchat +// +// Created by Liu Liu on 4/30/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCManagedVideoARDataSource.h" + +#import + +#import +#import + +@class ARSession; + +/** + * SCManagedVideoStreamer uses the current AVCaptureSession to create + * and publish video output frames. SCManagedVideoStreamer also conforms + * to SCManagedVideoDataSource allowing chained consumption of video frames. + */ +@interface SCManagedVideoStreamer : NSObject + +- (instancetype)initWithSession:(AVCaptureSession *)session + devicePosition:(SCManagedCaptureDevicePosition)devicePosition; + +- (instancetype)initWithSession:(AVCaptureSession *)session + arSession:(ARSession *)arSession + devicePosition:(SCManagedCaptureDevicePosition)devicePosition NS_AVAILABLE_IOS(11_0); + +- (void)setupWithSession:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition; + +- (void)setupWithARSession:(ARSession *)arSession NS_AVAILABLE_IOS(11_0); + +@end diff --git a/ManagedCapturer/SCManagedVideoStreamer.m b/ManagedCapturer/SCManagedVideoStreamer.m new file mode 100644 index 0000000..83bfa5e --- /dev/null +++ b/ManagedCapturer/SCManagedVideoStreamer.m @@ -0,0 +1,823 @@ +// +// SCManagedVideoStreamer.m +// Snapchat +// +// Created by Liu Liu on 4/30/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCManagedVideoStreamer.h" + +#import "ARConfiguration+SCConfiguration.h" +#import "SCCameraTweaks.h" +#import "SCCapturerDefines.h" +#import "SCLogger+Camera.h" +#import "SCManagedCapturePreviewLayerController.h" +#import "SCMetalUtils.h" +#import "SCProcessingPipeline.h" +#import "SCProcessingPipelineBuilder.h" + +#import +#import +#import +#import +#import +#import + +#import + +#import +#import + +@import ARKit; +@import AVFoundation; + +#define SCLogVideoStreamerInfo(fmt, ...) SCLogCoreCameraInfo(@"[SCManagedVideoStreamer] " fmt, ##__VA_ARGS__) +#define SCLogVideoStreamerWarning(fmt, ...) SCLogCoreCameraWarning(@"[SCManagedVideoStreamer] " fmt, ##__VA_ARGS__) +#define SCLogVideoStreamerError(fmt, ...) SCLogCoreCameraError(@"[SCManagedVideoStreamer] " fmt, ##__VA_ARGS__) + +static NSInteger const kSCCaptureFrameRate = 30; +static CGFloat const kSCLogInterval = 3.0; +static char *const kSCManagedVideoStreamerQueueLabel = "com.snapchat.managed-video-streamer"; +static char *const kSCManagedVideoStreamerCallbackQueueLabel = "com.snapchat.managed-video-streamer.dequeue"; +static NSTimeInterval const kSCManagedVideoStreamerMaxAllowedLatency = 1; // Drop the frame if it is 1 second late. + +static NSTimeInterval const kSCManagedVideoStreamerStalledDisplay = + 5; // If the frame is not updated for 5 seconds, it is considered to be stalled. + +static NSTimeInterval const kSCManagedVideoStreamerARSessionFramerateCap = + 1.0 / (kSCCaptureFrameRate + 1); // Restrict ARSession to 30fps +static int32_t const kSCManagedVideoStreamerMaxProcessingBuffers = 15; + +@interface SCManagedVideoStreamer () + +@property (nonatomic, strong) AVCaptureSession *captureSession; + +@end + +@implementation SCManagedVideoStreamer { + AVCaptureVideoDataOutput *_videoDataOutput; + AVCaptureDepthDataOutput *_depthDataOutput NS_AVAILABLE_IOS(11_0); + AVCaptureDataOutputSynchronizer *_dataOutputSynchronizer NS_AVAILABLE_IOS(11_0); + BOOL _performingConfigurations; + SCManagedCaptureDevicePosition _devicePosition; + BOOL _videoStabilizationEnabledIfSupported; + SCManagedVideoDataSourceListenerAnnouncer *_announcer; + + BOOL _sampleBufferDisplayEnabled; + id _sampleBufferDisplayController; + dispatch_block_t _flushOutdatedPreviewBlock; + NSMutableArray *_waitUntilSampleBufferDisplayedBlocks; + SCProcessingPipeline *_processingPipeline; + + NSTimeInterval _lastDisplayedFrameTimestamp; +#ifdef SC_USE_ARKIT_FACE + NSTimeInterval _lastDisplayedDepthFrameTimestamp; +#endif + + BOOL _depthCaptureEnabled; + CGPoint _portraitModePointOfInterest; + + // For sticky video tweaks + BOOL _keepLateFrames; + SCQueuePerformer *_callbackPerformer; + atomic_int _processingBuffersCount; +} + +@synthesize isStreaming = _isStreaming; +@synthesize performer = _performer; +@synthesize currentFrame = _currentFrame; +@synthesize fieldOfView = _fieldOfView; +#ifdef SC_USE_ARKIT_FACE +@synthesize lastDepthData = _lastDepthData; +#endif +@synthesize videoOrientation = _videoOrientation; + +- (instancetype)initWithSession:(AVCaptureSession *)session + devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + SCTraceStart(); + self = [super init]; + if (self) { + _sampleBufferDisplayEnabled = YES; + _announcer = [[SCManagedVideoDataSourceListenerAnnouncer alloc] init]; + // We discard frames to support lenses in real time + _keepLateFrames = NO; + _performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoStreamerQueueLabel + qualityOfService:QOS_CLASS_USER_INTERACTIVE + queueType:DISPATCH_QUEUE_SERIAL + context:SCQueuePerformerContextCamera]; + + _videoOrientation = AVCaptureVideoOrientationLandscapeRight; + + [self setupWithSession:session devicePosition:devicePosition]; + SCLogVideoStreamerInfo(@"init with position:%lu", (unsigned long)devicePosition); + } + return self; +} + +- (instancetype)initWithSession:(AVCaptureSession *)session + arSession:(ARSession *)arSession + devicePosition:(SCManagedCaptureDevicePosition)devicePosition NS_AVAILABLE_IOS(11_0) +{ + self = [self initWithSession:session devicePosition:devicePosition]; + if (self) { + [self setupWithARSession:arSession]; + self.currentFrame = nil; +#ifdef SC_USE_ARKIT_FACE + self.lastDepthData = nil; +#endif + } + return self; +} + +- (AVCaptureVideoDataOutput *)_newVideoDataOutput +{ + AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init]; + // All inbound frames are going to be the native format of the camera avoid + // any need for transcoding. + output.videoSettings = + @{(NSString *) kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) }; + return output; +} + +- (void)setupWithSession:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + [self stopStreaming]; + self.captureSession = session; + _devicePosition = devicePosition; + + _videoDataOutput = [self _newVideoDataOutput]; + if (SCDeviceSupportsMetal()) { + // We default to start the streaming if the Metal is supported at startup time. + _isStreaming = YES; + // Set the sample buffer delegate before starting it. + [_videoDataOutput setSampleBufferDelegate:self queue:[self callbackPerformer].queue]; + } + + if ([session canAddOutput:_videoDataOutput]) { + [session addOutput:_videoDataOutput]; + [self _enableVideoMirrorForDevicePosition:devicePosition]; + } + + if (SCCameraTweaksEnablePortraitModeButton()) { + if (@available(iOS 11.0, *)) { + _depthDataOutput = [[AVCaptureDepthDataOutput alloc] init]; + [[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:NO]; + if ([session canAddOutput:_depthDataOutput]) { + [session addOutput:_depthDataOutput]; + [_depthDataOutput setDelegate:self callbackQueue:_performer.queue]; + } + _depthCaptureEnabled = NO; + } + _portraitModePointOfInterest = CGPointMake(0.5, 0.5); + } + + [self setVideoStabilizationEnabledIfSupported:YES]; +} + +- (void)setupWithARSession:(ARSession *)arSession NS_AVAILABLE_IOS(11_0) +{ + arSession.delegateQueue = _performer.queue; + arSession.delegate = self; +} + +- (void)addSampleBufferDisplayController:(id)sampleBufferDisplayController +{ + [_performer perform:^{ + _sampleBufferDisplayController = sampleBufferDisplayController; + SCLogVideoStreamerInfo(@"add sampleBufferDisplayController:%@", _sampleBufferDisplayController); + }]; +} + +- (void)setSampleBufferDisplayEnabled:(BOOL)sampleBufferDisplayEnabled +{ + [_performer perform:^{ + _sampleBufferDisplayEnabled = sampleBufferDisplayEnabled; + SCLogVideoStreamerInfo(@"sampleBufferDisplayEnabled set to:%d", _sampleBufferDisplayEnabled); + }]; +} + +- (void)waitUntilSampleBufferDisplayed:(dispatch_queue_t)queue completionHandler:(dispatch_block_t)completionHandler +{ + SCAssert(queue, @"callback queue must be provided"); + SCAssert(completionHandler, @"completion handler must be provided"); + SCLogVideoStreamerInfo(@"waitUntilSampleBufferDisplayed queue:%@ completionHandler:%p isStreaming:%d", queue, + completionHandler, _isStreaming); + if (_isStreaming) { + [_performer perform:^{ + if (!_waitUntilSampleBufferDisplayedBlocks) { + _waitUntilSampleBufferDisplayedBlocks = [NSMutableArray array]; + } + [_waitUntilSampleBufferDisplayedBlocks addObject:@[ queue, completionHandler ]]; + SCLogVideoStreamerInfo(@"waitUntilSampleBufferDisplayed add block:%p", completionHandler); + }]; + } else { + dispatch_async(queue, completionHandler); + } +} + +- (void)startStreaming +{ + SCTraceStart(); + SCLogVideoStreamerInfo(@"start streaming. _isStreaming:%d", _isStreaming); + if (!_isStreaming) { + _isStreaming = YES; + [self _cancelFlushOutdatedPreview]; + if (@available(ios 11.0, *)) { + if (_depthCaptureEnabled) { + [[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:YES]; + } + } + [_videoDataOutput setSampleBufferDelegate:self queue:[self callbackPerformer].queue]; + } +} + +- (void)setAsOutput:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + SCTraceStart(); + if ([session canAddOutput:_videoDataOutput]) { + SCLogVideoStreamerError(@"add videoDataOutput:%@", _videoDataOutput); + [session addOutput:_videoDataOutput]; + [self _enableVideoMirrorForDevicePosition:devicePosition]; + } else { + SCLogVideoStreamerError(@"cannot add videoDataOutput:%@ to session:%@", _videoDataOutput, session); + } + [self _enableVideoStabilizationIfSupported]; +} + +- (void)removeAsOutput:(AVCaptureSession *)session +{ + SCTraceStart(); + SCLogVideoStreamerInfo(@"remove videoDataOutput:%@ from session:%@", _videoDataOutput, session); + [session removeOutput:_videoDataOutput]; +} + +- (void)_cancelFlushOutdatedPreview +{ + SCLogVideoStreamerInfo(@"cancel flush outdated preview:%p", _flushOutdatedPreviewBlock); + if (_flushOutdatedPreviewBlock) { + dispatch_block_cancel(_flushOutdatedPreviewBlock); + _flushOutdatedPreviewBlock = nil; + } +} + +- (SCQueuePerformer *)callbackPerformer +{ + // If sticky video tweak is on, use a separated performer queue + if (_keepLateFrames) { + if (!_callbackPerformer) { + _callbackPerformer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoStreamerCallbackQueueLabel + qualityOfService:QOS_CLASS_USER_INTERACTIVE + queueType:DISPATCH_QUEUE_SERIAL + context:SCQueuePerformerContextCamera]; + } + return _callbackPerformer; + } + return _performer; +} + +- (void)pauseStreaming +{ + SCTraceStart(); + SCLogVideoStreamerInfo(@"pauseStreaming isStreaming:%d", _isStreaming); + if (_isStreaming) { + _isStreaming = NO; + [_videoDataOutput setSampleBufferDelegate:nil queue:NULL]; + if (@available(ios 11.0, *)) { + if (_depthCaptureEnabled) { + [[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:NO]; + } + } + @weakify(self); + _flushOutdatedPreviewBlock = dispatch_block_create(0, ^{ + SCLogVideoStreamerInfo(@"execute flushOutdatedPreviewBlock"); + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + [self->_sampleBufferDisplayController flushOutdatedPreview]; + }); + [_performer perform:_flushOutdatedPreviewBlock + after:SCCameraTweaksEnableKeepLastFrameOnCamera() ? kSCManagedVideoStreamerStalledDisplay : 0]; + [_performer perform:^{ + [self _performCompletionHandlersForWaitUntilSampleBufferDisplayed]; + }]; + } +} + +- (void)stopStreaming +{ + SCTraceStart(); + SCLogVideoStreamerInfo(@"stopStreaming isStreaming:%d", _isStreaming); + if (_isStreaming) { + _isStreaming = NO; + [_videoDataOutput setSampleBufferDelegate:nil queue:NULL]; + if (@available(ios 11.0, *)) { + if (_depthCaptureEnabled) { + [[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:NO]; + } + } + } + [self _cancelFlushOutdatedPreview]; + [_performer perform:^{ + SCLogVideoStreamerInfo(@"stopStreaming in perfome queue"); + [_sampleBufferDisplayController flushOutdatedPreview]; + [self _performCompletionHandlersForWaitUntilSampleBufferDisplayed]; + }]; +} + +- (void)beginConfiguration +{ + SCLogVideoStreamerInfo(@"enter beginConfiguration"); + [_performer perform:^{ + SCLogVideoStreamerInfo(@"performingConfigurations set to YES"); + _performingConfigurations = YES; + }]; +} + +- (void)setDevicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + SCLogVideoStreamerInfo(@"setDevicePosition with newPosition:%lu", (unsigned long)devicePosition); + [self _enableVideoMirrorForDevicePosition:devicePosition]; + [self _enableVideoStabilizationIfSupported]; + [_performer perform:^{ + SCLogVideoStreamerInfo(@"setDevicePosition in perform queue oldPosition:%lu newPosition:%lu", + (unsigned long)_devicePosition, (unsigned long)devicePosition); + if (_devicePosition != devicePosition) { + _devicePosition = devicePosition; + } + }]; +} + +- (void)setVideoOrientation:(AVCaptureVideoOrientation)videoOrientation +{ + SCTraceStart(); + // It is not neccessary call these changes on private queue, because is is just only data output configuration. + // It should be called from manged capturer queue to prevent lock capture session in two different(private and + // managed capturer) queues that will cause the deadlock. + SCLogVideoStreamerInfo(@"setVideoOrientation oldOrientation:%lu newOrientation:%lu", + (unsigned long)_videoOrientation, (unsigned long)videoOrientation); + _videoOrientation = videoOrientation; + AVCaptureConnection *connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; + connection.videoOrientation = _videoOrientation; +} + +- (void)setKeepLateFrames:(BOOL)keepLateFrames +{ + SCTraceStart(); + [_performer perform:^{ + SCTraceStart(); + if (keepLateFrames != _keepLateFrames) { + _keepLateFrames = keepLateFrames; + // Get and set corresponding queue base on keepLateFrames. + // We don't use AVCaptureVideoDataOutput.alwaysDiscardsLateVideo anymore, because it will potentially + // result in lenses regression, and we could use all 15 sample buffers by adding a separated calllback + // queue. + [_videoDataOutput setSampleBufferDelegate:self queue:[self callbackPerformer].queue]; + SCLogVideoStreamerInfo(@"keepLateFrames was set to:%d", keepLateFrames); + } + }]; +} + +- (void)setDepthCaptureEnabled:(BOOL)enabled NS_AVAILABLE_IOS(11_0) +{ + _depthCaptureEnabled = enabled; + [[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:enabled]; + if (enabled) { + _dataOutputSynchronizer = + [[AVCaptureDataOutputSynchronizer alloc] initWithDataOutputs:@[ _videoDataOutput, _depthDataOutput ]]; + [_dataOutputSynchronizer setDelegate:self queue:_performer.queue]; + } else { + _dataOutputSynchronizer = nil; + } +} + +- (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest +{ + _portraitModePointOfInterest = pointOfInterest; +} + +- (BOOL)getKeepLateFrames +{ + return _keepLateFrames; +} + +- (void)commitConfiguration +{ + SCLogVideoStreamerInfo(@"enter commitConfiguration"); + [_performer perform:^{ + SCLogVideoStreamerInfo(@"performingConfigurations set to NO"); + _performingConfigurations = NO; + }]; +} + +- (void)addListener:(id)listener +{ + SCTraceStart(); + SCLogVideoStreamerInfo(@"add listener:%@", listener); + [_announcer addListener:listener]; +} + +- (void)removeListener:(id)listener +{ + SCTraceStart(); + SCLogVideoStreamerInfo(@"remove listener:%@", listener); + [_announcer removeListener:listener]; +} + +- (void)addProcessingPipeline:(SCProcessingPipeline *)processingPipeline +{ + SCLogVideoStreamerInfo(@"enter addProcessingPipeline:%@", processingPipeline); + [_performer perform:^{ + SCLogVideoStreamerInfo(@"processingPipeline set to %@", processingPipeline); + _processingPipeline = processingPipeline; + }]; +} + +- (void)removeProcessingPipeline +{ + SCLogVideoStreamerInfo(@"enter removeProcessingPipeline"); + [_performer perform:^{ + SCLogVideoStreamerInfo(@"processingPipeline set to nil"); + _processingPipeline = nil; + }]; +} + +- (BOOL)isVideoMirrored +{ + SCTraceStart(); + AVCaptureConnection *connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; + return connection.isVideoMirrored; +} + +#pragma mark - Common Sample Buffer Handling + +- (void)didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer +{ + return [self didOutputSampleBuffer:sampleBuffer depthData:nil]; +} + +- (void)didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer depthData:(CVPixelBufferRef)depthDataMap +{ + // Don't send the sample buffer if we are perform configurations + if (_performingConfigurations) { + SCLogVideoStreamerError(@"didOutputSampleBuffer return because performingConfigurations is YES"); + return; + } + SC_GUARD_ELSE_RETURN([_performer isCurrentPerformer]); + + // We can't set alwaysDiscardsLateVideoFrames to YES when lens is activated because it will cause camera freezing. + // When alwaysDiscardsLateVideoFrames is set to NO, the late frames will not be dropped until it reach 15 frames, + // so we should simulate the dropping behaviour as AVFoundation do. + NSTimeInterval presentationTime = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)); + _lastDisplayedFrameTimestamp = presentationTime; + NSTimeInterval frameLatency = CACurrentMediaTime() - presentationTime; + // Log interval definied in macro LOG_INTERVAL, now is 3.0s + BOOL shouldLog = + (long)(CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * kSCCaptureFrameRate) % + ((long)(kSCCaptureFrameRate * kSCLogInterval)) == + 0; + if (shouldLog) { + SCLogVideoStreamerInfo(@"didOutputSampleBuffer:%p", sampleBuffer); + } + if (_processingPipeline) { + RenderData renderData = { + .sampleBuffer = sampleBuffer, + .depthDataMap = depthDataMap, + .depthBlurPointOfInterest = + SCCameraTweaksEnablePortraitModeAutofocus() || SCCameraTweaksEnablePortraitModeTapToFocus() + ? &_portraitModePointOfInterest + : nil, + }; + // Ensure we are doing all render operations (i.e. accessing textures) on performer to prevent race condition + SCAssertPerformer(_performer); + sampleBuffer = [_processingPipeline render:renderData]; + + if (shouldLog) { + SCLogVideoStreamerInfo(@"rendered sampleBuffer:%p in processingPipeline:%@", sampleBuffer, + _processingPipeline); + } + } + + if (sampleBuffer && _sampleBufferDisplayEnabled) { + // Send the buffer only if it is valid, set it to be displayed immediately (See the enqueueSampleBuffer method + // header, need to get attachments array and set the dictionary). + CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, YES); + if (!attachmentsArray) { + SCLogVideoStreamerError(@"Error getting attachment array for CMSampleBuffer"); + } else if (CFArrayGetCount(attachmentsArray) > 0) { + CFMutableDictionaryRef attachment = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachmentsArray, 0); + CFDictionarySetValue(attachment, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue); + } + // Warn if frame that went through is not most recent enough. + if (frameLatency >= kSCManagedVideoStreamerMaxAllowedLatency) { + SCLogVideoStreamerWarning( + @"The sample buffer we received is too late, why? presentationTime:%lf frameLatency:%f", + presentationTime, frameLatency); + } + [_sampleBufferDisplayController enqueueSampleBuffer:sampleBuffer]; + if (shouldLog) { + SCLogVideoStreamerInfo(@"displayed sampleBuffer:%p in Metal", sampleBuffer); + } + + [self _performCompletionHandlersForWaitUntilSampleBufferDisplayed]; + } + + if (shouldLog) { + SCLogVideoStreamerInfo(@"begin annoucing sampleBuffer:%p of devicePosition:%lu", sampleBuffer, + (unsigned long)_devicePosition); + } + [_announcer managedVideoDataSource:self didOutputSampleBuffer:sampleBuffer devicePosition:_devicePosition]; + if (shouldLog) { + SCLogVideoStreamerInfo(@"end annoucing sampleBuffer:%p", sampleBuffer); + } +} + +- (void)didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer +{ + if (_performingConfigurations) { + return; + } + SC_GUARD_ELSE_RETURN([_performer isCurrentPerformer]); + NSTimeInterval currentProcessingTime = CACurrentMediaTime(); + NSTimeInterval currentSampleTime = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)); + // Only logging it when sticky tweak is on, which means sticky time is too long, and AVFoundation have to drop the + // sampleBuffer + if (_keepLateFrames) { + SCLogVideoStreamerInfo(@"didDropSampleBuffer:%p timestamp:%f latency:%f", sampleBuffer, currentProcessingTime, + currentSampleTime); + } + [_announcer managedVideoDataSource:self didDropSampleBuffer:sampleBuffer devicePosition:_devicePosition]; +} + +#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate + +- (void)captureOutput:(AVCaptureOutput *)captureOutput +didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer + fromConnection:(AVCaptureConnection *)connection NS_AVAILABLE_IOS(11_0) +{ + // Sticky video tweak is off, i.e. lenses is on, + // we use same queue for callback and processing, and let AVFoundation decide which frame should be dropped + if (!_keepLateFrames) { + [self didOutputSampleBuffer:sampleBuffer]; + } + // Sticky video tweak is on + else { + if ([_performer isCurrentPerformer]) { + // Note: there might be one frame callbacked in processing queue when switching callback queue, + // it should be fine. But if following log appears too much, it is not our design. + SCLogVideoStreamerWarning(@"The callback queue should be a separated queue when sticky tweak is on"); + } + // TODO: In sticky video v2, we should consider check free memory + if (_processingBuffersCount >= kSCManagedVideoStreamerMaxProcessingBuffers - 1) { + SCLogVideoStreamerWarning(@"processingBuffersCount reached to the max. current count:%d", + _processingBuffersCount); + [self didDropSampleBuffer:sampleBuffer]; + return; + } + atomic_fetch_add(&_processingBuffersCount, 1); + CFRetain(sampleBuffer); + // _performer should always be the processing queue + [_performer perform:^{ + [self didOutputSampleBuffer:sampleBuffer]; + CFRelease(sampleBuffer); + atomic_fetch_sub(&_processingBuffersCount, 1); + }]; + } +} + +- (void)captureOutput:(AVCaptureOutput *)captureOutput + didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer + fromConnection:(AVCaptureConnection *)connection +{ + [self didDropSampleBuffer:sampleBuffer]; +} + +#pragma mark - AVCaptureDataOutputSynchronizer (Video + Depth) + +- (void)dataOutputSynchronizer:(AVCaptureDataOutputSynchronizer *)synchronizer + didOutputSynchronizedDataCollection:(AVCaptureSynchronizedDataCollection *)synchronizedDataCollection + NS_AVAILABLE_IOS(11_0) +{ + AVCaptureSynchronizedDepthData *syncedDepthData = (AVCaptureSynchronizedDepthData *)[synchronizedDataCollection + synchronizedDataForCaptureOutput:_depthDataOutput]; + AVDepthData *depthData = nil; + if (syncedDepthData && !syncedDepthData.depthDataWasDropped) { + depthData = syncedDepthData.depthData; + } + + AVCaptureSynchronizedSampleBufferData *syncedVideoData = + (AVCaptureSynchronizedSampleBufferData *)[synchronizedDataCollection + synchronizedDataForCaptureOutput:_videoDataOutput]; + if (syncedVideoData && !syncedVideoData.sampleBufferWasDropped) { + CMSampleBufferRef videoSampleBuffer = syncedVideoData.sampleBuffer; + [self didOutputSampleBuffer:videoSampleBuffer depthData:depthData ? depthData.depthDataMap : nil]; + } +} + +#pragma mark - ARSessionDelegate + +- (void)session:(ARSession *)session cameraDidChangeTrackingState:(ARCamera *)camera NS_AVAILABLE_IOS(11_0) +{ + NSString *state = nil; + NSString *reason = nil; + switch (camera.trackingState) { + case ARTrackingStateNormal: + state = @"Normal"; + break; + case ARTrackingStateLimited: + state = @"Limited"; + break; + case ARTrackingStateNotAvailable: + state = @"Not Available"; + break; + } + switch (camera.trackingStateReason) { + case ARTrackingStateReasonNone: + reason = @"None"; + break; + case ARTrackingStateReasonInitializing: + reason = @"Initializing"; + break; + case ARTrackingStateReasonExcessiveMotion: + reason = @"Excessive Motion"; + break; + case ARTrackingStateReasonInsufficientFeatures: + reason = @"Insufficient Features"; + break; +#if SC_AT_LEAST_SDK_11_3 + case ARTrackingStateReasonRelocalizing: + reason = @"Relocalizing"; + break; +#endif + } + SCLogVideoStreamerInfo(@"ARKit changed tracking state - %@ (reason: %@)", state, reason); +} + +- (void)session:(ARSession *)session didUpdateFrame:(ARFrame *)frame NS_AVAILABLE_IOS(11_0) +{ +#ifdef SC_USE_ARKIT_FACE + // This is extremely weird, but LOOK-10251 indicates that despite the class having it defined, on some specific + // devices there are ARFrame instances that don't respond to `capturedDepthData`. + // (note: this was discovered to be due to some people staying on iOS 11 betas). + AVDepthData *depth = nil; + if ([frame respondsToSelector:@selector(capturedDepthData)]) { + depth = frame.capturedDepthData; + } +#endif + + CGFloat timeSince = frame.timestamp - _lastDisplayedFrameTimestamp; + // Don't deliver more than 30 frames per sec + BOOL framerateMinimumElapsed = timeSince >= kSCManagedVideoStreamerARSessionFramerateCap; + +#ifdef SC_USE_ARKIT_FACE + if (depth) { + CGFloat timeSince = frame.timestamp - _lastDisplayedDepthFrameTimestamp; + framerateMinimumElapsed |= timeSince >= kSCManagedVideoStreamerARSessionFramerateCap; + } + +#endif + + SC_GUARD_ELSE_RETURN(framerateMinimumElapsed); + +#ifdef SC_USE_ARKIT_FACE + if (depth) { + self.lastDepthData = depth; + _lastDisplayedDepthFrameTimestamp = frame.timestamp; + } +#endif + + // Make sure that current frame is no longer being used, otherwise drop current frame. + SC_GUARD_ELSE_RETURN(self.currentFrame == nil); + + CVPixelBufferRef pixelBuffer = frame.capturedImage; + CVPixelBufferLockBaseAddress(pixelBuffer, 0); + CMTime time = CMTimeMakeWithSeconds(frame.timestamp, 1000000); + CMSampleTimingInfo timing = {kCMTimeInvalid, time, kCMTimeInvalid}; + + CMVideoFormatDescriptionRef videoInfo; + CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &videoInfo); + + CMSampleBufferRef buffer; + CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, YES, nil, nil, videoInfo, &timing, &buffer); + CFRelease(videoInfo); + CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); + + self.currentFrame = frame; + [self didOutputSampleBuffer:buffer]; + [self _updateFieldOfViewWithARFrame:frame]; + + CFRelease(buffer); +} + +- (void)session:(ARSession *)session didAddAnchors:(NSArray *)anchors NS_AVAILABLE_IOS(11_0) +{ + for (ARAnchor *anchor in anchors) { + if ([anchor isKindOfClass:[ARPlaneAnchor class]]) { + SCLogVideoStreamerInfo(@"ARKit added plane anchor"); + return; + } + } +} + +- (void)session:(ARSession *)session didFailWithError:(NSError *)error NS_AVAILABLE_IOS(11_0) +{ + SCLogVideoStreamerError(@"ARKit session failed with error: %@. Resetting", error); + [session runWithConfiguration:[ARConfiguration sc_configurationForDevicePosition:_devicePosition]]; +} + +- (void)sessionWasInterrupted:(ARSession *)session NS_AVAILABLE_IOS(11_0) +{ + SCLogVideoStreamerWarning(@"ARKit session interrupted"); +} + +- (void)sessionInterruptionEnded:(ARSession *)session NS_AVAILABLE_IOS(11_0) +{ + SCLogVideoStreamerInfo(@"ARKit interruption ended"); +} + +#pragma mark - Private methods + +- (void)_performCompletionHandlersForWaitUntilSampleBufferDisplayed +{ + for (NSArray *completion in _waitUntilSampleBufferDisplayedBlocks) { + // Call the completion handlers. + dispatch_async(completion[0], completion[1]); + } + [_waitUntilSampleBufferDisplayedBlocks removeAllObjects]; +} + +// This is the magic that ensures the VideoDataOutput will have the correct +// orientation. +- (void)_enableVideoMirrorForDevicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + SCLogVideoStreamerInfo(@"enable video mirror for device position:%lu", (unsigned long)devicePosition); + AVCaptureConnection *connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; + connection.videoOrientation = _videoOrientation; + if (devicePosition == SCManagedCaptureDevicePositionFront) { + connection.videoMirrored = YES; + } +} + +- (void)_enableVideoStabilizationIfSupported +{ + SCTraceStart(); + if (!SCCameraTweaksEnableVideoStabilization()) { + SCLogVideoStreamerWarning(@"SCCameraTweaksEnableVideoStabilization is NO, won't enable video stabilization"); + return; + } + + AVCaptureConnection *videoConnection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; + if (!videoConnection) { + SCLogVideoStreamerError(@"cannot get videoConnection from videoDataOutput:%@", videoConnection); + return; + } + // Set the video stabilization mode to auto. Default is off. + if ([videoConnection isVideoStabilizationSupported]) { + videoConnection.preferredVideoStabilizationMode = _videoStabilizationEnabledIfSupported + ? AVCaptureVideoStabilizationModeStandard + : AVCaptureVideoStabilizationModeOff; + NSDictionary *params = @{ @"iOS8_Mode" : @(videoConnection.activeVideoStabilizationMode) }; + [[SCLogger sharedInstance] logEvent:@"VIDEO_STABILIZATION_MODE" parameters:params]; + SCLogVideoStreamerInfo(@"set video stabilization mode:%ld to videoConnection:%@", + (long)videoConnection.preferredVideoStabilizationMode, videoConnection); + } else { + SCLogVideoStreamerInfo(@"video stabilization isn't supported on videoConnection:%@", videoConnection); + } +} + +- (void)setVideoStabilizationEnabledIfSupported:(BOOL)videoStabilizationIfSupported +{ + SCLogVideoStreamerInfo(@"setVideoStabilizationEnabledIfSupported:%d", videoStabilizationIfSupported); + _videoStabilizationEnabledIfSupported = videoStabilizationIfSupported; + [self _enableVideoStabilizationIfSupported]; +} + +- (void)_updateFieldOfViewWithARFrame:(ARFrame *)frame NS_AVAILABLE_IOS(11_0) +{ + SC_GUARD_ELSE_RETURN(frame.camera); + CGSize imageResolution = frame.camera.imageResolution; + matrix_float3x3 intrinsics = frame.camera.intrinsics; + float xFovDegrees = 2 * atan(imageResolution.width / (2 * intrinsics.columns[0][0])) * 180 / M_PI; + if (_fieldOfView != xFovDegrees) { + self.fieldOfView = xFovDegrees; + } +} + +- (NSString *)description +{ + return [self debugDescription]; +} + +- (NSString *)debugDescription +{ + NSDictionary *debugDict = @{ + @"_sampleBufferDisplayEnabled" : _sampleBufferDisplayEnabled ? @"Yes" : @"No", + @"_videoStabilizationEnabledIfSupported" : _videoStabilizationEnabledIfSupported ? @"Yes" : @"No", + @"_performingConfigurations" : _performingConfigurations ? @"Yes" : @"No", + @"alwaysDiscardLateVideoFrames" : _videoDataOutput.alwaysDiscardsLateVideoFrames ? @"Yes" : @"No" + }; + return [NSString sc_stringWithFormat:@"%@", debugDict]; +} + +@end diff --git a/ManagedCapturer/SCMetalUtils.h b/ManagedCapturer/SCMetalUtils.h new file mode 100644 index 0000000..211ada7 --- /dev/null +++ b/ManagedCapturer/SCMetalUtils.h @@ -0,0 +1,63 @@ +// +// SCMetalUtils.h +// Snapchat +// +// Created by Michel Loenngren on 7/11/17. +// +// Utility class for metal related helpers. + +#import +#if !TARGET_IPHONE_SIMULATOR +#import +#endif +#import + +#import + +SC_EXTERN_C_BEGIN + +#if !TARGET_IPHONE_SIMULATOR +extern id SCGetManagedCaptureMetalDevice(void); +#endif + +static SC_ALWAYS_INLINE BOOL SCDeviceSupportsMetal(void) +{ +#if TARGET_CPU_ARM64 + return YES; // All 64 bit system supports Metal. +#else + return NO; +#endif +} + +#if !TARGET_IPHONE_SIMULATOR +static inline id SCMetalTextureFromPixelBuffer(CVPixelBufferRef pixelBuffer, size_t planeIndex, + MTLPixelFormat pixelFormat, + CVMetalTextureCacheRef textureCache) +{ + size_t width = CVPixelBufferGetWidthOfPlane(pixelBuffer, planeIndex); + size_t height = CVPixelBufferGetHeightOfPlane(pixelBuffer, planeIndex); + CVMetalTextureRef textureRef; + if (kCVReturnSuccess != CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, textureCache, pixelBuffer, + nil, pixelFormat, width, height, planeIndex, + &textureRef)) { + return nil; + } + id texture = CVMetalTextureGetTexture(textureRef); + CVBufferRelease(textureRef); + return texture; +} + +static inline void SCMetalCopyTexture(id texture, CVPixelBufferRef pixelBuffer, NSUInteger planeIndex) +{ + CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); + void *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, planeIndex); + NSUInteger bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, planeIndex); + MTLRegion region = MTLRegionMake2D(0, 0, CVPixelBufferGetWidthOfPlane(pixelBuffer, planeIndex), + CVPixelBufferGetHeightOfPlane(pixelBuffer, planeIndex)); + + [texture getBytes:baseAddress bytesPerRow:bytesPerRow fromRegion:region mipmapLevel:0]; + CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); +} +#endif + +SC_EXTERN_C_END diff --git a/ManagedCapturer/SCMetalUtils.m b/ManagedCapturer/SCMetalUtils.m new file mode 100644 index 0000000..79c58d3 --- /dev/null +++ b/ManagedCapturer/SCMetalUtils.m @@ -0,0 +1,25 @@ +// +// SCMetalUtils.m +// Snapchat +// +// Created by Michel Loenngren on 8/16/17. +// +// + +#import "SCMetalUtils.h" + +#import + +id SCGetManagedCaptureMetalDevice(void) +{ +#if !TARGET_IPHONE_SIMULATOR + SCTraceStart(); + static dispatch_once_t onceToken; + static id device; + dispatch_once(&onceToken, ^{ + device = MTLCreateSystemDefaultDevice(); + }); + return device; +#endif + return nil; +} diff --git a/ManagedCapturer/SCScanConfiguration.h b/ManagedCapturer/SCScanConfiguration.h new file mode 100644 index 0000000..738e813 --- /dev/null +++ b/ManagedCapturer/SCScanConfiguration.h @@ -0,0 +1,18 @@ +// +// SCScanConfiguration.h +// Snapchat +// +// Created by Yang Dai on 3/7/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCapturer.h" + +#import + +@interface SCScanConfiguration : NSObject + +@property (nonatomic, strong) sc_managed_capturer_scan_results_handler_t scanResultsHandler; +@property (nonatomic, strong) SCUserSession *userSession; + +@end diff --git a/ManagedCapturer/SCScanConfiguration.m b/ManagedCapturer/SCScanConfiguration.m new file mode 100644 index 0000000..9be8200 --- /dev/null +++ b/ManagedCapturer/SCScanConfiguration.m @@ -0,0 +1,13 @@ +// +// SCScanConfiguration.m +// Snapchat +// +// Created by Yang Dai on 3/7/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCScanConfiguration.h" + +@implementation SCScanConfiguration + +@end diff --git a/ManagedCapturer/SCSingleFrameStreamCapturer.h b/ManagedCapturer/SCSingleFrameStreamCapturer.h new file mode 100644 index 0000000..a154430 --- /dev/null +++ b/ManagedCapturer/SCSingleFrameStreamCapturer.h @@ -0,0 +1,17 @@ +// +// SCSingleFrameStreamCapturer.h +// Snapchat +// +// Created by Benjamin Hollis on 5/3/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +#import "SCCaptureCommon.h" + +#import + +#import + +@interface SCSingleFrameStreamCapturer : NSObject +- (instancetype)initWithCompletion:(sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler; +@end diff --git a/ManagedCapturer/SCSingleFrameStreamCapturer.m b/ManagedCapturer/SCSingleFrameStreamCapturer.m new file mode 100644 index 0000000..38813b5 --- /dev/null +++ b/ManagedCapturer/SCSingleFrameStreamCapturer.m @@ -0,0 +1,103 @@ +// +// SCSingleFrameStreamCapturer.m +// Snapchat +// +// Created by Benjamin Hollis on 5/3/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +#import "SCSingleFrameStreamCapturer.h" + +#import "SCManagedCapturer.h" + +@implementation SCSingleFrameStreamCapturer { + sc_managed_capturer_capture_video_frame_completion_handler_t _callback; +} + +- (instancetype)initWithCompletion:(sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler +{ + self = [super init]; + if (self) { + _callback = completionHandler; + } + return self; +} + +#pragma mark - SCManagedVideoDataSourceListener + +- (void)managedVideoDataSource:(id)managedVideoDataSource + didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer + devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + if (_callback) { + UIImage *image = [self imageFromSampleBuffer:sampleBuffer]; + _callback(image); + } + _callback = nil; +} + +/** + * Decode a CMSampleBufferRef to our native camera format (kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, + * as set in SCManagedVideoStreamer) to a UIImage. + * + * Code from http://stackoverflow.com/a/31553521/11284 + */ +#define clamp(a) (a > 255 ? 255 : (a < 0 ? 0 : a)) +// TODO: Use the transform code from SCImageProcessIdentityYUVCommand +- (UIImage *)imageFromSampleBuffer:(CMSampleBufferRef)sampleBuffer +{ + CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + CVPixelBufferLockBaseAddress(imageBuffer, 0); + + size_t width = CVPixelBufferGetWidth(imageBuffer); + size_t height = CVPixelBufferGetHeight(imageBuffer); + uint8_t *yBuffer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0); + size_t yPitch = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0); + uint8_t *cbCrBuffer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 1); + size_t cbCrPitch = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 1); + + int bytesPerPixel = 4; + uint8_t *rgbBuffer = malloc(width * height * bytesPerPixel); + + for (int y = 0; y < height; y++) { + uint8_t *rgbBufferLine = &rgbBuffer[y * width * bytesPerPixel]; + uint8_t *yBufferLine = &yBuffer[y * yPitch]; + uint8_t *cbCrBufferLine = &cbCrBuffer[(y >> 1) * cbCrPitch]; + + for (int x = 0; x < width; x++) { + int16_t y = yBufferLine[x]; + int16_t cb = cbCrBufferLine[x & ~1] - 128; + int16_t cr = cbCrBufferLine[x | 1] - 128; + + uint8_t *rgbOutput = &rgbBufferLine[x * bytesPerPixel]; + + int16_t r = (int16_t)roundf(y + cr * 1.4); + int16_t g = (int16_t)roundf(y + cb * -0.343 + cr * -0.711); + int16_t b = (int16_t)roundf(y + cb * 1.765); + + rgbOutput[0] = 0xff; + rgbOutput[1] = clamp(b); + rgbOutput[2] = clamp(g); + rgbOutput[3] = clamp(r); + } + } + + CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); + CGContextRef context = CGBitmapContextCreate(rgbBuffer, width, height, 8, width * bytesPerPixel, colorSpace, + kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipLast); + CGImageRef quartzImage = CGBitmapContextCreateImage(context); + + // TODO: Hardcoding UIImageOrientationRight seems cheesy + UIImage *image = [UIImage imageWithCGImage:quartzImage scale:1.0 orientation:UIImageOrientationRight]; + + CGContextRelease(context); + CGColorSpaceRelease(colorSpace); + CGImageRelease(quartzImage); + free(rgbBuffer); + + CVPixelBufferUnlockBaseAddress(imageBuffer, 0); + + return image; +} + +@end diff --git a/ManagedCapturer/SCStillImageCaptureVideoInputMethod.h b/ManagedCapturer/SCStillImageCaptureVideoInputMethod.h new file mode 100644 index 0000000..1704e53 --- /dev/null +++ b/ManagedCapturer/SCStillImageCaptureVideoInputMethod.h @@ -0,0 +1,19 @@ +// +// SCStillImageCaptureVideoInputMethod.h +// Snapchat +// +// Created by Alexander Grytsiuk on 3/16/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCapturerState.h" + +#import + +@interface SCStillImageCaptureVideoInputMethod : NSObject + +- (void)captureStillImageWithCapturerState:(SCManagedCapturerState *)state + successBlock:(void (^)(NSData *imageData, NSDictionary *cameraInfo, + NSError *error))successBlock + failureBlock:(void (^)(NSError *error))failureBlock; +@end diff --git a/ManagedCapturer/SCStillImageCaptureVideoInputMethod.m b/ManagedCapturer/SCStillImageCaptureVideoInputMethod.m new file mode 100644 index 0000000..ea6cb05 --- /dev/null +++ b/ManagedCapturer/SCStillImageCaptureVideoInputMethod.m @@ -0,0 +1,140 @@ +// +// SCStillImageCaptureVideoInputMethod.m +// Snapchat +// +// Created by Alexander Grytsiuk on 3/16/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +#import "SCStillImageCaptureVideoInputMethod.h" + +#import "SCManagedCapturer.h" +#import "SCManagedVideoFileStreamer.h" + +typedef unsigned char uchar_t; +int clamp(int val, int low, int high) +{ + if (val < low) + val = low; + if (val > high) + val = high; + return val; +} + +void yuv2rgb(uchar_t yValue, uchar_t uValue, uchar_t vValue, uchar_t *r, uchar_t *g, uchar_t *b) +{ + double red = yValue + (1.370705 * (vValue - 128)); + double green = yValue - (0.698001 * (vValue - 128)) - (0.337633 * (uValue - 128)); + double blue = yValue + (1.732446 * (uValue - 128)); + *r = clamp(red, 0, 255); + *g = clamp(green, 0, 255); + *b = clamp(blue, 0, 255); +} + +void convertNV21DataToRGBData(int width, int height, uchar_t *nv21Data, uchar_t *rgbData, int rgbBytesPerPixel, + int rgbBytesPerRow) +{ + uchar_t *uvData = nv21Data + height * width; + for (int h = 0; h < height; h++) { + uchar_t *yRowBegin = nv21Data + h * width; + uchar_t *uvRowBegin = uvData + h / 2 * width; + uchar_t *rgbRowBegin = rgbData + rgbBytesPerRow * h; + for (int w = 0; w < width; w++) { + uchar_t *rgbPixelBegin = rgbRowBegin + rgbBytesPerPixel * w; + yuv2rgb(yRowBegin[w], uvRowBegin[w / 2 * 2], uvRowBegin[w / 2 * 2 + 1], &(rgbPixelBegin[0]), + &(rgbPixelBegin[1]), &(rgbPixelBegin[2])); + } + } +} + +@implementation SCStillImageCaptureVideoInputMethod + +- (void)captureStillImageWithCapturerState:(SCManagedCapturerState *)state + successBlock:(void (^)(NSData *imageData, NSDictionary *cameraInfo, + NSError *error))successBlock + failureBlock:(void (^)(NSError *error))failureBlock +{ + id videoDataSource = [[SCManagedCapturer sharedInstance] currentVideoDataSource]; + if ([videoDataSource isKindOfClass:[SCManagedVideoFileStreamer class]]) { + SCManagedVideoFileStreamer *videoFileStreamer = (SCManagedVideoFileStreamer *)videoDataSource; + [videoFileStreamer getNextPixelBufferWithCompletion:^(CVPixelBufferRef pixelBuffer) { + BOOL shouldFlip = state.devicePosition == SCManagedCaptureDevicePositionFront; +#if TARGET_IPHONE_SIMULATOR + UIImage *uiImage = [self imageWithCVPixelBuffer:pixelBuffer]; + CGImageRef videoImage = uiImage.CGImage; + UIImage *capturedImage = [UIImage + imageWithCGImage:shouldFlip ? [self flipCGImage:videoImage size:uiImage.size].CGImage : videoImage + scale:1.0 + orientation:UIImageOrientationRight]; +#else + CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer]; + CIContext *temporaryContext = [CIContext contextWithOptions:nil]; + + CGSize size = CGSizeMake(CVPixelBufferGetWidth(pixelBuffer), CVPixelBufferGetHeight(pixelBuffer)); + CGImageRef videoImage = + [temporaryContext createCGImage:ciImage fromRect:CGRectMake(0, 0, size.width, size.height)]; + + UIImage *capturedImage = + [UIImage imageWithCGImage:shouldFlip ? [self flipCGImage:videoImage size:size].CGImage : videoImage + scale:1.0 + orientation:UIImageOrientationRight]; + + CGImageRelease(videoImage); +#endif + if (successBlock) { + successBlock(UIImageJPEGRepresentation(capturedImage, 1.0), nil, nil); + } + }]; + } else { + if (failureBlock) { + failureBlock([NSError errorWithDomain:NSStringFromClass(self.class) code:-1 userInfo:nil]); + } + } +} + +- (UIImage *)flipCGImage:(CGImageRef)cgImage size:(CGSize)size +{ + UIGraphicsBeginImageContext(size); + CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0, 0, size.width, size.height), cgImage); + UIImage *image = UIGraphicsGetImageFromCurrentImageContext(); + UIGraphicsEndImageContext(); + return image; +} + +- (UIImage *)imageWithCVPixelBuffer:(CVPixelBufferRef)imageBuffer +{ + CVPixelBufferLockBaseAddress(imageBuffer, 0); + + size_t width = CVPixelBufferGetWidth(imageBuffer); + size_t height = CVPixelBufferGetHeight(imageBuffer); + size_t rgbBytesPerPixel = 4; + size_t rgbBytesPerRow = width * rgbBytesPerPixel; + + uchar_t *nv21Data = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0); + uchar_t *rgbData = malloc(rgbBytesPerRow * height); + + convertNV21DataToRGBData((int)width, (int)height, nv21Data, rgbData, (int)rgbBytesPerPixel, (int)rgbBytesPerRow); + + CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); + CGContextRef context = + CGBitmapContextCreate(rgbData, width, height, 8, rgbBytesPerRow, colorSpace, kCGImageAlphaNoneSkipLast); + CGImageRef cgImage = CGBitmapContextCreateImage(context); + + UIImage *result = [UIImage imageWithCGImage:cgImage]; + + CGImageRelease(cgImage); + CGContextRelease(context); + CGColorSpaceRelease(colorSpace); + free(rgbData); + + CVPixelBufferUnlockBaseAddress(imageBuffer, 0); + + return result; +} + +- (NSString *)methodName +{ + return @"VideoInput"; +} + +@end diff --git a/ManagedCapturer/SCTimedTask.h b/ManagedCapturer/SCTimedTask.h new file mode 100644 index 0000000..f5a4e15 --- /dev/null +++ b/ManagedCapturer/SCTimedTask.h @@ -0,0 +1,28 @@ +// +// SCTimedTask.h +// Snapchat +// +// Created by Michel Loenngren on 4/2/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import +#import + +/* + Block based timed task + */ +@interface SCTimedTask : NSObject + +@property (nonatomic, assign) CMTime targetTime; +@property (nonatomic, copy) void (^task)(CMTime relativePresentationTime, CGFloat sessionStartTimeDelayInSecond); + +- (instancetype)init NS_UNAVAILABLE; + +- (instancetype)initWithTargetTime:(CMTime)targetTime + task:(void (^)(CMTime relativePresentationTime, + CGFloat sessionStartTimeDelayInSecond))task; + +- (NSString *)description; + +@end diff --git a/ManagedCapturer/SCTimedTask.m b/ManagedCapturer/SCTimedTask.m new file mode 100644 index 0000000..babf445 --- /dev/null +++ b/ManagedCapturer/SCTimedTask.m @@ -0,0 +1,32 @@ +// +// SCTimedTask.m +// Snapchat +// +// Created by Michel Loenngren on 4/2/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCTimedTask.h" + +#import + +@implementation SCTimedTask + +- (instancetype)initWithTargetTime:(CMTime)targetTime + task: + (void (^)(CMTime relativePresentationTime, CGFloat sessionStartTimeDelayInSecond))task +{ + if (self = [super init]) { + _targetTime = targetTime; + _task = task; + } + return self; +} + +- (NSString *)description +{ + return [NSString + sc_stringWithFormat:@"<%@: %p, targetTime: %lld>", NSStringFromClass([self class]), self, _targetTime.value]; +} + +@end diff --git a/ManagedCapturer/SCVideoCaptureSessionInfo.h b/ManagedCapturer/SCVideoCaptureSessionInfo.h new file mode 100644 index 0000000..b89da3e --- /dev/null +++ b/ManagedCapturer/SCVideoCaptureSessionInfo.h @@ -0,0 +1,83 @@ +// +// SCVideoCaptureSessionInfo.h +// Snapchat +// +// Created by Michel Loenngren on 3/27/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import + +#import +#import + +typedef NS_ENUM(NSInteger, SCManagedVideoCapturerInfoType) { + SCManagedVideoCapturerInfoAudioQueueError, + SCManagedVideoCapturerInfoAssetWriterError, + SCManagedVideoCapturerInfoAudioSessionError, + SCManagedVideoCapturerInfoAudioQueueRetrySuccess, + SCManagedVideoCapturerInfoAudioQueueRetryDataSourceSuccess_audioQueue, + SCManagedVideoCapturerInfoAudioQueueRetryDataSourceSuccess_hardware +}; + +typedef u_int32_t sc_managed_capturer_recording_session_t; + +/* + Container object holding information about the + current recording session. + */ +typedef struct { + CMTime startTime; + CMTime endTime; + CMTime duration; + sc_managed_capturer_recording_session_t sessionId; +} SCVideoCaptureSessionInfo; + +static inline SCVideoCaptureSessionInfo SCVideoCaptureSessionInfoMake(CMTime startTime, CMTime endTime, + sc_managed_capturer_recording_session_t sessionId) +{ + SCVideoCaptureSessionInfo session; + session.startTime = startTime; + session.endTime = endTime; + if (CMTIME_IS_VALID(startTime) && CMTIME_IS_VALID(endTime)) { + session.duration = CMTimeSubtract(endTime, startTime); + } else { + session.duration = kCMTimeInvalid; + } + session.sessionId = sessionId; + return session; +} + +static inline NSTimeInterval SCVideoCaptureSessionInfoGetCurrentDuration(SCVideoCaptureSessionInfo sessionInfo) +{ + if (CMTIME_IS_VALID(sessionInfo.startTime)) { + if (CMTIME_IS_VALID(sessionInfo.endTime)) { + return CMTimeGetSeconds(sessionInfo.duration); + } + return CACurrentMediaTime() - CMTimeGetSeconds(sessionInfo.startTime); + } + return 0; +} + +static inline NSString *SCVideoCaptureSessionInfoGetDebugString(CMTime time, NSString *label) +{ + if (CMTIME_IS_VALID(time)) { + return [NSString sc_stringWithFormat:@"%@: %f", label, CMTimeGetSeconds(time)]; + } else { + return [NSString sc_stringWithFormat:@"%@: Invalid", label]; + } +} + +static inline NSString *SCVideoCaptureSessionInfoGetDebugDescription(SCVideoCaptureSessionInfo sessionInfo) +{ + NSMutableString *description = [NSMutableString new]; + [description appendString:SCVideoCaptureSessionInfoGetDebugString(sessionInfo.startTime, @"StartTime")]; + [description appendString:@", "]; + [description appendString:SCVideoCaptureSessionInfoGetDebugString(sessionInfo.endTime, @"EndTime")]; + [description appendString:@", "]; + [description appendString:SCVideoCaptureSessionInfoGetDebugString(sessionInfo.duration, @"Duration")]; + [description appendString:@", "]; + [description appendString:[NSString sc_stringWithFormat:@"Id: %u", sessionInfo.sessionId]]; + + return [description copy]; +} diff --git a/ManagedCapturer/UIScreen+Debug.h b/ManagedCapturer/UIScreen+Debug.h new file mode 100644 index 0000000..58d54a1 --- /dev/null +++ b/ManagedCapturer/UIScreen+Debug.h @@ -0,0 +1,13 @@ +// +// UIScreen+Debug.h +// Snapchat +// +// Created by Derek Peirce on 6/1/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import + +@interface UIScreen (Debug) + +@end diff --git a/ManagedCapturer/UIScreen+Debug.m b/ManagedCapturer/UIScreen+Debug.m new file mode 100644 index 0000000..26a121c --- /dev/null +++ b/ManagedCapturer/UIScreen+Debug.m @@ -0,0 +1,28 @@ + +#import "UIScreen+Debug.h" + +#import +#import + +#import + +@implementation UIScreen (Debug) ++ (void)load +{ + if (SCIsPerformanceLoggingEnabled()) { + static dispatch_once_t once_token; + dispatch_once(&once_token, ^{ + SEL setBrightnessSelector = @selector(setBrightness:); + SEL setBrightnessLoggerSelector = @selector(logged_setBrightness:); + Method originalMethod = class_getInstanceMethod(self, setBrightnessSelector); + Method extendedMethod = class_getInstanceMethod(self, setBrightnessLoggerSelector); + method_exchangeImplementations(originalMethod, extendedMethod); + }); + } +} +- (void)logged_setBrightness:(CGFloat)brightness +{ + SCLogGeneralInfo(@"Setting brightness from %f to %f", self.brightness, brightness); + [self logged_setBrightness:brightness]; +} +@end