Jonny Banana
6 years ago
committed by
GitHub
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
83 changed files with 12299 additions and 0 deletions
-
67ManagedCapturer/SCManagedCaptureSession.h
-
74ManagedCapturer/SCManagedCaptureSession.m
-
26ManagedCapturer/SCManagedCapturer.m
-
26ManagedCapturer/SCManagedCapturerARSessionHandler.h
-
76ManagedCapturer/SCManagedCapturerARSessionHandler.m
-
135ManagedCapturer/SCManagedCapturerListener.h
-
12ManagedCapturer/SCManagedCapturerListenerAnnouncer.h
-
505ManagedCapturer/SCManagedCapturerListenerAnnouncer.mm
-
26ManagedCapturer/SCManagedCapturerSampleMetadata.h
-
24ManagedCapturer/SCManagedCapturerSampleMetadata.m
-
93ManagedCapturer/SCManagedCapturerState.h
-
359ManagedCapturer/SCManagedCapturerState.m
-
20ManagedCapturer/SCManagedCapturerState.value
-
46ManagedCapturer/SCManagedCapturerStateBuilder.h
-
158ManagedCapturer/SCManagedCapturerStateBuilder.m
-
36ManagedCapturer/SCManagedCapturerUtils.h
-
153ManagedCapturer/SCManagedCapturerUtils.m
-
57ManagedCapturer/SCManagedCapturerV1.h
-
2165ManagedCapturer/SCManagedCapturerV1.m
-
20ManagedCapturer/SCManagedCapturerV1_Private.h
-
32ManagedCapturer/SCManagedDeviceCapacityAnalyzer.h
-
294ManagedCapturer/SCManagedDeviceCapacityAnalyzer.m
-
20ManagedCapturer/SCManagedDeviceCapacityAnalyzerHandler.h
-
72ManagedCapturer/SCManagedDeviceCapacityAnalyzerHandler.m
-
35ManagedCapturer/SCManagedDeviceCapacityAnalyzerListener.h
-
12ManagedCapturer/SCManagedDeviceCapacityAnalyzerListenerAnnouncer.h
-
146ManagedCapturer/SCManagedDeviceCapacityAnalyzerListenerAnnouncer.mm
-
25ManagedCapturer/SCManagedDroppedFramesReporter.h
-
86ManagedCapturer/SCManagedDroppedFramesReporter.m
-
57ManagedCapturer/SCManagedFrameHealthChecker.h
-
709ManagedCapturer/SCManagedFrameHealthChecker.m
-
18ManagedCapturer/SCManagedFrontFlashController.h
-
105ManagedCapturer/SCManagedFrontFlashController.m
-
13ManagedCapturer/SCManagedLegacyStillImageCapturer.h
-
460ManagedCapturer/SCManagedLegacyStillImageCapturer.m
-
13ManagedCapturer/SCManagedPhotoCapturer.h
-
667ManagedCapturer/SCManagedPhotoCapturer.m
-
36ManagedCapturer/SCManagedRecordedVideo.h
-
180ManagedCapturer/SCManagedRecordedVideo.m
-
6ManagedCapturer/SCManagedRecordedVideo.value
-
92ManagedCapturer/SCManagedStillImageCapturer.h
-
399ManagedCapturer/SCManagedStillImageCapturer.mm
-
21ManagedCapturer/SCManagedStillImageCapturerHandler.h
-
85ManagedCapturer/SCManagedStillImageCapturerHandler.m
-
63ManagedCapturer/SCManagedStillImageCapturer_Protected.h
-
24ManagedCapturer/SCManagedVideoARDataSource.h
-
102ManagedCapturer/SCManagedVideoCapturer.h
-
1107ManagedCapturer/SCManagedVideoCapturer.m
-
20ManagedCapturer/SCManagedVideoCapturerHandler.h
-
252ManagedCapturer/SCManagedVideoCapturerHandler.m
-
27ManagedCapturer/SCManagedVideoCapturerLogger.h
-
77ManagedCapturer/SCManagedVideoCapturerLogger.m
-
48ManagedCapturer/SCManagedVideoCapturerOutputSettings.h
-
221ManagedCapturer/SCManagedVideoCapturerOutputSettings.m
-
10ManagedCapturer/SCManagedVideoCapturerOutputSettings.value
-
14ManagedCapturer/SCManagedVideoCapturerOutputType.h
-
25ManagedCapturer/SCManagedVideoCapturerTimeObserver.h
-
61ManagedCapturer/SCManagedVideoCapturerTimeObserver.m
-
26ManagedCapturer/SCManagedVideoFileStreamer.h
-
299ManagedCapturer/SCManagedVideoFileStreamer.m
-
22ManagedCapturer/SCManagedVideoFrameSampler.h
-
65ManagedCapturer/SCManagedVideoFrameSampler.m
-
44ManagedCapturer/SCManagedVideoNoSoundLogger.h
-
283ManagedCapturer/SCManagedVideoNoSoundLogger.m
-
35ManagedCapturer/SCManagedVideoScanner.h
-
299ManagedCapturer/SCManagedVideoScanner.m
-
15ManagedCapturer/SCManagedVideoStreamReporter.h
-
58ManagedCapturer/SCManagedVideoStreamReporter.m
-
36ManagedCapturer/SCManagedVideoStreamer.h
-
823ManagedCapturer/SCManagedVideoStreamer.m
-
63ManagedCapturer/SCMetalUtils.h
-
25ManagedCapturer/SCMetalUtils.m
-
18ManagedCapturer/SCScanConfiguration.h
-
13ManagedCapturer/SCScanConfiguration.m
-
17ManagedCapturer/SCSingleFrameStreamCapturer.h
-
103ManagedCapturer/SCSingleFrameStreamCapturer.m
-
19ManagedCapturer/SCStillImageCaptureVideoInputMethod.h
-
140ManagedCapturer/SCStillImageCaptureVideoInputMethod.m
-
28ManagedCapturer/SCTimedTask.h
-
32ManagedCapturer/SCTimedTask.m
-
83ManagedCapturer/SCVideoCaptureSessionInfo.h
-
13ManagedCapturer/UIScreen+Debug.h
-
28ManagedCapturer/UIScreen+Debug.m
@ -0,0 +1,67 @@ |
|||
// |
|||
// SCManagedCaptureSession.h |
|||
// Snapchat |
|||
// |
|||
// Created by Derek Wang on 02/03/2018. |
|||
// |
|||
|
|||
#import <SCBase/SCMacros.h> |
|||
|
|||
#import <AVFoundation/AVFoundation.h> |
|||
#import <Foundation/Foundation.h> |
|||
|
|||
/** |
|||
`SCManagedCaptureSession` is a wrapper class of `AVCaptureSession`. The purpose of this class is to provide additional |
|||
functionalities to `AVCaptureSession`. |
|||
For example, for black camera detection, we need to monitor when some method is called. Another example is that we can |
|||
treat it as a more stable version of `AVCaptureSession` by moving some `AVCaptureSession` fixing logic to this class, |
|||
and it provides reliable interfaces to the outside. That would be the next step. |
|||
It also tries to mimic the `AVCaptureSession` by implmenting some methods in `AVCaptureSession`. The original methods |
|||
in `AVCaptureSession` should not be used anymore |
|||
*/ |
|||
|
|||
@class SCBlackCameraDetector; |
|||
|
|||
NS_ASSUME_NONNULL_BEGIN |
|||
@interface SCManagedCaptureSession : NSObject |
|||
|
|||
/** |
|||
Expose avSession property |
|||
*/ |
|||
@property (nonatomic, strong, readonly) AVCaptureSession *avSession; |
|||
|
|||
/** |
|||
Expose avSession isRunning property for convenience. |
|||
*/ |
|||
@property (nonatomic, readonly, assign) BOOL isRunning; |
|||
|
|||
/** |
|||
Wrap [AVCaptureSession startRunning] method. Monitor startRunning method. [AVCaptureSession startRunning] should not be |
|||
called |
|||
*/ |
|||
- (void)startRunning; |
|||
/** |
|||
Wrap [AVCaptureSession stopRunning] method. Monitor stopRunning method. [AVCaptureSession stopRunning] should not be |
|||
called |
|||
*/ |
|||
- (void)stopRunning; |
|||
|
|||
/** |
|||
Wrap [AVCaptureSession beginConfiguration]. Monitor beginConfiguration method |
|||
*/ |
|||
- (void)beginConfiguration; |
|||
/** |
|||
Wrap [AVCaptureSession commitConfiguration]. Monitor commitConfiguration method |
|||
*/ |
|||
- (void)commitConfiguration; |
|||
/** |
|||
Configurate internal AVCaptureSession with block |
|||
@params block. configuration block with AVCaptureSession as parameter |
|||
*/ |
|||
- (void)performConfiguration:(void (^)(void))block; |
|||
|
|||
- (instancetype)initWithBlackCameraDetector:(SCBlackCameraDetector *)detector NS_DESIGNATED_INITIALIZER; |
|||
SC_INIT_AND_NEW_UNAVAILABLE |
|||
|
|||
@end |
|||
NS_ASSUME_NONNULL_END |
@ -0,0 +1,74 @@ |
|||
// |
|||
// SCManagedCaptureSession.m |
|||
// Snapchat |
|||
// |
|||
// Created by Derek Wang on 02/03/2018. |
|||
// |
|||
|
|||
#import "SCManagedCaptureSession.h" |
|||
|
|||
#import "SCBlackCameraDetector.h" |
|||
|
|||
#import <SCFoundation/SCTraceODPCompatible.h> |
|||
|
|||
@interface SCManagedCaptureSession () { |
|||
SCBlackCameraDetector *_blackCameraDetector; |
|||
} |
|||
|
|||
@end |
|||
|
|||
@implementation SCManagedCaptureSession |
|||
|
|||
- (instancetype)initWithBlackCameraDetector:(SCBlackCameraDetector *)detector |
|||
{ |
|||
self = [super init]; |
|||
if (self) { |
|||
_avSession = [[AVCaptureSession alloc] init]; |
|||
_blackCameraDetector = detector; |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
- (void)startRunning |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
[_blackCameraDetector sessionWillCallStartRunning]; |
|||
[_avSession startRunning]; |
|||
[_blackCameraDetector sessionDidCallStartRunning]; |
|||
} |
|||
|
|||
- (void)stopRunning |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
[_blackCameraDetector sessionWillCallStopRunning]; |
|||
[_avSession stopRunning]; |
|||
[_blackCameraDetector sessionDidCallStopRunning]; |
|||
} |
|||
|
|||
- (void)performConfiguration:(nonnull void (^)(void))block |
|||
{ |
|||
SC_GUARD_ELSE_RETURN(block); |
|||
[self beginConfiguration]; |
|||
block(); |
|||
[self commitConfiguration]; |
|||
} |
|||
|
|||
- (void)beginConfiguration |
|||
{ |
|||
[_avSession beginConfiguration]; |
|||
} |
|||
|
|||
- (void)commitConfiguration |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
[_blackCameraDetector sessionWillCommitConfiguration]; |
|||
[_avSession commitConfiguration]; |
|||
[_blackCameraDetector sessionDidCommitConfiguration]; |
|||
} |
|||
|
|||
- (BOOL)isRunning |
|||
{ |
|||
return _avSession.isRunning; |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,26 @@ |
|||
// |
|||
// SCManagedCapturer.m |
|||
// Snapchat |
|||
// |
|||
// Created by Lin Jia on 9/28/17. |
|||
// |
|||
|
|||
#import "SCManagedCapturer.h" |
|||
|
|||
#import "SCCameraTweaks.h" |
|||
#import "SCCaptureCore.h" |
|||
#import "SCManagedCapturerV1.h" |
|||
|
|||
@implementation SCManagedCapturer |
|||
|
|||
+ (id<SCCapturer>)sharedInstance |
|||
{ |
|||
static dispatch_once_t onceToken; |
|||
static id<SCCapturer> managedCapturer; |
|||
dispatch_once(&onceToken, ^{ |
|||
managedCapturer = [[SCCaptureCore alloc] init]; |
|||
}); |
|||
return managedCapturer; |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,26 @@ |
|||
// |
|||
// SCManagedCapturerARSessionHandler.h |
|||
// Snapchat |
|||
// |
|||
// Created by Xiaokang Liu on 16/03/2018. |
|||
// |
|||
// This class is used to handle the AVCaptureSession event when ARSession is enabled. |
|||
// The stopARSessionRunning will be blocked till the AVCaptureSessionDidStopRunningNotification event has been received |
|||
// successfully, |
|||
// after then we can restart AVCaptureSession gracefully. |
|||
|
|||
#import <SCBase/SCMacros.h> |
|||
|
|||
#import <Foundation/Foundation.h> |
|||
|
|||
@class SCCaptureResource; |
|||
|
|||
@interface SCManagedCapturerARSessionHandler : NSObject |
|||
|
|||
SC_INIT_AND_NEW_UNAVAILABLE |
|||
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource NS_DESIGNATED_INITIALIZER; |
|||
|
|||
- (void)stopObserving; |
|||
|
|||
- (void)stopARSessionRunning NS_AVAILABLE_IOS(11_0); |
|||
@end |
@ -0,0 +1,76 @@ |
|||
// |
|||
// SCManagedCapturerARSessionHandler.m |
|||
// Snapchat |
|||
// |
|||
// Created by Xiaokang Liu on 16/03/2018. |
|||
// |
|||
|
|||
#import "SCManagedCapturerARSessionHandler.h" |
|||
|
|||
#import "SCCaptureResource.h" |
|||
#import "SCManagedCaptureSession.h" |
|||
|
|||
#import <SCBase/SCAvailability.h> |
|||
#import <SCFoundation/SCAssertWrapper.h> |
|||
#import <SCFoundation/SCQueuePerformer.h> |
|||
|
|||
@import ARKit; |
|||
|
|||
static CGFloat const kSCManagedCapturerARKitShutdownTimeoutDuration = 2; |
|||
|
|||
@interface SCManagedCapturerARSessionHandler () { |
|||
SCCaptureResource *__weak _captureResource; |
|||
dispatch_semaphore_t _arSesssionShutdownSemaphore; |
|||
} |
|||
|
|||
@end |
|||
|
|||
@implementation SCManagedCapturerARSessionHandler |
|||
|
|||
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource |
|||
{ |
|||
self = [super init]; |
|||
if (self) { |
|||
SCAssert(captureResource, @""); |
|||
_captureResource = captureResource; |
|||
_arSesssionShutdownSemaphore = dispatch_semaphore_create(0); |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
- (void)stopObserving |
|||
{ |
|||
[[NSNotificationCenter defaultCenter] removeObserver:self |
|||
name:AVCaptureSessionDidStopRunningNotification |
|||
object:nil]; |
|||
} |
|||
|
|||
- (void)stopARSessionRunning |
|||
{ |
|||
SCAssertPerformer(_captureResource.queuePerformer); |
|||
SCAssert(SC_AT_LEAST_IOS_11, @"Shoule be only call from iOS 11+"); |
|||
if (@available(iOS 11.0, *)) { |
|||
// ARSession stops its internal AVCaptureSession asynchronously. We listen for its callback and actually restart |
|||
// our own capture session once it's finished shutting down so the two ARSessions don't conflict. |
|||
[[NSNotificationCenter defaultCenter] addObserver:self |
|||
selector:@selector(_completeARSessionShutdown:) |
|||
name:AVCaptureSessionDidStopRunningNotification |
|||
object:nil]; |
|||
[_captureResource.arSession pause]; |
|||
dispatch_semaphore_wait( |
|||
_arSesssionShutdownSemaphore, |
|||
dispatch_time(DISPATCH_TIME_NOW, (int64_t)(kSCManagedCapturerARKitShutdownTimeoutDuration * NSEC_PER_SEC))); |
|||
} |
|||
} |
|||
|
|||
- (void)_completeARSessionShutdown:(NSNotification *)note |
|||
{ |
|||
// This notification is only registered for IMMEDIATELY before arkit shutdown. |
|||
// Explicitly guard that the notification object IS NOT the main session's. |
|||
SC_GUARD_ELSE_RETURN(![note.object isEqual:_captureResource.managedSession.avSession]); |
|||
[[NSNotificationCenter defaultCenter] removeObserver:self |
|||
name:AVCaptureSessionDidStopRunningNotification |
|||
object:nil]; |
|||
dispatch_semaphore_signal(_arSesssionShutdownSemaphore); |
|||
} |
|||
@end |
@ -0,0 +1,135 @@ |
|||
//#!announcer.rb |
|||
// |
|||
// SCManagedCaptuerListener |
|||
// Snapchat |
|||
// |
|||
// Created by Liu Liu on 4/23/15. |
|||
// Copyright (c) 2015 Liu Liu. All rights reserved. |
|||
// |
|||
|
|||
#import "SCCapturer.h" |
|||
#import "SCManagedCaptureDevice.h" |
|||
#import "SCManagedRecordedVideo.h" |
|||
#import "SCVideoCaptureSessionInfo.h" |
|||
|
|||
#import <SCFoundation/SCFuture.h> |
|||
|
|||
#import <AVFoundation/AVFoundation.h> |
|||
#import <Foundation/Foundation.h> |
|||
|
|||
@class SCManagedCapturer; |
|||
@class SCManagedCapturerState; |
|||
@class LSAGLView; |
|||
@class SCManagedCapturerSampleMetadata; |
|||
|
|||
@protocol SCManagedCapturerListener <NSObject> |
|||
|
|||
@optional |
|||
|
|||
// All these calbacks are invoked on main queue |
|||
|
|||
// Start / stop / reset |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didStartRunning:(SCManagedCapturerState *)state; |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didStopRunning:(SCManagedCapturerState *)state; |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didResetFromRuntimeError:(SCManagedCapturerState *)state; |
|||
|
|||
// Change state methods |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeState:(SCManagedCapturerState *)state; |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeNightModeActive:(SCManagedCapturerState *)state; |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangePortraitModeActive:(SCManagedCapturerState *)state; |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeFlashActive:(SCManagedCapturerState *)state; |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeLensesActive:(SCManagedCapturerState *)state; |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeARSessionActive:(SCManagedCapturerState *)state; |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer |
|||
didChangeFlashSupportedAndTorchSupported:(SCManagedCapturerState *)state; |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeZoomFactor:(SCManagedCapturerState *)state; |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeLowLightCondition:(SCManagedCapturerState *)state; |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state; |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeCaptureDevicePosition:(SCManagedCapturerState *)state; |
|||
|
|||
// The video preview layer is not maintained as a state, therefore, its change is not related to the state of |
|||
// the camera at all, listener show only manage the setup of the videoPreviewLayer. |
|||
// Since the AVCaptureVideoPreviewLayer can only attach to one AVCaptureSession per app, it is recommended you |
|||
// have a view and controller which manages the video preview layer, and for upper layer, only manage that view |
|||
// or view controller, which maintains the pointer consistency. The video preview layer is required to recreate |
|||
// every now and then because otherwise we will have cases that the old video preview layer may contain |
|||
// residual images. |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer |
|||
didChangeVideoPreviewLayer:(AVCaptureVideoPreviewLayer *)videoPreviewLayer; |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeVideoPreviewGLView:(LSAGLView *)videoPreviewGLView; |
|||
|
|||
// Video recording-related methods |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer |
|||
didBeginVideoRecording:(SCManagedCapturerState *)state |
|||
session:(SCVideoCaptureSessionInfo)session; |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer |
|||
didBeginAudioRecording:(SCManagedCapturerState *)state |
|||
session:(SCVideoCaptureSessionInfo)session; |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer |
|||
willFinishRecording:(SCManagedCapturerState *)state |
|||
session:(SCVideoCaptureSessionInfo)session |
|||
recordedVideoFuture:(SCFuture<id<SCManagedRecordedVideo>> *)recordedVideoFuture |
|||
videoSize:(CGSize)videoSize |
|||
placeholderImage:(UIImage *)placeholderImage; |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer |
|||
didFinishRecording:(SCManagedCapturerState *)state |
|||
session:(SCVideoCaptureSessionInfo)session |
|||
recordedVideo:(SCManagedRecordedVideo *)recordedVideo; |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer |
|||
didFailRecording:(SCManagedCapturerState *)state |
|||
session:(SCVideoCaptureSessionInfo)session |
|||
error:(NSError *)error; |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer |
|||
didCancelRecording:(SCManagedCapturerState *)state |
|||
session:(SCVideoCaptureSessionInfo)session; |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer |
|||
didGetError:(NSError *)error |
|||
forType:(SCManagedVideoCapturerInfoType)type |
|||
session:(SCVideoCaptureSessionInfo)session; |
|||
|
|||
- (void)managedCapturerDidCallLenseResume:(id<SCCapturer>)managedCapturer session:(SCVideoCaptureSessionInfo)session; |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer |
|||
didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer |
|||
sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata; |
|||
|
|||
// Photo methods |
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer |
|||
willCapturePhoto:(SCManagedCapturerState *)state |
|||
sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata; |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didCapturePhoto:(SCManagedCapturerState *)state; |
|||
|
|||
- (BOOL)managedCapturer:(id<SCCapturer>)managedCapturer isUnderDeviceMotion:(SCManagedCapturerState *)state; |
|||
|
|||
- (BOOL)managedCapturer:(id<SCCapturer>)managedCapturer shouldProcessFileInput:(SCManagedCapturerState *)state; |
|||
|
|||
// Face detection |
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer |
|||
didDetectFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID; |
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeExposurePoint:(CGPoint)exposurePoint; |
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeFocusPoint:(CGPoint)focusPoint; |
|||
@end |
@ -0,0 +1,12 @@ |
|||
// Generated by the announcer.rb DO NOT EDIT!! |
|||
|
|||
#import "SCManagedCapturerListener.h" |
|||
|
|||
#import <Foundation/Foundation.h> |
|||
|
|||
@interface SCManagedCapturerListenerAnnouncer : NSObject <SCManagedCapturerListener> |
|||
|
|||
- (BOOL)addListener:(id<SCManagedCapturerListener>)listener; |
|||
- (void)removeListener:(id<SCManagedCapturerListener>)listener; |
|||
|
|||
@end |
@ -0,0 +1,505 @@ |
|||
// Generated by the announcer.rb DO NOT EDIT!! |
|||
|
|||
#import "SCManagedCapturerListenerAnnouncer.h" |
|||
|
|||
#include <mutex> |
|||
using std::lock_guard; |
|||
using std::mutex; |
|||
#include <vector> |
|||
using std::find; |
|||
using std::make_shared; |
|||
using std::shared_ptr; |
|||
using std::vector; |
|||
|
|||
@implementation SCManagedCapturerListenerAnnouncer { |
|||
mutex _mutex; |
|||
shared_ptr<vector<__weak id<SCManagedCapturerListener>>> _listeners; |
|||
} |
|||
|
|||
- (NSString *)description |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
NSMutableString *desc = [NSMutableString string]; |
|||
[desc appendFormat:@"<SCManagedCapturerListenerAnnouncer %p>: [", self]; |
|||
for (int i = 0; i < listeners->size(); ++i) { |
|||
[desc appendFormat:@"%@", (*listeners)[i]]; |
|||
if (i != listeners->size() - 1) { |
|||
[desc appendString:@", "]; |
|||
} |
|||
} |
|||
[desc appendString:@"]"]; |
|||
return desc; |
|||
} |
|||
|
|||
- (BOOL)addListener:(id<SCManagedCapturerListener>)listener |
|||
{ |
|||
lock_guard<mutex> lock(_mutex); |
|||
auto listeners = make_shared<vector<__weak id<SCManagedCapturerListener>>>(); |
|||
if (_listeners != nil) { |
|||
// The listener we want to add already exists |
|||
if (find(_listeners->begin(), _listeners->end(), listener) != _listeners->end()) { |
|||
return NO; |
|||
} |
|||
for (auto &one : *_listeners) { |
|||
if (one != nil) { |
|||
listeners->push_back(one); |
|||
} |
|||
} |
|||
listeners->push_back(listener); |
|||
atomic_store(&self->_listeners, listeners); |
|||
} else { |
|||
listeners->push_back(listener); |
|||
atomic_store(&self->_listeners, listeners); |
|||
} |
|||
return YES; |
|||
} |
|||
|
|||
- (void)removeListener:(id<SCManagedCapturerListener>)listener |
|||
{ |
|||
lock_guard<mutex> lock(_mutex); |
|||
if (_listeners == nil) { |
|||
return; |
|||
} |
|||
// If the only item in the listener list is the one we want to remove, store it back to nil again |
|||
if (_listeners->size() == 1 && (*_listeners)[0] == listener) { |
|||
atomic_store(&self->_listeners, shared_ptr<vector<__weak id<SCManagedCapturerListener>>>()); |
|||
return; |
|||
} |
|||
auto listeners = make_shared<vector<__weak id<SCManagedCapturerListener>>>(); |
|||
for (auto &one : *_listeners) { |
|||
if (one != nil && one != listener) { |
|||
listeners->push_back(one); |
|||
} |
|||
} |
|||
atomic_store(&self->_listeners, listeners); |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didStartRunning:(SCManagedCapturerState *)state |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didStartRunning:)]) { |
|||
[listener managedCapturer:managedCapturer didStartRunning:state]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didStopRunning:(SCManagedCapturerState *)state |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didStopRunning:)]) { |
|||
[listener managedCapturer:managedCapturer didStopRunning:state]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didResetFromRuntimeError:(SCManagedCapturerState *)state |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didResetFromRuntimeError:)]) { |
|||
[listener managedCapturer:managedCapturer didResetFromRuntimeError:state]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeState:(SCManagedCapturerState *)state |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didChangeState:)]) { |
|||
[listener managedCapturer:managedCapturer didChangeState:state]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeNightModeActive:(SCManagedCapturerState *)state |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didChangeNightModeActive:)]) { |
|||
[listener managedCapturer:managedCapturer didChangeNightModeActive:state]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangePortraitModeActive:(SCManagedCapturerState *)state |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didChangePortraitModeActive:)]) { |
|||
[listener managedCapturer:managedCapturer didChangePortraitModeActive:state]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeFlashActive:(SCManagedCapturerState *)state |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didChangeFlashActive:)]) { |
|||
[listener managedCapturer:managedCapturer didChangeFlashActive:state]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeLensesActive:(SCManagedCapturerState *)state |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didChangeLensesActive:)]) { |
|||
[listener managedCapturer:managedCapturer didChangeLensesActive:state]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeARSessionActive:(SCManagedCapturerState *)state |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didChangeARSessionActive:)]) { |
|||
[listener managedCapturer:managedCapturer didChangeARSessionActive:state]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer |
|||
didChangeFlashSupportedAndTorchSupported:(SCManagedCapturerState *)state |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didChangeFlashSupportedAndTorchSupported:)]) { |
|||
[listener managedCapturer:managedCapturer didChangeFlashSupportedAndTorchSupported:state]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeZoomFactor:(SCManagedCapturerState *)state |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didChangeZoomFactor:)]) { |
|||
[listener managedCapturer:managedCapturer didChangeZoomFactor:state]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeLowLightCondition:(SCManagedCapturerState *)state |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didChangeLowLightCondition:)]) { |
|||
[listener managedCapturer:managedCapturer didChangeLowLightCondition:state]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didChangeAdjustingExposure:)]) { |
|||
[listener managedCapturer:managedCapturer didChangeAdjustingExposure:state]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeCaptureDevicePosition:(SCManagedCapturerState *)state |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didChangeCaptureDevicePosition:)]) { |
|||
[listener managedCapturer:managedCapturer didChangeCaptureDevicePosition:state]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer |
|||
didChangeVideoPreviewLayer:(AVCaptureVideoPreviewLayer *)videoPreviewLayer |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didChangeVideoPreviewLayer:)]) { |
|||
[listener managedCapturer:managedCapturer didChangeVideoPreviewLayer:videoPreviewLayer]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeVideoPreviewGLView:(LSAGLView *)videoPreviewGLView |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didChangeVideoPreviewGLView:)]) { |
|||
[listener managedCapturer:managedCapturer didChangeVideoPreviewGLView:videoPreviewGLView]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer |
|||
didBeginVideoRecording:(SCManagedCapturerState *)state |
|||
session:(SCVideoCaptureSessionInfo)session |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didBeginVideoRecording:session:)]) { |
|||
[listener managedCapturer:managedCapturer didBeginVideoRecording:state session:session]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer |
|||
didBeginAudioRecording:(SCManagedCapturerState *)state |
|||
session:(SCVideoCaptureSessionInfo)session |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didBeginAudioRecording:session:)]) { |
|||
[listener managedCapturer:managedCapturer didBeginAudioRecording:state session:session]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer |
|||
willFinishRecording:(SCManagedCapturerState *)state |
|||
session:(SCVideoCaptureSessionInfo)session |
|||
recordedVideoFuture:(SCFuture<id<SCManagedRecordedVideo>> *)recordedVideoFuture |
|||
videoSize:(CGSize)videoSize |
|||
placeholderImage:(UIImage *)placeholderImage |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer: |
|||
willFinishRecording: |
|||
session: |
|||
recordedVideoFuture: |
|||
videoSize: |
|||
placeholderImage:)]) { |
|||
[listener managedCapturer:managedCapturer |
|||
willFinishRecording:state |
|||
session:session |
|||
recordedVideoFuture:recordedVideoFuture |
|||
videoSize:videoSize |
|||
placeholderImage:placeholderImage]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer |
|||
didFinishRecording:(SCManagedCapturerState *)state |
|||
session:(SCVideoCaptureSessionInfo)session |
|||
recordedVideo:(SCManagedRecordedVideo *)recordedVideo |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didFinishRecording:session:recordedVideo:)]) { |
|||
[listener managedCapturer:managedCapturer |
|||
didFinishRecording:state |
|||
session:session |
|||
recordedVideo:recordedVideo]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer |
|||
didFailRecording:(SCManagedCapturerState *)state |
|||
session:(SCVideoCaptureSessionInfo)session |
|||
error:(NSError *)error |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didFailRecording:session:error:)]) { |
|||
[listener managedCapturer:managedCapturer didFailRecording:state session:session error:error]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer |
|||
didCancelRecording:(SCManagedCapturerState *)state |
|||
session:(SCVideoCaptureSessionInfo)session |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didCancelRecording:session:)]) { |
|||
[listener managedCapturer:managedCapturer didCancelRecording:state session:session]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer |
|||
didGetError:(NSError *)error |
|||
forType:(SCManagedVideoCapturerInfoType)type |
|||
session:(SCVideoCaptureSessionInfo)session |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didGetError:forType:session:)]) { |
|||
[listener managedCapturer:managedCapturer didGetError:error forType:type session:session]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturerDidCallLenseResume:(id<SCCapturer>)managedCapturer session:(SCVideoCaptureSessionInfo)session |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturerDidCallLenseResume:session:)]) { |
|||
[listener managedCapturerDidCallLenseResume:managedCapturer session:session]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer |
|||
didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer |
|||
sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didAppendVideoSampleBuffer:sampleMetadata:)]) { |
|||
[listener managedCapturer:managedCapturer |
|||
didAppendVideoSampleBuffer:sampleBuffer |
|||
sampleMetadata:sampleMetadata]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer |
|||
willCapturePhoto:(SCManagedCapturerState *)state |
|||
sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:willCapturePhoto:sampleMetadata:)]) { |
|||
[listener managedCapturer:managedCapturer willCapturePhoto:state sampleMetadata:sampleMetadata]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didCapturePhoto:(SCManagedCapturerState *)state |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didCapturePhoto:)]) { |
|||
[listener managedCapturer:managedCapturer didCapturePhoto:state]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (BOOL)managedCapturer:(id<SCCapturer>)managedCapturer isUnderDeviceMotion:(SCManagedCapturerState *)state |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didCapturePhoto:)]) { |
|||
return [listener managedCapturer:managedCapturer isUnderDeviceMotion:state]; |
|||
} |
|||
} |
|||
} |
|||
return NO; |
|||
} |
|||
|
|||
- (BOOL)managedCapturer:(id<SCCapturer>)managedCapturer shouldProcessFileInput:(SCManagedCapturerState *)state |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didCapturePhoto:)]) { |
|||
return [listener managedCapturer:managedCapturer isUnderDeviceMotion:state]; |
|||
} |
|||
} |
|||
} |
|||
return NO; |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer |
|||
didDetectFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didDetectFaceBounds:)]) { |
|||
[listener managedCapturer:managedCapturer didDetectFaceBounds:faceBoundsByFaceID]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeExposurePoint:(CGPoint)exposurePoint |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didChangeExposurePoint:)]) { |
|||
[listener managedCapturer:managedCapturer didChangeExposurePoint:exposurePoint]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeFocusPoint:(CGPoint)focusPoint |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedCapturerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedCapturer:didChangeFocusPoint:)]) { |
|||
[listener managedCapturer:managedCapturer didChangeFocusPoint:focusPoint]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,26 @@ |
|||
// |
|||
// SCRecordingMetadata.h |
|||
// Snapchat |
|||
// |
|||
|
|||
#import <SCBase/SCMacros.h> |
|||
|
|||
#import <CoreMedia/CoreMedia.h> |
|||
#import <Foundation/Foundation.h> |
|||
|
|||
NS_ASSUME_NONNULL_BEGIN |
|||
|
|||
@interface SCManagedCapturerSampleMetadata : NSObject |
|||
|
|||
SC_INIT_AND_NEW_UNAVAILABLE |
|||
|
|||
- (instancetype)initWithPresentationTimestamp:(CMTime)presentationTimestamp |
|||
fieldOfView:(float)fieldOfView NS_DESIGNATED_INITIALIZER; |
|||
|
|||
@property (nonatomic, readonly) CMTime presentationTimestamp; |
|||
|
|||
@property (nonatomic, readonly) float fieldOfView; |
|||
|
|||
@end |
|||
|
|||
NS_ASSUME_NONNULL_END |
@ -0,0 +1,24 @@ |
|||
// |
|||
// SCRecordingMetadata.m |
|||
// Snapchat |
|||
// |
|||
|
|||
#import "SCManagedCapturerSampleMetadata.h" |
|||
|
|||
NS_ASSUME_NONNULL_BEGIN |
|||
|
|||
@implementation SCManagedCapturerSampleMetadata |
|||
|
|||
- (instancetype)initWithPresentationTimestamp:(CMTime)presentationTimestamp fieldOfView:(float)fieldOfView |
|||
{ |
|||
self = [super init]; |
|||
if (self) { |
|||
_presentationTimestamp = presentationTimestamp; |
|||
_fieldOfView = fieldOfView; |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
@end |
|||
|
|||
NS_ASSUME_NONNULL_END |
@ -0,0 +1,93 @@ |
|||
// 49126048c3d19dd5b676b8d39844cf133833b67a |
|||
// Generated by the value-object.rb DO NOT EDIT!! |
|||
|
|||
#import "SCManagedCaptureDevice.h" |
|||
|
|||
#import <AvailabilityMacros.h> |
|||
|
|||
#import <CoreGraphics/CoreGraphics.h> |
|||
#import <Foundation/Foundation.h> |
|||
|
|||
@protocol SCManagedCapturerState <NSObject, NSCoding, NSCopying> |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL isRunning; |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL isNightModeActive; |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL isPortraitModeActive; |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL lowLightCondition; |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL adjustingExposure; |
|||
|
|||
@property (nonatomic, assign, readonly) SCManagedCaptureDevicePosition devicePosition; |
|||
|
|||
@property (nonatomic, assign, readonly) CGFloat zoomFactor; |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL flashSupported; |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL torchSupported; |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL flashActive; |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL torchActive; |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL lensesActive; |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL arSessionActive; |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL liveVideoStreaming; |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL lensProcessorReady; |
|||
|
|||
@end |
|||
|
|||
@interface SCManagedCapturerState : NSObject <SCManagedCapturerState> |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL isRunning; |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL isNightModeActive; |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL isPortraitModeActive; |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL lowLightCondition; |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL adjustingExposure; |
|||
|
|||
@property (nonatomic, assign, readonly) SCManagedCaptureDevicePosition devicePosition; |
|||
|
|||
@property (nonatomic, assign, readonly) CGFloat zoomFactor; |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL flashSupported; |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL torchSupported; |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL flashActive; |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL torchActive; |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL lensesActive; |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL arSessionActive; |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL liveVideoStreaming; |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL lensProcessorReady; |
|||
|
|||
- (instancetype)initWithIsRunning:(BOOL)isRunning |
|||
isNightModeActive:(BOOL)isNightModeActive |
|||
isPortraitModeActive:(BOOL)isPortraitModeActive |
|||
lowLightCondition:(BOOL)lowLightCondition |
|||
adjustingExposure:(BOOL)adjustingExposure |
|||
devicePosition:(SCManagedCaptureDevicePosition)devicePosition |
|||
zoomFactor:(CGFloat)zoomFactor |
|||
flashSupported:(BOOL)flashSupported |
|||
torchSupported:(BOOL)torchSupported |
|||
flashActive:(BOOL)flashActive |
|||
torchActive:(BOOL)torchActive |
|||
lensesActive:(BOOL)lensesActive |
|||
arSessionActive:(BOOL)arSessionActive |
|||
liveVideoStreaming:(BOOL)liveVideoStreaming |
|||
lensProcessorReady:(BOOL)lensProcessorReady; |
|||
|
|||
@end |
@ -0,0 +1,359 @@ |
|||
// 49126048c3d19dd5b676b8d39844cf133833b67a |
|||
// Generated by the value-object.rb DO NOT EDIT!! |
|||
|
|||
#import "SCManagedCapturerState.h" |
|||
|
|||
#import <SCFoundation/SCValueObjectHelpers.h> |
|||
|
|||
#import <FastCoding/FastCoder.h> |
|||
|
|||
@implementation SCManagedCapturerState |
|||
|
|||
static ptrdiff_t sSCManagedCapturerStateOffsets[0]; |
|||
static BOOL sSCManagedCapturerStateHasOffsets; |
|||
|
|||
- (instancetype)initWithIsRunning:(BOOL)isRunning |
|||
isNightModeActive:(BOOL)isNightModeActive |
|||
isPortraitModeActive:(BOOL)isPortraitModeActive |
|||
lowLightCondition:(BOOL)lowLightCondition |
|||
adjustingExposure:(BOOL)adjustingExposure |
|||
devicePosition:(SCManagedCaptureDevicePosition)devicePosition |
|||
zoomFactor:(CGFloat)zoomFactor |
|||
flashSupported:(BOOL)flashSupported |
|||
torchSupported:(BOOL)torchSupported |
|||
flashActive:(BOOL)flashActive |
|||
torchActive:(BOOL)torchActive |
|||
lensesActive:(BOOL)lensesActive |
|||
arSessionActive:(BOOL)arSessionActive |
|||
liveVideoStreaming:(BOOL)liveVideoStreaming |
|||
lensProcessorReady:(BOOL)lensProcessorReady |
|||
{ |
|||
self = [super init]; |
|||
if (self) { |
|||
_isRunning = isRunning; |
|||
_isNightModeActive = isNightModeActive; |
|||
_isPortraitModeActive = isPortraitModeActive; |
|||
_lowLightCondition = lowLightCondition; |
|||
_adjustingExposure = adjustingExposure; |
|||
_devicePosition = devicePosition; |
|||
_zoomFactor = zoomFactor; |
|||
_flashSupported = flashSupported; |
|||
_torchSupported = torchSupported; |
|||
_flashActive = flashActive; |
|||
_torchActive = torchActive; |
|||
_lensesActive = lensesActive; |
|||
_arSessionActive = arSessionActive; |
|||
_liveVideoStreaming = liveVideoStreaming; |
|||
_lensProcessorReady = lensProcessorReady; |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
#pragma mark - NSCopying |
|||
|
|||
- (instancetype)copyWithZone:(NSZone *)zone |
|||
{ |
|||
// Immutable object, bypass copy |
|||
return self; |
|||
} |
|||
|
|||
#pragma mark - NSCoding |
|||
|
|||
- (instancetype)initWithCoder:(NSCoder *)aDecoder |
|||
{ |
|||
self = [super init]; |
|||
if (self) { |
|||
_isRunning = [aDecoder decodeBoolForKey:@"isRunning"]; |
|||
_isNightModeActive = [aDecoder decodeBoolForKey:@"isNightModeActive"]; |
|||
_isPortraitModeActive = [aDecoder decodeBoolForKey:@"isPortraitModeActive"]; |
|||
_lowLightCondition = [aDecoder decodeBoolForKey:@"lowLightCondition"]; |
|||
_adjustingExposure = [aDecoder decodeBoolForKey:@"adjustingExposure"]; |
|||
_devicePosition = (SCManagedCaptureDevicePosition)[aDecoder decodeIntegerForKey:@"devicePosition"]; |
|||
_zoomFactor = [aDecoder decodeFloatForKey:@"zoomFactor"]; |
|||
_flashSupported = [aDecoder decodeBoolForKey:@"flashSupported"]; |
|||
_torchSupported = [aDecoder decodeBoolForKey:@"torchSupported"]; |
|||
_flashActive = [aDecoder decodeBoolForKey:@"flashActive"]; |
|||
_torchActive = [aDecoder decodeBoolForKey:@"torchActive"]; |
|||
_lensesActive = [aDecoder decodeBoolForKey:@"lensesActive"]; |
|||
_arSessionActive = [aDecoder decodeBoolForKey:@"arSessionActive"]; |
|||
_liveVideoStreaming = [aDecoder decodeBoolForKey:@"liveVideoStreaming"]; |
|||
_lensProcessorReady = [aDecoder decodeBoolForKey:@"lensProcessorReady"]; |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
- (void)encodeWithCoder:(NSCoder *)aCoder |
|||
{ |
|||
[aCoder encodeBool:_isRunning forKey:@"isRunning"]; |
|||
[aCoder encodeBool:_isNightModeActive forKey:@"isNightModeActive"]; |
|||
[aCoder encodeBool:_isPortraitModeActive forKey:@"isPortraitModeActive"]; |
|||
[aCoder encodeBool:_lowLightCondition forKey:@"lowLightCondition"]; |
|||
[aCoder encodeBool:_adjustingExposure forKey:@"adjustingExposure"]; |
|||
[aCoder encodeInteger:(NSInteger)_devicePosition forKey:@"devicePosition"]; |
|||
[aCoder encodeFloat:_zoomFactor forKey:@"zoomFactor"]; |
|||
[aCoder encodeBool:_flashSupported forKey:@"flashSupported"]; |
|||
[aCoder encodeBool:_torchSupported forKey:@"torchSupported"]; |
|||
[aCoder encodeBool:_flashActive forKey:@"flashActive"]; |
|||
[aCoder encodeBool:_torchActive forKey:@"torchActive"]; |
|||
[aCoder encodeBool:_lensesActive forKey:@"lensesActive"]; |
|||
[aCoder encodeBool:_arSessionActive forKey:@"arSessionActive"]; |
|||
[aCoder encodeBool:_liveVideoStreaming forKey:@"liveVideoStreaming"]; |
|||
[aCoder encodeBool:_lensProcessorReady forKey:@"lensProcessorReady"]; |
|||
} |
|||
|
|||
#pragma mark - FasterCoding |
|||
|
|||
- (BOOL)preferFasterCoding |
|||
{ |
|||
return YES; |
|||
} |
|||
|
|||
- (void)encodeWithFasterCoder:(id<FCFasterCoder>)fasterCoder |
|||
{ |
|||
[fasterCoder encodeBool:_adjustingExposure]; |
|||
[fasterCoder encodeBool:_arSessionActive]; |
|||
[fasterCoder encodeSInt32:_devicePosition]; |
|||
[fasterCoder encodeBool:_flashActive]; |
|||
[fasterCoder encodeBool:_flashSupported]; |
|||
[fasterCoder encodeBool:_isNightModeActive]; |
|||
[fasterCoder encodeBool:_isPortraitModeActive]; |
|||
[fasterCoder encodeBool:_isRunning]; |
|||
[fasterCoder encodeBool:_lensProcessorReady]; |
|||
[fasterCoder encodeBool:_lensesActive]; |
|||
[fasterCoder encodeBool:_liveVideoStreaming]; |
|||
[fasterCoder encodeBool:_lowLightCondition]; |
|||
[fasterCoder encodeBool:_torchActive]; |
|||
[fasterCoder encodeBool:_torchSupported]; |
|||
[fasterCoder encodeFloat64:_zoomFactor]; |
|||
} |
|||
|
|||
- (void)decodeWithFasterDecoder:(id<FCFasterDecoder>)fasterDecoder |
|||
{ |
|||
_adjustingExposure = (BOOL)[fasterDecoder decodeBool]; |
|||
_arSessionActive = (BOOL)[fasterDecoder decodeBool]; |
|||
_devicePosition = (SCManagedCaptureDevicePosition)[fasterDecoder decodeSInt32]; |
|||
_flashActive = (BOOL)[fasterDecoder decodeBool]; |
|||
_flashSupported = (BOOL)[fasterDecoder decodeBool]; |
|||
_isNightModeActive = (BOOL)[fasterDecoder decodeBool]; |
|||
_isPortraitModeActive = (BOOL)[fasterDecoder decodeBool]; |
|||
_isRunning = (BOOL)[fasterDecoder decodeBool]; |
|||
_lensProcessorReady = (BOOL)[fasterDecoder decodeBool]; |
|||
_lensesActive = (BOOL)[fasterDecoder decodeBool]; |
|||
_liveVideoStreaming = (BOOL)[fasterDecoder decodeBool]; |
|||
_lowLightCondition = (BOOL)[fasterDecoder decodeBool]; |
|||
_torchActive = (BOOL)[fasterDecoder decodeBool]; |
|||
_torchSupported = (BOOL)[fasterDecoder decodeBool]; |
|||
_zoomFactor = (CGFloat)[fasterDecoder decodeFloat64]; |
|||
} |
|||
|
|||
- (void)setBool:(BOOL)val forUInt64Key:(uint64_t)key |
|||
{ |
|||
switch (key) { |
|||
case 15633755733674300ULL: |
|||
_adjustingExposure = (BOOL)val; |
|||
break; |
|||
case 11461798188076803ULL: |
|||
_arSessionActive = (BOOL)val; |
|||
break; |
|||
case 12833337784991002ULL: |
|||
_flashActive = (BOOL)val; |
|||
break; |
|||
case 51252237764061994ULL: |
|||
_flashSupported = (BOOL)val; |
|||
break; |
|||
case 1498048848502287ULL: |
|||
_isNightModeActive = (BOOL)val; |
|||
break; |
|||
case 56151582267629469ULL: |
|||
_isPortraitModeActive = (BOOL)val; |
|||
break; |
|||
case 12346172623874083ULL: |
|||
_isRunning = (BOOL)val; |
|||
break; |
|||
case 67168377441917657ULL: |
|||
_lensProcessorReady = (BOOL)val; |
|||
break; |
|||
case 5791542045168142ULL: |
|||
_lensesActive = (BOOL)val; |
|||
break; |
|||
case 28486888710545224ULL: |
|||
_liveVideoStreaming = (BOOL)val; |
|||
break; |
|||
case 24071673583499455ULL: |
|||
_lowLightCondition = (BOOL)val; |
|||
break; |
|||
case 40774429934225315ULL: |
|||
_torchActive = (BOOL)val; |
|||
break; |
|||
case 41333098301057670ULL: |
|||
_torchSupported = (BOOL)val; |
|||
break; |
|||
} |
|||
} |
|||
|
|||
- (void)setSInt32:(int32_t)val forUInt64Key:(uint64_t)key |
|||
{ |
|||
switch (key) { |
|||
case 66264093189780655ULL: |
|||
_devicePosition = (SCManagedCaptureDevicePosition)val; |
|||
break; |
|||
} |
|||
} |
|||
|
|||
- (void)setFloat64:(double)val forUInt64Key:(uint64_t)key |
|||
{ |
|||
switch (key) { |
|||
case 61340640993537628ULL: |
|||
_zoomFactor = (CGFloat)val; |
|||
break; |
|||
} |
|||
} |
|||
|
|||
+ (uint64_t)fasterCodingVersion |
|||
{ |
|||
return 10319810232046341562ULL; |
|||
} |
|||
|
|||
+ (uint64_t *)fasterCodingKeys |
|||
{ |
|||
static uint64_t keys[] = { |
|||
15 /* Total */, |
|||
FC_ENCODE_KEY_TYPE(15633755733674300, FCEncodeTypeBool), |
|||
FC_ENCODE_KEY_TYPE(11461798188076803, FCEncodeTypeBool), |
|||
FC_ENCODE_KEY_TYPE(66264093189780655, FCEncodeTypeSInt32), |
|||
FC_ENCODE_KEY_TYPE(12833337784991002, FCEncodeTypeBool), |
|||
FC_ENCODE_KEY_TYPE(51252237764061994, FCEncodeTypeBool), |
|||
FC_ENCODE_KEY_TYPE(1498048848502287, FCEncodeTypeBool), |
|||
FC_ENCODE_KEY_TYPE(56151582267629469, FCEncodeTypeBool), |
|||
FC_ENCODE_KEY_TYPE(12346172623874083, FCEncodeTypeBool), |
|||
FC_ENCODE_KEY_TYPE(67168377441917657, FCEncodeTypeBool), |
|||
FC_ENCODE_KEY_TYPE(5791542045168142, FCEncodeTypeBool), |
|||
FC_ENCODE_KEY_TYPE(28486888710545224, FCEncodeTypeBool), |
|||
FC_ENCODE_KEY_TYPE(24071673583499455, FCEncodeTypeBool), |
|||
FC_ENCODE_KEY_TYPE(40774429934225315, FCEncodeTypeBool), |
|||
FC_ENCODE_KEY_TYPE(41333098301057670, FCEncodeTypeBool), |
|||
FC_ENCODE_KEY_TYPE(61340640993537628, FCEncodeTypeFloat64), |
|||
}; |
|||
return keys; |
|||
} |
|||
|
|||
#pragma mark - isEqual |
|||
|
|||
- (BOOL)isEqual:(id)object |
|||
{ |
|||
if (!SCObjectsIsEqual(self, object, &sSCManagedCapturerStateHasOffsets, sSCManagedCapturerStateOffsets, 15, 0)) { |
|||
return NO; |
|||
} |
|||
SCManagedCapturerState *other = (SCManagedCapturerState *)object; |
|||
if (other->_isRunning != _isRunning) { |
|||
return NO; |
|||
} |
|||
|
|||
if (other->_isNightModeActive != _isNightModeActive) { |
|||
return NO; |
|||
} |
|||
|
|||
if (other->_isPortraitModeActive != _isPortraitModeActive) { |
|||
return NO; |
|||
} |
|||
|
|||
if (other->_lowLightCondition != _lowLightCondition) { |
|||
return NO; |
|||
} |
|||
|
|||
if (other->_adjustingExposure != _adjustingExposure) { |
|||
return NO; |
|||
} |
|||
|
|||
if (other->_devicePosition != _devicePosition) { |
|||
return NO; |
|||
} |
|||
|
|||
if (other->_zoomFactor != _zoomFactor) { |
|||
return NO; |
|||
} |
|||
|
|||
if (other->_flashSupported != _flashSupported) { |
|||
return NO; |
|||
} |
|||
|
|||
if (other->_torchSupported != _torchSupported) { |
|||
return NO; |
|||
} |
|||
|
|||
if (other->_flashActive != _flashActive) { |
|||
return NO; |
|||
} |
|||
|
|||
if (other->_torchActive != _torchActive) { |
|||
return NO; |
|||
} |
|||
|
|||
if (other->_lensesActive != _lensesActive) { |
|||
return NO; |
|||
} |
|||
|
|||
if (other->_arSessionActive != _arSessionActive) { |
|||
return NO; |
|||
} |
|||
|
|||
if (other->_liveVideoStreaming != _liveVideoStreaming) { |
|||
return NO; |
|||
} |
|||
|
|||
if (other->_lensProcessorReady != _lensProcessorReady) { |
|||
return NO; |
|||
} |
|||
|
|||
return YES; |
|||
} |
|||
|
|||
- (NSUInteger)hash |
|||
{ |
|||
NSUInteger subhashes[] = { |
|||
(NSUInteger)_isRunning, (NSUInteger)_isNightModeActive, (NSUInteger)_isPortraitModeActive, |
|||
(NSUInteger)_lowLightCondition, (NSUInteger)_adjustingExposure, (NSUInteger)_devicePosition, |
|||
(NSUInteger)_zoomFactor, (NSUInteger)_flashSupported, (NSUInteger)_torchSupported, |
|||
(NSUInteger)_flashActive, (NSUInteger)_torchActive, (NSUInteger)_lensesActive, |
|||
(NSUInteger)_arSessionActive, (NSUInteger)_liveVideoStreaming, (NSUInteger)_lensProcessorReady}; |
|||
NSUInteger result = subhashes[0]; |
|||
for (int i = 1; i < 15; i++) { |
|||
unsigned long long base = (((unsigned long long)result) << 32 | subhashes[i]); |
|||
base = (~base) + (base << 18); |
|||
base ^= (base >> 31); |
|||
base *= 21; |
|||
base ^= (base >> 11); |
|||
base += (base << 6); |
|||
base ^= (base >> 22); |
|||
result = (NSUInteger)base; |
|||
} |
|||
return result; |
|||
} |
|||
|
|||
#pragma mark - Print description in console: lldb> po #{variable name} |
|||
|
|||
- (NSString *)description |
|||
{ |
|||
NSMutableString *desc = [NSMutableString string]; |
|||
[desc appendString:@"{\n"]; |
|||
[desc appendFormat:@"\tisRunning:%@\n", [@(_isRunning) description]]; |
|||
[desc appendFormat:@"\tisNightModeActive:%@\n", [@(_isNightModeActive) description]]; |
|||
[desc appendFormat:@"\tisPortraitModeActive:%@\n", [@(_isPortraitModeActive) description]]; |
|||
[desc appendFormat:@"\tlowLightCondition:%@\n", [@(_lowLightCondition) description]]; |
|||
[desc appendFormat:@"\tadjustingExposure:%@\n", [@(_adjustingExposure) description]]; |
|||
[desc appendFormat:@"\tdevicePosition:%@\n", [@(_devicePosition) description]]; |
|||
[desc appendFormat:@"\tzoomFactor:%@\n", [@(_zoomFactor) description]]; |
|||
[desc appendFormat:@"\tflashSupported:%@\n", [@(_flashSupported) description]]; |
|||
[desc appendFormat:@"\ttorchSupported:%@\n", [@(_torchSupported) description]]; |
|||
[desc appendFormat:@"\tflashActive:%@\n", [@(_flashActive) description]]; |
|||
[desc appendFormat:@"\ttorchActive:%@\n", [@(_torchActive) description]]; |
|||
[desc appendFormat:@"\tlensesActive:%@\n", [@(_lensesActive) description]]; |
|||
[desc appendFormat:@"\tarSessionActive:%@\n", [@(_arSessionActive) description]]; |
|||
[desc appendFormat:@"\tliveVideoStreaming:%@\n", [@(_liveVideoStreaming) description]]; |
|||
[desc appendFormat:@"\tlensProcessorReady:%@\n", [@(_lensProcessorReady) description]]; |
|||
[desc appendString:@"}\n"]; |
|||
|
|||
return [desc copy]; |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,20 @@ |
|||
#import <CoreGraphics/CoreGraphics.h> |
|||
#import "SCManagedCaptureDevice.h" |
|||
|
|||
interface SCManagedCapturerState |
|||
BOOL isRunning |
|||
BOOL isNightModeActive |
|||
BOOL isPortraitModeActive |
|||
BOOL lowLightCondition |
|||
BOOL adjustingExposure |
|||
enum SCManagedCaptureDevicePosition devicePosition |
|||
CGFloat zoomFactor |
|||
BOOL flashSupported |
|||
BOOL torchSupported |
|||
BOOL flashActive |
|||
BOOL torchActive |
|||
BOOL lensesActive |
|||
BOOL arSessionActive |
|||
BOOL liveVideoStreaming |
|||
BOOL lensProcessorReady |
|||
end |
@ -0,0 +1,46 @@ |
|||
// 49126048c3d19dd5b676b8d39844cf133833b67a |
|||
// Generated by the value-object.rb DO NOT EDIT!! |
|||
|
|||
#import "SCManagedCapturerState.h" |
|||
|
|||
#import <AvailabilityMacros.h> |
|||
|
|||
#import <Foundation/Foundation.h> |
|||
|
|||
@interface SCManagedCapturerStateBuilder : NSObject |
|||
|
|||
+ (instancetype)withManagedCapturerState:(id<SCManagedCapturerState>)managedCapturerState; |
|||
|
|||
- (SCManagedCapturerState *)build; |
|||
|
|||
- (instancetype)setIsRunning:(BOOL)isRunning; |
|||
|
|||
- (instancetype)setIsNightModeActive:(BOOL)isNightModeActive; |
|||
|
|||
- (instancetype)setIsPortraitModeActive:(BOOL)isPortraitModeActive; |
|||
|
|||
- (instancetype)setLowLightCondition:(BOOL)lowLightCondition; |
|||
|
|||
- (instancetype)setAdjustingExposure:(BOOL)adjustingExposure; |
|||
|
|||
- (instancetype)setDevicePosition:(SCManagedCaptureDevicePosition)devicePosition; |
|||
|
|||
- (instancetype)setZoomFactor:(CGFloat)zoomFactor; |
|||
|
|||
- (instancetype)setFlashSupported:(BOOL)flashSupported; |
|||
|
|||
- (instancetype)setTorchSupported:(BOOL)torchSupported; |
|||
|
|||
- (instancetype)setFlashActive:(BOOL)flashActive; |
|||
|
|||
- (instancetype)setTorchActive:(BOOL)torchActive; |
|||
|
|||
- (instancetype)setLensesActive:(BOOL)lensesActive; |
|||
|
|||
- (instancetype)setArSessionActive:(BOOL)arSessionActive; |
|||
|
|||
- (instancetype)setLiveVideoStreaming:(BOOL)liveVideoStreaming; |
|||
|
|||
- (instancetype)setLensProcessorReady:(BOOL)lensProcessorReady; |
|||
|
|||
@end |
@ -0,0 +1,158 @@ |
|||
// 49126048c3d19dd5b676b8d39844cf133833b67a |
|||
// Generated by the value-object.rb DO NOT EDIT!! |
|||
|
|||
#import "SCManagedCapturerStateBuilder.h" |
|||
|
|||
#import <SCFoundation/SCValueObjectHelpers.h> |
|||
|
|||
#import <FastCoding/FastCoder.h> |
|||
|
|||
@implementation SCManagedCapturerStateBuilder { |
|||
BOOL _isRunning; |
|||
BOOL _isNightModeActive; |
|||
BOOL _isPortraitModeActive; |
|||
BOOL _lowLightCondition; |
|||
BOOL _adjustingExposure; |
|||
SCManagedCaptureDevicePosition _devicePosition; |
|||
CGFloat _zoomFactor; |
|||
BOOL _flashSupported; |
|||
BOOL _torchSupported; |
|||
BOOL _flashActive; |
|||
BOOL _torchActive; |
|||
BOOL _lensesActive; |
|||
BOOL _arSessionActive; |
|||
BOOL _liveVideoStreaming; |
|||
BOOL _lensProcessorReady; |
|||
} |
|||
|
|||
+ (instancetype)withManagedCapturerState:(id<SCManagedCapturerState>)managedCapturerState |
|||
{ |
|||
SCManagedCapturerStateBuilder *builder = [[SCManagedCapturerStateBuilder alloc] init]; |
|||
builder->_isRunning = managedCapturerState.isRunning; |
|||
builder->_isNightModeActive = managedCapturerState.isNightModeActive; |
|||
builder->_isPortraitModeActive = managedCapturerState.isPortraitModeActive; |
|||
builder->_lowLightCondition = managedCapturerState.lowLightCondition; |
|||
builder->_adjustingExposure = managedCapturerState.adjustingExposure; |
|||
builder->_devicePosition = managedCapturerState.devicePosition; |
|||
builder->_zoomFactor = managedCapturerState.zoomFactor; |
|||
builder->_flashSupported = managedCapturerState.flashSupported; |
|||
builder->_torchSupported = managedCapturerState.torchSupported; |
|||
builder->_flashActive = managedCapturerState.flashActive; |
|||
builder->_torchActive = managedCapturerState.torchActive; |
|||
builder->_lensesActive = managedCapturerState.lensesActive; |
|||
builder->_arSessionActive = managedCapturerState.arSessionActive; |
|||
builder->_liveVideoStreaming = managedCapturerState.liveVideoStreaming; |
|||
builder->_lensProcessorReady = managedCapturerState.lensProcessorReady; |
|||
return builder; |
|||
} |
|||
|
|||
- (SCManagedCapturerState *)build |
|||
{ |
|||
return [[SCManagedCapturerState alloc] initWithIsRunning:_isRunning |
|||
isNightModeActive:_isNightModeActive |
|||
isPortraitModeActive:_isPortraitModeActive |
|||
lowLightCondition:_lowLightCondition |
|||
adjustingExposure:_adjustingExposure |
|||
devicePosition:_devicePosition |
|||
zoomFactor:_zoomFactor |
|||
flashSupported:_flashSupported |
|||
torchSupported:_torchSupported |
|||
flashActive:_flashActive |
|||
torchActive:_torchActive |
|||
lensesActive:_lensesActive |
|||
arSessionActive:_arSessionActive |
|||
liveVideoStreaming:_liveVideoStreaming |
|||
lensProcessorReady:_lensProcessorReady]; |
|||
} |
|||
|
|||
- (instancetype)setIsRunning:(BOOL)isRunning |
|||
{ |
|||
_isRunning = isRunning; |
|||
return self; |
|||
} |
|||
|
|||
- (instancetype)setIsNightModeActive:(BOOL)isNightModeActive |
|||
{ |
|||
_isNightModeActive = isNightModeActive; |
|||
return self; |
|||
} |
|||
|
|||
- (instancetype)setIsPortraitModeActive:(BOOL)isPortraitModeActive |
|||
{ |
|||
_isPortraitModeActive = isPortraitModeActive; |
|||
return self; |
|||
} |
|||
|
|||
- (instancetype)setLowLightCondition:(BOOL)lowLightCondition |
|||
{ |
|||
_lowLightCondition = lowLightCondition; |
|||
return self; |
|||
} |
|||
|
|||
- (instancetype)setAdjustingExposure:(BOOL)adjustingExposure |
|||
{ |
|||
_adjustingExposure = adjustingExposure; |
|||
return self; |
|||
} |
|||
|
|||
- (instancetype)setDevicePosition:(SCManagedCaptureDevicePosition)devicePosition |
|||
{ |
|||
_devicePosition = devicePosition; |
|||
return self; |
|||
} |
|||
|
|||
- (instancetype)setZoomFactor:(CGFloat)zoomFactor |
|||
{ |
|||
_zoomFactor = zoomFactor; |
|||
return self; |
|||
} |
|||
|
|||
- (instancetype)setFlashSupported:(BOOL)flashSupported |
|||
{ |
|||
_flashSupported = flashSupported; |
|||
return self; |
|||
} |
|||
|
|||
- (instancetype)setTorchSupported:(BOOL)torchSupported |
|||
{ |
|||
_torchSupported = torchSupported; |
|||
return self; |
|||
} |
|||
|
|||
- (instancetype)setFlashActive:(BOOL)flashActive |
|||
{ |
|||
_flashActive = flashActive; |
|||
return self; |
|||
} |
|||
|
|||
- (instancetype)setTorchActive:(BOOL)torchActive |
|||
{ |
|||
_torchActive = torchActive; |
|||
return self; |
|||
} |
|||
|
|||
- (instancetype)setLensesActive:(BOOL)lensesActive |
|||
{ |
|||
_lensesActive = lensesActive; |
|||
return self; |
|||
} |
|||
|
|||
- (instancetype)setArSessionActive:(BOOL)arSessionActive |
|||
{ |
|||
_arSessionActive = arSessionActive; |
|||
return self; |
|||
} |
|||
|
|||
- (instancetype)setLiveVideoStreaming:(BOOL)liveVideoStreaming |
|||
{ |
|||
_liveVideoStreaming = liveVideoStreaming; |
|||
return self; |
|||
} |
|||
|
|||
- (instancetype)setLensProcessorReady:(BOOL)lensProcessorReady |
|||
{ |
|||
_lensProcessorReady = lensProcessorReady; |
|||
return self; |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,36 @@ |
|||
// |
|||
// SCManagedCapturerUtils.h |
|||
// Snapchat |
|||
// |
|||
// Created by Chao Pang on 10/4/17. |
|||
// |
|||
|
|||
#import <SCBase/SCMacros.h> |
|||
|
|||
#import <Foundation/Foundation.h> |
|||
#import <UIKit/UIKit.h> |
|||
|
|||
SC_EXTERN_C_BEGIN |
|||
|
|||
extern const CGFloat kSCIPhoneXCapturedImageVideoCropRatio; |
|||
|
|||
extern CGFloat SCManagedCapturedImageAndVideoAspectRatio(void); |
|||
|
|||
extern CGSize SCManagedCapturerAllScreenSize(void); |
|||
|
|||
extern CGSize SCAsyncImageCapturePlaceholderViewSize(void); |
|||
|
|||
extern CGFloat SCAdjustedAspectRatio(UIImageOrientation orientation, CGFloat aspectRatio); |
|||
|
|||
extern UIImage *SCCropImageToTargetAspectRatio(UIImage *image, CGFloat targetAspectRatio); |
|||
|
|||
extern void SCCropImageSizeToAspectRatio(size_t inputWidth, size_t inputHeight, UIImageOrientation orientation, |
|||
CGFloat aspectRatio, size_t *outputWidth, size_t *outputHeight); |
|||
|
|||
extern BOOL SCNeedsCropImageToAspectRatio(CGImageRef image, UIImageOrientation orientation, CGFloat aspectRatio); |
|||
|
|||
extern CGRect SCCalculateRectToCrop(size_t imageWidth, size_t imageHeight, size_t croppedWidth, size_t croppedHeight); |
|||
|
|||
extern CGImageRef SCCreateCroppedImageToAspectRatio(CGImageRef image, UIImageOrientation orientation, |
|||
CGFloat aspectRatio); |
|||
SC_EXTERN_C_END |
@ -0,0 +1,153 @@ |
|||
// |
|||
// SCManagedCapturerUtils.m |
|||
// Snapchat |
|||
// |
|||
// Created by Chao Pang on 10/4/17. |
|||
// |
|||
|
|||
#import "SCManagedCapturerUtils.h" |
|||
|
|||
#import "SCCaptureCommon.h" |
|||
|
|||
#import <SCFoundation/SCAssertWrapper.h> |
|||
#import <SCFoundation/SCCoreGraphicsUtils.h> |
|||
#import <SCFoundation/SCDeviceName.h> |
|||
#import <SCFoundation/UIScreen+SCSafeAreaInsets.h> |
|||
|
|||
// This is to calculate the crop ratio for generating the image shown in Preview page |
|||
// Check https://snapchat.quip.com/lU3kAoDxaAFG for our design. |
|||
const CGFloat kSCIPhoneXCapturedImageVideoCropRatio = (397.0 * 739.0) / (375.0 * 812.0); |
|||
|
|||
CGFloat SCManagedCapturedImageAndVideoAspectRatio(void) |
|||
{ |
|||
static dispatch_once_t onceToken; |
|||
static CGFloat aspectRatio; |
|||
dispatch_once(&onceToken, ^{ |
|||
CGSize screenSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size; |
|||
UIEdgeInsets safeAreaInsets = [UIScreen sc_safeAreaInsets]; |
|||
aspectRatio = SCSizeGetAspectRatio( |
|||
CGSizeMake(screenSize.width, screenSize.height - safeAreaInsets.top - safeAreaInsets.bottom)); |
|||
}); |
|||
return aspectRatio; |
|||
} |
|||
|
|||
CGSize SCManagedCapturerAllScreenSize(void) |
|||
{ |
|||
static CGSize size; |
|||
static dispatch_once_t onceToken; |
|||
dispatch_once(&onceToken, ^{ |
|||
CGSize screenSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size; |
|||
// This logic is complicated because we need to handle iPhone X properly. |
|||
// See https://snapchat.quip.com/lU3kAoDxaAFG for our design. |
|||
UIEdgeInsets safeAreaInsets = [UIScreen sc_safeAreaInsets]; |
|||
UIEdgeInsets visualSafeInsets = [UIScreen sc_visualSafeInsets]; |
|||
// This really is just some coordinate computations: |
|||
// We know in preview, our size is (screenWidth, screenHeight - topInset - bottomInset) |
|||
// We know that when the preview image is in the camera screen, the height is screenHeight - visualTopInset, |
|||
// thus, we need to figure out in camera screen, what's the bleed-over width should be |
|||
// (screenWidth * (screenHeight - visualTopInset) / (screenHeight - topInset - bottomInset) |
|||
size = CGSizeMake(roundf(screenSize.width * (screenSize.height - visualSafeInsets.top) / |
|||
(screenSize.height - safeAreaInsets.top - safeAreaInsets.bottom)), |
|||
screenSize.height); |
|||
}); |
|||
return size; |
|||
} |
|||
|
|||
CGSize SCAsyncImageCapturePlaceholderViewSize(void) |
|||
{ |
|||
static CGSize size; |
|||
static dispatch_once_t onceToken; |
|||
dispatch_once(&onceToken, ^{ |
|||
CGSize screenSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size; |
|||
UIEdgeInsets safeAreaInsets = [UIScreen sc_safeAreaInsets]; |
|||
UIEdgeInsets visualSafeInsets = [UIScreen sc_visualSafeInsets]; |
|||
size = CGSizeMake(roundf((screenSize.height - visualSafeInsets.top) * screenSize.width / |
|||
(screenSize.height - safeAreaInsets.top - safeAreaInsets.bottom)), |
|||
screenSize.height - visualSafeInsets.top); |
|||
}); |
|||
return size; |
|||
} |
|||
|
|||
CGFloat SCAdjustedAspectRatio(UIImageOrientation orientation, CGFloat aspectRatio) |
|||
{ |
|||
SCCAssert(aspectRatio != kSCManagedCapturerAspectRatioUnspecified, @""); |
|||
switch (orientation) { |
|||
case UIImageOrientationLeft: |
|||
case UIImageOrientationRight: |
|||
case UIImageOrientationLeftMirrored: |
|||
case UIImageOrientationRightMirrored: |
|||
return 1.0 / aspectRatio; |
|||
default: |
|||
return aspectRatio; |
|||
} |
|||
} |
|||
|
|||
UIImage *SCCropImageToTargetAspectRatio(UIImage *image, CGFloat targetAspectRatio) |
|||
{ |
|||
if (SCNeedsCropImageToAspectRatio(image.CGImage, image.imageOrientation, targetAspectRatio)) { |
|||
CGImageRef croppedImageRef = |
|||
SCCreateCroppedImageToAspectRatio(image.CGImage, image.imageOrientation, targetAspectRatio); |
|||
UIImage *croppedImage = |
|||
[UIImage imageWithCGImage:croppedImageRef scale:image.scale orientation:image.imageOrientation]; |
|||
CGImageRelease(croppedImageRef); |
|||
return croppedImage; |
|||
} else { |
|||
return image; |
|||
} |
|||
} |
|||
|
|||
void SCCropImageSizeToAspectRatio(size_t inputWidth, size_t inputHeight, UIImageOrientation orientation, |
|||
CGFloat aspectRatio, size_t *outputWidth, size_t *outputHeight) |
|||
{ |
|||
SCCAssert(outputWidth != NULL && outputHeight != NULL, @""); |
|||
aspectRatio = SCAdjustedAspectRatio(orientation, aspectRatio); |
|||
if (inputWidth > roundf(inputHeight * aspectRatio)) { |
|||
*outputHeight = inputHeight; |
|||
*outputWidth = roundf(*outputHeight * aspectRatio); |
|||
} else { |
|||
*outputWidth = inputWidth; |
|||
*outputHeight = roundf(*outputWidth / aspectRatio); |
|||
} |
|||
} |
|||
|
|||
BOOL SCNeedsCropImageToAspectRatio(CGImageRef image, UIImageOrientation orientation, CGFloat aspectRatio) |
|||
{ |
|||
if (aspectRatio == kSCManagedCapturerAspectRatioUnspecified) { |
|||
return NO; |
|||
} |
|||
aspectRatio = SCAdjustedAspectRatio(orientation, aspectRatio); |
|||
size_t width = CGImageGetWidth(image); |
|||
size_t height = CGImageGetHeight(image); |
|||
return (width != roundf(height * aspectRatio)); |
|||
} |
|||
|
|||
CGRect SCCalculateRectToCrop(size_t imageWidth, size_t imageHeight, size_t croppedWidth, size_t croppedHeight) |
|||
{ |
|||
if ([SCDeviceName isIphoneX]) { |
|||
// X is pushed all the way over to crop out top section but none of bottom |
|||
CGFloat x = (imageWidth - croppedWidth); |
|||
// Crop y symmetrically. |
|||
CGFloat y = roundf((imageHeight - croppedHeight) / 2.0); |
|||
|
|||
return CGRectMake(x, y, croppedWidth, croppedHeight); |
|||
} |
|||
return CGRectMake((imageWidth - croppedWidth) / 2, (imageHeight - croppedHeight) / 2, croppedWidth, croppedHeight); |
|||
} |
|||
|
|||
CGImageRef SCCreateCroppedImageToAspectRatio(CGImageRef image, UIImageOrientation orientation, CGFloat aspectRatio) |
|||
{ |
|||
SCCAssert(aspectRatio != kSCManagedCapturerAspectRatioUnspecified, @""); |
|||
size_t width = CGImageGetWidth(image); |
|||
size_t height = CGImageGetHeight(image); |
|||
size_t croppedWidth, croppedHeight; |
|||
if ([SCDeviceName isIphoneX]) { |
|||
size_t adjustedWidth = (size_t)(width * kSCIPhoneXCapturedImageVideoCropRatio); |
|||
size_t adjustedHeight = (size_t)(height * kSCIPhoneXCapturedImageVideoCropRatio); |
|||
SCCropImageSizeToAspectRatio(adjustedWidth, adjustedHeight, orientation, aspectRatio, &croppedWidth, |
|||
&croppedHeight); |
|||
} else { |
|||
SCCropImageSizeToAspectRatio(width, height, orientation, aspectRatio, &croppedWidth, &croppedHeight); |
|||
} |
|||
CGRect cropRect = SCCalculateRectToCrop(width, height, croppedWidth, croppedHeight); |
|||
return CGImageCreateWithImageInRect(image, cropRect); |
|||
} |
@ -0,0 +1,57 @@ |
|||
// |
|||
// SCManagedCapturer.h |
|||
// Snapchat |
|||
// |
|||
// Created by Liu Liu on 4/20/15. |
|||
// Copyright (c) 2015 Liu Liu. All rights reserved. |
|||
// |
|||
|
|||
#import "SCCaptureCommon.h" |
|||
#import "SCCapturer.h" |
|||
|
|||
#import <SCFoundation/SCTraceODPCompatible.h> |
|||
|
|||
#import <AVFoundation/AVFoundation.h> |
|||
#import <Foundation/Foundation.h> |
|||
|
|||
/** |
|||
* Manage AVCaptureSession with SCManagedCapturerV1 |
|||
* |
|||
* In phantom, there are a lot of places we use AVCaptureSession. However, since for each app, only one session |
|||
* can run at the same time, we need some kind of management for the capture session. |
|||
* |
|||
* SCManagedCapturerV1 manages the state of capture session in following ways: |
|||
* |
|||
* All operations in SCManagedCapturerV1 are handled on a serial queue, to ensure its sequence. All callbacks (either |
|||
* on the listener or the completion handler) are on the main thread. The state of SCManagedCapturerV1 are conveniently |
|||
* maintained in a SCManagedCapturerState object, which is immutable and can be passed across threads, it mains a |
|||
* consistent view of the capture session, if it is not delayed (thus, the state may deliver as current active device |
|||
* is back camera on main thread, but in reality, on the serial queue, the active device switched to the front camera |
|||
* already. However, this is OK because state.devicePosition will be back camera and with all its setup at that time. |
|||
* Note that it is impossible to have an on-time view of the state across threads without blocking each other). |
|||
* |
|||
* For main use cases, you setup the capturer, add the preview layer, and then can call capture still image |
|||
* or record video, and SCManagedCapturerV1 will do the rest (make sure it actually captures image / video, recover |
|||
* from error, or setup our more advanced image / video post-process). |
|||
* |
|||
* The key classes that drive the recording flow are SCManagedVideoStreamer and SCManagedVideoFileStreamer which |
|||
* conform to SCManagedVideoDataSource. They will stream images to consumers conforming to |
|||
* SCManagedVideoDataSourceListener |
|||
* such as SCManagedLensesProcessor, SCManagedDeviceCapacityAnalyzer, SCManagedVideoScanner and ultimately |
|||
* SCManagedVideoCapturer and SCManagedStillImageCapturer which record the final output. |
|||
* |
|||
*/ |
|||
@class SCCaptureResource; |
|||
|
|||
extern NSString *const kSCLensesTweaksDidChangeFileInput; |
|||
|
|||
@interface SCManagedCapturerV1 : NSObject <SCCapturer, SCTimeProfilable> |
|||
|
|||
+ (SCManagedCapturerV1 *)sharedInstance; |
|||
|
|||
/* |
|||
The following APIs are reserved to be only used for SCCaptureCore aka managedCapturerV2. |
|||
*/ |
|||
- (instancetype)initWithResource:(SCCaptureResource *)resource; |
|||
|
|||
@end |
2165
ManagedCapturer/SCManagedCapturerV1.m
File diff suppressed because it is too large
View File
File diff suppressed because it is too large
View File
@ -0,0 +1,20 @@ |
|||
// |
|||
// SCManagedCapturerV1_Private.h |
|||
// Snapchat |
|||
// |
|||
// Created by Jingtian Yang on 20/12/2017. |
|||
// |
|||
|
|||
#import "SCManagedCapturerV1.h" |
|||
|
|||
@interface SCManagedCapturerV1 () |
|||
|
|||
- (SCCaptureResource *)captureResource; |
|||
|
|||
- (void)setupWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition |
|||
completionHandler:(dispatch_block_t)completionHandler; |
|||
|
|||
- (BOOL)stopRunningWithCaptureToken:(SCCapturerToken *)token |
|||
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler |
|||
context:(NSString *)context; |
|||
@end |
@ -0,0 +1,32 @@ |
|||
// |
|||
// SCManagedDeviceCapacityAnalyzer.h |
|||
// Snapchat |
|||
// |
|||
// Created by Liu Liu on 5/1/15. |
|||
// Copyright (c) 2015 Liu Liu. All rights reserved. |
|||
// |
|||
|
|||
#import "SCManagedDeviceCapacityAnalyzerListener.h" |
|||
|
|||
#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h> |
|||
|
|||
#import <Foundation/Foundation.h> |
|||
|
|||
@class SCManagedCaptureDevice; |
|||
@protocol SCPerforming; |
|||
|
|||
extern NSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHigh; |
|||
|
|||
@interface SCManagedDeviceCapacityAnalyzer : NSObject <SCManagedVideoDataSourceListener> |
|||
|
|||
@property (nonatomic, assign) BOOL lowLightConditionEnabled; |
|||
|
|||
- (instancetype)initWithPerformer:(id<SCPerforming>)performer; |
|||
|
|||
- (void)addListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener; |
|||
- (void)removeListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener; |
|||
|
|||
- (void)setAsFocusListenerForDevice:(SCManagedCaptureDevice *)captureDevice; |
|||
- (void)removeFocusListener; |
|||
|
|||
@end |
@ -0,0 +1,294 @@ |
|||
// |
|||
// SCManagedDeviceCapacityAnalyzer.m |
|||
// Snapchat |
|||
// |
|||
// Created by Liu Liu on 5/1/15. |
|||
// Copyright (c) 2015 Liu Liu. All rights reserved. |
|||
// |
|||
|
|||
#import "SCManagedDeviceCapacityAnalyzer.h" |
|||
|
|||
#import "SCCameraSettingUtils.h" |
|||
#import "SCCameraTweaks.h" |
|||
#import "SCManagedCaptureDevice+SCManagedDeviceCapacityAnalyzer.h" |
|||
#import "SCManagedCaptureDevice.h" |
|||
#import "SCManagedDeviceCapacityAnalyzerListenerAnnouncer.h" |
|||
|
|||
#import <SCFoundation/SCAppEnvironment.h> |
|||
#import <SCFoundation/SCDeviceName.h> |
|||
#import <SCFoundation/SCLog.h> |
|||
#import <SCFoundation/SCPerforming.h> |
|||
#import <SCFoundation/SCTrace.h> |
|||
|
|||
#import <FBKVOController/FBKVOController.h> |
|||
|
|||
@import ImageIO; |
|||
@import QuartzCore; |
|||
|
|||
NSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor6WithHRSI = 500; |
|||
|
|||
NSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor6S = 800; |
|||
|
|||
NSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor7 = 640; |
|||
|
|||
NSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor8 = 800; |
|||
|
|||
// After this much frames we haven't changed exposure time or ISO, we will assume that the adjustingExposure is ended. |
|||
static NSInteger const kExposureUnchangedHighWatermark = 5; |
|||
// If deadline reached, and we still haven't reached high watermark yet, we will consult the low watermark and at least |
|||
// give the system a chance to take not-so-great pictures. |
|||
static NSInteger const kExposureUnchangedLowWatermark = 1; |
|||
static NSTimeInterval const kExposureUnchangedDeadline = 0.2; |
|||
|
|||
// It seems that between ISO 500 to 640, the brightness value is always somewhere around -0.4 to -0.5. |
|||
// Therefore, this threshold probably will work fine. |
|||
static float const kBrightnessValueThreshold = -2.25; |
|||
// Give some margins between recognized as bright enough and not enough light. |
|||
// If the brightness is lower than kBrightnessValueThreshold - kBrightnessValueThresholdConfidenceInterval, |
|||
// and then we count the frame as low light frame. Only if the brightness is higher than |
|||
// kBrightnessValueThreshold + kBrightnessValueThresholdConfidenceInterval, we think that we |
|||
// have enough light, and reset low light frame count to 0. 0.5 is choosing because in dark |
|||
// environment, the brightness value changes +-0.3 with minor orientation changes. |
|||
static float const kBrightnessValueThresholdConfidenceInterval = 0.5; |
|||
// If we are at good light condition for 5 frames, ready to change back |
|||
static NSInteger const kLowLightBoostUnchangedLowWatermark = 7; |
|||
// Requires we are at low light condition for ~2 seconds (assuming 20~30fps) |
|||
static NSInteger const kLowLightBoostUnchangedHighWatermark = 25; |
|||
|
|||
static NSInteger const kSCLightingConditionDecisionWatermark = 15; // For 30 fps, it is 0.5 second |
|||
static float const kSCLightingConditionNormalThreshold = 0; |
|||
static float const kSCLightingConditionDarkThreshold = -3; |
|||
|
|||
@implementation SCManagedDeviceCapacityAnalyzer { |
|||
float _lastExposureTime; |
|||
int _lastISOSpeedRating; |
|||
NSTimeInterval _lastAdjustingExposureStartTime; |
|||
|
|||
NSInteger _lowLightBoostLowLightCount; |
|||
NSInteger _lowLightBoostEnoughLightCount; |
|||
NSInteger _exposureUnchangedCount; |
|||
NSInteger _maxISOPresetHigh; |
|||
|
|||
NSInteger _normalLightingConditionCount; |
|||
NSInteger _darkLightingConditionCount; |
|||
NSInteger _extremeDarkLightingConditionCount; |
|||
SCCapturerLightingConditionType _lightingCondition; |
|||
|
|||
BOOL _lowLightCondition; |
|||
BOOL _adjustingExposure; |
|||
|
|||
SCManagedDeviceCapacityAnalyzerListenerAnnouncer *_announcer; |
|||
FBKVOController *_observeController; |
|||
id<SCPerforming> _performer; |
|||
|
|||
float |
|||
_lastBrightnessToLog; // Remember last logged brightness, only log again if it changes greater than a threshold |
|||
} |
|||
|
|||
- (instancetype)initWithPerformer:(id<SCPerforming>)performer |
|||
{ |
|||
SCTraceStart(); |
|||
self = [super init]; |
|||
if (self) { |
|||
_performer = performer; |
|||
_maxISOPresetHigh = kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor6WithHRSI; |
|||
if ([SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone8orNewer]) { |
|||
_maxISOPresetHigh = kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor8; |
|||
} else if ([SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone7orNewer]) { |
|||
_maxISOPresetHigh = kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor7; |
|||
} else if ([SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone6SorNewer]) { |
|||
// iPhone 6S supports higher ISO rate for video recording, accommadating that. |
|||
_maxISOPresetHigh = kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor6S; |
|||
} |
|||
_announcer = [[SCManagedDeviceCapacityAnalyzerListenerAnnouncer alloc] init]; |
|||
_observeController = [[FBKVOController alloc] initWithObserver:self]; |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
- (void)addListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener |
|||
{ |
|||
SCTraceStart(); |
|||
[_announcer addListener:listener]; |
|||
} |
|||
|
|||
- (void)removeListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener |
|||
{ |
|||
SCTraceStart(); |
|||
[_announcer removeListener:listener]; |
|||
} |
|||
|
|||
- (void)setLowLightConditionEnabled:(BOOL)lowLightConditionEnabled |
|||
{ |
|||
SCTraceStart(); |
|||
if (_lowLightConditionEnabled != lowLightConditionEnabled) { |
|||
_lowLightConditionEnabled = lowLightConditionEnabled; |
|||
if (!lowLightConditionEnabled) { |
|||
_lowLightBoostLowLightCount = 0; |
|||
_lowLightBoostEnoughLightCount = 0; |
|||
_lowLightCondition = NO; |
|||
[_announcer managedDeviceCapacityAnalyzer:self didChangeLowLightCondition:_lowLightCondition]; |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource |
|||
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer |
|||
devicePosition:(SCManagedCaptureDevicePosition)devicePosition |
|||
{ |
|||
SCTraceStart(); |
|||
SampleBufferMetadata metadata = { |
|||
.isoSpeedRating = _lastISOSpeedRating, .brightness = 0, .exposureTime = _lastExposureTime, |
|||
}; |
|||
retrieveSampleBufferMetadata(sampleBuffer, &metadata); |
|||
if ((SCIsDebugBuild() || SCIsMasterBuild()) |
|||
// Enable this on internal build only (excluding alpha) |
|||
&& fabs(metadata.brightness - _lastBrightnessToLog) > 0.5f) { |
|||
// Log only when brightness change is greater than 0.5 |
|||
_lastBrightnessToLog = metadata.brightness; |
|||
SCLogCoreCameraInfo(@"ExposureTime: %f, ISO: %ld, Brightness: %f", metadata.exposureTime, |
|||
(long)metadata.isoSpeedRating, metadata.brightness); |
|||
} |
|||
[self _automaticallyDetectAdjustingExposure:metadata.exposureTime ISOSpeedRating:metadata.isoSpeedRating]; |
|||
_lastExposureTime = metadata.exposureTime; |
|||
_lastISOSpeedRating = metadata.isoSpeedRating; |
|||
if (!_adjustingExposure && _lastISOSpeedRating <= _maxISOPresetHigh && |
|||
_lowLightConditionEnabled) { // If we are not recording, we are not at ISO higher than we needed |
|||
[self _automaticallyDetectLowLightCondition:metadata.brightness]; |
|||
} |
|||
[self _automaticallyDetectLightingConditionWithBrightness:metadata.brightness]; |
|||
[_announcer managedDeviceCapacityAnalyzer:self didChangeBrightness:metadata.brightness]; |
|||
} |
|||
|
|||
- (void)setAsFocusListenerForDevice:(SCManagedCaptureDevice *)captureDevice |
|||
{ |
|||
SCTraceStart(); |
|||
[_observeController observe:captureDevice.device |
|||
keyPath:@keypath(captureDevice.device, adjustingFocus) |
|||
options:NSKeyValueObservingOptionNew |
|||
action:@selector(_adjustingFocusingChanged:)]; |
|||
} |
|||
|
|||
- (void)removeFocusListener |
|||
{ |
|||
SCTraceStart(); |
|||
[_observeController unobserveAll]; |
|||
} |
|||
|
|||
#pragma mark - Private methods |
|||
|
|||
- (void)_automaticallyDetectAdjustingExposure:(float)currentExposureTime ISOSpeedRating:(NSInteger)currentISOSpeedRating |
|||
{ |
|||
SCTraceStart(); |
|||
if (currentISOSpeedRating != _lastISOSpeedRating || fabsf(currentExposureTime - _lastExposureTime) > FLT_MIN) { |
|||
_exposureUnchangedCount = 0; |
|||
} else { |
|||
++_exposureUnchangedCount; |
|||
} |
|||
NSTimeInterval currentTime = CACurrentMediaTime(); |
|||
if (_exposureUnchangedCount >= kExposureUnchangedHighWatermark || |
|||
(currentTime - _lastAdjustingExposureStartTime > kExposureUnchangedDeadline && |
|||
_exposureUnchangedCount >= kExposureUnchangedLowWatermark)) { |
|||
// The exposure values haven't changed for kExposureUnchangedHighWatermark times, considering the adjustment |
|||
// as done. Otherwise, if we waited long enough, and the exposure unchange count at least reached low |
|||
// watermark, we will call it done and give it a shot. |
|||
if (_adjustingExposure) { |
|||
_adjustingExposure = NO; |
|||
SCLogGeneralInfo(@"Adjusting exposure is done, unchanged count: %zd", _exposureUnchangedCount); |
|||
[_announcer managedDeviceCapacityAnalyzer:self didChangeAdjustingExposure:_adjustingExposure]; |
|||
} |
|||
} else { |
|||
// Otherwise signal that we have adjustments on exposure |
|||
if (!_adjustingExposure) { |
|||
_adjustingExposure = YES; |
|||
_lastAdjustingExposureStartTime = currentTime; |
|||
[_announcer managedDeviceCapacityAnalyzer:self didChangeAdjustingExposure:_adjustingExposure]; |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)_automaticallyDetectLowLightCondition:(float)brightness |
|||
{ |
|||
SCTraceStart(); |
|||
if (!_lowLightCondition && _lastISOSpeedRating == _maxISOPresetHigh) { |
|||
// If we are at the stage that we need to use higher ISO (because current ISO is maxed out) |
|||
// and the brightness is lower than the threshold |
|||
if (brightness < kBrightnessValueThreshold - kBrightnessValueThresholdConfidenceInterval) { |
|||
// Either count how many frames like this continuously we encountered |
|||
// Or if reached the watermark, change the low light boost mode |
|||
if (_lowLightBoostLowLightCount >= kLowLightBoostUnchangedHighWatermark) { |
|||
_lowLightCondition = YES; |
|||
[_announcer managedDeviceCapacityAnalyzer:self didChangeLowLightCondition:_lowLightCondition]; |
|||
} else { |
|||
++_lowLightBoostLowLightCount; |
|||
} |
|||
} else if (brightness >= kBrightnessValueThreshold + kBrightnessValueThresholdConfidenceInterval) { |
|||
// If the brightness is consistently better, reset the low light boost unchanged count to 0 |
|||
_lowLightBoostLowLightCount = 0; |
|||
} |
|||
} else if (_lowLightCondition) { |
|||
// Check the current ISO to see if we can disable low light boost |
|||
if (_lastISOSpeedRating <= _maxISOPresetHigh && |
|||
brightness >= kBrightnessValueThreshold + kBrightnessValueThresholdConfidenceInterval) { |
|||
if (_lowLightBoostEnoughLightCount >= kLowLightBoostUnchangedLowWatermark) { |
|||
_lowLightCondition = NO; |
|||
[_announcer managedDeviceCapacityAnalyzer:self didChangeLowLightCondition:_lowLightCondition]; |
|||
_lowLightBoostEnoughLightCount = 0; |
|||
} else { |
|||
++_lowLightBoostEnoughLightCount; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)_adjustingFocusingChanged:(NSDictionary *)change |
|||
{ |
|||
SCTraceStart(); |
|||
BOOL adjustingFocus = [change[NSKeyValueChangeNewKey] boolValue]; |
|||
[_performer perform:^{ |
|||
[_announcer managedDeviceCapacityAnalyzer:self didChangeAdjustingFocus:adjustingFocus]; |
|||
}]; |
|||
} |
|||
|
|||
- (void)_automaticallyDetectLightingConditionWithBrightness:(float)brightness |
|||
{ |
|||
if (brightness >= kSCLightingConditionNormalThreshold) { |
|||
if (_normalLightingConditionCount > kSCLightingConditionDecisionWatermark) { |
|||
if (_lightingCondition != SCCapturerLightingConditionTypeNormal) { |
|||
_lightingCondition = SCCapturerLightingConditionTypeNormal; |
|||
[_announcer managedDeviceCapacityAnalyzer:self |
|||
didChangeLightingCondition:SCCapturerLightingConditionTypeNormal]; |
|||
} |
|||
} else { |
|||
_normalLightingConditionCount++; |
|||
} |
|||
_darkLightingConditionCount = 0; |
|||
_extremeDarkLightingConditionCount = 0; |
|||
} else if (brightness >= kSCLightingConditionDarkThreshold) { |
|||
if (_darkLightingConditionCount > kSCLightingConditionDecisionWatermark) { |
|||
if (_lightingCondition != SCCapturerLightingConditionTypeDark) { |
|||
_lightingCondition = SCCapturerLightingConditionTypeDark; |
|||
[_announcer managedDeviceCapacityAnalyzer:self |
|||
didChangeLightingCondition:SCCapturerLightingConditionTypeDark]; |
|||
} |
|||
} else { |
|||
_darkLightingConditionCount++; |
|||
} |
|||
_normalLightingConditionCount = 0; |
|||
_extremeDarkLightingConditionCount = 0; |
|||
} else { |
|||
if (_extremeDarkLightingConditionCount > kSCLightingConditionDecisionWatermark) { |
|||
if (_lightingCondition != SCCapturerLightingConditionTypeExtremeDark) { |
|||
_lightingCondition = SCCapturerLightingConditionTypeExtremeDark; |
|||
[_announcer managedDeviceCapacityAnalyzer:self |
|||
didChangeLightingCondition:SCCapturerLightingConditionTypeExtremeDark]; |
|||
} |
|||
} else { |
|||
_extremeDarkLightingConditionCount++; |
|||
} |
|||
_normalLightingConditionCount = 0; |
|||
_darkLightingConditionCount = 0; |
|||
} |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,20 @@ |
|||
// |
|||
// SCManagedDeviceCapacityAnalyzerHandler.h |
|||
// Snapchat |
|||
// |
|||
// Created by Jingtian Yang on 11/12/2017. |
|||
// |
|||
|
|||
#import "SCManagedDeviceCapacityAnalyzerListener.h" |
|||
|
|||
#import <Foundation/Foundation.h> |
|||
|
|||
@class SCCaptureResource; |
|||
|
|||
@interface SCManagedDeviceCapacityAnalyzerHandler : NSObject <SCManagedDeviceCapacityAnalyzerListener> |
|||
|
|||
- (instancetype)init NS_UNAVAILABLE; |
|||
|
|||
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource; |
|||
|
|||
@end |
@ -0,0 +1,72 @@ |
|||
// |
|||
// SCManagedDeviceCapacityAnalyzerHandler.m |
|||
// Snapchat |
|||
// |
|||
// Created by Jingtian Yang on 11/12/2017. |
|||
// |
|||
|
|||
#import "SCManagedDeviceCapacityAnalyzerHandler.h" |
|||
|
|||
#import "SCCaptureResource.h" |
|||
#import "SCManagedCapturer.h" |
|||
#import "SCManagedCapturerLogging.h" |
|||
#import "SCManagedCapturerState.h" |
|||
#import "SCManagedCapturerStateBuilder.h" |
|||
|
|||
#import <SCFoundation/SCAssertWrapper.h> |
|||
#import <SCFoundation/SCQueuePerformer.h> |
|||
#import <SCFoundation/SCThreadHelpers.h> |
|||
#import <SCFoundation/SCTraceODPCompatible.h> |
|||
|
|||
@interface SCManagedDeviceCapacityAnalyzerHandler () { |
|||
__weak SCCaptureResource *_captureResource; |
|||
} |
|||
@end |
|||
|
|||
@implementation SCManagedDeviceCapacityAnalyzerHandler |
|||
|
|||
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource |
|||
{ |
|||
self = [super init]; |
|||
if (self) { |
|||
SCAssert(captureResource, @""); |
|||
_captureResource = captureResource; |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer |
|||
didChangeLowLightCondition:(BOOL)lowLightCondition |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
SCLogCapturerInfo(@"Change Low Light Condition %d", lowLightCondition); |
|||
[_captureResource.queuePerformer perform:^{ |
|||
_captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] |
|||
setLowLightCondition:lowLightCondition] build]; |
|||
SCManagedCapturerState *state = [_captureResource.state copy]; |
|||
runOnMainThreadAsynchronously(^{ |
|||
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:state]; |
|||
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] |
|||
didChangeLowLightCondition:state]; |
|||
}); |
|||
}]; |
|||
} |
|||
|
|||
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer |
|||
didChangeAdjustingExposure:(BOOL)adjustingExposure |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
SCLogCapturerInfo(@"Capacity Analyzer Changes adjustExposure %d", adjustingExposure); |
|||
[_captureResource.queuePerformer perform:^{ |
|||
_captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] |
|||
setAdjustingExposure:adjustingExposure] build]; |
|||
SCManagedCapturerState *state = [_captureResource.state copy]; |
|||
runOnMainThreadAsynchronously(^{ |
|||
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:state]; |
|||
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] |
|||
didChangeAdjustingExposure:state]; |
|||
}); |
|||
}]; |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,35 @@ |
|||
//#!announcer.rb |
|||
// SCManagedDeviceCapacityAnalyzerListener.h |
|||
// Snapchat |
|||
// |
|||
// Created by Liu Liu on 5/4/15. |
|||
// Copyright (c) 2015 Liu Liu. All rights reserved. |
|||
// |
|||
|
|||
#import "SCCapturerDefines.h" |
|||
|
|||
#import <Foundation/Foundation.h> |
|||
|
|||
@class SCManagedDeviceCapacityAnalyzer; |
|||
|
|||
@protocol SCManagedDeviceCapacityAnalyzerListener <NSObject> |
|||
|
|||
@optional |
|||
|
|||
// These callbacks happen on a internal queue |
|||
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer |
|||
didChangeLowLightCondition:(BOOL)lowLightCondition; |
|||
|
|||
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer |
|||
didChangeAdjustingExposure:(BOOL)adjustingExposure; |
|||
|
|||
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer |
|||
didChangeAdjustingFocus:(BOOL)adjustingFocus; |
|||
|
|||
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer |
|||
didChangeBrightness:(float)adjustingBrightness; |
|||
|
|||
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer |
|||
didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition; |
|||
|
|||
@end |
@ -0,0 +1,12 @@ |
|||
// Generated by the announcer.rb DO NOT EDIT!! |
|||
|
|||
#import "SCManagedDeviceCapacityAnalyzerListener.h" |
|||
|
|||
#import <Foundation/Foundation.h> |
|||
|
|||
@interface SCManagedDeviceCapacityAnalyzerListenerAnnouncer : NSObject <SCManagedDeviceCapacityAnalyzerListener> |
|||
|
|||
- (void)addListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener; |
|||
- (void)removeListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener; |
|||
|
|||
@end |
@ -0,0 +1,146 @@ |
|||
// Generated by the announcer.rb DO NOT EDIT!! |
|||
|
|||
#import "SCManagedDeviceCapacityAnalyzerListenerAnnouncer.h" |
|||
|
|||
#include <mutex> |
|||
using std::lock_guard; |
|||
using std::mutex; |
|||
#include <vector> |
|||
using std::find; |
|||
using std::make_shared; |
|||
using std::shared_ptr; |
|||
using std::vector; |
|||
|
|||
@implementation SCManagedDeviceCapacityAnalyzerListenerAnnouncer { |
|||
mutex _mutex; |
|||
shared_ptr<vector<__weak id<SCManagedDeviceCapacityAnalyzerListener>>> _listeners; |
|||
} |
|||
|
|||
- (NSString *)description |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
NSMutableString *desc = [NSMutableString string]; |
|||
[desc appendFormat:@"<SCManagedDeviceCapacityAnalyzerListenerAnnouncer %p>: [", self]; |
|||
for (int i = 0; i < listeners->size(); ++i) { |
|||
[desc appendFormat:@"%@", (*listeners)[i]]; |
|||
if (i != listeners->size() - 1) { |
|||
[desc appendString:@", "]; |
|||
} |
|||
} |
|||
[desc appendString:@"]"]; |
|||
return desc; |
|||
} |
|||
|
|||
- (void)addListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener |
|||
{ |
|||
lock_guard<mutex> lock(_mutex); |
|||
auto listeners = make_shared<vector<__weak id<SCManagedDeviceCapacityAnalyzerListener>>>(); |
|||
if (_listeners != nil) { |
|||
// The listener we want to add already exists |
|||
if (find(_listeners->begin(), _listeners->end(), listener) != _listeners->end()) { |
|||
return; |
|||
} |
|||
for (auto &one : *_listeners) { |
|||
if (one != nil) { |
|||
listeners->push_back(one); |
|||
} |
|||
} |
|||
listeners->push_back(listener); |
|||
atomic_store(&self->_listeners, listeners); |
|||
} else { |
|||
listeners->push_back(listener); |
|||
atomic_store(&self->_listeners, listeners); |
|||
} |
|||
} |
|||
|
|||
- (void)removeListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener |
|||
{ |
|||
lock_guard<mutex> lock(_mutex); |
|||
if (_listeners == nil) { |
|||
return; |
|||
} |
|||
// If the only item in the listener list is the one we want to remove, store it back to nil again |
|||
if (_listeners->size() == 1 && (*_listeners)[0] == listener) { |
|||
atomic_store(&self->_listeners, shared_ptr<vector<__weak id<SCManagedDeviceCapacityAnalyzerListener>>>()); |
|||
return; |
|||
} |
|||
auto listeners = make_shared<vector<__weak id<SCManagedDeviceCapacityAnalyzerListener>>>(); |
|||
for (auto &one : *_listeners) { |
|||
if (one != nil && one != listener) { |
|||
listeners->push_back(one); |
|||
} |
|||
} |
|||
atomic_store(&self->_listeners, listeners); |
|||
} |
|||
|
|||
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer |
|||
didChangeLowLightCondition:(BOOL)lowLightCondition |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedDeviceCapacityAnalyzerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeLowLightCondition:)]) { |
|||
[listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer |
|||
didChangeLowLightCondition:lowLightCondition]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer |
|||
didChangeAdjustingExposure:(BOOL)adjustingExposure |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedDeviceCapacityAnalyzerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeAdjustingExposure:)]) { |
|||
[listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer |
|||
didChangeAdjustingExposure:adjustingExposure]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer |
|||
didChangeAdjustingFocus:(BOOL)adjustingFocus |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedDeviceCapacityAnalyzerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeAdjustingFocus:)]) { |
|||
[listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer |
|||
didChangeAdjustingFocus:adjustingFocus]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer |
|||
didChangeBrightness:(float)adjustingBrightness |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedDeviceCapacityAnalyzerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeBrightness:)]) { |
|||
[listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer |
|||
didChangeBrightness:adjustingBrightness]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer |
|||
didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition |
|||
{ |
|||
auto listeners = atomic_load(&self->_listeners); |
|||
if (listeners) { |
|||
for (id<SCManagedDeviceCapacityAnalyzerListener> listener : *listeners) { |
|||
if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeLightingCondition:)]) { |
|||
[listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer |
|||
didChangeLightingCondition:lightingCondition]; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,25 @@ |
|||
// |
|||
// SCManagedDroppedFramesReporter.h |
|||
// Snapchat |
|||
// |
|||
// Created by Michel Loenngren on 3/21/17. |
|||
// Copyright © 2017 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import "SCManagedCapturerListener.h" |
|||
|
|||
#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h> |
|||
|
|||
#import <Foundation/Foundation.h> |
|||
|
|||
/* |
|||
Conforms to SCManagedVideoDataSourceListener and records frame rate statistics |
|||
during recording. |
|||
*/ |
|||
@interface SCManagedDroppedFramesReporter : NSObject <SCManagedVideoDataSourceListener, SCManagedCapturerListener> |
|||
|
|||
- (void)reportWithKeepLateFrames:(BOOL)keepLateFrames lensesApplied:(BOOL)lensesApplied; |
|||
|
|||
- (void)didChangeCaptureDevicePosition; |
|||
|
|||
@end |
@ -0,0 +1,86 @@ |
|||
// |
|||
// SCManagedDroppedFramesReporter.m |
|||
// Snapchat |
|||
// |
|||
// Created by Michel Loenngren on 3/21/17. |
|||
// Copyright © 2017 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import "SCManagedDroppedFramesReporter.h" |
|||
|
|||
#import "SCCameraTweaks.h" |
|||
#import "SCManagedCapturerState.h" |
|||
|
|||
#import <SCFoundation/SCBackgroundTaskMonitor.h> |
|||
#import <SCFoundation/SCLog.h> |
|||
#import <SCFrameRate/SCFrameRateEntry.h> |
|||
#import <SCFrameRate/SCVideoFrameDropCounter.h> |
|||
#import <SCLogger/SCCameraMetrics.h> |
|||
#import <SCLogger/SCLogger.h> |
|||
|
|||
CGFloat const kSCCaptureTargetFramerate = 30; |
|||
|
|||
@interface SCManagedDroppedFramesReporter () |
|||
|
|||
@property (nonatomic) SCVideoFrameDropCounter *frameDropCounter; |
|||
|
|||
@end |
|||
|
|||
@implementation SCManagedDroppedFramesReporter { |
|||
SCVideoFrameDropCounter *_frameDropCounter; |
|||
NSUInteger _droppedFrames; |
|||
} |
|||
|
|||
- (SCVideoFrameDropCounter *)frameDropCounter |
|||
{ |
|||
if (_frameDropCounter == nil) { |
|||
_frameDropCounter = [[SCVideoFrameDropCounter alloc] initWithTargetFramerate:kSCCaptureTargetFramerate]; |
|||
_droppedFrames = 0; |
|||
} |
|||
return _frameDropCounter; |
|||
} |
|||
|
|||
- (void)reportWithKeepLateFrames:(BOOL)keepLateFrames lensesApplied:(BOOL)lensesApplied |
|||
{ |
|||
if (_frameDropCounter == nil) { |
|||
return; |
|||
} |
|||
|
|||
NSMutableDictionary *eventDict = [_frameDropCounter.toDict mutableCopy]; |
|||
eventDict[@"total_frame_drop_measured"] = @(_droppedFrames); |
|||
eventDict[@"keep_late_frames"] = @(keepLateFrames); |
|||
// if user select none of the lenses when activing the lenses scroll view, we still enable keepLateFrames |
|||
eventDict[@"lenses_applied"] = @(lensesApplied); |
|||
|
|||
[[SCLogger sharedInstance] logEvent:kSCCameraMetricsFramesDroppedDuringRecording parameters:eventDict]; |
|||
|
|||
// Reset |
|||
_frameDropCounter = nil; |
|||
_droppedFrames = 0; |
|||
} |
|||
|
|||
- (void)didChangeCaptureDevicePosition |
|||
{ |
|||
[_frameDropCounter didChangeCaptureDevicePosition]; |
|||
} |
|||
|
|||
#pragma mark - SCManagedVideoDataSourceListener |
|||
|
|||
- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource |
|||
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer |
|||
devicePosition:(SCManagedCaptureDevicePosition)devicePosition |
|||
{ |
|||
[self.frameDropCounter processFrameTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)]; |
|||
} |
|||
|
|||
- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource |
|||
didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer |
|||
devicePosition:(SCManagedCaptureDevicePosition)devicePosition |
|||
{ |
|||
_droppedFrames += 1; |
|||
NSDictionary<NSString *, NSNumber *> *backgroundTaskScreenshot = SCBackgrounTaskScreenshotReport(); |
|||
SCLogCoreCameraInfo(@"[SCManagedDroppedFramesReporter] frame dropped, background tasks: %@", |
|||
backgroundTaskScreenshot); |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,57 @@ |
|||
// |
|||
// SCManagedFrameHealthChecker.h |
|||
// Snapchat |
|||
// |
|||
// Created by Pinlin Chen on 30/08/2017. |
|||
// |
|||
|
|||
#import <SCBase/SCMacros.h> |
|||
#import <SCFeatureGating/SCExperimentManager.h> |
|||
|
|||
#import <AVFoundation/AVFoundation.h> |
|||
#import <Foundation/Foundation.h> |
|||
|
|||
@interface SCManagedFrameHealthChecker : NSObject |
|||
|
|||
+ (SCManagedFrameHealthChecker *)sharedInstance; |
|||
/*! @abstract Use sharedInstance instead. */ |
|||
SC_INIT_AND_NEW_UNAVAILABLE; |
|||
|
|||
/* Utility method */ |
|||
- (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer extraInfo:(NSDictionary *)extraInfo; |
|||
- (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer |
|||
photoCapturerEnabled:(BOOL)photoCapturerEnabled |
|||
lensEnabled:(BOOL)lensesEnabled |
|||
lensID:(NSString *)lensID; |
|||
- (NSMutableDictionary *)metadataForMetadata:(NSDictionary *)metadata |
|||
photoCapturerEnabled:(BOOL)photoCapturerEnabled |
|||
lensEnabled:(BOOL)lensesEnabled |
|||
lensID:(NSString *)lensID; |
|||
- (NSMutableDictionary *)getPropertiesFromAsset:(AVAsset *)asset; |
|||
|
|||
/* Image snap */ |
|||
- (void)checkImageHealthForCaptureFrameImage:(UIImage *)image |
|||
captureSettings:(NSDictionary *)captureSettings |
|||
captureSessionID:(NSString *)captureSessionID; |
|||
- (void)checkImageHealthForPreTranscoding:(UIImage *)image |
|||
metadata:(NSDictionary *)metadata |
|||
captureSessionID:(NSString *)captureSessionID; |
|||
- (void)checkImageHealthForPostTranscoding:(NSData *)imageData |
|||
metadata:(NSDictionary *)metadata |
|||
captureSessionID:(NSString *)captureSessionID; |
|||
|
|||
/* Video snap */ |
|||
- (void)checkVideoHealthForCaptureFrameImage:(UIImage *)image |
|||
metedata:(NSDictionary *)metadata |
|||
captureSessionID:(NSString *)captureSessionID; |
|||
- (void)checkVideoHealthForOverlayImage:(UIImage *)image |
|||
metedata:(NSDictionary *)metadata |
|||
captureSessionID:(NSString *)captureSessionID; |
|||
- (void)checkVideoHealthForPostTranscodingThumbnail:(UIImage *)image |
|||
metedata:(NSDictionary *)metadata |
|||
properties:(NSDictionary *)properties |
|||
captureSessionID:(NSString *)captureSessionID; |
|||
|
|||
- (void)reportFrameHealthCheckForCaptureSessionID:(NSString *)captureSessionID; |
|||
|
|||
@end |
@ -0,0 +1,709 @@ |
|||
// |
|||
// SCManagedFrameHealthChecker.m |
|||
// Snapchat |
|||
// |
|||
// Created by Pinlin Chen on 30/08/2017. |
|||
// |
|||
|
|||
#import "SCManagedFrameHealthChecker.h" |
|||
|
|||
#import "SCCameraSettingUtils.h" |
|||
#import "SCCameraTweaks.h" |
|||
|
|||
#import <SCFoundation/AVAsset+Helpers.h> |
|||
#import <SCFoundation/SCLog.h> |
|||
#import <SCFoundation/SCLogHelper.h> |
|||
#import <SCFoundation/SCQueuePerformer.h> |
|||
#import <SCFoundation/SCTraceODPCompatible.h> |
|||
#import <SCFoundation/UIImage+Helpers.h> |
|||
#import <SCLogger/SCCameraMetrics.h> |
|||
#import <SCLogger/SCLogger+Stats.h> |
|||
#import <SCWebP/UIImage+WebP.h> |
|||
|
|||
#import <ImageIO/CGImageProperties.h> |
|||
@import Accelerate; |
|||
|
|||
static const char *kSCManagedFrameHealthCheckerQueueLabel = "com.snapchat.frame_health_checker"; |
|||
static const int kSCManagedFrameHealthCheckerMaxSamples = 2304; |
|||
static const float kSCManagedFrameHealthCheckerPossibleBlackThreshold = 20.0; |
|||
static const float kSCManagedFrameHealthCheckerScaledImageMaxEdgeLength = 300.0; |
|||
static const float kSCManagedFrameHealthCheckerScaledImageScale = 1.0; |
|||
// assume we could process at most of 2 RGBA images which are 2304*4096 RGBA image |
|||
static const double kSCManagedFrameHealthCheckerMinFreeMemMB = 72.0; |
|||
|
|||
typedef NS_ENUM(NSUInteger, SCManagedFrameHealthCheckType) { |
|||
SCManagedFrameHealthCheck_ImageCapture = 0, |
|||
SCManagedFrameHealthCheck_ImagePreTranscoding, |
|||
SCManagedFrameHealthCheck_ImagePostTranscoding, |
|||
SCManagedFrameHealthCheck_VideoCapture, |
|||
SCManagedFrameHealthCheck_VideoOverlayImage, |
|||
SCManagedFrameHealthCheck_VideoPostTranscoding, |
|||
}; |
|||
|
|||
typedef NS_ENUM(NSUInteger, SCManagedFrameHealthCheckErrorType) { |
|||
SCManagedFrameHealthCheckError_None = 0, |
|||
SCManagedFrameHealthCheckError_Invalid_Bitmap, |
|||
SCManagedFrameHealthCheckError_Frame_Possibly_Black, |
|||
SCManagedFrameHealthCheckError_Frame_Totally_Black, |
|||
SCManagedFrameHealthCheckError_Execution_Error, |
|||
}; |
|||
|
|||
typedef struct { |
|||
float R; |
|||
float G; |
|||
float B; |
|||
float A; |
|||
} FloatRGBA; |
|||
|
|||
@class SCManagedFrameHealthCheckerTask; |
|||
typedef NSMutableDictionary * (^sc_managed_frame_checker_block)(SCManagedFrameHealthCheckerTask *task); |
|||
|
|||
float vDspColorElementSum(const Byte *data, NSInteger stripLength, NSInteger bufferLength) |
|||
{ |
|||
float sum = 0; |
|||
float colorArray[bufferLength]; |
|||
// Convert to float for DSP registerator |
|||
vDSP_vfltu8(data, stripLength, colorArray, 1, bufferLength); |
|||
// Calculate sum of color element |
|||
vDSP_sve(colorArray, 1, &sum, bufferLength); |
|||
return sum; |
|||
} |
|||
|
|||
@interface SCManagedFrameHealthCheckerTask : NSObject |
|||
|
|||
@property (nonatomic, assign) SCManagedFrameHealthCheckType type; |
|||
@property (nonatomic, strong) id targetObject; |
|||
@property (nonatomic, assign) CGSize sourceImageSize; |
|||
@property (nonatomic, strong) UIImage *unifiedImage; |
|||
@property (nonatomic, strong) NSDictionary *metadata; |
|||
@property (nonatomic, strong) NSDictionary *videoProperties; |
|||
@property (nonatomic, assign) SCManagedFrameHealthCheckErrorType errorType; |
|||
|
|||
+ (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type |
|||
targetObject:(id)targetObject |
|||
metadata:(NSDictionary *)metadata |
|||
videoProperties:(NSDictionary *)videoProperties; |
|||
|
|||
+ (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type |
|||
targetObject:(id)targetObject |
|||
metadata:(NSDictionary *)metadata; |
|||
|
|||
@end |
|||
|
|||
@implementation SCManagedFrameHealthCheckerTask |
|||
|
|||
+ (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type |
|||
targetObject:(id)targetObject |
|||
metadata:(NSDictionary *)metadata |
|||
{ |
|||
return [self taskWithType:type targetObject:targetObject metadata:metadata videoProperties:nil]; |
|||
} |
|||
|
|||
+ (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type |
|||
targetObject:(id)targetObject |
|||
metadata:(NSDictionary *)metadata |
|||
videoProperties:(NSDictionary *)videoProperties |
|||
{ |
|||
SCManagedFrameHealthCheckerTask *task = [[SCManagedFrameHealthCheckerTask alloc] init]; |
|||
task.type = type; |
|||
task.targetObject = targetObject; |
|||
task.metadata = metadata; |
|||
task.videoProperties = videoProperties; |
|||
return task; |
|||
} |
|||
|
|||
- (NSString *)textForSnapType |
|||
{ |
|||
switch (self.type) { |
|||
case SCManagedFrameHealthCheck_ImageCapture: |
|||
case SCManagedFrameHealthCheck_ImagePreTranscoding: |
|||
case SCManagedFrameHealthCheck_ImagePostTranscoding: |
|||
return @"IMAGE"; |
|||
case SCManagedFrameHealthCheck_VideoCapture: |
|||
case SCManagedFrameHealthCheck_VideoOverlayImage: |
|||
case SCManagedFrameHealthCheck_VideoPostTranscoding: |
|||
return @"VIDEO"; |
|||
} |
|||
} |
|||
|
|||
- (NSString *)textForSource |
|||
{ |
|||
switch (self.type) { |
|||
case SCManagedFrameHealthCheck_ImageCapture: |
|||
return @"CAPTURE"; |
|||
case SCManagedFrameHealthCheck_ImagePreTranscoding: |
|||
return @"PRE_TRANSCODING"; |
|||
case SCManagedFrameHealthCheck_ImagePostTranscoding: |
|||
return @"POST_TRANSCODING"; |
|||
case SCManagedFrameHealthCheck_VideoCapture: |
|||
return @"CAPTURE"; |
|||
case SCManagedFrameHealthCheck_VideoOverlayImage: |
|||
return @"OVERLAY_IMAGE"; |
|||
case SCManagedFrameHealthCheck_VideoPostTranscoding: |
|||
return @"POST_TRANSCODING"; |
|||
} |
|||
} |
|||
|
|||
- (NSString *)textForErrorType |
|||
{ |
|||
switch (self.errorType) { |
|||
case SCManagedFrameHealthCheckError_None: |
|||
return nil; |
|||
case SCManagedFrameHealthCheckError_Invalid_Bitmap: |
|||
return @"Invalid_Bitmap"; |
|||
case SCManagedFrameHealthCheckError_Frame_Possibly_Black: |
|||
return @"Frame_Possibly_Black"; |
|||
case SCManagedFrameHealthCheckError_Frame_Totally_Black: |
|||
return @"Frame_Totally_Black"; |
|||
case SCManagedFrameHealthCheckError_Execution_Error: |
|||
return @"Execution_Error"; |
|||
} |
|||
} |
|||
|
|||
@end |
|||
|
|||
@interface SCManagedFrameHealthChecker () { |
|||
id<SCPerforming> _performer; |
|||
// Dictionary structure |
|||
// Key - NSString, captureSessionID |
|||
// Value - NSMutableArray<SCManagedFrameHealthCheckerTask> |
|||
NSMutableDictionary *_frameCheckTasks; |
|||
} |
|||
|
|||
@end |
|||
|
|||
@implementation SCManagedFrameHealthChecker |
|||
|
|||
+ (SCManagedFrameHealthChecker *)sharedInstance |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
static SCManagedFrameHealthChecker *checker; |
|||
static dispatch_once_t onceToken; |
|||
dispatch_once(&onceToken, ^{ |
|||
checker = [[SCManagedFrameHealthChecker alloc] _init]; |
|||
}); |
|||
return checker; |
|||
} |
|||
|
|||
- (instancetype)_init |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
if (self = [super init]) { |
|||
// Use the lowest QoS level |
|||
_performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedFrameHealthCheckerQueueLabel |
|||
qualityOfService:QOS_CLASS_UTILITY |
|||
queueType:DISPATCH_QUEUE_SERIAL |
|||
context:SCQueuePerformerContextCamera]; |
|||
_frameCheckTasks = [NSMutableDictionary dictionary]; |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
- (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
// add exposure, ISO, brightness |
|||
NSMutableDictionary *metadata = [NSMutableDictionary dictionary]; |
|||
if (!sampleBuffer || !CMSampleBufferDataIsReady(sampleBuffer)) { |
|||
return metadata; |
|||
} |
|||
CFDictionaryRef exifAttachments = |
|||
(CFDictionaryRef)CMGetAttachment(sampleBuffer, kCGImagePropertyExifDictionary, NULL); |
|||
NSNumber *exposureTimeNum = retrieveExposureTimeFromEXIFAttachments(exifAttachments); |
|||
if (exposureTimeNum) { |
|||
metadata[@"exposure"] = exposureTimeNum; |
|||
} |
|||
NSNumber *isoSpeedRatingNum = retrieveISOSpeedRatingFromEXIFAttachments(exifAttachments); |
|||
if (isoSpeedRatingNum) { |
|||
metadata[@"iso"] = isoSpeedRatingNum; |
|||
} |
|||
NSNumber *brightnessNum = retrieveBrightnessFromEXIFAttachments(exifAttachments); |
|||
if (brightnessNum) { |
|||
float brightness = [brightnessNum floatValue]; |
|||
metadata[@"brightness"] = isfinite(brightness) ? @(brightness) : @(0); |
|||
} |
|||
|
|||
return metadata; |
|||
} |
|||
|
|||
- (NSMutableDictionary *)metadataForMetadata:(NSDictionary *)metadata |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
// add exposure, ISO, brightness |
|||
NSMutableDictionary *newMetadata = [NSMutableDictionary dictionary]; |
|||
CFDictionaryRef exifAttachments = (__bridge CFDictionaryRef)metadata; |
|||
NSNumber *exposureTimeNum = retrieveExposureTimeFromEXIFAttachments(exifAttachments); |
|||
if (exposureTimeNum) { |
|||
newMetadata[@"exposure"] = exposureTimeNum; |
|||
} |
|||
NSNumber *isoSpeedRatingNum = retrieveISOSpeedRatingFromEXIFAttachments(exifAttachments); |
|||
if (isoSpeedRatingNum) { |
|||
newMetadata[@"iso"] = isoSpeedRatingNum; |
|||
} |
|||
NSNumber *brightnessNum = retrieveBrightnessFromEXIFAttachments(exifAttachments); |
|||
if (brightnessNum) { |
|||
float brightness = [brightnessNum floatValue]; |
|||
newMetadata[@"brightness"] = isfinite(brightness) ? @(brightness) : @(0); |
|||
} |
|||
|
|||
return newMetadata; |
|||
} |
|||
|
|||
- (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer extraInfo:(NSDictionary *)extraInfo |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
NSMutableDictionary *metadata = [self metadataForSampleBuffer:sampleBuffer]; |
|||
[metadata addEntriesFromDictionary:extraInfo]; |
|||
return metadata; |
|||
} |
|||
|
|||
- (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer |
|||
photoCapturerEnabled:(BOOL)photoCapturerEnabled |
|||
lensEnabled:(BOOL)lensesEnabled |
|||
lensID:(NSString *)lensID |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
NSMutableDictionary *metadata = [self metadataForSampleBuffer:sampleBuffer]; |
|||
metadata[@"photo_capturer_enabled"] = @(photoCapturerEnabled); |
|||
|
|||
metadata[@"lens_enabled"] = @(lensesEnabled); |
|||
if (lensesEnabled) { |
|||
metadata[@"lens_id"] = lensID ?: @""; |
|||
} |
|||
|
|||
return metadata; |
|||
} |
|||
|
|||
- (NSMutableDictionary *)metadataForMetadata:(NSDictionary *)metadata |
|||
photoCapturerEnabled:(BOOL)photoCapturerEnabled |
|||
lensEnabled:(BOOL)lensesEnabled |
|||
lensID:(NSString *)lensID |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
NSMutableDictionary *newMetadata = [self metadataForMetadata:metadata]; |
|||
newMetadata[@"photo_capturer_enabled"] = @(photoCapturerEnabled); |
|||
|
|||
newMetadata[@"lens_enabled"] = @(lensesEnabled); |
|||
if (lensesEnabled) { |
|||
newMetadata[@"lens_id"] = lensID ?: @""; |
|||
} |
|||
|
|||
return newMetadata; |
|||
} |
|||
|
|||
- (NSMutableDictionary *)getPropertiesFromAsset:(AVAsset *)asset |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
SC_GUARD_ELSE_RETURN_VALUE(asset != nil, nil); |
|||
NSMutableDictionary *properties = [NSMutableDictionary dictionary]; |
|||
// file size |
|||
properties[@"file_size"] = @([asset fileSize]); |
|||
// duration |
|||
properties[@"duration"] = @(CMTimeGetSeconds(asset.duration)); |
|||
// video track count |
|||
NSArray<AVAssetTrack *> *videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo]; |
|||
properties[@"video_track_count"] = @(videoTracks.count); |
|||
if (videoTracks.count > 0) { |
|||
// video bitrate |
|||
properties[@"video_bitrate"] = @([videoTracks.firstObject estimatedDataRate]); |
|||
// frame rate |
|||
properties[@"video_frame_rate"] = @([videoTracks.firstObject nominalFrameRate]); |
|||
} |
|||
// audio track count |
|||
NSArray<AVAssetTrack *> *audioTracks = [asset tracksWithMediaType:AVMediaTypeAudio]; |
|||
properties[@"audio_track_count"] = @(audioTracks.count); |
|||
if (audioTracks.count > 0) { |
|||
// audio bitrate |
|||
properties[@"audio_bitrate"] = @([audioTracks.firstObject estimatedDataRate]); |
|||
} |
|||
// playable |
|||
properties[@"playable"] = @(asset.isPlayable); |
|||
return properties; |
|||
} |
|||
|
|||
#pragma mark - Image snap |
|||
|
|||
- (void)checkImageHealthForCaptureFrameImage:(UIImage *)image |
|||
captureSettings:(NSDictionary *)captureSettings |
|||
captureSessionID:(NSString *)captureSessionID |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
if (captureSessionID.length == 0) { |
|||
SCLogCoreCameraError(@"[FrameHealthChecker] #IMAGE:CAPTURE - captureSessionID shouldn't be empty"); |
|||
return; |
|||
} |
|||
SCManagedFrameHealthCheckerTask *task = |
|||
[SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_ImageCapture |
|||
targetObject:image |
|||
metadata:captureSettings]; |
|||
[self _addTask:task withCaptureSessionID:captureSessionID]; |
|||
} |
|||
|
|||
- (void)checkImageHealthForPreTranscoding:(UIImage *)image |
|||
metadata:(NSDictionary *)metadata |
|||
captureSessionID:(NSString *)captureSessionID |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
if (captureSessionID.length == 0) { |
|||
SCLogCoreCameraError(@"[FrameHealthChecker] #IMAGE:PRE_CAPTURE - captureSessionID shouldn't be empty"); |
|||
return; |
|||
} |
|||
SCManagedFrameHealthCheckerTask *task = |
|||
[SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_ImagePreTranscoding |
|||
targetObject:image |
|||
metadata:metadata]; |
|||
[self _addTask:task withCaptureSessionID:captureSessionID]; |
|||
} |
|||
|
|||
- (void)checkImageHealthForPostTranscoding:(NSData *)imageData |
|||
metadata:(NSDictionary *)metadata |
|||
captureSessionID:(NSString *)captureSessionID |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
if (captureSessionID.length == 0) { |
|||
SCLogCoreCameraError(@"[FrameHealthChecker] #IMAGE:POST_CAPTURE - captureSessionID shouldn't be empty"); |
|||
return; |
|||
} |
|||
SCManagedFrameHealthCheckerTask *task = |
|||
[SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_ImagePostTranscoding |
|||
targetObject:imageData |
|||
metadata:metadata]; |
|||
[self _addTask:task withCaptureSessionID:captureSessionID]; |
|||
} |
|||
|
|||
#pragma mark - Video snap |
|||
- (void)checkVideoHealthForCaptureFrameImage:(UIImage *)image |
|||
metedata:(NSDictionary *)metadata |
|||
captureSessionID:(NSString *)captureSessionID |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
if (captureSessionID.length == 0) { |
|||
SCLogCoreCameraError(@"[FrameHealthChecker] #VIDEO:CAPTURE - captureSessionID shouldn't be empty"); |
|||
return; |
|||
} |
|||
SCManagedFrameHealthCheckerTask *task = |
|||
[SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_VideoCapture |
|||
targetObject:image |
|||
metadata:metadata]; |
|||
[self _addTask:task withCaptureSessionID:captureSessionID]; |
|||
} |
|||
|
|||
- (void)checkVideoHealthForOverlayImage:(UIImage *)image |
|||
metedata:(NSDictionary *)metadata |
|||
captureSessionID:(NSString *)captureSessionID |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
if (captureSessionID.length == 0) { |
|||
SCLogCoreCameraError(@"[FrameHealthChecker] #VIDEO:OVERLAY_IMAGE - captureSessionID shouldn't be empty"); |
|||
return; |
|||
} |
|||
// Overlay image could be nil |
|||
if (!image) { |
|||
SCLogCoreCameraInfo(@"[FrameHealthChecker] #VIDEO:OVERLAY_IMAGE - overlayImage is nil."); |
|||
return; |
|||
} |
|||
SCManagedFrameHealthCheckerTask *task = |
|||
[SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_VideoOverlayImage |
|||
targetObject:image |
|||
metadata:metadata]; |
|||
[self _addTask:task withCaptureSessionID:captureSessionID]; |
|||
} |
|||
|
|||
- (void)checkVideoHealthForPostTranscodingThumbnail:(UIImage *)image |
|||
metedata:(NSDictionary *)metadata |
|||
properties:(NSDictionary *)properties |
|||
captureSessionID:(NSString *)captureSessionID |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
if (captureSessionID.length == 0) { |
|||
SCLogCoreCameraError(@"[FrameHealthChecker] #VIDEO:POST_TRANSCODING - captureSessionID shouldn't be empty"); |
|||
return; |
|||
} |
|||
SCManagedFrameHealthCheckerTask *task = |
|||
[SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_VideoPostTranscoding |
|||
targetObject:image |
|||
metadata:metadata |
|||
videoProperties:properties]; |
|||
[self _addTask:task withCaptureSessionID:captureSessionID]; |
|||
} |
|||
|
|||
#pragma mark - Task management |
|||
- (void)reportFrameHealthCheckForCaptureSessionID:(NSString *)captureSessionID |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
if (!captureSessionID) { |
|||
SCLogCoreCameraError(@"[FrameHealthChecker] report - captureSessionID shouldn't be nil"); |
|||
return; |
|||
} |
|||
[self _asynchronouslyCheckForCaptureSessionID:captureSessionID]; |
|||
} |
|||
|
|||
#pragma mark - Private functions |
|||
|
|||
/// Scale the source image to a new image with edges less than kSCManagedFrameHealthCheckerScaledImageMaxEdgeLength. |
|||
- (UIImage *)_unifyImage:(UIImage *)sourceImage |
|||
{ |
|||
CGFloat sourceWidth = sourceImage.size.width; |
|||
CGFloat sourceHeight = sourceImage.size.height; |
|||
|
|||
if (sourceWidth == 0.0 || sourceHeight == 0.0) { |
|||
SCLogCoreCameraInfo(@"[FrameHealthChecker] Tried scaling image with no size"); |
|||
return sourceImage; |
|||
} |
|||
|
|||
CGFloat maxEdgeLength = kSCManagedFrameHealthCheckerScaledImageMaxEdgeLength; |
|||
|
|||
CGFloat widthScalingFactor = maxEdgeLength / sourceWidth; |
|||
CGFloat heightScalingFactor = maxEdgeLength / sourceHeight; |
|||
|
|||
CGFloat scalingFactor = MIN(widthScalingFactor, heightScalingFactor); |
|||
|
|||
if (scalingFactor >= 1) { |
|||
SCLogCoreCameraInfo(@"[FrameHealthChecker] No need to scale image."); |
|||
return sourceImage; |
|||
} |
|||
|
|||
CGSize targetSize = CGSizeMake(sourceWidth * scalingFactor, sourceHeight * scalingFactor); |
|||
|
|||
SCLogCoreCameraInfo(@"[FrameHealthChecker] Scaling image from %@ to %@", NSStringFromCGSize(sourceImage.size), |
|||
NSStringFromCGSize(targetSize)); |
|||
return [sourceImage scaledImageToSize:targetSize scale:kSCManagedFrameHealthCheckerScaledImageScale]; |
|||
} |
|||
|
|||
- (void)_addTask:(SCManagedFrameHealthCheckerTask *)newTask withCaptureSessionID:(NSString *)captureSessionID |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
if (captureSessionID.length == 0) { |
|||
return; |
|||
} |
|||
[_performer perform:^{ |
|||
SCTraceODPCompatibleStart(2); |
|||
|
|||
CFTimeInterval beforeScaling = CACurrentMediaTime(); |
|||
if (newTask.targetObject) { |
|||
if ([newTask.targetObject isKindOfClass:[UIImage class]]) { |
|||
UIImage *sourceImage = (UIImage *)newTask.targetObject; |
|||
newTask.unifiedImage = [self _unifyImage:sourceImage]; |
|||
newTask.sourceImageSize = sourceImage.size; |
|||
} else if ([newTask.targetObject isKindOfClass:[NSData class]]) { |
|||
UIImage *sourceImage = [UIImage sc_imageWithData:newTask.targetObject]; |
|||
CFTimeInterval betweenDecodingAndScaling = CACurrentMediaTime(); |
|||
SCLogCoreCameraInfo(@"[FrameHealthChecker] #Image decoding delay: %f", |
|||
betweenDecodingAndScaling - beforeScaling); |
|||
beforeScaling = betweenDecodingAndScaling; |
|||
newTask.unifiedImage = [self _unifyImage:sourceImage]; |
|||
newTask.sourceImageSize = sourceImage.size; |
|||
} else { |
|||
SCLogCoreCameraError(@"[FrameHealthChecker] Invalid targetObject class:%@", |
|||
NSStringFromClass([newTask.targetObject class])); |
|||
} |
|||
newTask.targetObject = nil; |
|||
} |
|||
SCLogCoreCameraInfo(@"[FrameHealthChecker] #Scale image delay: %f", CACurrentMediaTime() - beforeScaling); |
|||
|
|||
NSMutableArray *taskQueue = _frameCheckTasks[captureSessionID]; |
|||
if (!taskQueue) { |
|||
taskQueue = [NSMutableArray array]; |
|||
_frameCheckTasks[captureSessionID] = taskQueue; |
|||
} |
|||
// Remove previous same type task, avoid meaningless task, |
|||
// for example repeat click "Send Button" and then "Back button" |
|||
// will produce a lot of PRE_TRANSCODING and POST_TRANSCODING |
|||
for (SCManagedFrameHealthCheckerTask *task in taskQueue) { |
|||
if (task.type == newTask.type) { |
|||
[taskQueue removeObject:task]; |
|||
break; |
|||
} |
|||
} |
|||
|
|||
[taskQueue addObject:newTask]; |
|||
}]; |
|||
} |
|||
|
|||
- (void)_asynchronouslyCheckForCaptureSessionID:(NSString *)captureSessionID |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
[_performer perform:^{ |
|||
SCTraceODPCompatibleStart(2); |
|||
NSMutableArray *tasksQueue = _frameCheckTasks[captureSessionID]; |
|||
if (!tasksQueue) { |
|||
return; |
|||
} |
|||
|
|||
// Check the free memory, if it is too low, drop these tasks |
|||
double memFree = [SCLogger memoryFreeMB]; |
|||
if (memFree < kSCManagedFrameHealthCheckerMinFreeMemMB) { |
|||
SCLogCoreCameraWarning( |
|||
@"[FrameHealthChecker] mem_free:%f is too low, dropped checking tasks for captureSessionID:%@", memFree, |
|||
captureSessionID); |
|||
[_frameCheckTasks removeObjectForKey:captureSessionID]; |
|||
return; |
|||
} |
|||
|
|||
__block NSMutableArray *frameHealthInfoArray = [NSMutableArray array]; |
|||
// Execute all tasks and wait for complete |
|||
[tasksQueue enumerateObjectsUsingBlock:^(id _Nonnull obj, NSUInteger idx, BOOL *_Nonnull stop) { |
|||
SCManagedFrameHealthCheckerTask *task = (SCManagedFrameHealthCheckerTask *)obj; |
|||
NSMutableDictionary *frameHealthInfo; |
|||
UIImage *image = task.unifiedImage; |
|||
|
|||
if (image) { |
|||
// Get frame health info |
|||
frameHealthInfo = [self _getFrameHealthInfoForImage:image |
|||
source:[task textForSource] |
|||
snapType:[task textForSnapType] |
|||
metadata:task.metadata |
|||
sourceImageSize:task.sourceImageSize |
|||
captureSessionID:captureSessionID]; |
|||
NSNumber *isPossibleBlackNum = frameHealthInfo[@"is_possible_black"]; |
|||
NSNumber *isTotallyBlackNum = frameHealthInfo[@"is_total_black"]; |
|||
NSNumber *hasExecutionError = frameHealthInfo[@"execution_error"]; |
|||
if ([isTotallyBlackNum boolValue]) { |
|||
task.errorType = SCManagedFrameHealthCheckError_Frame_Totally_Black; |
|||
} else if ([isPossibleBlackNum boolValue]) { |
|||
task.errorType = SCManagedFrameHealthCheckError_Frame_Possibly_Black; |
|||
} else if ([hasExecutionError boolValue]) { |
|||
task.errorType = SCManagedFrameHealthCheckError_Execution_Error; |
|||
} |
|||
} else { |
|||
frameHealthInfo = [NSMutableDictionary dictionary]; |
|||
task.errorType = SCManagedFrameHealthCheckError_Invalid_Bitmap; |
|||
} |
|||
|
|||
if (frameHealthInfo) { |
|||
frameHealthInfo[@"frame_source"] = [task textForSource]; |
|||
frameHealthInfo[@"snap_type"] = [task textForSnapType]; |
|||
frameHealthInfo[@"error_type"] = [task textForErrorType]; |
|||
frameHealthInfo[@"capture_session_id"] = captureSessionID; |
|||
frameHealthInfo[@"metadata"] = task.metadata; |
|||
if (task.videoProperties.count > 0) { |
|||
[frameHealthInfo addEntriesFromDictionary:task.videoProperties]; |
|||
} |
|||
[frameHealthInfoArray addObject:frameHealthInfo]; |
|||
} |
|||
|
|||
// Release the image as soon as possible to mitigate the memory pressure |
|||
task.unifiedImage = nil; |
|||
}]; |
|||
|
|||
for (NSDictionary *frameHealthInfo in frameHealthInfoArray) { |
|||
if ([frameHealthInfo[@"is_total_black"] boolValue] || [frameHealthInfo[@"is_possible_black"] boolValue]) { |
|||
// // TODO: Zi Kai Chen - add this back. Normally we use id<SCManiphestTicketCreator> for |
|||
// this but as this is a shared instance we cannot easily inject it. The work would |
|||
// involve making this not a shared instance. |
|||
// SCShakeBetaLogEvent(SCShakeBetaLoggerKeyCCamBlackSnap, |
|||
// JSONStringSerializeObjectForLogging(frameHealthInfo)); |
|||
} |
|||
|
|||
[[SCLogger sharedInstance] logUnsampledEventToEventLogger:kSCCameraMetricsFrameHealthCheckIndex |
|||
parameters:frameHealthInfo |
|||
secretParameters:nil |
|||
metrics:nil]; |
|||
} |
|||
|
|||
[_frameCheckTasks removeObjectForKey:captureSessionID]; |
|||
}]; |
|||
} |
|||
|
|||
- (NSMutableDictionary *)_getFrameHealthInfoForImage:(UIImage *)image |
|||
source:(NSString *)source |
|||
snapType:(NSString *)snapType |
|||
metadata:(NSDictionary *)metadata |
|||
sourceImageSize:(CGSize)sourceImageSize |
|||
captureSessionID:(NSString *)captureSessionID |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
NSMutableDictionary *parameters = [NSMutableDictionary dictionary]; |
|||
size_t samplesCount = 0; |
|||
CFTimeInterval start = CACurrentMediaTime(); |
|||
CGImageRef imageRef = image.CGImage; |
|||
size_t imageWidth = CGImageGetWidth(imageRef); |
|||
size_t imageHeight = CGImageGetHeight(imageRef); |
|||
CFDataRef pixelData = CGDataProviderCopyData(CGImageGetDataProvider(imageRef)); |
|||
CFTimeInterval getImageDataTime = CACurrentMediaTime(); |
|||
if (pixelData) { |
|||
const Byte *imageData = CFDataGetBytePtr(pixelData); |
|||
NSInteger stripLength = 0; |
|||
NSInteger bufferLength = 0; |
|||
NSInteger imagePixels = imageWidth * imageHeight; |
|||
// Limit the max sampled frames |
|||
if (imagePixels > kSCManagedFrameHealthCheckerMaxSamples) { |
|||
stripLength = imagePixels / kSCManagedFrameHealthCheckerMaxSamples * 4; |
|||
bufferLength = kSCManagedFrameHealthCheckerMaxSamples; |
|||
} else { |
|||
stripLength = 4; |
|||
bufferLength = imagePixels; |
|||
} |
|||
samplesCount = bufferLength; |
|||
|
|||
// Avoid dividing by zero |
|||
if (samplesCount != 0) { |
|||
FloatRGBA sumRGBA = [self _getSumRGBAFromData:imageData |
|||
stripLength:stripLength |
|||
bufferLength:bufferLength |
|||
bitmapInfo:CGImageGetBitmapInfo(imageRef)]; |
|||
float averageR = sumRGBA.R / samplesCount; |
|||
float averageG = sumRGBA.G / samplesCount; |
|||
float averageB = sumRGBA.B / samplesCount; |
|||
float averageA = sumRGBA.A / samplesCount; |
|||
parameters[@"average_sampled_rgba_r"] = @(averageR); |
|||
parameters[@"average_sampled_rgba_g"] = @(averageG); |
|||
parameters[@"average_sampled_rgba_b"] = @(averageB); |
|||
parameters[@"average_sampled_rgba_a"] = @(averageA); |
|||
parameters[@"origin_frame_width"] = @(sourceImageSize.width); |
|||
parameters[@"origin_frame_height"] = @(sourceImageSize.height); |
|||
// Also report possible black to identify the intentional black snap by covering camera. |
|||
// Normally, the averageA very near 255, but for video overlay image, it is very small. |
|||
// So we use averageA > 250 to avoid considing video overlay image as possible black. |
|||
if (averageA > 250 && averageR < kSCManagedFrameHealthCheckerPossibleBlackThreshold && |
|||
averageG < kSCManagedFrameHealthCheckerPossibleBlackThreshold && |
|||
averageB < kSCManagedFrameHealthCheckerPossibleBlackThreshold) { |
|||
parameters[@"is_possible_black"] = @(YES); |
|||
// Use this parameters for BigQuery conditions in Grafana |
|||
if (averageR == 0 && averageG == 0 && averageB == 0) { |
|||
parameters[@"is_total_black"] = @(YES); |
|||
} |
|||
} |
|||
} else { |
|||
SCLogCoreCameraError(@"[FrameHealthChecker] #%@:%@ - samplesCount is zero! captureSessionID:%@", snapType, |
|||
source, captureSessionID); |
|||
parameters[@"execution_error"] = @(YES); |
|||
} |
|||
CFRelease(pixelData); |
|||
} else { |
|||
SCLogCoreCameraError(@"[FrameHealthChecker] #%@:%@ - pixelData is nil! captureSessionID:%@", snapType, source, |
|||
captureSessionID); |
|||
parameters[@"execution_error"] = @(YES); |
|||
} |
|||
parameters[@"sample_size"] = @(samplesCount); |
|||
|
|||
CFTimeInterval end = CACurrentMediaTime(); |
|||
SCLogCoreCameraInfo(@"[FrameHealthChecker] #%@:%@ - GET_IMAGE_DATA_TIME:%f SAMPLE_DATA_TIME:%f TOTAL_TIME:%f", |
|||
snapType, source, getImageDataTime - start, end - getImageDataTime, end - start); |
|||
return parameters; |
|||
} |
|||
|
|||
- (FloatRGBA)_getSumRGBAFromData:(const Byte *)imageData |
|||
stripLength:(NSInteger)stripLength |
|||
bufferLength:(NSInteger)bufferLength |
|||
bitmapInfo:(CGBitmapInfo)bitmapInfo |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
FloatRGBA sumRGBA; |
|||
if ((bitmapInfo & kCGImageAlphaPremultipliedFirst) && (bitmapInfo & kCGImageByteOrder32Little)) { |
|||
// BGRA |
|||
sumRGBA.B = vDspColorElementSum(imageData, stripLength, bufferLength); |
|||
sumRGBA.G = vDspColorElementSum(imageData + 1, stripLength, bufferLength); |
|||
sumRGBA.R = vDspColorElementSum(imageData + 2, stripLength, bufferLength); |
|||
sumRGBA.A = vDspColorElementSum(imageData + 3, stripLength, bufferLength); |
|||
} else { |
|||
// TODO. support other types beside RGBA |
|||
sumRGBA.R = vDspColorElementSum(imageData, stripLength, bufferLength); |
|||
sumRGBA.G = vDspColorElementSum(imageData + 1, stripLength, bufferLength); |
|||
sumRGBA.B = vDspColorElementSum(imageData + 2, stripLength, bufferLength); |
|||
sumRGBA.A = vDspColorElementSum(imageData + 3, stripLength, bufferLength); |
|||
} |
|||
return sumRGBA; |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,18 @@ |
|||
// |
|||
// SCManagedFrontFlashController.h |
|||
// Snapchat |
|||
// |
|||
// Created by Liu Liu on 5/4/15. |
|||
// Copyright (c) 2015 Liu Liu. All rights reserved. |
|||
// |
|||
|
|||
#import <Foundation/Foundation.h> |
|||
|
|||
// This object is only access on SCManagedCapturer thread |
|||
@interface SCManagedFrontFlashController : NSObject |
|||
|
|||
@property (nonatomic, assign) BOOL flashActive; |
|||
|
|||
@property (nonatomic, assign) BOOL torchActive; |
|||
|
|||
@end |
@ -0,0 +1,105 @@ |
|||
// |
|||
// SCManagedFrontFlashController.m |
|||
// Snapchat |
|||
// |
|||
// Created by Liu Liu on 5/4/15. |
|||
// Copyright (c) 2015 Liu Liu. All rights reserved. |
|||
// |
|||
|
|||
#import "SCManagedFrontFlashController.h" |
|||
|
|||
#import <SCFoundation/SCAssertWrapper.h> |
|||
#import <SCFoundation/SCLog.h> |
|||
#import <SCFoundation/SCThreadHelpers.h> |
|||
#import <SCFoundation/SCTrace.h> |
|||
|
|||
@import UIKit; |
|||
|
|||
@implementation SCManagedFrontFlashController { |
|||
BOOL _active; |
|||
UIView *_brightView; |
|||
CGFloat _brightnessWhenFlashAndTorchOff; |
|||
} |
|||
|
|||
- (void)_setScreenWithFrontViewFlashActive:(BOOL)flashActive torchActive:(BOOL)torchActive |
|||
{ |
|||
SCTraceStart(); |
|||
SCAssertMainThread(); |
|||
BOOL wasActive = _active; |
|||
_active = flashActive || torchActive; |
|||
if (!wasActive && _active) { |
|||
[self _activateFlash:flashActive]; |
|||
} else if (wasActive && !_active) { |
|||
[self _deactivateFlash]; |
|||
} |
|||
} |
|||
|
|||
- (void)_activateFlash:(BOOL)flashActive |
|||
{ |
|||
UIWindow *mainWindow = [[UIApplication sharedApplication] keyWindow]; |
|||
if (!_brightView) { |
|||
CGRect frame = [mainWindow bounds]; |
|||
CGFloat maxLength = MAX(CGRectGetWidth(frame), CGRectGetHeight(frame)); |
|||
frame.size = CGSizeMake(maxLength, maxLength); |
|||
// Using the max length on either side to be compatible with different orientations |
|||
_brightView = [[UIView alloc] initWithFrame:frame]; |
|||
_brightView.userInteractionEnabled = NO; |
|||
_brightView.backgroundColor = [UIColor whiteColor]; |
|||
} |
|||
_brightnessWhenFlashAndTorchOff = [UIScreen mainScreen].brightness; |
|||
SCLogGeneralInfo(@"[SCManagedFrontFlashController] Activating flash, setting screen brightness from %f to 1.0", |
|||
_brightnessWhenFlashAndTorchOff); |
|||
[self _brightenLoop]; |
|||
_brightView.alpha = flashActive ? 1.0 : 0.75; |
|||
[mainWindow addSubview:_brightView]; |
|||
} |
|||
|
|||
- (void)_deactivateFlash |
|||
{ |
|||
SCLogGeneralInfo(@"[SCManagedFrontFlashController] Deactivating flash, setting screen brightness from %f to %f", |
|||
[UIScreen mainScreen].brightness, _brightnessWhenFlashAndTorchOff); |
|||
[UIScreen mainScreen].brightness = _brightnessWhenFlashAndTorchOff; |
|||
if (_brightView) { |
|||
[_brightView removeFromSuperview]; |
|||
} |
|||
} |
|||
|
|||
- (void)_brightenLoop |
|||
{ |
|||
if (_active) { |
|||
SCLogGeneralInfo(@"[SCManagedFrontFlashController] In brighten loop, setting brightness from %f to 1.0", |
|||
[UIScreen mainScreen].brightness); |
|||
[UIScreen mainScreen].brightness = 1.0; |
|||
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, NSEC_PER_SEC / 2), dispatch_get_main_queue(), ^(void) { |
|||
[self _brightenLoop]; |
|||
}); |
|||
} else { |
|||
SCLogGeneralInfo(@"[SCManagedFrontFlashController] Recording is done, brighten loop ends"); |
|||
} |
|||
} |
|||
|
|||
- (void)setFlashActive:(BOOL)flashActive |
|||
{ |
|||
SCTraceStart(); |
|||
if (_flashActive != flashActive) { |
|||
_flashActive = flashActive; |
|||
BOOL torchActive = _torchActive; |
|||
runOnMainThreadAsynchronously(^{ |
|||
[self _setScreenWithFrontViewFlashActive:flashActive torchActive:torchActive]; |
|||
}); |
|||
} |
|||
} |
|||
|
|||
- (void)setTorchActive:(BOOL)torchActive |
|||
{ |
|||
SCTraceStart(); |
|||
if (_torchActive != torchActive) { |
|||
_torchActive = torchActive; |
|||
BOOL flashActive = _flashActive; |
|||
runOnMainThreadAsynchronously(^{ |
|||
[self _setScreenWithFrontViewFlashActive:flashActive torchActive:torchActive]; |
|||
}); |
|||
} |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,13 @@ |
|||
// |
|||
// SCManagedLegacyStillImageCapturer.h |
|||
// Snapchat |
|||
// |
|||
// Created by Chao Pang on 10/4/16. |
|||
// Copyright © 2016 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import "SCManagedStillImageCapturer.h" |
|||
|
|||
@interface SCManagedLegacyStillImageCapturer : SCManagedStillImageCapturer |
|||
|
|||
@end |
@ -0,0 +1,460 @@ |
|||
// |
|||
// SCManagedLegacyStillImageCapturer.m |
|||
// Snapchat |
|||
// |
|||
// Created by Chao Pang on 10/4/16. |
|||
// Copyright © 2016 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import "SCManagedLegacyStillImageCapturer.h" |
|||
|
|||
#import "AVCaptureConnection+InputDevice.h" |
|||
#import "SCCameraTweaks.h" |
|||
#import "SCLogger+Camera.h" |
|||
#import "SCManagedCapturer.h" |
|||
#import "SCManagedStillImageCapturer_Protected.h" |
|||
#import "SCStillImageCaptureVideoInputMethod.h" |
|||
|
|||
#import <SCCrashLogger/SCCrashLogger.h> |
|||
#import <SCFoundation/SCAssertWrapper.h> |
|||
#import <SCFoundation/SCLog.h> |
|||
#import <SCFoundation/SCPerforming.h> |
|||
#import <SCFoundation/SCQueuePerformer.h> |
|||
#import <SCFoundation/SCTrace.h> |
|||
#import <SCLenses/SCLens.h> |
|||
#import <SCLogger/SCCameraMetrics.h> |
|||
#import <SCWebP/UIImage+WebP.h> |
|||
|
|||
@import ImageIO; |
|||
|
|||
static NSString *const kSCLegacyStillImageCaptureDefaultMethodErrorDomain = |
|||
@"kSCLegacyStillImageCaptureDefaultMethodErrorDomain"; |
|||
static NSString *const kSCLegacyStillImageCaptureLensStabilizationMethodErrorDomain = |
|||
@"kSCLegacyStillImageCaptureLensStabilizationMethodErrorDomain"; |
|||
|
|||
static NSInteger const kSCLegacyStillImageCaptureDefaultMethodErrorEncounteredException = 10000; |
|||
static NSInteger const kSCLegacyStillImageCaptureLensStabilizationMethodErrorEncounteredException = 10001; |
|||
|
|||
@implementation SCManagedLegacyStillImageCapturer { |
|||
#pragma clang diagnostic push |
|||
#pragma clang diagnostic ignored "-Wdeprecated-declarations" |
|||
AVCaptureStillImageOutput *_stillImageOutput; |
|||
#pragma clang diagnostic pop |
|||
|
|||
BOOL _shouldCapture; |
|||
NSUInteger _retries; |
|||
|
|||
SCStillImageCaptureVideoInputMethod *_videoFileMethod; |
|||
} |
|||
|
|||
- (instancetype)initWithSession:(AVCaptureSession *)session |
|||
performer:(id<SCPerforming>)performer |
|||
lensProcessingCore:(id<SCManagedCapturerLensAPI>)lensProcessingCore |
|||
delegate:(id<SCManagedStillImageCapturerDelegate>)delegate |
|||
{ |
|||
SCTraceStart(); |
|||
self = [super initWithSession:session performer:performer lensProcessingCore:lensProcessingCore delegate:delegate]; |
|||
if (self) { |
|||
[self setupWithSession:session]; |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
- (void)setupWithSession:(AVCaptureSession *)session |
|||
{ |
|||
SCTraceStart(); |
|||
#pragma clang diagnostic push |
|||
#pragma clang diagnostic ignored "-Wdeprecated-declarations" |
|||
_stillImageOutput = [[AVCaptureStillImageOutput alloc] init]; |
|||
#pragma clang diagnostic pop |
|||
_stillImageOutput.outputSettings = @{AVVideoCodecKey : AVVideoCodecJPEG}; |
|||
[self setAsOutput:session]; |
|||
} |
|||
|
|||
- (void)setAsOutput:(AVCaptureSession *)session |
|||
{ |
|||
SCTraceStart(); |
|||
if ([session canAddOutput:_stillImageOutput]) { |
|||
[session addOutput:_stillImageOutput]; |
|||
} |
|||
} |
|||
|
|||
- (void)setHighResolutionStillImageOutputEnabled:(BOOL)highResolutionStillImageOutputEnabled |
|||
{ |
|||
SCTraceStart(); |
|||
if (_stillImageOutput.isHighResolutionStillImageOutputEnabled != highResolutionStillImageOutputEnabled) { |
|||
_stillImageOutput.highResolutionStillImageOutputEnabled = highResolutionStillImageOutputEnabled; |
|||
} |
|||
} |
|||
|
|||
- (void)setPortraitModeCaptureEnabled:(BOOL)enabled |
|||
{ |
|||
// Legacy capturer only used on devices running versions under 10.2, which don't support depth data |
|||
// so this function is never called and does not need to be implemented |
|||
} |
|||
|
|||
- (void)enableStillImageStabilization |
|||
{ |
|||
SCTraceStart(); |
|||
#pragma clang diagnostic push |
|||
#pragma clang diagnostic ignored "-Wdeprecated-declarations" |
|||
if (_stillImageOutput.isLensStabilizationDuringBracketedCaptureSupported) { |
|||
_stillImageOutput.lensStabilizationDuringBracketedCaptureEnabled = YES; |
|||
} |
|||
#pragma clang diagnostic pop |
|||
} |
|||
|
|||
- (void)removeAsOutput:(AVCaptureSession *)session |
|||
{ |
|||
SCTraceStart(); |
|||
[session removeOutput:_stillImageOutput]; |
|||
} |
|||
|
|||
- (void)captureStillImageWithAspectRatio:(CGFloat)aspectRatio |
|||
atZoomFactor:(float)zoomFactor |
|||
fieldOfView:(float)fieldOfView |
|||
state:(SCManagedCapturerState *)state |
|||
captureSessionID:(NSString *)captureSessionID |
|||
shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo |
|||
completionHandler: |
|||
(sc_managed_still_image_capturer_capture_still_image_completion_handler_t)completionHandler |
|||
{ |
|||
SCTraceStart(); |
|||
SCAssert(completionHandler, @"completionHandler shouldn't be nil"); |
|||
_retries = 6; // AVFoundation Unknown Error usually resolves itself within 0.5 seconds |
|||
_aspectRatio = aspectRatio; |
|||
_zoomFactor = zoomFactor; |
|||
_fieldOfView = fieldOfView; |
|||
_state = state; |
|||
_captureSessionID = captureSessionID; |
|||
_shouldCaptureFromVideo = shouldCaptureFromVideo; |
|||
SCAssert(!_completionHandler, @"We shouldn't have a _completionHandler at this point otherwise we are destroying " |
|||
@"current completion handler."); |
|||
_completionHandler = [completionHandler copy]; |
|||
[[SCLogger sharedInstance] logCameraExposureAdjustmentDelayStart]; |
|||
if (!_adjustingExposureManualDetect) { |
|||
SCLogCoreCameraInfo(@"Capturing still image now"); |
|||
[self _captureStillImageWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyNo]; |
|||
_shouldCapture = NO; |
|||
} else { |
|||
SCLogCoreCameraInfo(@"Wait adjusting exposure (or after 0.4 seconds) and then capture still image"); |
|||
_shouldCapture = YES; |
|||
[self _deadlineCaptureStillImage]; |
|||
} |
|||
} |
|||
|
|||
#pragma mark - SCManagedDeviceCapacityAnalyzerListener |
|||
|
|||
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer |
|||
didChangeAdjustingExposure:(BOOL)adjustingExposure |
|||
{ |
|||
SCTraceStart(); |
|||
@weakify(self); |
|||
[_performer performImmediatelyIfCurrentPerformer:^{ |
|||
// Since this is handled on a different thread, therefore, dispatch back to the queue we operated on. |
|||
@strongify(self); |
|||
SC_GUARD_ELSE_RETURN(self); |
|||
self->_adjustingExposureManualDetect = adjustingExposure; |
|||
[self _didChangeAdjustingExposure:adjustingExposure |
|||
withStrategy:kSCCameraExposureAdjustmentStrategyManualDetect]; |
|||
}]; |
|||
} |
|||
|
|||
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer |
|||
didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition |
|||
{ |
|||
SCTraceStart(); |
|||
@weakify(self); |
|||
[_performer performImmediatelyIfCurrentPerformer:^{ |
|||
@strongify(self); |
|||
SC_GUARD_ELSE_RETURN(self); |
|||
self->_lightingConditionType = lightingCondition; |
|||
}]; |
|||
} |
|||
|
|||
#pragma mark - SCManagedCapturerListener |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state |
|||
{ |
|||
SCTraceStart(); |
|||
@weakify(self); |
|||
[_performer performImmediatelyIfCurrentPerformer:^{ |
|||
@strongify(self); |
|||
SC_GUARD_ELSE_RETURN(self); |
|||
// Since this is handled on a different thread, therefore, dispatch back to the queue we operated on. |
|||
[self _didChangeAdjustingExposure:state.adjustingExposure withStrategy:kSCCameraExposureAdjustmentStrategyKVO]; |
|||
}]; |
|||
} |
|||
|
|||
#pragma mark - Private methods |
|||
|
|||
- (void)_didChangeAdjustingExposure:(BOOL)adjustingExposure withStrategy:(NSString *)strategy |
|||
{ |
|||
if (!adjustingExposure && self->_shouldCapture) { |
|||
SCLogCoreCameraInfo(@"Capturing after adjusting exposure using strategy: %@", strategy); |
|||
[self _captureStillImageWithExposureAdjustmentStrategy:strategy]; |
|||
self->_shouldCapture = NO; |
|||
} |
|||
} |
|||
|
|||
- (void)_deadlineCaptureStillImage |
|||
{ |
|||
SCTraceStart(); |
|||
// Use the SCManagedCapturer's private queue. |
|||
[_performer perform:^{ |
|||
if (_shouldCapture) { |
|||
[self _captureStillImageWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyDeadline]; |
|||
_shouldCapture = NO; |
|||
} |
|||
} |
|||
after:SCCameraTweaksExposureDeadline()]; |
|||
} |
|||
|
|||
- (void)_captureStillImageWithExposureAdjustmentStrategy:(NSString *)strategy |
|||
{ |
|||
SCTraceStart(); |
|||
[[SCLogger sharedInstance] logCameraExposureAdjustmentDelayEndWithStrategy:strategy]; |
|||
if (_shouldCaptureFromVideo) { |
|||
[self captureStillImageFromVideoBuffer]; |
|||
return; |
|||
} |
|||
SCAssert(_stillImageOutput, @"stillImageOutput shouldn't be nil"); |
|||
#pragma clang diagnostic push |
|||
#pragma clang diagnostic ignored "-Wdeprecated-declarations" |
|||
AVCaptureStillImageOutput *stillImageOutput = _stillImageOutput; |
|||
#pragma clang diagnostic pop |
|||
AVCaptureConnection *captureConnection = [self _captureConnectionFromStillImageOutput:stillImageOutput]; |
|||
SCManagedCapturerState *state = [_state copy]; |
|||
dispatch_block_t legacyStillImageCaptureBlock = ^{ |
|||
SCCAssertMainThread(); |
|||
// If the application is not in background, and we have still image connection, do thecapture. Otherwise fail. |
|||
if ([UIApplication sharedApplication].applicationState == UIApplicationStateBackground) { |
|||
[_performer performImmediatelyIfCurrentPerformer:^{ |
|||
sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = |
|||
_completionHandler; |
|||
_completionHandler = nil; |
|||
completionHandler(nil, nil, |
|||
[NSError errorWithDomain:kSCManagedStillImageCapturerErrorDomain |
|||
code:kSCManagedStillImageCapturerApplicationStateBackground |
|||
userInfo:nil]); |
|||
}]; |
|||
return; |
|||
} |
|||
#if !TARGET_IPHONE_SIMULATOR |
|||
if (!captureConnection) { |
|||
[_performer performImmediatelyIfCurrentPerformer:^{ |
|||
sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = |
|||
_completionHandler; |
|||
_completionHandler = nil; |
|||
completionHandler(nil, nil, [NSError errorWithDomain:kSCManagedStillImageCapturerErrorDomain |
|||
code:kSCManagedStillImageCapturerNoStillImageConnection |
|||
userInfo:nil]); |
|||
}]; |
|||
return; |
|||
} |
|||
#endif |
|||
// Select appropriate image capture method |
|||
if ([_delegate managedStillImageCapturerShouldProcessFileInput:self]) { |
|||
if (!_videoFileMethod) { |
|||
_videoFileMethod = [[SCStillImageCaptureVideoInputMethod alloc] init]; |
|||
} |
|||
[[SCLogger sharedInstance] logStillImageCaptureApi:@"SCStillImageCapture"]; |
|||
[[SCCoreCameraLogger sharedInstance] |
|||
logCameraCreationDelaySplitPointStillImageCaptureApi:@"SCStillImageCapture"]; |
|||
[_videoFileMethod captureStillImageWithCapturerState:state |
|||
successBlock:^(NSData *imageData, NSDictionary *cameraInfo, NSError *error) { |
|||
[self _legacyStillImageCaptureDidSucceedWithImageData:imageData |
|||
sampleBuffer:nil |
|||
cameraInfo:cameraInfo |
|||
error:error]; |
|||
} |
|||
failureBlock:^(NSError *error) { |
|||
[self _legacyStillImageCaptureDidFailWithError:error]; |
|||
}]; |
|||
} else { |
|||
#pragma clang diagnostic push |
|||
#pragma clang diagnostic ignored "-Wdeprecated-declarations" |
|||
if (stillImageOutput.isLensStabilizationDuringBracketedCaptureSupported && !state.flashActive) { |
|||
[self _captureStabilizedStillImageWithStillImageOutput:stillImageOutput |
|||
captureConnection:captureConnection |
|||
capturerState:state]; |
|||
} else { |
|||
[self _captureStillImageWithStillImageOutput:stillImageOutput |
|||
captureConnection:captureConnection |
|||
capturerState:state]; |
|||
} |
|||
#pragma clang diagnostic pop |
|||
} |
|||
}; |
|||
// We need to call this on main thread and blocking. |
|||
[[SCQueuePerformer mainQueuePerformer] performAndWait:legacyStillImageCaptureBlock]; |
|||
} |
|||
|
|||
#pragma clang diagnostic push |
|||
#pragma clang diagnostic ignored "-Wdeprecated-declarations" |
|||
- (void)_captureStillImageWithStillImageOutput:(AVCaptureStillImageOutput *)stillImageOutput |
|||
captureConnection:(AVCaptureConnection *)captureConnection |
|||
capturerState:(SCManagedCapturerState *)state |
|||
{ |
|||
[[SCLogger sharedInstance] logStillImageCaptureApi:@"AVStillImageCaptureAsynchronous"]; |
|||
[[SCCoreCameraLogger sharedInstance] |
|||
logCameraCreationDelaySplitPointStillImageCaptureApi:@"AVStillImageCaptureAsynchronous"]; |
|||
@try { |
|||
[stillImageOutput |
|||
captureStillImageAsynchronouslyFromConnection:captureConnection |
|||
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) { |
|||
if (imageDataSampleBuffer) { |
|||
NSData *imageData = [AVCaptureStillImageOutput |
|||
jpegStillImageNSDataRepresentation:imageDataSampleBuffer]; |
|||
[self |
|||
_legacyStillImageCaptureDidSucceedWithImageData:imageData |
|||
sampleBuffer: |
|||
imageDataSampleBuffer |
|||
cameraInfo: |
|||
cameraInfoForBuffer( |
|||
imageDataSampleBuffer) |
|||
error:error]; |
|||
} else { |
|||
if (error.domain == AVFoundationErrorDomain && error.code == -11800) { |
|||
// iOS 7 "unknown error"; works if we retry |
|||
[self _legacyStillImageCaptureWillRetryWithError:error]; |
|||
} else { |
|||
[self _legacyStillImageCaptureDidFailWithError:error]; |
|||
} |
|||
} |
|||
}]; |
|||
} @catch (NSException *e) { |
|||
[SCCrashLogger logHandledException:e]; |
|||
[self _legacyStillImageCaptureDidFailWithError: |
|||
[NSError errorWithDomain:kSCLegacyStillImageCaptureDefaultMethodErrorDomain |
|||
code:kSCLegacyStillImageCaptureDefaultMethodErrorEncounteredException |
|||
userInfo:@{ |
|||
@"exception" : e |
|||
}]]; |
|||
} |
|||
} |
|||
|
|||
- (void)_captureStabilizedStillImageWithStillImageOutput:(AVCaptureStillImageOutput *)stillImageOutput |
|||
captureConnection:(AVCaptureConnection *)captureConnection |
|||
capturerState:(SCManagedCapturerState *)state |
|||
{ |
|||
[[SCLogger sharedInstance] logStillImageCaptureApi:@"AVStillImageOutputCaptureBracketAsynchronously"]; |
|||
[[SCCoreCameraLogger sharedInstance] |
|||
logCameraCreationDelaySplitPointStillImageCaptureApi:@"AVStillImageOutputCaptureBracketAsynchronously"]; |
|||
NSArray *bracketArray = [self _bracketSettingsArray:captureConnection]; |
|||
@try { |
|||
[stillImageOutput |
|||
captureStillImageBracketAsynchronouslyFromConnection:captureConnection |
|||
withSettingsArray:bracketArray |
|||
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, |
|||
AVCaptureBracketedStillImageSettings *settings, |
|||
NSError *err) { |
|||
if (!imageDataSampleBuffer) { |
|||
[self _legacyStillImageCaptureDidFailWithError:err]; |
|||
return; |
|||
} |
|||
NSData *jpegData = [AVCaptureStillImageOutput |
|||
jpegStillImageNSDataRepresentation:imageDataSampleBuffer]; |
|||
[self |
|||
_legacyStillImageCaptureDidSucceedWithImageData:jpegData |
|||
sampleBuffer: |
|||
imageDataSampleBuffer |
|||
cameraInfo: |
|||
cameraInfoForBuffer( |
|||
imageDataSampleBuffer) |
|||
error:nil]; |
|||
}]; |
|||
} @catch (NSException *e) { |
|||
[SCCrashLogger logHandledException:e]; |
|||
[self _legacyStillImageCaptureDidFailWithError: |
|||
[NSError errorWithDomain:kSCLegacyStillImageCaptureLensStabilizationMethodErrorDomain |
|||
code:kSCLegacyStillImageCaptureLensStabilizationMethodErrorEncounteredException |
|||
userInfo:@{ |
|||
@"exception" : e |
|||
}]]; |
|||
} |
|||
} |
|||
#pragma clang diagnostic pop |
|||
|
|||
- (NSArray *)_bracketSettingsArray:(AVCaptureConnection *)stillImageConnection |
|||
{ |
|||
NSInteger const stillCount = 1; |
|||
NSMutableArray *bracketSettingsArray = [NSMutableArray arrayWithCapacity:stillCount]; |
|||
AVCaptureDevice *device = [stillImageConnection inputDevice]; |
|||
AVCaptureManualExposureBracketedStillImageSettings *settings = [AVCaptureManualExposureBracketedStillImageSettings |
|||
manualExposureSettingsWithExposureDuration:device.exposureDuration |
|||
ISO:AVCaptureISOCurrent]; |
|||
for (NSInteger i = 0; i < stillCount; i++) { |
|||
[bracketSettingsArray addObject:settings]; |
|||
} |
|||
return [bracketSettingsArray copy]; |
|||
} |
|||
|
|||
- (void)_legacyStillImageCaptureDidSucceedWithImageData:(NSData *)imageData |
|||
sampleBuffer:(CMSampleBufferRef)sampleBuffer |
|||
cameraInfo:(NSDictionary *)cameraInfo |
|||
error:(NSError *)error |
|||
{ |
|||
[[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CACurrentMediaTime()]; |
|||
[[SCCoreCameraLogger sharedInstance] |
|||
logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CACurrentMediaTime()]; |
|||
if (sampleBuffer) { |
|||
CFRetain(sampleBuffer); |
|||
} |
|||
[_performer performImmediatelyIfCurrentPerformer:^{ |
|||
UIImage *fullScreenImage = [self imageFromData:imageData |
|||
currentZoomFactor:_zoomFactor |
|||
targetAspectRatio:_aspectRatio |
|||
fieldOfView:_fieldOfView |
|||
state:_state |
|||
sampleBuffer:sampleBuffer]; |
|||
|
|||
sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; |
|||
_completionHandler = nil; |
|||
completionHandler(fullScreenImage, cameraInfo, error); |
|||
if (sampleBuffer) { |
|||
CFRelease(sampleBuffer); |
|||
} |
|||
}]; |
|||
} |
|||
|
|||
- (void)_legacyStillImageCaptureDidFailWithError:(NSError *)error |
|||
{ |
|||
[_performer performImmediatelyIfCurrentPerformer:^{ |
|||
sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; |
|||
_completionHandler = nil; |
|||
completionHandler(nil, nil, error); |
|||
}]; |
|||
} |
|||
|
|||
- (void)_legacyStillImageCaptureWillRetryWithError:(NSError *)error |
|||
{ |
|||
if (_retries-- > 0) { |
|||
[_performer perform:^{ |
|||
[self _captureStillImageWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyNo]; |
|||
} |
|||
after:kSCCameraRetryInterval]; |
|||
} else { |
|||
[self _legacyStillImageCaptureDidFailWithError:error]; |
|||
} |
|||
} |
|||
|
|||
#pragma clang diagnostic push |
|||
#pragma clang diagnostic ignored "-Wdeprecated-declarations" |
|||
- (AVCaptureConnection *)_captureConnectionFromStillImageOutput:(AVCaptureStillImageOutput *)stillImageOutput |
|||
#pragma clang diagnostic pop |
|||
{ |
|||
SCTraceStart(); |
|||
SCAssert([_performer isCurrentPerformer], @""); |
|||
NSArray *connections = [stillImageOutput.connections copy]; |
|||
for (AVCaptureConnection *connection in connections) { |
|||
for (AVCaptureInputPort *port in [connection inputPorts]) { |
|||
if ([[port mediaType] isEqual:AVMediaTypeVideo]) { |
|||
return connection; |
|||
} |
|||
} |
|||
} |
|||
return nil; |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,13 @@ |
|||
// |
|||
// SCManagedPhotoCapturer.h |
|||
// Snapchat |
|||
// |
|||
// Created by Chao Pang on 10/5/16. |
|||
// Copyright © 2016 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import "SCManagedStillImageCapturer.h" |
|||
|
|||
@interface SCManagedPhotoCapturer : SCManagedStillImageCapturer |
|||
|
|||
@end |
@ -0,0 +1,667 @@ |
|||
// |
|||
// SCManagedPhotoCapturer.m |
|||
// Snapchat |
|||
// |
|||
// Created by Chao Pang on 10/5/16. |
|||
// Copyright © 2016 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import "SCManagedPhotoCapturer.h" |
|||
|
|||
#import "AVCaptureConnection+InputDevice.h" |
|||
#import "SCCameraTweaks.h" |
|||
#import "SCLogger+Camera.h" |
|||
#import "SCManagedCapturer.h" |
|||
#import "SCManagedFrameHealthChecker.h" |
|||
#import "SCManagedStillImageCapturer_Protected.h" |
|||
#import "SCStillImageCaptureVideoInputMethod.h" |
|||
#import "SCStillImageDepthBlurFilter.h" |
|||
|
|||
#import <SCCrashLogger/SCCrashLogger.h> |
|||
#import <SCFoundation/SCAssertWrapper.h> |
|||
#import <SCFoundation/SCLog.h> |
|||
#import <SCFoundation/SCPerforming.h> |
|||
#import <SCFoundation/SCQueuePerformer.h> |
|||
#import <SCFoundation/SCTrace.h> |
|||
#import <SCLenses/SCLens.h> |
|||
#import <SCLogger/SCCameraMetrics.h> |
|||
#import <SCLogger/SClogger+Performance.h> |
|||
#import <SCWebP/UIImage+WebP.h> |
|||
|
|||
@import ImageIO; |
|||
|
|||
static NSString *const kSCManagedPhotoCapturerErrorDomain = @"kSCManagedPhotoCapturerErrorDomain"; |
|||
|
|||
static NSInteger const kSCManagedPhotoCapturerErrorEncounteredException = 10000; |
|||
static NSInteger const kSCManagedPhotoCapturerInconsistentStatus = 10001; |
|||
|
|||
typedef NS_ENUM(NSUInteger, SCManagedPhotoCapturerStatus) { |
|||
SCManagedPhotoCapturerStatusPrepareToCapture, |
|||
SCManagedPhotoCapturerStatusWillCapture, |
|||
SCManagedPhotoCapturerStatusDidFinishProcess, |
|||
}; |
|||
|
|||
@interface SCManagedPhotoCapturer () <AVCapturePhotoCaptureDelegate> |
|||
@end |
|||
|
|||
@implementation SCManagedPhotoCapturer { |
|||
AVCapturePhotoOutput *_photoOutput; |
|||
|
|||
BOOL _shouldCapture; |
|||
BOOL _shouldEnableHRSI; |
|||
BOOL _portraitModeCaptureEnabled; |
|||
NSUInteger _retries; |
|||
|
|||
CGPoint _portraitModePointOfInterest; |
|||
SCStillImageDepthBlurFilter *_depthBlurFilter; |
|||
sc_managed_still_image_capturer_capture_still_image_completion_handler_t _callbackBlock; |
|||
|
|||
SCStillImageCaptureVideoInputMethod *_videoFileMethod; |
|||
|
|||
SCManagedPhotoCapturerStatus _status; |
|||
} |
|||
|
|||
- (instancetype)initWithSession:(AVCaptureSession *)session |
|||
performer:(id<SCPerforming>)performer |
|||
lensProcessingCore:(id<SCManagedCapturerLensAPI>)lensProcessingCore |
|||
delegate:(id<SCManagedStillImageCapturerDelegate>)delegate |
|||
{ |
|||
SCTraceStart(); |
|||
self = [super initWithSession:session performer:performer lensProcessingCore:lensProcessingCore delegate:delegate]; |
|||
if (self) { |
|||
[self setupWithSession:session]; |
|||
_portraitModePointOfInterest = CGPointMake(0.5, 0.5); |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
- (void)setupWithSession:(AVCaptureSession *)session |
|||
{ |
|||
SCTraceStart(); |
|||
_photoOutput = [[AVCapturePhotoOutput alloc] init]; |
|||
_photoOutput.highResolutionCaptureEnabled = YES; |
|||
[self setAsOutput:session]; |
|||
} |
|||
|
|||
- (void)setAsOutput:(AVCaptureSession *)session |
|||
{ |
|||
SCTraceStart(); |
|||
if ([session canAddOutput:_photoOutput]) { |
|||
[session addOutput:_photoOutput]; |
|||
} |
|||
} |
|||
|
|||
- (void)setHighResolutionStillImageOutputEnabled:(BOOL)highResolutionStillImageOutputEnabled |
|||
{ |
|||
SCTraceStart(); |
|||
SCAssert([_performer isCurrentPerformer], @""); |
|||
// Here we cannot directly set _photoOutput.highResolutionCaptureEnabled, since it will cause |
|||
// black frame blink when enabling lenses. Instead, we enable HRSI in AVCapturePhotoSettings. |
|||
// https://ph.sc-corp.net/T96228 |
|||
_shouldEnableHRSI = highResolutionStillImageOutputEnabled; |
|||
} |
|||
|
|||
- (void)enableStillImageStabilization |
|||
{ |
|||
// The lens stabilization is enabled when configure AVCapturePhotoSettings |
|||
// instead of AVCapturePhotoOutput |
|||
SCTraceStart(); |
|||
} |
|||
|
|||
- (void)setPortraitModeCaptureEnabled:(BOOL)enabled |
|||
{ |
|||
_portraitModeCaptureEnabled = enabled; |
|||
if (@available(ios 11.0, *)) { |
|||
_photoOutput.depthDataDeliveryEnabled = enabled; |
|||
} |
|||
if (enabled && _depthBlurFilter == nil) { |
|||
_depthBlurFilter = [[SCStillImageDepthBlurFilter alloc] init]; |
|||
} |
|||
} |
|||
|
|||
- (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest |
|||
{ |
|||
_portraitModePointOfInterest = pointOfInterest; |
|||
} |
|||
|
|||
- (void)removeAsOutput:(AVCaptureSession *)session |
|||
{ |
|||
SCTraceStart(); |
|||
[session removeOutput:_photoOutput]; |
|||
} |
|||
|
|||
- (void)captureStillImageWithAspectRatio:(CGFloat)aspectRatio |
|||
atZoomFactor:(float)zoomFactor |
|||
fieldOfView:(float)fieldOfView |
|||
state:(SCManagedCapturerState *)state |
|||
captureSessionID:(NSString *)captureSessionID |
|||
shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo |
|||
completionHandler: |
|||
(sc_managed_still_image_capturer_capture_still_image_completion_handler_t)completionHandler |
|||
{ |
|||
SCTraceStart(); |
|||
SCAssert(completionHandler, @"completionHandler shouldn't be nil"); |
|||
SCAssert([_performer isCurrentPerformer], @""); |
|||
_retries = 6; // AVFoundation Unknown Error usually resolves itself within 0.5 seconds |
|||
_aspectRatio = aspectRatio; |
|||
_zoomFactor = zoomFactor; |
|||
_fieldOfView = fieldOfView; |
|||
_state = state; |
|||
_captureSessionID = captureSessionID; |
|||
_shouldCaptureFromVideo = shouldCaptureFromVideo; |
|||
SCAssert(!_completionHandler, @"We shouldn't have a _completionHandler at this point otherwise we are destroying " |
|||
@"current completion handler."); |
|||
|
|||
// The purpose of these lines is to attach a strong reference to self to the completion handler. |
|||
// This is because AVCapturePhotoOutput does not hold a strong reference to its delegate, which acts as a completion |
|||
// handler. |
|||
// If self is deallocated during the call to _photoOuptut capturePhotoWithSettings:delegate:, which may happen if |
|||
// any AVFoundationError occurs, |
|||
// then it's callback method, captureOutput:didFinish..., will not be called, and the completion handler will be |
|||
// forgotten. |
|||
// This comes with a risk of a memory leak. If for whatever reason the completion handler field is never used and |
|||
// then unset, |
|||
// then we have a permanent retain cycle. |
|||
_callbackBlock = completionHandler; |
|||
__typeof(self) strongSelf = self; |
|||
_completionHandler = ^(UIImage *fullScreenImage, NSDictionary *metadata, NSError *error) { |
|||
strongSelf->_callbackBlock(fullScreenImage, metadata, error); |
|||
strongSelf->_callbackBlock = nil; |
|||
}; |
|||
[[SCLogger sharedInstance] logCameraExposureAdjustmentDelayStart]; |
|||
|
|||
if (!_adjustingExposureManualDetect) { |
|||
SCLogCoreCameraInfo(@"Capturing still image now"); |
|||
[self _capturePhotoWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyNo]; |
|||
_shouldCapture = NO; |
|||
} else { |
|||
SCLogCoreCameraInfo(@"Wait adjusting exposure (or after 0.4 seconds) and then capture still image"); |
|||
_shouldCapture = YES; |
|||
[self _deadlineCapturePhoto]; |
|||
} |
|||
} |
|||
|
|||
#pragma mark - SCManagedDeviceCapacityAnalyzerListener |
|||
|
|||
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer |
|||
didChangeAdjustingExposure:(BOOL)adjustingExposure |
|||
{ |
|||
SCTraceStart(); |
|||
@weakify(self); |
|||
[_performer performImmediatelyIfCurrentPerformer:^{ |
|||
@strongify(self); |
|||
SC_GUARD_ELSE_RETURN(self); |
|||
// Since this is handled on a different thread, therefore, dispatch back to the queue we operated on. |
|||
self->_adjustingExposureManualDetect = adjustingExposure; |
|||
[self _didChangeAdjustingExposure:adjustingExposure |
|||
withStrategy:kSCCameraExposureAdjustmentStrategyManualDetect]; |
|||
}]; |
|||
} |
|||
|
|||
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer |
|||
didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition |
|||
{ |
|||
SCTraceStart(); |
|||
@weakify(self); |
|||
[_performer performImmediatelyIfCurrentPerformer:^{ |
|||
@strongify(self); |
|||
SC_GUARD_ELSE_RETURN(self); |
|||
self->_lightingConditionType = lightingCondition; |
|||
}]; |
|||
} |
|||
|
|||
#pragma mark - SCManagedCapturerListener |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state |
|||
{ |
|||
SCTraceStart(); |
|||
@weakify(self); |
|||
[_performer performImmediatelyIfCurrentPerformer:^{ |
|||
@strongify(self); |
|||
SC_GUARD_ELSE_RETURN(self); |
|||
// Since this is handled on a different thread, therefore, dispatch back to the queue we operated on. |
|||
[self _didChangeAdjustingExposure:state.adjustingExposure withStrategy:kSCCameraExposureAdjustmentStrategyKVO]; |
|||
}]; |
|||
} |
|||
|
|||
#pragma mark - AVCapturePhotoCaptureDelegate |
|||
|
|||
- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput |
|||
didFinishProcessingPhotoSampleBuffer:(CMSampleBufferRef)photoSampleBuffer |
|||
previewPhotoSampleBuffer:(CMSampleBufferRef)previewPhotoSampleBuffer |
|||
resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings |
|||
bracketSettings:(AVCaptureBracketedStillImageSettings *)bracketSettings |
|||
error:(NSError *)error |
|||
{ |
|||
SCTraceStart(); |
|||
if (photoSampleBuffer) { |
|||
CFRetain(photoSampleBuffer); |
|||
} |
|||
@weakify(self); |
|||
[_performer performImmediatelyIfCurrentPerformer:^{ |
|||
SCTraceStart(); |
|||
@strongify(self); |
|||
SC_GUARD_ELSE_RETURN(self); |
|||
SC_GUARD_ELSE_RUN_AND_RETURN(photoSampleBuffer, [self _photoCaptureDidFailWithError:error]); |
|||
if (self->_status == SCManagedPhotoCapturerStatusWillCapture) { |
|||
NSData *imageData = [AVCapturePhotoOutput JPEGPhotoDataRepresentationForJPEGSampleBuffer:photoSampleBuffer |
|||
previewPhotoSampleBuffer:nil]; |
|||
|
|||
[[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay |
|||
uniqueId:@"IMAGE" |
|||
splitPoint:@"DID_FINISH_PROCESSING"]; |
|||
[self _capturePhotoFinishedWithImageData:imageData |
|||
sampleBuffer:photoSampleBuffer |
|||
cameraInfo:cameraInfoForBuffer(photoSampleBuffer) |
|||
error:error]; |
|||
|
|||
} else { |
|||
SCLogCoreCameraInfo(@"DidFinishProcessingPhoto with unexpected status: %@", |
|||
[self _photoCapturerStatusToString:self->_status]); |
|||
[self _photoCaptureDidFailWithError:[NSError errorWithDomain:kSCManagedPhotoCapturerErrorDomain |
|||
code:kSCManagedPhotoCapturerInconsistentStatus |
|||
userInfo:nil]]; |
|||
} |
|||
CFRelease(photoSampleBuffer); |
|||
}]; |
|||
} |
|||
|
|||
- (void)captureOutput:(AVCapturePhotoOutput *)output |
|||
didFinishProcessingPhoto:(nonnull AVCapturePhoto *)photo |
|||
error:(nullable NSError *)error NS_AVAILABLE_IOS(11_0) |
|||
{ |
|||
SCTraceStart(); |
|||
@weakify(self); |
|||
[_performer performImmediatelyIfCurrentPerformer:^{ |
|||
SCTraceStart(); |
|||
@strongify(self); |
|||
SC_GUARD_ELSE_RETURN(self); |
|||
NSData *imageData = [photo fileDataRepresentation]; |
|||
SC_GUARD_ELSE_RUN_AND_RETURN(imageData, [self _photoCaptureDidFailWithError:error]); |
|||
if (self->_status == SCManagedPhotoCapturerStatusWillCapture) { |
|||
if (@available(ios 11.0, *)) { |
|||
if (_portraitModeCaptureEnabled) { |
|||
RenderData renderData = { |
|||
.depthDataMap = photo.depthData.depthDataMap, |
|||
.depthBlurPointOfInterest = &_portraitModePointOfInterest, |
|||
}; |
|||
imageData = [_depthBlurFilter renderWithPhotoData:imageData renderData:renderData]; |
|||
} |
|||
} |
|||
|
|||
[[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay |
|||
uniqueId:@"IMAGE" |
|||
splitPoint:@"DID_FINISH_PROCESSING"]; |
|||
|
|||
[self _capturePhotoFinishedWithImageData:imageData metadata:photo.metadata error:error]; |
|||
|
|||
} else { |
|||
SCLogCoreCameraInfo(@"DidFinishProcessingPhoto with unexpected status: %@", |
|||
[self _photoCapturerStatusToString:self->_status]); |
|||
[self _photoCaptureDidFailWithError:[NSError errorWithDomain:kSCManagedPhotoCapturerErrorDomain |
|||
code:kSCManagedPhotoCapturerInconsistentStatus |
|||
userInfo:nil]]; |
|||
} |
|||
}]; |
|||
} |
|||
|
|||
- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput |
|||
willBeginCaptureForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings |
|||
{ |
|||
SCTraceStart(); |
|||
@weakify(self); |
|||
[_performer performImmediatelyIfCurrentPerformer:^{ |
|||
SCTraceStart(); |
|||
@strongify(self); |
|||
SC_GUARD_ELSE_RETURN(self); |
|||
if ([self->_delegate respondsToSelector:@selector(managedStillImageCapturerWillCapturePhoto:)]) { |
|||
if (self->_status == SCManagedPhotoCapturerStatusPrepareToCapture) { |
|||
self->_status = SCManagedPhotoCapturerStatusWillCapture; |
|||
|
|||
[[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay |
|||
uniqueId:@"IMAGE" |
|||
splitPoint:@"WILL_BEGIN_CAPTURE"]; |
|||
[self->_delegate managedStillImageCapturerWillCapturePhoto:self]; |
|||
} else { |
|||
SCLogCoreCameraInfo(@"WillBeginCapture with unexpected status: %@", |
|||
[self _photoCapturerStatusToString:self->_status]); |
|||
} |
|||
} |
|||
}]; |
|||
} |
|||
|
|||
- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput |
|||
didCapturePhotoForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings |
|||
{ |
|||
SCTraceStart(); |
|||
@weakify(self); |
|||
[_performer performImmediatelyIfCurrentPerformer:^{ |
|||
SCTraceStart(); |
|||
@strongify(self); |
|||
SC_GUARD_ELSE_RETURN(self); |
|||
if ([self->_delegate respondsToSelector:@selector(managedStillImageCapturerDidCapturePhoto:)]) { |
|||
if (self->_status == SCManagedPhotoCapturerStatusWillCapture || |
|||
self->_status == SCManagedPhotoCapturerStatusDidFinishProcess) { |
|||
[[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay |
|||
uniqueId:@"IMAGE" |
|||
splitPoint:@"DID_CAPTURE_PHOTO"]; |
|||
[self->_delegate managedStillImageCapturerDidCapturePhoto:self]; |
|||
} else { |
|||
SCLogCoreCameraInfo(@"DidCapturePhoto with unexpected status: %@", |
|||
[self _photoCapturerStatusToString:self->_status]); |
|||
} |
|||
} |
|||
}]; |
|||
} |
|||
|
|||
#pragma mark - Private methods |
|||
|
|||
- (void)_didChangeAdjustingExposure:(BOOL)adjustingExposure withStrategy:(NSString *)strategy |
|||
{ |
|||
if (!adjustingExposure && self->_shouldCapture) { |
|||
SCLogCoreCameraInfo(@"Capturing after adjusting exposure using strategy: %@", strategy); |
|||
[self _capturePhotoWithExposureAdjustmentStrategy:strategy]; |
|||
self->_shouldCapture = NO; |
|||
} |
|||
} |
|||
|
|||
- (void)_capturePhotoFinishedWithImageData:(NSData *)imageData |
|||
sampleBuffer:(CMSampleBufferRef)sampleBuffer |
|||
cameraInfo:(NSDictionary *)cameraInfo |
|||
error:(NSError *)error |
|||
{ |
|||
[self _photoCaptureDidSucceedWithImageData:imageData |
|||
sampleBuffer:sampleBuffer |
|||
cameraInfo:cameraInfoForBuffer(sampleBuffer) |
|||
error:error]; |
|||
self->_status = SCManagedPhotoCapturerStatusDidFinishProcess; |
|||
} |
|||
|
|||
- (void)_capturePhotoFinishedWithImageData:(NSData *)imageData metadata:(NSDictionary *)metadata error:(NSError *)error |
|||
{ |
|||
[self _photoCaptureDidSucceedWithImageData:imageData metadata:metadata error:error]; |
|||
self->_status = SCManagedPhotoCapturerStatusDidFinishProcess; |
|||
} |
|||
|
|||
- (void)_deadlineCapturePhoto |
|||
{ |
|||
SCTraceStart(); |
|||
// Use the SCManagedCapturer's private queue. |
|||
@weakify(self); |
|||
[_performer perform:^{ |
|||
@strongify(self); |
|||
SC_GUARD_ELSE_RETURN(self); |
|||
if (self->_shouldCapture) { |
|||
[self _capturePhotoWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyDeadline]; |
|||
self->_shouldCapture = NO; |
|||
} |
|||
} |
|||
after:SCCameraTweaksExposureDeadline()]; |
|||
} |
|||
|
|||
- (void)_capturePhotoWithExposureAdjustmentStrategy:(NSString *)strategy |
|||
{ |
|||
SCTraceStart(); |
|||
[[SCLogger sharedInstance] logCameraExposureAdjustmentDelayEndWithStrategy:strategy]; |
|||
if (_shouldCaptureFromVideo) { |
|||
[self captureStillImageFromVideoBuffer]; |
|||
return; |
|||
} |
|||
SCAssert([_performer isCurrentPerformer], @""); |
|||
SCAssert(_photoOutput, @"_photoOutput shouldn't be nil"); |
|||
_status = SCManagedPhotoCapturerStatusPrepareToCapture; |
|||
AVCapturePhotoOutput *photoOutput = _photoOutput; |
|||
AVCaptureConnection *captureConnection = [self _captureConnectionFromPhotoOutput:photoOutput]; |
|||
SCManagedCapturerState *state = [_state copy]; |
|||
#if !TARGET_IPHONE_SIMULATOR |
|||
if (!captureConnection) { |
|||
sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; |
|||
_completionHandler = nil; |
|||
completionHandler(nil, nil, [NSError errorWithDomain:kSCManagedStillImageCapturerErrorDomain |
|||
code:kSCManagedStillImageCapturerNoStillImageConnection |
|||
userInfo:nil]); |
|||
} |
|||
#endif |
|||
AVCapturePhotoSettings *photoSettings = |
|||
[self _photoSettingsWithPhotoOutput:photoOutput captureConnection:captureConnection captureState:state]; |
|||
// Select appropriate image capture method |
|||
|
|||
if ([_delegate managedStillImageCapturerShouldProcessFileInput:self]) { |
|||
if (!_videoFileMethod) { |
|||
_videoFileMethod = [[SCStillImageCaptureVideoInputMethod alloc] init]; |
|||
} |
|||
[[SCLogger sharedInstance] logStillImageCaptureApi:@"SCStillImageCaptureVideoFileInput"]; |
|||
[[SCCoreCameraLogger sharedInstance] |
|||
logCameraCreationDelaySplitPointStillImageCaptureApi:@"SCStillImageCaptureVideoFileInput"]; |
|||
[_delegate managedStillImageCapturerWillCapturePhoto:self]; |
|||
[_videoFileMethod captureStillImageWithCapturerState:state |
|||
successBlock:^(NSData *imageData, NSDictionary *cameraInfo, NSError *error) { |
|||
[_performer performImmediatelyIfCurrentPerformer:^{ |
|||
[self _photoCaptureDidSucceedWithImageData:imageData |
|||
sampleBuffer:nil |
|||
cameraInfo:cameraInfo |
|||
error:error]; |
|||
}]; |
|||
} |
|||
failureBlock:^(NSError *error) { |
|||
[_performer performImmediatelyIfCurrentPerformer:^{ |
|||
[self _photoCaptureDidFailWithError:error]; |
|||
}]; |
|||
}]; |
|||
} else { |
|||
[[SCLogger sharedInstance] logStillImageCaptureApi:@"AVCapturePhoto"]; |
|||
[[SCCoreCameraLogger sharedInstance] logCameraCreationDelaySplitPointStillImageCaptureApi:@"AVCapturePhoto"]; |
|||
@try { |
|||
[photoOutput capturePhotoWithSettings:photoSettings delegate:self]; |
|||
} @catch (NSException *e) { |
|||
[SCCrashLogger logHandledException:e]; |
|||
[self |
|||
_photoCaptureDidFailWithError:[NSError errorWithDomain:kSCManagedPhotoCapturerErrorDomain |
|||
code:kSCManagedPhotoCapturerErrorEncounteredException |
|||
userInfo:@{ |
|||
@"exception" : e |
|||
}]]; |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)_photoCaptureDidSucceedWithImageData:(NSData *)imageData |
|||
sampleBuffer:(CMSampleBufferRef)sampleBuffer |
|||
cameraInfo:(NSDictionary *)cameraInfo |
|||
error:(NSError *)error |
|||
{ |
|||
SCTraceStart(); |
|||
SCAssert([_performer isCurrentPerformer], @""); |
|||
[[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CACurrentMediaTime()]; |
|||
[[SCCoreCameraLogger sharedInstance] |
|||
logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CACurrentMediaTime()]; |
|||
|
|||
UIImage *fullScreenImage = [self imageFromData:imageData |
|||
currentZoomFactor:_zoomFactor |
|||
targetAspectRatio:_aspectRatio |
|||
fieldOfView:_fieldOfView |
|||
state:_state |
|||
sampleBuffer:sampleBuffer]; |
|||
[[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay |
|||
uniqueId:@"IMAGE" |
|||
splitPoint:@"WILL_START_COMPLETION_HANDLER"]; |
|||
sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; |
|||
_completionHandler = nil; |
|||
if (completionHandler) { |
|||
completionHandler(fullScreenImage, cameraInfo, error); |
|||
} |
|||
} |
|||
|
|||
- (void)_photoCaptureDidSucceedWithImageData:(NSData *)imageData |
|||
metadata:(NSDictionary *)metadata |
|||
error:(NSError *)error |
|||
{ |
|||
SCTraceStart(); |
|||
SCAssert([_performer isCurrentPerformer], @""); |
|||
[[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CACurrentMediaTime()]; |
|||
[[SCCoreCameraLogger sharedInstance] |
|||
logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CACurrentMediaTime()]; |
|||
|
|||
UIImage *fullScreenImage = [self imageFromData:imageData |
|||
currentZoomFactor:_zoomFactor |
|||
targetAspectRatio:_aspectRatio |
|||
fieldOfView:_fieldOfView |
|||
state:_state |
|||
metadata:metadata]; |
|||
[[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay |
|||
uniqueId:@"IMAGE" |
|||
splitPoint:@"WILL_START_COMPLETION_HANDLER"]; |
|||
sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; |
|||
_completionHandler = nil; |
|||
if (completionHandler) { |
|||
completionHandler(fullScreenImage, metadata, error); |
|||
} |
|||
} |
|||
|
|||
- (void)_photoCaptureDidFailWithError:(NSError *)error |
|||
{ |
|||
SCTraceStart(); |
|||
SCAssert([_performer isCurrentPerformer], @""); |
|||
sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; |
|||
_completionHandler = nil; |
|||
if (completionHandler) { |
|||
completionHandler(nil, nil, error); |
|||
} |
|||
} |
|||
|
|||
- (AVCaptureConnection *)_captureConnectionFromPhotoOutput:(AVCapturePhotoOutput *)photoOutput |
|||
{ |
|||
SCTraceStart(); |
|||
SCAssert([_performer isCurrentPerformer], @""); |
|||
NSArray *connections = [photoOutput.connections copy]; |
|||
for (AVCaptureConnection *connection in connections) { |
|||
for (AVCaptureInputPort *port in [connection inputPorts]) { |
|||
if ([[port mediaType] isEqual:AVMediaTypeVideo]) { |
|||
return connection; |
|||
} |
|||
} |
|||
} |
|||
return nil; |
|||
} |
|||
|
|||
- (AVCapturePhotoSettings *)_photoSettingsWithPhotoOutput:(AVCapturePhotoOutput *)photoOutput |
|||
captureConnection:(AVCaptureConnection *)captureConnection |
|||
captureState:(SCManagedCapturerState *)state |
|||
{ |
|||
SCTraceStart(); |
|||
if ([self _shouldUseBracketPhotoSettingsWithCaptureState:state]) { |
|||
return [self _bracketPhotoSettingsWithPhotoOutput:photoOutput |
|||
captureConnection:captureConnection |
|||
captureState:state]; |
|||
} else { |
|||
return [self _defaultPhotoSettingsWithPhotoOutput:photoOutput captureState:state]; |
|||
} |
|||
} |
|||
|
|||
- (BOOL)_shouldUseBracketPhotoSettingsWithCaptureState:(SCManagedCapturerState *)state |
|||
{ |
|||
// According to Apple docmentation, AVCapturePhotoBracketSettings do not support flashMode, |
|||
// autoStillImageStabilizationEnabled, livePhotoMovieFileURL or livePhotoMovieMetadata. |
|||
// Besides, we only use AVCapturePhotoBracketSettings if capture settings needs to be set manually. |
|||
return !state.flashActive && !_portraitModeCaptureEnabled && |
|||
(([SCManagedCaptureDevice isEnhancedNightModeSupported] && state.isNightModeActive) || |
|||
[_delegate managedStillImageCapturerIsUnderDeviceMotion:self]); |
|||
} |
|||
|
|||
- (AVCapturePhotoSettings *)_defaultPhotoSettingsWithPhotoOutput:(AVCapturePhotoOutput *)photoOutput |
|||
captureState:(SCManagedCapturerState *)state |
|||
{ |
|||
SCTraceStart(); |
|||
// Specify the output file format |
|||
AVCapturePhotoSettings *photoSettings = |
|||
[AVCapturePhotoSettings photoSettingsWithFormat:@{AVVideoCodecKey : AVVideoCodecJPEG}]; |
|||
|
|||
// Enable HRSI if necessary |
|||
if (photoSettings.isHighResolutionPhotoEnabled != _shouldEnableHRSI) { |
|||
photoSettings.highResolutionPhotoEnabled = _shouldEnableHRSI; |
|||
} |
|||
|
|||
// Turn on flash if active and supported by device |
|||
if (state.flashActive && state.flashSupported) { |
|||
photoSettings.flashMode = AVCaptureFlashModeOn; |
|||
} |
|||
|
|||
// Turn on stabilization if available |
|||
// Seems that setting autoStillImageStabilizationEnabled doesn't work during video capture session, |
|||
// but we set enable it anyway as it is harmless. |
|||
if (photoSettings.isAutoStillImageStabilizationEnabled) { |
|||
photoSettings.autoStillImageStabilizationEnabled = YES; |
|||
} |
|||
|
|||
if (_portraitModeCaptureEnabled) { |
|||
if (@available(ios 11.0, *)) { |
|||
photoSettings.depthDataDeliveryEnabled = YES; |
|||
} |
|||
} |
|||
|
|||
return photoSettings; |
|||
} |
|||
|
|||
- (AVCapturePhotoSettings *)_bracketPhotoSettingsWithPhotoOutput:(AVCapturePhotoOutput *)photoOutput |
|||
captureConnection:(AVCaptureConnection *)captureConnection |
|||
captureState:(SCManagedCapturerState *)state |
|||
{ |
|||
SCTraceStart(); |
|||
OSType rawPixelFormatType = [photoOutput.availableRawPhotoPixelFormatTypes.firstObject unsignedIntValue]; |
|||
NSArray<AVCaptureBracketedStillImageSettings *> *bracketedSettings = |
|||
[self _bracketSettingsArray:captureConnection withCaptureState:state]; |
|||
SCAssert(bracketedSettings.count <= photoOutput.maxBracketedCapturePhotoCount, |
|||
@"Bracket photo count cannot exceed maximum count"); |
|||
// Specify the output file format and raw pixel format |
|||
AVCapturePhotoBracketSettings *photoSettings = |
|||
[AVCapturePhotoBracketSettings photoBracketSettingsWithRawPixelFormatType:rawPixelFormatType |
|||
processedFormat:@{ |
|||
AVVideoCodecKey : AVVideoCodecJPEG |
|||
} |
|||
bracketedSettings:bracketedSettings]; |
|||
|
|||
// Enable HRSI if necessary |
|||
if (photoSettings.isHighResolutionPhotoEnabled != _shouldEnableHRSI) { |
|||
photoSettings.highResolutionPhotoEnabled = _shouldEnableHRSI; |
|||
} |
|||
|
|||
// If lens stabilization is supportd, enable the stabilization when device is moving |
|||
if (photoOutput.isLensStabilizationDuringBracketedCaptureSupported && !photoSettings.isLensStabilizationEnabled && |
|||
[_delegate managedStillImageCapturerIsUnderDeviceMotion:self]) { |
|||
photoSettings.lensStabilizationEnabled = YES; |
|||
} |
|||
return photoSettings; |
|||
} |
|||
|
|||
- (NSArray *)_bracketSettingsArray:(AVCaptureConnection *)stillImageConnection |
|||
withCaptureState:(SCManagedCapturerState *)state |
|||
{ |
|||
NSInteger const stillCount = 1; |
|||
NSMutableArray *bracketSettingsArray = [NSMutableArray arrayWithCapacity:stillCount]; |
|||
AVCaptureDevice *device = [stillImageConnection inputDevice]; |
|||
CMTime exposureDuration = device.exposureDuration; |
|||
if ([SCManagedCaptureDevice isEnhancedNightModeSupported] && state.isNightModeActive) { |
|||
exposureDuration = [self adjustedExposureDurationForNightModeWithCurrentExposureDuration:exposureDuration]; |
|||
} |
|||
AVCaptureBracketedStillImageSettings *settings = [AVCaptureManualExposureBracketedStillImageSettings |
|||
manualExposureSettingsWithExposureDuration:exposureDuration |
|||
ISO:AVCaptureISOCurrent]; |
|||
for (NSInteger i = 0; i < stillCount; i++) { |
|||
[bracketSettingsArray addObject:settings]; |
|||
} |
|||
return [bracketSettingsArray copy]; |
|||
} |
|||
|
|||
- (NSString *)_photoCapturerStatusToString:(SCManagedPhotoCapturerStatus)status |
|||
{ |
|||
switch (status) { |
|||
case SCManagedPhotoCapturerStatusPrepareToCapture: |
|||
return @"PhotoCapturerStatusPrepareToCapture"; |
|||
case SCManagedPhotoCapturerStatusWillCapture: |
|||
return @"PhotoCapturerStatusWillCapture"; |
|||
case SCManagedPhotoCapturerStatusDidFinishProcess: |
|||
return @"PhotoCapturerStatusDidFinishProcess"; |
|||
} |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,36 @@ |
|||
// ed265cb0c346ae35dce70d3fc12a0bd8deae0802 |
|||
// Generated by the value-object.rb DO NOT EDIT!! |
|||
|
|||
#import <AvailabilityMacros.h> |
|||
|
|||
#import <Foundation/Foundation.h> |
|||
#import <UIKit/UIKit.h> |
|||
|
|||
@protocol SCManagedRecordedVideo <NSObject, NSCoding, NSCopying> |
|||
|
|||
@property (nonatomic, copy, readonly) NSURL *videoURL; |
|||
|
|||
@property (nonatomic, copy, readonly) NSURL *rawVideoDataFileURL; |
|||
|
|||
@property (nonatomic, copy, readonly) UIImage *placeholderImage; |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL isFrontFacingCamera; |
|||
|
|||
@end |
|||
|
|||
@interface SCManagedRecordedVideo : NSObject <SCManagedRecordedVideo> |
|||
|
|||
@property (nonatomic, copy, readonly) NSURL *videoURL; |
|||
|
|||
@property (nonatomic, copy, readonly) NSURL *rawVideoDataFileURL; |
|||
|
|||
@property (nonatomic, copy, readonly) UIImage *placeholderImage; |
|||
|
|||
@property (nonatomic, assign, readonly) BOOL isFrontFacingCamera; |
|||
|
|||
- (instancetype)initWithVideoURL:(NSURL *)videoURL |
|||
rawVideoDataFileURL:(NSURL *)rawVideoDataFileURL |
|||
placeholderImage:(UIImage *)placeholderImage |
|||
isFrontFacingCamera:(BOOL)isFrontFacingCamera; |
|||
|
|||
@end |
@ -0,0 +1,180 @@ |
|||
// ed265cb0c346ae35dce70d3fc12a0bd8deae0802 |
|||
// Generated by the value-object.rb DO NOT EDIT!! |
|||
|
|||
#import "SCManagedRecordedVideo.h" |
|||
|
|||
#import <FastCoding/FastCoder.h> |
|||
|
|||
@implementation SCManagedRecordedVideo |
|||
|
|||
- (instancetype)initWithVideoURL:(NSURL *)videoURL |
|||
rawVideoDataFileURL:(NSURL *)rawVideoDataFileURL |
|||
placeholderImage:(UIImage *)placeholderImage |
|||
isFrontFacingCamera:(BOOL)isFrontFacingCamera |
|||
{ |
|||
self = [super init]; |
|||
if (self) { |
|||
_videoURL = [(NSObject *)videoURL copy]; |
|||
_rawVideoDataFileURL = [(NSObject *)rawVideoDataFileURL copy]; |
|||
_placeholderImage = [(NSObject *)placeholderImage copy]; |
|||
_isFrontFacingCamera = isFrontFacingCamera; |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
#pragma mark - NSCopying |
|||
|
|||
- (instancetype)copyWithZone:(NSZone *)zone |
|||
{ |
|||
// Immutable object, bypass copy |
|||
return self; |
|||
} |
|||
|
|||
#pragma mark - NSCoding |
|||
|
|||
- (instancetype)initWithCoder:(NSCoder *)aDecoder |
|||
{ |
|||
self = [super init]; |
|||
if (self) { |
|||
_videoURL = [aDecoder decodeObjectForKey:@"videoURL"]; |
|||
_rawVideoDataFileURL = [aDecoder decodeObjectForKey:@"rawVideoDataFileURL"]; |
|||
_placeholderImage = [aDecoder decodeObjectForKey:@"placeholderImage"]; |
|||
_isFrontFacingCamera = [aDecoder decodeBoolForKey:@"isFrontFacingCamera"]; |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
- (void)encodeWithCoder:(NSCoder *)aCoder |
|||
{ |
|||
[aCoder encodeObject:_videoURL forKey:@"videoURL"]; |
|||
[aCoder encodeObject:_rawVideoDataFileURL forKey:@"rawVideoDataFileURL"]; |
|||
[aCoder encodeObject:_placeholderImage forKey:@"placeholderImage"]; |
|||
[aCoder encodeBool:_isFrontFacingCamera forKey:@"isFrontFacingCamera"]; |
|||
} |
|||
|
|||
#pragma mark - FasterCoding |
|||
|
|||
- (BOOL)preferFasterCoding |
|||
{ |
|||
return YES; |
|||
} |
|||
|
|||
- (void)encodeWithFasterCoder:(id<FCFasterCoder>)fasterCoder |
|||
{ |
|||
[fasterCoder encodeBool:_isFrontFacingCamera]; |
|||
[fasterCoder encodeObject:_placeholderImage]; |
|||
[fasterCoder encodeObject:_rawVideoDataFileURL]; |
|||
[fasterCoder encodeObject:_videoURL]; |
|||
} |
|||
|
|||
- (void)decodeWithFasterDecoder:(id<FCFasterDecoder>)fasterDecoder |
|||
{ |
|||
_isFrontFacingCamera = (BOOL)[fasterDecoder decodeBool]; |
|||
_placeholderImage = (UIImage *)[fasterDecoder decodeObject]; |
|||
_rawVideoDataFileURL = (NSURL *)[fasterDecoder decodeObject]; |
|||
_videoURL = (NSURL *)[fasterDecoder decodeObject]; |
|||
} |
|||
|
|||
- (void)setObject:(id)val forUInt64Key:(uint64_t)key |
|||
{ |
|||
switch (key) { |
|||
case 50783861721184594ULL: |
|||
_placeholderImage = (UIImage *)val; |
|||
break; |
|||
case 13152167848358790ULL: |
|||
_rawVideoDataFileURL = (NSURL *)val; |
|||
break; |
|||
case 48945309622713334ULL: |
|||
_videoURL = (NSURL *)val; |
|||
break; |
|||
} |
|||
} |
|||
|
|||
- (void)setBool:(BOOL)val forUInt64Key:(uint64_t)key |
|||
{ |
|||
switch (key) { |
|||
case 11924284868025312ULL: |
|||
_isFrontFacingCamera = (BOOL)val; |
|||
break; |
|||
} |
|||
} |
|||
|
|||
+ (uint64_t)fasterCodingVersion |
|||
{ |
|||
return 17435789727352013688ULL; |
|||
} |
|||
|
|||
+ (uint64_t *)fasterCodingKeys |
|||
{ |
|||
static uint64_t keys[] = { |
|||
4 /* Total */, |
|||
FC_ENCODE_KEY_TYPE(11924284868025312, FCEncodeTypeBool), |
|||
FC_ENCODE_KEY_TYPE(50783861721184594, FCEncodeTypeObject), |
|||
FC_ENCODE_KEY_TYPE(13152167848358790, FCEncodeTypeObject), |
|||
FC_ENCODE_KEY_TYPE(48945309622713334, FCEncodeTypeObject), |
|||
}; |
|||
return keys; |
|||
} |
|||
|
|||
#pragma mark - isEqual |
|||
|
|||
- (BOOL)isEqual:(id)object |
|||
{ |
|||
if (self == object) { |
|||
return YES; |
|||
} |
|||
if (![object isMemberOfClass:[self class]]) { |
|||
return NO; |
|||
} |
|||
SCManagedRecordedVideo *other = (SCManagedRecordedVideo *)object; |
|||
if (other.videoURL != _videoURL && ![(NSObject *)other.videoURL isEqual:_videoURL]) { |
|||
return NO; |
|||
} |
|||
if (other.rawVideoDataFileURL != _rawVideoDataFileURL && |
|||
![(NSObject *)other.rawVideoDataFileURL isEqual:_rawVideoDataFileURL]) { |
|||
return NO; |
|||
} |
|||
if (other.placeholderImage != _placeholderImage && |
|||
![(NSObject *)other.placeholderImage isEqual:_placeholderImage]) { |
|||
return NO; |
|||
} |
|||
if (other.isFrontFacingCamera != _isFrontFacingCamera) { |
|||
return NO; |
|||
} |
|||
return YES; |
|||
} |
|||
|
|||
- (NSUInteger)hash |
|||
{ |
|||
NSUInteger subhashes[] = {[_videoURL hash], [_rawVideoDataFileURL hash], [_placeholderImage hash], |
|||
(NSUInteger)_isFrontFacingCamera}; |
|||
NSUInteger result = subhashes[0]; |
|||
for (int i = 1; i < 4; i++) { |
|||
unsigned long long base = (((unsigned long long)result) << 32 | subhashes[i]); |
|||
base = (~base) + (base << 18); |
|||
base ^= (base >> 31); |
|||
base *= 21; |
|||
base ^= (base >> 11); |
|||
base += (base << 6); |
|||
base ^= (base >> 22); |
|||
result = (NSUInteger)base; |
|||
} |
|||
return result; |
|||
} |
|||
|
|||
#pragma mark - Print description in console: lldb> po #{variable name} |
|||
|
|||
- (NSString *)description |
|||
{ |
|||
NSMutableString *desc = [NSMutableString string]; |
|||
[desc appendString:@"{\n"]; |
|||
[desc appendFormat:@"\tvideoURL:%@\n", [_videoURL description]]; |
|||
[desc appendFormat:@"\trawVideoDataFileURL:%@\n", [_rawVideoDataFileURL description]]; |
|||
[desc appendFormat:@"\tplaceholderImage:%@\n", [_placeholderImage description]]; |
|||
[desc appendFormat:@"\tisFrontFacingCamera:%@\n", [@(_isFrontFacingCamera) description]]; |
|||
[desc appendString:@"}\n"]; |
|||
|
|||
return [desc copy]; |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,6 @@ |
|||
interface SCManagedRecordedVideo |
|||
NSURL *videoURL; |
|||
NSURL *rawVideoDataFileURL; |
|||
UIImage *placeholderImage; |
|||
BOOL isFrontFacingCamera; |
|||
end |
@ -0,0 +1,92 @@ |
|||
// |
|||
// SCManagedStillImageCapturer.h |
|||
// Snapchat |
|||
// |
|||
// Created by Liu Liu on 4/30/15. |
|||
// Copyright (c) 2015 Liu Liu. All rights reserved. |
|||
// |
|||
|
|||
#import "SCCoreCameraLogger.h" |
|||
#import "SCManagedCaptureDevice.h" |
|||
#import "SCManagedCapturerListener.h" |
|||
#import "SCManagedCapturerState.h" |
|||
#import "SCManagedDeviceCapacityAnalyzerListener.h" |
|||
|
|||
#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h> |
|||
#import <SCLogger/SCCameraMetrics+ExposureAdjustment.h> |
|||
|
|||
#import <AVFoundation/AVFoundation.h> |
|||
#import <Foundation/Foundation.h> |
|||
|
|||
SC_EXTERN_C_BEGIN |
|||
|
|||
extern BOOL SCPhotoCapturerIsEnabled(void); |
|||
|
|||
SC_EXTERN_C_END |
|||
|
|||
@protocol SCPerforming; |
|||
@protocol SCManagedStillImageCapturerDelegate; |
|||
@class SCCaptureResource; |
|||
|
|||
typedef void (^sc_managed_still_image_capturer_capture_still_image_completion_handler_t)(UIImage *fullScreenImage, |
|||
NSDictionary *metadata, |
|||
NSError *error); |
|||
|
|||
@interface SCManagedStillImageCapturer |
|||
: NSObject <SCManagedDeviceCapacityAnalyzerListener, SCManagedCapturerListener, SCManagedVideoDataSourceListener> { |
|||
SCManagedCapturerState *_state; |
|||
BOOL _shouldCaptureFromVideo; |
|||
BOOL _captureImageFromVideoImmediately; |
|||
CGFloat _aspectRatio; |
|||
float _zoomFactor; |
|||
float _fieldOfView; |
|||
BOOL _adjustingExposureManualDetect; |
|||
sc_managed_still_image_capturer_capture_still_image_completion_handler_t _completionHandler; |
|||
} |
|||
|
|||
+ (instancetype)capturerWithCaptureResource:(SCCaptureResource *)captureResource; |
|||
|
|||
SC_INIT_AND_NEW_UNAVAILABLE; |
|||
|
|||
@property (nonatomic, weak) id<SCManagedStillImageCapturerDelegate> delegate; |
|||
|
|||
- (void)setupWithSession:(AVCaptureSession *)session; |
|||
|
|||
- (void)setAsOutput:(AVCaptureSession *)session; |
|||
|
|||
- (void)removeAsOutput:(AVCaptureSession *)session; |
|||
|
|||
- (void)setHighResolutionStillImageOutputEnabled:(BOOL)highResolutionStillImageOutputEnabled; |
|||
|
|||
- (void)setPortraitModeCaptureEnabled:(BOOL)enabled; |
|||
|
|||
- (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest; |
|||
|
|||
- (void)enableStillImageStabilization; |
|||
|
|||
- (void)captureStillImageWithAspectRatio:(CGFloat)aspectRatio |
|||
atZoomFactor:(float)zoomFactor |
|||
fieldOfView:(float)fieldOfView |
|||
state:(SCManagedCapturerState *)state |
|||
captureSessionID:(NSString *)captureSessionID |
|||
shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo |
|||
completionHandler: |
|||
(sc_managed_still_image_capturer_capture_still_image_completion_handler_t)completionHandler; |
|||
|
|||
- (void)captureStillImageFromVideoBuffer; |
|||
|
|||
@end |
|||
|
|||
@protocol SCManagedStillImageCapturerDelegate <NSObject> |
|||
|
|||
- (BOOL)managedStillImageCapturerIsUnderDeviceMotion:(SCManagedStillImageCapturer *)managedStillImageCapturer; |
|||
|
|||
- (BOOL)managedStillImageCapturerShouldProcessFileInput:(SCManagedStillImageCapturer *)managedStillImageCapturer; |
|||
|
|||
@optional |
|||
|
|||
- (void)managedStillImageCapturerWillCapturePhoto:(SCManagedStillImageCapturer *)managedStillImageCapturer; |
|||
|
|||
- (void)managedStillImageCapturerDidCapturePhoto:(SCManagedStillImageCapturer *)managedStillImageCapturer; |
|||
|
|||
@end |
@ -0,0 +1,399 @@ |
|||
// |
|||
// SCManagedStillImageCapturer.m |
|||
// Snapchat |
|||
// |
|||
// Created by Liu Liu on 4/30/15. |
|||
// Copyright (c) 2015 Liu Liu. All rights reserved. |
|||
// |
|||
|
|||
#import "SCManagedStillImageCapturer.h" |
|||
|
|||
#import "SCCameraSettingUtils.h" |
|||
#import "SCCameraTweaks.h" |
|||
#import "SCCaptureResource.h" |
|||
#import "SCLogger+Camera.h" |
|||
#import "SCManagedCaptureSession.h" |
|||
#import "SCManagedCapturer.h" |
|||
#import "SCManagedCapturerLensAPI.h" |
|||
#import "SCManagedFrameHealthChecker.h" |
|||
#import "SCManagedLegacyStillImageCapturer.h" |
|||
#import "SCManagedPhotoCapturer.h" |
|||
#import "SCManagedStillImageCapturerHandler.h" |
|||
#import "SCManagedStillImageCapturer_Protected.h" |
|||
|
|||
#import <SCFoundation/NSException+Exceptions.h> |
|||
#import <SCFoundation/SCLog.h> |
|||
#import <SCFoundation/SCPerforming.h> |
|||
#import <SCFoundation/SCQueuePerformer.h> |
|||
#import <SCFoundation/SCTrace.h> |
|||
#import <SCFoundation/UIImage+CVPixelBufferRef.h> |
|||
#import <SCLenses/SCLens.h> |
|||
#import <SCLogger/SCCameraMetrics.h> |
|||
#import <SCWebP/UIImage+WebP.h> |
|||
|
|||
#import <ImageIO/ImageIO.h> |
|||
|
|||
NSString *const kSCManagedStillImageCapturerErrorDomain = @"kSCManagedStillImageCapturerErrorDomain"; |
|||
|
|||
NSInteger const kSCCameraShutterSoundID = 1108; |
|||
|
|||
#if !TARGET_IPHONE_SIMULATOR |
|||
NSInteger const kSCManagedStillImageCapturerNoStillImageConnection = 1101; |
|||
#endif |
|||
NSInteger const kSCManagedStillImageCapturerApplicationStateBackground = 1102; |
|||
|
|||
// We will do the image capture regardless if these is still camera adjustment in progress after 0.4 seconds. |
|||
NSTimeInterval const kSCManagedStillImageCapturerDeadline = 0.4; |
|||
NSTimeInterval const kSCCameraRetryInterval = 0.1; |
|||
|
|||
BOOL SCPhotoCapturerIsEnabled(void) |
|||
{ |
|||
// Due to the native crash in https://jira.sc-corp.net/browse/CCAM-4904, we guard it >= 10.2 |
|||
return SC_AT_LEAST_IOS_10_2; |
|||
} |
|||
|
|||
NSDictionary *cameraInfoForBuffer(CMSampleBufferRef imageDataSampleBuffer) |
|||
{ |
|||
CFDictionaryRef exifAttachments = |
|||
(CFDictionaryRef)CMGetAttachment(imageDataSampleBuffer, kCGImagePropertyExifDictionary, NULL); |
|||
float brightness = [retrieveBrightnessFromEXIFAttachments(exifAttachments) floatValue]; |
|||
NSInteger ISOSpeedRating = [retrieveISOSpeedRatingFromEXIFAttachments(exifAttachments) integerValue]; |
|||
return @{ |
|||
(__bridge NSString *) kCGImagePropertyExifISOSpeedRatings : @(ISOSpeedRating), (__bridge NSString *) |
|||
kCGImagePropertyExifBrightnessValue : @(brightness) |
|||
}; |
|||
} |
|||
|
|||
@implementation SCManagedStillImageCapturer |
|||
|
|||
+ (instancetype)capturerWithCaptureResource:(SCCaptureResource *)captureResource |
|||
{ |
|||
if (SCPhotoCapturerIsEnabled()) { |
|||
return [[SCManagedPhotoCapturer alloc] initWithSession:captureResource.managedSession.avSession |
|||
performer:captureResource.queuePerformer |
|||
lensProcessingCore:captureResource.lensProcessingCore |
|||
delegate:captureResource.stillImageCapturerHandler]; |
|||
} else { |
|||
return [[SCManagedLegacyStillImageCapturer alloc] initWithSession:captureResource.managedSession.avSession |
|||
performer:captureResource.queuePerformer |
|||
lensProcessingCore:captureResource.lensProcessingCore |
|||
delegate:captureResource.stillImageCapturerHandler]; |
|||
} |
|||
} |
|||
|
|||
- (instancetype)initWithSession:(AVCaptureSession *)session |
|||
performer:(id<SCPerforming>)performer |
|||
lensProcessingCore:(id<SCManagedCapturerLensAPI>)lensAPI |
|||
delegate:(id<SCManagedStillImageCapturerDelegate>)delegate |
|||
{ |
|||
self = [super init]; |
|||
if (self) { |
|||
_session = session; |
|||
_performer = performer; |
|||
_lensAPI = lensAPI; |
|||
_delegate = delegate; |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
- (void)setupWithSession:(AVCaptureSession *)session |
|||
{ |
|||
UNIMPLEMENTED_METHOD; |
|||
} |
|||
|
|||
- (void)setAsOutput:(AVCaptureSession *)session |
|||
{ |
|||
UNIMPLEMENTED_METHOD; |
|||
} |
|||
|
|||
- (void)setHighResolutionStillImageOutputEnabled:(BOOL)highResolutionStillImageOutputEnabled |
|||
{ |
|||
UNIMPLEMENTED_METHOD; |
|||
} |
|||
|
|||
- (void)enableStillImageStabilization |
|||
{ |
|||
UNIMPLEMENTED_METHOD; |
|||
} |
|||
|
|||
- (void)removeAsOutput:(AVCaptureSession *)session |
|||
{ |
|||
UNIMPLEMENTED_METHOD; |
|||
} |
|||
|
|||
- (void)setPortraitModeCaptureEnabled:(BOOL)enabled |
|||
{ |
|||
UNIMPLEMENTED_METHOD; |
|||
} |
|||
|
|||
- (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest |
|||
{ |
|||
UNIMPLEMENTED_METHOD; |
|||
} |
|||
|
|||
- (void)captureStillImageWithAspectRatio:(CGFloat)aspectRatio |
|||
atZoomFactor:(float)zoomFactor |
|||
fieldOfView:(float)fieldOfView |
|||
state:(SCManagedCapturerState *)state |
|||
captureSessionID:(NSString *)captureSessionID |
|||
shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo |
|||
completionHandler: |
|||
(sc_managed_still_image_capturer_capture_still_image_completion_handler_t)completionHandler |
|||
{ |
|||
UNIMPLEMENTED_METHOD; |
|||
} |
|||
|
|||
#pragma mark - SCManagedDeviceCapacityAnalyzerListener |
|||
|
|||
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer |
|||
didChangeAdjustingExposure:(BOOL)adjustingExposure |
|||
{ |
|||
UNIMPLEMENTED_METHOD; |
|||
} |
|||
|
|||
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer |
|||
didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition |
|||
{ |
|||
UNIMPLEMENTED_METHOD; |
|||
} |
|||
|
|||
#pragma mark - SCManagedCapturerListener |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state |
|||
{ |
|||
UNIMPLEMENTED_METHOD; |
|||
} |
|||
|
|||
- (UIImage *)imageFromData:(NSData *)data |
|||
currentZoomFactor:(float)currentZoomFactor |
|||
targetAspectRatio:(CGFloat)targetAspectRatio |
|||
fieldOfView:(float)fieldOfView |
|||
state:(SCManagedCapturerState *)state |
|||
sampleBuffer:(CMSampleBufferRef)sampleBuffer |
|||
{ |
|||
UIImage *capturedImage = [self imageFromImage:[UIImage sc_imageWithData:data] |
|||
currentZoomFactor:currentZoomFactor |
|||
targetAspectRatio:targetAspectRatio |
|||
fieldOfView:fieldOfView |
|||
state:state]; |
|||
// Check capture frame health before showing preview |
|||
NSDictionary *metadata = |
|||
[[SCManagedFrameHealthChecker sharedInstance] metadataForSampleBuffer:sampleBuffer |
|||
photoCapturerEnabled:SCPhotoCapturerIsEnabled() |
|||
lensEnabled:state.lensesActive |
|||
lensID:[_lensAPI activeLensId]]; |
|||
[[SCManagedFrameHealthChecker sharedInstance] checkImageHealthForCaptureFrameImage:capturedImage |
|||
captureSettings:metadata |
|||
captureSessionID:_captureSessionID]; |
|||
_captureSessionID = nil; |
|||
return capturedImage; |
|||
} |
|||
|
|||
- (UIImage *)imageFromData:(NSData *)data |
|||
currentZoomFactor:(float)currentZoomFactor |
|||
targetAspectRatio:(CGFloat)targetAspectRatio |
|||
fieldOfView:(float)fieldOfView |
|||
state:(SCManagedCapturerState *)state |
|||
metadata:(NSDictionary *)metadata |
|||
{ |
|||
UIImage *capturedImage = [self imageFromImage:[UIImage sc_imageWithData:data] |
|||
currentZoomFactor:currentZoomFactor |
|||
targetAspectRatio:targetAspectRatio |
|||
fieldOfView:fieldOfView |
|||
state:state]; |
|||
// Check capture frame health before showing preview |
|||
NSDictionary *newMetadata = |
|||
[[SCManagedFrameHealthChecker sharedInstance] metadataForMetadata:metadata |
|||
photoCapturerEnabled:SCPhotoCapturerIsEnabled() |
|||
lensEnabled:state.lensesActive |
|||
lensID:[_lensAPI activeLensId]]; |
|||
[[SCManagedFrameHealthChecker sharedInstance] checkImageHealthForCaptureFrameImage:capturedImage |
|||
captureSettings:newMetadata |
|||
captureSessionID:_captureSessionID]; |
|||
_captureSessionID = nil; |
|||
return capturedImage; |
|||
} |
|||
|
|||
- (UIImage *)imageFromImage:(UIImage *)image |
|||
currentZoomFactor:(float)currentZoomFactor |
|||
targetAspectRatio:(CGFloat)targetAspectRatio |
|||
fieldOfView:(float)fieldOfView |
|||
state:(SCManagedCapturerState *)state |
|||
{ |
|||
UIImage *fullScreenImage = image; |
|||
if (state.lensesActive && _lensAPI.isLensApplied) { |
|||
fullScreenImage = [_lensAPI processImage:fullScreenImage |
|||
maxPixelSize:[_lensAPI maxPixelSize] |
|||
devicePosition:state.devicePosition |
|||
fieldOfView:fieldOfView]; |
|||
} |
|||
// Resize and crop |
|||
return [self resizeImage:fullScreenImage currentZoomFactor:currentZoomFactor targetAspectRatio:targetAspectRatio]; |
|||
} |
|||
|
|||
- (UIImage *)resizeImage:(UIImage *)image |
|||
currentZoomFactor:(float)currentZoomFactor |
|||
targetAspectRatio:(CGFloat)targetAspectRatio |
|||
{ |
|||
SCTraceStart(); |
|||
if (currentZoomFactor == 1) { |
|||
return SCCropImageToTargetAspectRatio(image, targetAspectRatio); |
|||
} else { |
|||
@autoreleasepool { |
|||
return [self resizeImageUsingCG:image |
|||
currentZoomFactor:currentZoomFactor |
|||
targetAspectRatio:targetAspectRatio |
|||
maxPixelSize:[_lensAPI maxPixelSize]]; |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (UIImage *)resizeImageUsingCG:(UIImage *)inputImage |
|||
currentZoomFactor:(float)currentZoomFactor |
|||
targetAspectRatio:(CGFloat)targetAspectRatio |
|||
maxPixelSize:(CGFloat)maxPixelSize |
|||
{ |
|||
size_t imageWidth = CGImageGetWidth(inputImage.CGImage); |
|||
size_t imageHeight = CGImageGetHeight(inputImage.CGImage); |
|||
SCLogGeneralInfo(@"Captured still image at %dx%d", (int)imageWidth, (int)imageHeight); |
|||
size_t targetWidth, targetHeight; |
|||
float zoomFactor = currentZoomFactor; |
|||
if (imageWidth > imageHeight) { |
|||
targetWidth = maxPixelSize; |
|||
targetHeight = (maxPixelSize * imageHeight + imageWidth / 2) / imageWidth; |
|||
// Update zoom factor here |
|||
zoomFactor *= (float)maxPixelSize / imageWidth; |
|||
} else { |
|||
targetHeight = maxPixelSize; |
|||
targetWidth = (maxPixelSize * imageWidth + imageHeight / 2) / imageHeight; |
|||
zoomFactor *= (float)maxPixelSize / imageHeight; |
|||
} |
|||
if (targetAspectRatio != kSCManagedCapturerAspectRatioUnspecified) { |
|||
SCCropImageSizeToAspectRatio(targetWidth, targetHeight, inputImage.imageOrientation, targetAspectRatio, |
|||
&targetWidth, &targetHeight); |
|||
} |
|||
CGContextRef context = |
|||
CGBitmapContextCreate(NULL, targetWidth, targetHeight, CGImageGetBitsPerComponent(inputImage.CGImage), |
|||
CGImageGetBitsPerPixel(inputImage.CGImage) * targetWidth / 8, |
|||
CGImageGetColorSpace(inputImage.CGImage), CGImageGetBitmapInfo(inputImage.CGImage)); |
|||
CGContextSetInterpolationQuality(context, kCGInterpolationHigh); |
|||
CGContextDrawImage(context, CGRectMake(targetWidth * 0.5 - imageWidth * 0.5 * zoomFactor, |
|||
targetHeight * 0.5 - imageHeight * 0.5 * zoomFactor, imageWidth * zoomFactor, |
|||
imageHeight * zoomFactor), |
|||
inputImage.CGImage); |
|||
CGImageRef thumbnail = CGBitmapContextCreateImage(context); |
|||
CGContextRelease(context); |
|||
UIImage *image = |
|||
[UIImage imageWithCGImage:thumbnail scale:inputImage.scale orientation:inputImage.imageOrientation]; |
|||
CGImageRelease(thumbnail); |
|||
return image; |
|||
} |
|||
|
|||
- (CMTime)adjustedExposureDurationForNightModeWithCurrentExposureDuration:(CMTime)exposureDuration |
|||
{ |
|||
CMTime adjustedExposureDuration = exposureDuration; |
|||
if (_lightingConditionType == SCCapturerLightingConditionTypeDark) { |
|||
adjustedExposureDuration = CMTimeMultiplyByFloat64(exposureDuration, 1.5); |
|||
} else if (_lightingConditionType == SCCapturerLightingConditionTypeExtremeDark) { |
|||
adjustedExposureDuration = CMTimeMultiplyByFloat64(exposureDuration, 2.5); |
|||
} |
|||
return adjustedExposureDuration; |
|||
} |
|||
|
|||
#pragma mark - SCManagedVideoDataSourceListener |
|||
|
|||
- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource |
|||
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer |
|||
devicePosition:(SCManagedCaptureDevicePosition)devicePosition |
|||
{ |
|||
SCTraceStart(); |
|||
SC_GUARD_ELSE_RETURN(_captureImageFromVideoImmediately); |
|||
_captureImageFromVideoImmediately = NO; |
|||
@weakify(self); |
|||
CFRetain(sampleBuffer); |
|||
[_performer performImmediatelyIfCurrentPerformer:^{ |
|||
SCTraceStart(); |
|||
@strongify(self); |
|||
SC_GUARD_ELSE_RETURN(self); |
|||
[self _didCapturePhotoFromVideoBuffer]; |
|||
UIImageOrientation orientation = devicePosition == SCManagedCaptureDevicePositionBack |
|||
? UIImageOrientationRight |
|||
: UIImageOrientationLeftMirrored; |
|||
UIImage *videoImage = [UIImage imageWithPixelBufferRef:CMSampleBufferGetImageBuffer(sampleBuffer) |
|||
backingType:UIImageBackingTypeCGImage |
|||
orientation:orientation |
|||
context:[CIContext contextWithOptions:nil]]; |
|||
UIImage *fullScreenImage = [self imageFromImage:videoImage |
|||
currentZoomFactor:_zoomFactor |
|||
targetAspectRatio:_aspectRatio |
|||
fieldOfView:_fieldOfView |
|||
state:_state]; |
|||
NSMutableDictionary *cameraInfo = [cameraInfoForBuffer(sampleBuffer) mutableCopy]; |
|||
cameraInfo[@"capture_image_from_video_buffer"] = @"enabled"; |
|||
[self _didFinishProcessingFromVideoBufferWithImage:fullScreenImage cameraInfo:cameraInfo]; |
|||
CFRelease(sampleBuffer); |
|||
}]; |
|||
} |
|||
|
|||
- (void)_willBeginCapturePhotoFromVideoBuffer |
|||
{ |
|||
SCTraceStart(); |
|||
@weakify(self); |
|||
[_performer performImmediatelyIfCurrentPerformer:^{ |
|||
SCTraceStart(); |
|||
@strongify(self); |
|||
SC_GUARD_ELSE_RETURN(self); |
|||
if ([self->_delegate respondsToSelector:@selector(managedStillImageCapturerWillCapturePhoto:)]) { |
|||
[self->_delegate managedStillImageCapturerWillCapturePhoto:self]; |
|||
} |
|||
}]; |
|||
} |
|||
|
|||
- (void)_didCapturePhotoFromVideoBuffer |
|||
{ |
|||
SCTraceStart(); |
|||
@weakify(self); |
|||
[_performer performImmediatelyIfCurrentPerformer:^{ |
|||
SCTraceStart(); |
|||
@strongify(self); |
|||
SC_GUARD_ELSE_RETURN(self); |
|||
if ([self->_delegate respondsToSelector:@selector(managedStillImageCapturerDidCapturePhoto:)]) { |
|||
[self->_delegate managedStillImageCapturerDidCapturePhoto:self]; |
|||
} |
|||
}]; |
|||
} |
|||
|
|||
- (void)_didFinishProcessingFromVideoBufferWithImage:(UIImage *)image cameraInfo:(NSDictionary *)cameraInfo |
|||
{ |
|||
SCTraceStart(); |
|||
@weakify(self); |
|||
[_performer performImmediatelyIfCurrentPerformer:^{ |
|||
SCTraceStart(); |
|||
@strongify(self); |
|||
SC_GUARD_ELSE_RETURN(self); |
|||
[[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CACurrentMediaTime()]; |
|||
[[SCCoreCameraLogger sharedInstance] |
|||
logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CACurrentMediaTime()]; |
|||
sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; |
|||
_completionHandler = nil; |
|||
if (completionHandler) { |
|||
completionHandler(image, cameraInfo, nil); |
|||
} |
|||
}]; |
|||
} |
|||
|
|||
- (void)captureStillImageFromVideoBuffer |
|||
{ |
|||
SCTraceStart(); |
|||
@weakify(self); |
|||
[_performer performImmediatelyIfCurrentPerformer:^{ |
|||
SCTraceStart(); |
|||
@strongify(self); |
|||
SC_GUARD_ELSE_RETURN(self); |
|||
AudioServicesPlaySystemSoundWithCompletion(kSCCameraShutterSoundID, nil); |
|||
[self _willBeginCapturePhotoFromVideoBuffer]; |
|||
self->_captureImageFromVideoImmediately = YES; |
|||
}]; |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,21 @@ |
|||
// |
|||
// SCManagedStillImageCapturerHandler.h |
|||
// Snapchat |
|||
// |
|||
// Created by Jingtian Yang on 11/12/2017. |
|||
// |
|||
|
|||
#import "SCManagedStillImageCapturer.h" |
|||
|
|||
#import <Foundation/Foundation.h> |
|||
|
|||
@class SCCaptureResource; |
|||
@protocol SCDeviceMotionProvider |
|||
, SCFileInputDecider; |
|||
|
|||
@interface SCManagedStillImageCapturerHandler : NSObject <SCManagedStillImageCapturerDelegate> |
|||
|
|||
SC_INIT_AND_NEW_UNAVAILABLE |
|||
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource; |
|||
|
|||
@end |
@ -0,0 +1,85 @@ |
|||
// |
|||
// SCManagedStillImageCapturerHandler.m |
|||
// Snapchat |
|||
// |
|||
// Created by Jingtian Yang on 11/12/2017. |
|||
// |
|||
|
|||
#import "SCManagedStillImageCapturerHandler.h" |
|||
|
|||
#import "SCCaptureResource.h" |
|||
#import "SCManagedCaptureDevice+SCManagedCapturer.h" |
|||
#import "SCManagedCapturer.h" |
|||
#import "SCManagedCapturerLogging.h" |
|||
#import "SCManagedCapturerSampleMetadata.h" |
|||
#import "SCManagedCapturerState.h" |
|||
|
|||
#import <SCFoundation/SCAssertWrapper.h> |
|||
#import <SCFoundation/SCQueuePerformer.h> |
|||
#import <SCFoundation/SCThreadHelpers.h> |
|||
#import <SCFoundation/SCTraceODPCompatible.h> |
|||
|
|||
@interface SCManagedStillImageCapturerHandler () { |
|||
__weak SCCaptureResource *_captureResource; |
|||
} |
|||
|
|||
@end |
|||
|
|||
@implementation SCManagedStillImageCapturerHandler |
|||
|
|||
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource |
|||
{ |
|||
self = [super init]; |
|||
if (self) { |
|||
SCAssert(captureResource, @""); |
|||
_captureResource = captureResource; |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
- (void)managedStillImageCapturerWillCapturePhoto:(SCManagedStillImageCapturer *)managedStillImageCapturer |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
SCLogCapturerInfo(@"Will capture photo. stillImageCapturer:%@", _captureResource.stillImageCapturer); |
|||
[_captureResource.queuePerformer performImmediatelyIfCurrentPerformer:^{ |
|||
SCTraceStart(); |
|||
if (_captureResource.stillImageCapturer) { |
|||
SCManagedCapturerState *state = [_captureResource.state copy]; |
|||
SCManagedCapturerSampleMetadata *sampleMetadata = [[SCManagedCapturerSampleMetadata alloc] |
|||
initWithPresentationTimestamp:kCMTimeZero |
|||
fieldOfView:_captureResource.device.fieldOfView]; |
|||
runOnMainThreadAsynchronously(^{ |
|||
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] |
|||
willCapturePhoto:state |
|||
sampleMetadata:sampleMetadata]; |
|||
}); |
|||
} |
|||
}]; |
|||
} |
|||
|
|||
- (void)managedStillImageCapturerDidCapturePhoto:(SCManagedStillImageCapturer *)managedStillImageCapturer |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
SCLogCapturerInfo(@"Did capture photo. stillImageCapturer:%@", _captureResource.stillImageCapturer); |
|||
[_captureResource.queuePerformer performImmediatelyIfCurrentPerformer:^{ |
|||
SCTraceStart(); |
|||
if (_captureResource.stillImageCapturer) { |
|||
SCManagedCapturerState *state = [_captureResource.state copy]; |
|||
runOnMainThreadAsynchronously(^{ |
|||
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didCapturePhoto:state]; |
|||
}); |
|||
} |
|||
}]; |
|||
} |
|||
|
|||
- (BOOL)managedStillImageCapturerIsUnderDeviceMotion:(SCManagedStillImageCapturer *)managedStillImageCapturer |
|||
{ |
|||
return _captureResource.deviceMotionProvider.isUnderDeviceMotion; |
|||
} |
|||
|
|||
- (BOOL)managedStillImageCapturerShouldProcessFileInput:(SCManagedStillImageCapturer *)managedStillImageCapturer |
|||
{ |
|||
return _captureResource.fileInputDecider.shouldProcessFileInput; |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,63 @@ |
|||
// |
|||
// SCManagedStillImageCapturer_Protected.h |
|||
// Snapchat |
|||
// |
|||
// Created by Chao Pang on 10/4/16. |
|||
// Copyright © 2016 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
SC_EXTERN_C_BEGIN |
|||
extern NSDictionary *cameraInfoForBuffer(CMSampleBufferRef imageDataSampleBuffer); |
|||
SC_EXTERN_C_END |
|||
|
|||
extern NSString *const kSCManagedStillImageCapturerErrorDomain; |
|||
|
|||
#if !TARGET_IPHONE_SIMULATOR |
|||
extern NSInteger const kSCManagedStillImageCapturerNoStillImageConnection; |
|||
#endif |
|||
extern NSInteger const kSCManagedStillImageCapturerApplicationStateBackground; |
|||
|
|||
// We will do the image capture regardless if these is still camera adjustment in progress after 0.4 seconds. |
|||
extern NSTimeInterval const kSCManagedStillImageCapturerDeadline; |
|||
extern NSTimeInterval const kSCCameraRetryInterval; |
|||
|
|||
@protocol SCManagedCapturerLensAPI; |
|||
|
|||
@interface SCManagedStillImageCapturer () { |
|||
@protected |
|||
id<SCManagedCapturerLensAPI> _lensAPI; |
|||
id<SCPerforming> _performer; |
|||
AVCaptureSession *_session; |
|||
id<SCManagedStillImageCapturerDelegate> __weak _delegate; |
|||
NSString *_captureSessionID; |
|||
SCCapturerLightingConditionType _lightingConditionType; |
|||
} |
|||
|
|||
- (instancetype)initWithSession:(AVCaptureSession *)session |
|||
performer:(id<SCPerforming>)performer |
|||
lensProcessingCore:(id<SCManagedCapturerLensAPI>)lensProcessingCore |
|||
delegate:(id<SCManagedStillImageCapturerDelegate>)delegate; |
|||
|
|||
- (UIImage *)imageFromData:(NSData *)data |
|||
currentZoomFactor:(float)currentZoomFactor |
|||
targetAspectRatio:(CGFloat)targetAspectRatio |
|||
fieldOfView:(float)fieldOfView |
|||
state:(SCManagedCapturerState *)state |
|||
sampleBuffer:(CMSampleBufferRef)sampleBuffer; |
|||
|
|||
- (UIImage *)imageFromData:(NSData *)data |
|||
currentZoomFactor:(float)currentZoomFactor |
|||
targetAspectRatio:(CGFloat)targetAspectRatio |
|||
fieldOfView:(float)fieldOfView |
|||
state:(SCManagedCapturerState *)state |
|||
metadata:(NSDictionary *)metadata; |
|||
|
|||
- (UIImage *)imageFromImage:(UIImage *)image |
|||
currentZoomFactor:(float)currentZoomFactor |
|||
targetAspectRatio:(CGFloat)targetAspectRatio |
|||
fieldOfView:(float)fieldOfView |
|||
state:(SCManagedCapturerState *)state; |
|||
|
|||
- (CMTime)adjustedExposureDurationForNightModeWithCurrentExposureDuration:(CMTime)exposureDuration; |
|||
|
|||
@end |
@ -0,0 +1,24 @@ |
|||
// |
|||
// SCManagedVideoARDataSource.h |
|||
// Snapchat |
|||
// |
|||
// Created by Eyal Segal on 20/10/2017. |
|||
// |
|||
|
|||
#import "SCCapturerDefines.h" |
|||
|
|||
#import <SCCameraFoundation/SCManagedVideoDataSource.h> |
|||
|
|||
#import <ARKit/ARKit.h> |
|||
|
|||
@protocol SCManagedVideoARDataSource <SCManagedVideoDataSource> |
|||
|
|||
@property (atomic, strong) ARFrame *currentFrame NS_AVAILABLE_IOS(11_0); |
|||
|
|||
#ifdef SC_USE_ARKIT_FACE |
|||
@property (atomic, strong) AVDepthData *lastDepthData NS_AVAILABLE_IOS(11_0); |
|||
#endif |
|||
|
|||
@property (atomic, assign) float fieldOfView NS_AVAILABLE_IOS(11_0); |
|||
|
|||
@end |
@ -0,0 +1,102 @@ |
|||
// |
|||
// SCManagedVideoCapturer.h |
|||
// Snapchat |
|||
// |
|||
// Created by Liu Liu on 5/1/15. |
|||
// Copyright (c) 2015 Liu Liu. All rights reserved. |
|||
// |
|||
|
|||
#import "SCManagedRecordedVideo.h" |
|||
#import "SCManagedVideoCapturerOutputSettings.h" |
|||
#import "SCVideoCaptureSessionInfo.h" |
|||
|
|||
#import <SCCameraFoundation/SCManagedAudioDataSource.h> |
|||
#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h> |
|||
#import <SCFoundation/SCFuture.h> |
|||
|
|||
#import <AVFoundation/AVFoundation.h> |
|||
#import <Foundation/Foundation.h> |
|||
|
|||
typedef void (^sc_managed_video_capturer_recording_completion_handler_t)(NSURL *fileURL, NSError *error); |
|||
|
|||
@class SCManagedVideoCapturer, SCTimedTask; |
|||
|
|||
@protocol SCManagedVideoCapturerDelegate <NSObject> |
|||
|
|||
// All these calbacks are invoked on a private queue for video recording channels |
|||
|
|||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer |
|||
didBeginVideoRecording:(SCVideoCaptureSessionInfo)sessionInfo; |
|||
|
|||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer |
|||
didBeginAudioRecording:(SCVideoCaptureSessionInfo)sessionInfo; |
|||
|
|||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer |
|||
willStopWithRecordedVideoFuture:(SCFuture<id<SCManagedRecordedVideo>> *)videoProviderFuture |
|||
videoSize:(CGSize)videoSize |
|||
placeholderImage:(UIImage *)placeholderImage |
|||
session:(SCVideoCaptureSessionInfo)sessionInfo; |
|||
|
|||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer |
|||
didSucceedWithRecordedVideo:(SCManagedRecordedVideo *)recordedVideo |
|||
session:(SCVideoCaptureSessionInfo)sessionInfo; |
|||
|
|||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer |
|||
didFailWithError:(NSError *)error |
|||
session:(SCVideoCaptureSessionInfo)sessionInfo; |
|||
|
|||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer |
|||
didCancelVideoRecording:(SCVideoCaptureSessionInfo)sessionInfo; |
|||
|
|||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer |
|||
didGetError:(NSError *)error |
|||
forType:(SCManagedVideoCapturerInfoType)type |
|||
session:(SCVideoCaptureSessionInfo)sessionInfo; |
|||
|
|||
- (NSDictionary *)managedVideoCapturerGetExtraFrameHealthInfo:(SCManagedVideoCapturer *)managedVideoCapturer; |
|||
|
|||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer |
|||
didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer |
|||
presentationTimestamp:(CMTime)presentationTimestamp; |
|||
|
|||
@end |
|||
|
|||
/** |
|||
* AVFoundation backed class that writes frames to an output file. SCManagedVideoCapturer |
|||
* uses SCManagedVideoCapturerOutputSettings to determine output settings. If no output |
|||
* settings are passed in (nil) SCManagedVideoCapturer will fall back on default settings. |
|||
*/ |
|||
@interface SCManagedVideoCapturer : NSObject <SCManagedVideoDataSourceListener, SCManagedAudioDataSource> |
|||
|
|||
/** |
|||
* Return the output URL that passed into beginRecordingToURL method |
|||
*/ |
|||
@property (nonatomic, copy, readonly) NSURL *outputURL; |
|||
|
|||
@property (nonatomic, weak) id<SCManagedVideoCapturerDelegate> delegate; |
|||
@property (nonatomic, readonly) SCVideoCaptureSessionInfo activeSession; |
|||
@property (nonatomic, assign, readonly) CMTime firstWrittenAudioBufferDelay; |
|||
@property (nonatomic, assign, readonly) BOOL audioQueueStarted; |
|||
|
|||
- (instancetype)initWithQueuePerformer:(SCQueuePerformer *)queuePerformer; |
|||
|
|||
- (void)prepareForRecordingWithAudioConfiguration:(SCAudioConfiguration *)configuration; |
|||
- (SCVideoCaptureSessionInfo)startRecordingAsynchronouslyWithOutputSettings: |
|||
(SCManagedVideoCapturerOutputSettings *)outputSettings |
|||
audioConfiguration:(SCAudioConfiguration *)audioConfiguration |
|||
maxDuration:(NSTimeInterval)maxDuration |
|||
toURL:(NSURL *)URL |
|||
deviceFormat:(AVCaptureDeviceFormat *)deviceFormat |
|||
orientation:(AVCaptureVideoOrientation)videoOrientation |
|||
captureSessionID:(NSString *)captureSessionID; |
|||
|
|||
- (void)stopRecordingAsynchronously; |
|||
- (void)cancelRecordingAsynchronously; |
|||
|
|||
// Schedule a task to run, it is thread safe. |
|||
- (void)addTimedTask:(SCTimedTask *)task; |
|||
|
|||
// Clear all tasks, it is thread safe. |
|||
- (void)clearTimedTasks; |
|||
|
|||
@end |
1107
ManagedCapturer/SCManagedVideoCapturer.m
File diff suppressed because it is too large
View File
File diff suppressed because it is too large
View File
@ -0,0 +1,20 @@ |
|||
// |
|||
// SCManagedVideoCapturerHandler.h |
|||
// Snapchat |
|||
// |
|||
// Created by Jingtian Yang on 11/12/2017. |
|||
// |
|||
|
|||
#import "SCManagedVideoCapturer.h" |
|||
|
|||
#import <Foundation/Foundation.h> |
|||
|
|||
@class SCCaptureResource; |
|||
|
|||
@interface SCManagedVideoCapturerHandler : NSObject <SCManagedVideoCapturerDelegate> |
|||
|
|||
- (instancetype)init NS_UNAVAILABLE; |
|||
|
|||
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource; |
|||
|
|||
@end |
@ -0,0 +1,252 @@ |
|||
// |
|||
// SCManagedVideoCapturerHandler.m |
|||
// Snapchat |
|||
// |
|||
// Created by Jingtian Yang on 11/12/2017. |
|||
// |
|||
|
|||
#import "SCManagedVideoCapturerHandler.h" |
|||
|
|||
#import "SCCaptureResource.h" |
|||
#import "SCManagedCaptureDevice+SCManagedCapturer.h" |
|||
#import "SCManagedCapturer.h" |
|||
#import "SCManagedCapturerLensAPI.h" |
|||
#import "SCManagedCapturerLogging.h" |
|||
#import "SCManagedCapturerSampleMetadata.h" |
|||
#import "SCManagedCapturerState.h" |
|||
#import "SCManagedDeviceCapacityAnalyzer.h" |
|||
#import "SCManagedFrontFlashController.h" |
|||
#import "SCManagedVideoFileStreamer.h" |
|||
#import "SCManagedVideoFrameSampler.h" |
|||
#import "SCManagedVideoStreamer.h" |
|||
|
|||
#import <SCCameraFoundation/SCManagedDataSource.h> |
|||
#import <SCFoundation/SCAssertWrapper.h> |
|||
#import <SCFoundation/SCQueuePerformer.h> |
|||
#import <SCFoundation/SCThreadHelpers.h> |
|||
#import <SCFoundation/SCTraceODPCompatible.h> |
|||
|
|||
@interface SCManagedVideoCapturerHandler () { |
|||
__weak SCCaptureResource *_captureResource; |
|||
} |
|||
@end |
|||
|
|||
@implementation SCManagedVideoCapturerHandler |
|||
|
|||
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource |
|||
{ |
|||
self = [super init]; |
|||
if (self) { |
|||
SCAssert(captureResource, @""); |
|||
_captureResource = captureResource; |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer |
|||
didBeginVideoRecording:(SCVideoCaptureSessionInfo)sessionInfo |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
SCLogCapturerInfo(@"Did begin video recording. sessionId:%u", sessionInfo.sessionId); |
|||
[_captureResource.queuePerformer perform:^{ |
|||
SCTraceStart(); |
|||
SCManagedCapturerState *state = [_captureResource.state copy]; |
|||
runOnMainThreadAsynchronously(^{ |
|||
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] |
|||
didBeginVideoRecording:state |
|||
session:sessionInfo]; |
|||
}); |
|||
}]; |
|||
} |
|||
|
|||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer |
|||
didBeginAudioRecording:(SCVideoCaptureSessionInfo)sessionInfo |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
SCLogCapturerInfo(@"Did begin audio recording. sessionId:%u", sessionInfo.sessionId); |
|||
[_captureResource.queuePerformer perform:^{ |
|||
if ([_captureResource.fileInputDecider shouldProcessFileInput]) { |
|||
[_captureResource.videoDataSource startStreaming]; |
|||
} |
|||
SCTraceStart(); |
|||
SCManagedCapturerState *state = [_captureResource.state copy]; |
|||
runOnMainThreadAsynchronously(^{ |
|||
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] |
|||
didBeginAudioRecording:state |
|||
session:sessionInfo]; |
|||
}); |
|||
}]; |
|||
} |
|||
|
|||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer |
|||
willStopWithRecordedVideoFuture:(SCFuture<id<SCManagedRecordedVideo>> *)recordedVideoFuture |
|||
videoSize:(CGSize)videoSize |
|||
placeholderImage:(UIImage *)placeholderImage |
|||
session:(SCVideoCaptureSessionInfo)sessionInfo |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
SCLogCapturerInfo(@"Will stop recording. sessionId:%u placeHolderImage:%@ videoSize:(%f, %f)", |
|||
sessionInfo.sessionId, placeholderImage, videoSize.width, videoSize.height); |
|||
[_captureResource.queuePerformer perform:^{ |
|||
SCTraceStart(); |
|||
if (_captureResource.videoRecording) { |
|||
SCManagedCapturerState *state = [_captureResource.state copy]; |
|||
// Then, sync back to main thread to notify will finish recording |
|||
runOnMainThreadAsynchronously(^{ |
|||
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] |
|||
willFinishRecording:state |
|||
session:sessionInfo |
|||
recordedVideoFuture:recordedVideoFuture |
|||
videoSize:videoSize |
|||
placeholderImage:placeholderImage]; |
|||
}); |
|||
} |
|||
}]; |
|||
} |
|||
|
|||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer |
|||
didSucceedWithRecordedVideo:(SCManagedRecordedVideo *)recordedVideo |
|||
session:(SCVideoCaptureSessionInfo)sessionInfo |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
SCLogCapturerInfo(@"Did succeed recording. sessionId:%u recordedVideo:%@", sessionInfo.sessionId, recordedVideo); |
|||
[_captureResource.queuePerformer perform:^{ |
|||
SCTraceStart(); |
|||
if (_captureResource.videoRecording) { |
|||
[self _videoRecordingCleanup]; |
|||
SCManagedCapturerState *state = [_captureResource.state copy]; |
|||
// Then, sync back to main thread to notify the finish recording |
|||
runOnMainThreadAsynchronously(^{ |
|||
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] |
|||
didFinishRecording:state |
|||
session:sessionInfo |
|||
recordedVideo:recordedVideo]; |
|||
}); |
|||
} |
|||
}]; |
|||
} |
|||
|
|||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer |
|||
didFailWithError:(NSError *)error |
|||
session:(SCVideoCaptureSessionInfo)sessionInfo |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
SCLogCapturerInfo(@"Did fail recording. sessionId:%u", sessionInfo.sessionId); |
|||
[_captureResource.queuePerformer perform:^{ |
|||
SCTraceStart(); |
|||
if (_captureResource.videoRecording) { |
|||
[self _videoRecordingCleanup]; |
|||
SCManagedCapturerState *state = [_captureResource.state copy]; |
|||
runOnMainThreadAsynchronously(^{ |
|||
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] |
|||
didFailRecording:state |
|||
session:sessionInfo |
|||
error:error]; |
|||
}); |
|||
} |
|||
}]; |
|||
} |
|||
|
|||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer |
|||
didCancelVideoRecording:(SCVideoCaptureSessionInfo)sessionInfo |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
SCLogCapturerInfo(@"Did cancel recording. sessionId:%u", sessionInfo.sessionId); |
|||
[_captureResource.queuePerformer perform:^{ |
|||
SCTraceStart(); |
|||
if (_captureResource.videoRecording) { |
|||
[self _videoRecordingCleanup]; |
|||
SCManagedCapturerState *state = [_captureResource.state copy]; |
|||
runOnMainThreadAsynchronously(^{ |
|||
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] |
|||
didCancelRecording:state |
|||
session:sessionInfo]; |
|||
}); |
|||
} |
|||
}]; |
|||
} |
|||
|
|||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer |
|||
didGetError:(NSError *)error |
|||
forType:(SCManagedVideoCapturerInfoType)type |
|||
session:(SCVideoCaptureSessionInfo)sessionInfo |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
SCLogCapturerInfo(@"Did get error. sessionId:%u errorType:%lu, error:%@", sessionInfo.sessionId, (long)type, error); |
|||
[_captureResource.queuePerformer perform:^{ |
|||
runOnMainThreadAsynchronously(^{ |
|||
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] |
|||
didGetError:error |
|||
forType:type |
|||
session:sessionInfo]; |
|||
}); |
|||
}]; |
|||
} |
|||
|
|||
- (NSDictionary *)managedVideoCapturerGetExtraFrameHealthInfo:(SCManagedVideoCapturer *)managedVideoCapturer |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
if (_captureResource.state.lensesActive) { |
|||
return @{ |
|||
@"lens_active" : @(YES), |
|||
@"lens_id" : ([_captureResource.lensProcessingCore activeLensId] ?: [NSNull null]) |
|||
}; |
|||
} |
|||
return nil; |
|||
} |
|||
|
|||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer |
|||
didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer |
|||
presentationTimestamp:(CMTime)presentationTimestamp |
|||
{ |
|||
CFRetain(sampleBuffer); |
|||
[_captureResource.queuePerformer perform:^{ |
|||
SCManagedCapturerSampleMetadata *sampleMetadata = |
|||
[[SCManagedCapturerSampleMetadata alloc] initWithPresentationTimestamp:presentationTimestamp |
|||
fieldOfView:_captureResource.device.fieldOfView]; |
|||
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] |
|||
didAppendVideoSampleBuffer:sampleBuffer |
|||
sampleMetadata:sampleMetadata]; |
|||
CFRelease(sampleBuffer); |
|||
}]; |
|||
} |
|||
|
|||
- (void)_videoRecordingCleanup |
|||
{ |
|||
SCTraceODPCompatibleStart(2); |
|||
SCAssert(_captureResource.videoRecording, @"clean up function only can be called if the " |
|||
@"video recording is still in progress."); |
|||
SCAssert([_captureResource.queuePerformer isCurrentPerformer], @""); |
|||
SCLogCapturerInfo(@"Video recording cleanup. previous state:%@", _captureResource.state); |
|||
[_captureResource.videoDataSource removeListener:_captureResource.videoCapturer]; |
|||
if (_captureResource.videoFrameSampler) { |
|||
SCManagedVideoFrameSampler *sampler = _captureResource.videoFrameSampler; |
|||
_captureResource.videoFrameSampler = nil; |
|||
[_captureResource.announcer removeListener:sampler]; |
|||
} |
|||
// Add back other listeners to video streamer |
|||
[_captureResource.videoDataSource addListener:_captureResource.deviceCapacityAnalyzer]; |
|||
if (!_captureResource.state.torchActive) { |
|||
// We should turn off torch for the device that we specifically turned on |
|||
// for recording |
|||
[_captureResource.device setTorchActive:NO]; |
|||
if (_captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) { |
|||
_captureResource.frontFlashController.torchActive = NO; |
|||
} |
|||
} |
|||
|
|||
// Unlock focus on both front and back camera if they were locked. |
|||
// Even if ARKit was being used during recording, it'll be shut down by the time we get here |
|||
// So DON'T match the ARKit check we use around [_ setRecording:YES] |
|||
SCManagedCaptureDevice *front = [SCManagedCaptureDevice front]; |
|||
SCManagedCaptureDevice *back = [SCManagedCaptureDevice back]; |
|||
[front setRecording:NO]; |
|||
[back setRecording:NO]; |
|||
_captureResource.videoRecording = NO; |
|||
if (_captureResource.state.lensesActive) { |
|||
BOOL modifySource = _captureResource.videoRecording || _captureResource.state.liveVideoStreaming; |
|||
[_captureResource.lensProcessingCore setModifySource:modifySource]; |
|||
} |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,27 @@ |
|||
// |
|||
// SCCaptureLogger.h |
|||
// Snapchat |
|||
// |
|||
// Created by Pinlin on 12/04/2017. |
|||
// Copyright © 2017 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import <Foundation/Foundation.h> |
|||
|
|||
static NSString *const kSCCapturerStartingStepAudioSession = @"audio_session"; |
|||
static NSString *const kSCCapturerStartingStepTranscodeingVideoBitrate = @"transcoding_video_bitrate"; |
|||
static NSString *const kSCCapturerStartingStepOutputSettings = @"output_settings"; |
|||
static NSString *const kSCCapturerStartingStepVideoFrameRawData = @"video_frame_raw_data"; |
|||
static NSString *const kSCCapturerStartingStepAudioRecording = @"audio_recording"; |
|||
static NSString *const kSCCapturerStartingStepAssetWriterConfiguration = @"asset_writer_config"; |
|||
static NSString *const kSCCapturerStartingStepStartingWriting = @"start_writing"; |
|||
static NSString *const kCapturerStartingTotalDelay = @"total_delay"; |
|||
|
|||
@interface SCManagedVideoCapturerLogger : NSObject |
|||
|
|||
- (void)prepareForStartingLog; |
|||
- (void)logStartingStep:(NSString *)stepName; |
|||
- (void)endLoggingForStarting; |
|||
- (void)logEventIfStartingTooSlow; |
|||
|
|||
@end |
@ -0,0 +1,77 @@ |
|||
// |
|||
// SCManagedVideoCapturerLogger.m |
|||
// Snapchat |
|||
// |
|||
// Created by Pinlin on 12/04/2017. |
|||
// Copyright © 2017 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import "SCManagedVideoCapturerLogger.h" |
|||
|
|||
#import <SCFoundation/SCAssertWrapper.h> |
|||
#import <SCFoundation/SCLog.h> |
|||
#import <SCLogger/SCCameraMetrics.h> |
|||
#import <SCLogger/SCLogger.h> |
|||
|
|||
@import QuartzCore; |
|||
|
|||
@interface SCManagedVideoCapturerLogger () { |
|||
// For time profiles metric during start recording |
|||
NSMutableDictionary *_startingStepsDelayTime; |
|||
NSTimeInterval _beginStartTime; |
|||
NSTimeInterval _lastCheckpointTime; |
|||
NSTimeInterval _startedTime; |
|||
} |
|||
|
|||
@end |
|||
|
|||
@implementation SCManagedVideoCapturerLogger |
|||
|
|||
- (instancetype)init |
|||
{ |
|||
self = [super init]; |
|||
if (self) { |
|||
_startingStepsDelayTime = [NSMutableDictionary dictionary]; |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
- (void)prepareForStartingLog |
|||
{ |
|||
_beginStartTime = CACurrentMediaTime(); |
|||
_lastCheckpointTime = _beginStartTime; |
|||
[_startingStepsDelayTime removeAllObjects]; |
|||
} |
|||
|
|||
- (void)logStartingStep:(NSString *)stepname |
|||
{ |
|||
SCAssert(_beginStartTime > 0, @"logger is not ready yet, please call prepareForStartingLog at first"); |
|||
NSTimeInterval currentCheckpointTime = CACurrentMediaTime(); |
|||
_startingStepsDelayTime[stepname] = @(currentCheckpointTime - _lastCheckpointTime); |
|||
_lastCheckpointTime = currentCheckpointTime; |
|||
} |
|||
|
|||
- (void)endLoggingForStarting |
|||
{ |
|||
SCAssert(_beginStartTime > 0, @"logger is not ready yet, please call prepareForStartingLog at first"); |
|||
_startedTime = CACurrentMediaTime(); |
|||
[self logStartingStep:kSCCapturerStartingStepStartingWriting]; |
|||
_startingStepsDelayTime[kCapturerStartingTotalDelay] = @(CACurrentMediaTime() - _beginStartTime); |
|||
} |
|||
|
|||
- (void)logEventIfStartingTooSlow |
|||
{ |
|||
if (_beginStartTime > 0) { |
|||
if (_startingStepsDelayTime.count == 0) { |
|||
// It should not be here. We only need to log once. |
|||
return; |
|||
} |
|||
SCLogGeneralWarning(@"Capturer starting delay(in second):%f", _startedTime - _beginStartTime); |
|||
[[SCLogger sharedInstance] logEvent:kSCCameraMetricsVideoCapturerStartDelay parameters:_startingStepsDelayTime]; |
|||
// Clean all delay times after logging |
|||
[_startingStepsDelayTime removeAllObjects]; |
|||
_beginStartTime = 0; |
|||
} |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,48 @@ |
|||
// 42f6113daff3eebf06d809a073c99651867c42ea |
|||
// Generated by the value-object.rb DO NOT EDIT!! |
|||
|
|||
#import "SCManagedVideoCapturerOutputType.h" |
|||
|
|||
#import <AvailabilityMacros.h> |
|||
|
|||
#import <CoreGraphics/CoreGraphics.h> |
|||
#import <Foundation/Foundation.h> |
|||
|
|||
@protocol SCManagedVideoCapturerOutputSettings <NSObject, NSCoding, NSCopying> |
|||
|
|||
@property (nonatomic, assign, readonly) CGFloat width; |
|||
|
|||
@property (nonatomic, assign, readonly) CGFloat height; |
|||
|
|||
@property (nonatomic, assign, readonly) CGFloat videoBitRate; |
|||
|
|||
@property (nonatomic, assign, readonly) CGFloat audioBitRate; |
|||
|
|||
@property (nonatomic, assign, readonly) NSUInteger keyFrameInterval; |
|||
|
|||
@property (nonatomic, assign, readonly) SCManagedVideoCapturerOutputType outputType; |
|||
|
|||
@end |
|||
|
|||
@interface SCManagedVideoCapturerOutputSettings : NSObject <SCManagedVideoCapturerOutputSettings> |
|||
|
|||
@property (nonatomic, assign, readonly) CGFloat width; |
|||
|
|||
@property (nonatomic, assign, readonly) CGFloat height; |
|||
|
|||
@property (nonatomic, assign, readonly) CGFloat videoBitRate; |
|||
|
|||
@property (nonatomic, assign, readonly) CGFloat audioBitRate; |
|||
|
|||
@property (nonatomic, assign, readonly) NSUInteger keyFrameInterval; |
|||
|
|||
@property (nonatomic, assign, readonly) SCManagedVideoCapturerOutputType outputType; |
|||
|
|||
- (instancetype)initWithWidth:(CGFloat)width |
|||
height:(CGFloat)height |
|||
videoBitRate:(CGFloat)videoBitRate |
|||
audioBitRate:(CGFloat)audioBitRate |
|||
keyFrameInterval:(NSUInteger)keyFrameInterval |
|||
outputType:(SCManagedVideoCapturerOutputType)outputType; |
|||
|
|||
@end |
@ -0,0 +1,221 @@ |
|||
// 42f6113daff3eebf06d809a073c99651867c42ea |
|||
// Generated by the value-object.rb DO NOT EDIT!! |
|||
|
|||
#import "SCManagedVideoCapturerOutputSettings.h" |
|||
|
|||
#import <SCFoundation/SCValueObjectHelpers.h> |
|||
|
|||
#import <FastCoding/FastCoder.h> |
|||
|
|||
@implementation SCManagedVideoCapturerOutputSettings |
|||
|
|||
static ptrdiff_t sSCManagedVideoCapturerOutputSettingsOffsets[0]; |
|||
static BOOL sSCManagedVideoCapturerOutputSettingsHasOffsets; |
|||
|
|||
- (instancetype)initWithWidth:(CGFloat)width |
|||
height:(CGFloat)height |
|||
videoBitRate:(CGFloat)videoBitRate |
|||
audioBitRate:(CGFloat)audioBitRate |
|||
keyFrameInterval:(NSUInteger)keyFrameInterval |
|||
outputType:(SCManagedVideoCapturerOutputType)outputType |
|||
{ |
|||
self = [super init]; |
|||
if (self) { |
|||
_width = width; |
|||
_height = height; |
|||
_videoBitRate = videoBitRate; |
|||
_audioBitRate = audioBitRate; |
|||
_keyFrameInterval = keyFrameInterval; |
|||
_outputType = outputType; |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
#pragma mark - NSCopying |
|||
|
|||
- (instancetype)copyWithZone:(NSZone *)zone |
|||
{ |
|||
// Immutable object, bypass copy |
|||
return self; |
|||
} |
|||
|
|||
#pragma mark - NSCoding |
|||
|
|||
- (instancetype)initWithCoder:(NSCoder *)aDecoder |
|||
{ |
|||
self = [super init]; |
|||
if (self) { |
|||
_width = [aDecoder decodeFloatForKey:@"width"]; |
|||
_height = [aDecoder decodeFloatForKey:@"height"]; |
|||
_videoBitRate = [aDecoder decodeFloatForKey:@"videoBitRate"]; |
|||
_audioBitRate = [aDecoder decodeFloatForKey:@"audioBitRate"]; |
|||
_keyFrameInterval = [[aDecoder decodeObjectForKey:@"keyFrameInterval"] unsignedIntegerValue]; |
|||
_outputType = (SCManagedVideoCapturerOutputType)[aDecoder decodeIntegerForKey:@"outputType"]; |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
- (void)encodeWithCoder:(NSCoder *)aCoder |
|||
{ |
|||
[aCoder encodeFloat:_width forKey:@"width"]; |
|||
[aCoder encodeFloat:_height forKey:@"height"]; |
|||
[aCoder encodeFloat:_videoBitRate forKey:@"videoBitRate"]; |
|||
[aCoder encodeFloat:_audioBitRate forKey:@"audioBitRate"]; |
|||
[aCoder encodeObject:@(_keyFrameInterval) forKey:@"keyFrameInterval"]; |
|||
[aCoder encodeInteger:(NSInteger)_outputType forKey:@"outputType"]; |
|||
} |
|||
|
|||
#pragma mark - FasterCoding |
|||
|
|||
- (BOOL)preferFasterCoding |
|||
{ |
|||
return YES; |
|||
} |
|||
|
|||
- (void)encodeWithFasterCoder:(id<FCFasterCoder>)fasterCoder |
|||
{ |
|||
[fasterCoder encodeFloat64:_audioBitRate]; |
|||
[fasterCoder encodeFloat64:_height]; |
|||
[fasterCoder encodeUInt64:_keyFrameInterval]; |
|||
[fasterCoder encodeSInt32:_outputType]; |
|||
[fasterCoder encodeFloat64:_videoBitRate]; |
|||
[fasterCoder encodeFloat64:_width]; |
|||
} |
|||
|
|||
- (void)decodeWithFasterDecoder:(id<FCFasterDecoder>)fasterDecoder |
|||
{ |
|||
_audioBitRate = (CGFloat)[fasterDecoder decodeFloat64]; |
|||
_height = (CGFloat)[fasterDecoder decodeFloat64]; |
|||
_keyFrameInterval = (NSUInteger)[fasterDecoder decodeUInt64]; |
|||
_outputType = (SCManagedVideoCapturerOutputType)[fasterDecoder decodeSInt32]; |
|||
_videoBitRate = (CGFloat)[fasterDecoder decodeFloat64]; |
|||
_width = (CGFloat)[fasterDecoder decodeFloat64]; |
|||
} |
|||
|
|||
- (void)setSInt32:(int32_t)val forUInt64Key:(uint64_t)key |
|||
{ |
|||
switch (key) { |
|||
case 54425104364133881ULL: |
|||
_outputType = (SCManagedVideoCapturerOutputType)val; |
|||
break; |
|||
} |
|||
} |
|||
|
|||
- (void)setUInt64:(uint64_t)val forUInt64Key:(uint64_t)key |
|||
{ |
|||
switch (key) { |
|||
case 47327990652274883ULL: |
|||
_keyFrameInterval = (NSUInteger)val; |
|||
break; |
|||
} |
|||
} |
|||
|
|||
- (void)setFloat64:(double)val forUInt64Key:(uint64_t)key |
|||
{ |
|||
switch (key) { |
|||
case 50995534680662654ULL: |
|||
_audioBitRate = (CGFloat)val; |
|||
break; |
|||
case 11656660716170763ULL: |
|||
_height = (CGFloat)val; |
|||
break; |
|||
case 29034524155663716ULL: |
|||
_videoBitRate = (CGFloat)val; |
|||
break; |
|||
case 30689178641753681ULL: |
|||
_width = (CGFloat)val; |
|||
break; |
|||
} |
|||
} |
|||
|
|||
+ (uint64_t)fasterCodingVersion |
|||
{ |
|||
return 14709152111692666517ULL; |
|||
} |
|||
|
|||
+ (uint64_t *)fasterCodingKeys |
|||
{ |
|||
static uint64_t keys[] = { |
|||
6 /* Total */, |
|||
FC_ENCODE_KEY_TYPE(50995534680662654, FCEncodeTypeFloat64), |
|||
FC_ENCODE_KEY_TYPE(11656660716170763, FCEncodeTypeFloat64), |
|||
FC_ENCODE_KEY_TYPE(47327990652274883, FCEncodeTypeUInt64), |
|||
FC_ENCODE_KEY_TYPE(54425104364133881, FCEncodeTypeSInt32), |
|||
FC_ENCODE_KEY_TYPE(29034524155663716, FCEncodeTypeFloat64), |
|||
FC_ENCODE_KEY_TYPE(30689178641753681, FCEncodeTypeFloat64), |
|||
}; |
|||
return keys; |
|||
} |
|||
|
|||
#pragma mark - isEqual |
|||
|
|||
- (BOOL)isEqual:(id)object |
|||
{ |
|||
if (!SCObjectsIsEqual(self, object, &sSCManagedVideoCapturerOutputSettingsHasOffsets, |
|||
sSCManagedVideoCapturerOutputSettingsOffsets, 6, 0)) { |
|||
return NO; |
|||
} |
|||
SCManagedVideoCapturerOutputSettings *other = (SCManagedVideoCapturerOutputSettings *)object; |
|||
if (other->_width != _width) { |
|||
return NO; |
|||
} |
|||
|
|||
if (other->_height != _height) { |
|||
return NO; |
|||
} |
|||
|
|||
if (other->_videoBitRate != _videoBitRate) { |
|||
return NO; |
|||
} |
|||
|
|||
if (other->_audioBitRate != _audioBitRate) { |
|||
return NO; |
|||
} |
|||
|
|||
if (other->_keyFrameInterval != _keyFrameInterval) { |
|||
return NO; |
|||
} |
|||
|
|||
if (other->_outputType != _outputType) { |
|||
return NO; |
|||
} |
|||
|
|||
return YES; |
|||
} |
|||
|
|||
- (NSUInteger)hash |
|||
{ |
|||
NSUInteger subhashes[] = {(NSUInteger)_width, (NSUInteger)_height, (NSUInteger)_videoBitRate, |
|||
(NSUInteger)_audioBitRate, (NSUInteger)_keyFrameInterval, (NSUInteger)_outputType}; |
|||
NSUInteger result = subhashes[0]; |
|||
for (int i = 1; i < 6; i++) { |
|||
unsigned long long base = (((unsigned long long)result) << 32 | subhashes[i]); |
|||
base = (~base) + (base << 18); |
|||
base ^= (base >> 31); |
|||
base *= 21; |
|||
base ^= (base >> 11); |
|||
base += (base << 6); |
|||
base ^= (base >> 22); |
|||
result = (NSUInteger)base; |
|||
} |
|||
return result; |
|||
} |
|||
|
|||
#pragma mark - Print description in console: lldb> po #{variable name} |
|||
|
|||
- (NSString *)description |
|||
{ |
|||
NSMutableString *desc = [NSMutableString string]; |
|||
[desc appendString:@"{\n"]; |
|||
[desc appendFormat:@"\twidth:%@\n", [@(_width) description]]; |
|||
[desc appendFormat:@"\theight:%@\n", [@(_height) description]]; |
|||
[desc appendFormat:@"\tvideoBitRate:%@\n", [@(_videoBitRate) description]]; |
|||
[desc appendFormat:@"\taudioBitRate:%@\n", [@(_audioBitRate) description]]; |
|||
[desc appendFormat:@"\tkeyFrameInterval:%@\n", [@(_keyFrameInterval) description]]; |
|||
[desc appendFormat:@"\toutputType:%@\n", [@(_outputType) description]]; |
|||
[desc appendString:@"}\n"]; |
|||
|
|||
return [desc copy]; |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,10 @@ |
|||
#import "SCManagedVideoCapturerOutputType.h" |
|||
|
|||
interface SCManagedVideoCapturerOutputSettings |
|||
CGFloat width |
|||
CGFloat height |
|||
CGFloat videoBitRate |
|||
CGFloat audioBitRate |
|||
NSUInteger keyFrameInterval |
|||
enum SCManagedVideoCapturerOutputType outputType |
|||
end |
@ -0,0 +1,14 @@ |
|||
// |
|||
// SCManagedVideoCapturerOutputType.h |
|||
// Snapchat |
|||
// |
|||
// Created by Chao Pang on 8/8/16. |
|||
// Copyright © 2016 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import <Foundation/Foundation.h> |
|||
|
|||
typedef NS_ENUM(NSInteger, SCManagedVideoCapturerOutputType) { |
|||
SCManagedVideoCapturerOutputTypeVideoSnap = 0, |
|||
SCManagedVideoCapturerOutputTypeVideoNote, |
|||
}; |
@ -0,0 +1,25 @@ |
|||
// |
|||
// SCManagedVideoCapturerTimeObserver.h |
|||
// Snapchat |
|||
// |
|||
// Created by Michel Loenngren on 4/3/17. |
|||
// Copyright © 2017 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import <CoreMedia/CoreMedia.h> |
|||
#import <Foundation/Foundation.h> |
|||
|
|||
@class SCTimedTask; |
|||
|
|||
/* |
|||
Class keeping track of SCTimedTasks and firing them on the main thread |
|||
when needed. |
|||
*/ |
|||
@interface SCManagedVideoCapturerTimeObserver : NSObject |
|||
|
|||
- (void)addTimedTask:(SCTimedTask *_Nonnull)task; |
|||
|
|||
- (void)processTime:(CMTime)relativePresentationTime |
|||
sessionStartTimeDelayInSecond:(CGFloat)sessionStartTimeDelayInSecond; |
|||
|
|||
@end |
@ -0,0 +1,61 @@ |
|||
// |
|||
// SCManagedVideoCapturerTimeObserver.m |
|||
// Snapchat |
|||
// |
|||
// Created by Michel Loenngren on 4/3/17. |
|||
// Copyright © 2017 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import "SCManagedVideoCapturerTimeObserver.h" |
|||
|
|||
#import "SCTimedTask.h" |
|||
|
|||
#import <SCFoundation/SCAssertWrapper.h> |
|||
#import <SCFoundation/SCLog.h> |
|||
#import <SCFoundation/SCThreadHelpers.h> |
|||
|
|||
@implementation SCManagedVideoCapturerTimeObserver { |
|||
NSMutableArray<SCTimedTask *> *_tasks; |
|||
BOOL _isProcessing; |
|||
} |
|||
|
|||
- (instancetype)init |
|||
{ |
|||
if (self = [super init]) { |
|||
_tasks = [NSMutableArray new]; |
|||
_isProcessing = NO; |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
- (void)addTimedTask:(SCTimedTask *_Nonnull)task |
|||
{ |
|||
SCAssert(!_isProcessing, |
|||
@"[SCManagedVideoCapturerTimeObserver] Trying to add an SCTimedTask after streaming started."); |
|||
SCAssert(CMTIME_IS_VALID(task.targetTime), |
|||
@"[SCManagedVideoCapturerTimeObserver] Trying to add an SCTimedTask with invalid time."); |
|||
[_tasks addObject:task]; |
|||
[_tasks sortUsingComparator:^NSComparisonResult(SCTimedTask *_Nonnull obj1, SCTimedTask *_Nonnull obj2) { |
|||
return (NSComparisonResult)CMTimeCompare(obj2.targetTime, obj1.targetTime); |
|||
}]; |
|||
SCLogGeneralInfo(@"[SCManagedVideoCapturerTimeObserver] Adding task: %@, task count: %lu", task, |
|||
(unsigned long)_tasks.count); |
|||
} |
|||
|
|||
- (void)processTime:(CMTime)relativePresentationTime |
|||
sessionStartTimeDelayInSecond:(CGFloat)sessionStartTimeDelayInSecond |
|||
{ |
|||
_isProcessing = YES; |
|||
SCTimedTask *last = _tasks.lastObject; |
|||
while (last && last.task && CMTimeCompare(relativePresentationTime, last.targetTime) >= 0) { |
|||
[_tasks removeLastObject]; |
|||
void (^task)(CMTime relativePresentationTime, CGFloat sessionStartTimeDelay) = last.task; |
|||
last.task = nil; |
|||
runOnMainThreadAsynchronously(^{ |
|||
task(relativePresentationTime, sessionStartTimeDelayInSecond); |
|||
}); |
|||
last = _tasks.lastObject; |
|||
} |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,26 @@ |
|||
// |
|||
// SCManagedVideoFileStreamer.h |
|||
// Snapchat |
|||
// |
|||
// Created by Alexander Grytsiuk on 3/4/16. |
|||
// Copyright © 2016 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import <SCCameraFoundation/SCManagedVideoDataSource.h> |
|||
|
|||
#import <AVFoundation/AVFoundation.h> |
|||
#import <Foundation/Foundation.h> |
|||
|
|||
typedef void (^sc_managed_video_file_streamer_pixel_buffer_completion_handler_t)(CVPixelBufferRef pixelBuffer); |
|||
|
|||
/** |
|||
* SCManagedVideoFileStreamer reads a video file from provided NSURL to create |
|||
* and publish video output frames. SCManagedVideoFileStreamer also conforms |
|||
* to SCManagedVideoDataSource allowing chained consumption of video frames. |
|||
*/ |
|||
@interface SCManagedVideoFileStreamer : NSObject <SCManagedVideoDataSource> |
|||
|
|||
- (instancetype)initWithPlaybackForURL:(NSURL *)URL; |
|||
- (void)getNextPixelBufferWithCompletion:(sc_managed_video_file_streamer_pixel_buffer_completion_handler_t)completion; |
|||
|
|||
@end |
@ -0,0 +1,299 @@ |
|||
// |
|||
// SCManagedVideoFileStreamer.m |
|||
// Snapchat |
|||
// |
|||
// Created by Alexander Grytsiuk on 3/4/16. |
|||
// Copyright © 2016 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import "SCManagedVideoFileStreamer.h" |
|||
|
|||
#import "SCManagedCapturePreviewLayerController.h" |
|||
|
|||
#import <SCCameraFoundation/SCManagedVideoDataSourceListenerAnnouncer.h> |
|||
#import <SCFoundation/SCLog.h> |
|||
#import <SCFoundation/SCPlayer.h> |
|||
#import <SCFoundation/SCQueuePerformer.h> |
|||
#import <SCFoundation/SCTrace.h> |
|||
|
|||
@import AVFoundation; |
|||
@import CoreMedia; |
|||
|
|||
static char *const kSCManagedVideoFileStreamerQueueLabel = "com.snapchat.managed-video-file-streamer"; |
|||
|
|||
@interface SCManagedVideoFileStreamer () <AVPlayerItemOutputPullDelegate> |
|||
@end |
|||
|
|||
@implementation SCManagedVideoFileStreamer { |
|||
SCManagedVideoDataSourceListenerAnnouncer *_announcer; |
|||
SCManagedCaptureDevicePosition _devicePosition; |
|||
sc_managed_video_file_streamer_pixel_buffer_completion_handler_t _nextPixelBufferHandler; |
|||
|
|||
id _notificationToken; |
|||
id<SCPerforming> _performer; |
|||
dispatch_semaphore_t _semaphore; |
|||
|
|||
CADisplayLink *_displayLink; |
|||
AVPlayerItemVideoOutput *_videoOutput; |
|||
AVPlayer *_player; |
|||
|
|||
BOOL _sampleBufferDisplayEnabled; |
|||
id<SCManagedSampleBufferDisplayController> _sampleBufferDisplayController; |
|||
} |
|||
|
|||
@synthesize isStreaming = _isStreaming; |
|||
@synthesize performer = _performer; |
|||
@synthesize videoOrientation = _videoOrientation; |
|||
|
|||
- (instancetype)initWithPlaybackForURL:(NSURL *)URL |
|||
{ |
|||
SCTraceStart(); |
|||
self = [super init]; |
|||
if (self) { |
|||
_videoOrientation = AVCaptureVideoOrientationLandscapeRight; |
|||
_announcer = [[SCManagedVideoDataSourceListenerAnnouncer alloc] init]; |
|||
_semaphore = dispatch_semaphore_create(1); |
|||
_performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoFileStreamerQueueLabel |
|||
qualityOfService:QOS_CLASS_UNSPECIFIED |
|||
queueType:DISPATCH_QUEUE_SERIAL |
|||
context:SCQueuePerformerContextStories]; |
|||
|
|||
// Setup CADisplayLink which will callback displayPixelBuffer: at every vsync. |
|||
_displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkCallback:)]; |
|||
[_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSDefaultRunLoopMode]; |
|||
[_displayLink setPaused:YES]; |
|||
|
|||
// Prepare player |
|||
_player = [[SCPlayer alloc] initWithPlayerDomain:SCPlayerDomainCameraFileStreamer URL:URL]; |
|||
#if TARGET_IPHONE_SIMULATOR |
|||
_player.volume = 0.0; |
|||
#endif |
|||
// Configure output |
|||
[self configureOutput]; |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
- (void)addSampleBufferDisplayController:(id<SCManagedSampleBufferDisplayController>)sampleBufferDisplayController |
|||
{ |
|||
_sampleBufferDisplayController = sampleBufferDisplayController; |
|||
} |
|||
|
|||
- (void)setSampleBufferDisplayEnabled:(BOOL)sampleBufferDisplayEnabled |
|||
{ |
|||
_sampleBufferDisplayEnabled = sampleBufferDisplayEnabled; |
|||
SCLogGeneralInfo(@"[SCManagedVideoFileStreamer] sampleBufferDisplayEnabled set to:%d", _sampleBufferDisplayEnabled); |
|||
} |
|||
|
|||
- (void)setKeepLateFrames:(BOOL)keepLateFrames |
|||
{ |
|||
// Do nothing |
|||
} |
|||
|
|||
- (BOOL)getKeepLateFrames |
|||
{ |
|||
// return default NO value |
|||
return NO; |
|||
} |
|||
|
|||
- (void)waitUntilSampleBufferDisplayed:(dispatch_queue_t)queue completionHandler:(dispatch_block_t)completionHandler |
|||
{ |
|||
SCAssert(queue, @"callback queue must be provided"); |
|||
SCAssert(completionHandler, @"completion handler must be provided"); |
|||
dispatch_async(queue, completionHandler); |
|||
} |
|||
|
|||
- (void)startStreaming |
|||
{ |
|||
SCTraceStart(); |
|||
if (!_isStreaming) { |
|||
_isStreaming = YES; |
|||
[self addDidPlayToEndTimeNotificationForPlayerItem:_player.currentItem]; |
|||
[_player play]; |
|||
} |
|||
} |
|||
|
|||
- (void)stopStreaming |
|||
{ |
|||
SCTraceStart(); |
|||
if (_isStreaming) { |
|||
_isStreaming = NO; |
|||
[_player pause]; |
|||
[self removePlayerObservers]; |
|||
} |
|||
} |
|||
|
|||
- (void)pauseStreaming |
|||
{ |
|||
[self stopStreaming]; |
|||
} |
|||
|
|||
- (void)addListener:(id<SCManagedVideoDataSourceListener>)listener |
|||
{ |
|||
SCTraceStart(); |
|||
[_announcer addListener:listener]; |
|||
} |
|||
|
|||
- (void)removeListener:(id<SCManagedVideoDataSourceListener>)listener |
|||
{ |
|||
SCTraceStart(); |
|||
[_announcer removeListener:listener]; |
|||
} |
|||
|
|||
- (void)setAsOutput:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition |
|||
{ |
|||
_devicePosition = devicePosition; |
|||
} |
|||
|
|||
- (void)setDevicePosition:(SCManagedCaptureDevicePosition)devicePosition |
|||
{ |
|||
_devicePosition = devicePosition; |
|||
} |
|||
|
|||
- (void)setVideoOrientation:(AVCaptureVideoOrientation)videoOrientation |
|||
{ |
|||
_videoOrientation = videoOrientation; |
|||
} |
|||
|
|||
- (void)removeAsOutput:(AVCaptureSession *)session |
|||
{ |
|||
// Ignored |
|||
} |
|||
|
|||
- (void)setVideoStabilizationEnabledIfSupported:(BOOL)videoStabilizationIfSupported |
|||
{ |
|||
// Ignored |
|||
} |
|||
|
|||
- (void)beginConfiguration |
|||
{ |
|||
// Ignored |
|||
} |
|||
|
|||
- (void)commitConfiguration |
|||
{ |
|||
// Ignored |
|||
} |
|||
|
|||
- (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest |
|||
{ |
|||
// Ignored |
|||
} |
|||
|
|||
#pragma mark - AVPlayerItemOutputPullDelegate |
|||
|
|||
- (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender |
|||
{ |
|||
if (![_videoOutput hasNewPixelBufferForItemTime:CMTimeMake(1, 10)]) { |
|||
[self configureOutput]; |
|||
} |
|||
[_displayLink setPaused:NO]; |
|||
} |
|||
|
|||
#pragma mark - Internal |
|||
|
|||
- (void)displayLinkCallback:(CADisplayLink *)sender |
|||
{ |
|||
CFTimeInterval nextVSync = [sender timestamp] + [sender duration]; |
|||
|
|||
CMTime time = [_videoOutput itemTimeForHostTime:nextVSync]; |
|||
if (dispatch_semaphore_wait(_semaphore, DISPATCH_TIME_NOW) == 0) { |
|||
[_performer perform:^{ |
|||
if ([_videoOutput hasNewPixelBufferForItemTime:time]) { |
|||
CVPixelBufferRef pixelBuffer = [_videoOutput copyPixelBufferForItemTime:time itemTimeForDisplay:NULL]; |
|||
if (pixelBuffer != NULL) { |
|||
if (_nextPixelBufferHandler) { |
|||
_nextPixelBufferHandler(pixelBuffer); |
|||
_nextPixelBufferHandler = nil; |
|||
} else { |
|||
CMSampleBufferRef sampleBuffer = |
|||
[self createSampleBufferFromPixelBuffer:pixelBuffer |
|||
presentationTime:CMTimeMake(CACurrentMediaTime() * 1000, 1000)]; |
|||
if (sampleBuffer) { |
|||
if (_sampleBufferDisplayEnabled) { |
|||
[_sampleBufferDisplayController enqueueSampleBuffer:sampleBuffer]; |
|||
} |
|||
[_announcer managedVideoDataSource:self |
|||
didOutputSampleBuffer:sampleBuffer |
|||
devicePosition:_devicePosition]; |
|||
CFRelease(sampleBuffer); |
|||
} |
|||
} |
|||
CVBufferRelease(pixelBuffer); |
|||
} |
|||
} |
|||
dispatch_semaphore_signal(_semaphore); |
|||
}]; |
|||
} |
|||
} |
|||
|
|||
- (CMSampleBufferRef)createSampleBufferFromPixelBuffer:(CVPixelBufferRef)pixelBuffer presentationTime:(CMTime)time |
|||
{ |
|||
CMSampleBufferRef sampleBuffer = NULL; |
|||
CMVideoFormatDescriptionRef formatDesc = NULL; |
|||
|
|||
OSStatus err = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &formatDesc); |
|||
if (err != noErr) { |
|||
return NULL; |
|||
} |
|||
|
|||
CMSampleTimingInfo sampleTimingInfo = {kCMTimeInvalid, time, kCMTimeInvalid}; |
|||
CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, formatDesc, |
|||
&sampleTimingInfo, &sampleBuffer); |
|||
|
|||
CFRelease(formatDesc); |
|||
|
|||
return sampleBuffer; |
|||
} |
|||
|
|||
- (void)configureOutput |
|||
{ |
|||
// Remove old output |
|||
if (_videoOutput) { |
|||
[[_player currentItem] removeOutput:_videoOutput]; |
|||
} |
|||
|
|||
// Setup AVPlayerItemVideoOutput with the required pixelbuffer attributes. |
|||
_videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:@{ |
|||
(id) kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) |
|||
}]; |
|||
_videoOutput.suppressesPlayerRendering = YES; |
|||
[_videoOutput setDelegate:self queue:_performer.queue]; |
|||
|
|||
// Add new output |
|||
[[_player currentItem] addOutput:_videoOutput]; |
|||
[_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:1.0 / 30.0]; |
|||
} |
|||
|
|||
- (void)getNextPixelBufferWithCompletion:(sc_managed_video_file_streamer_pixel_buffer_completion_handler_t)completion |
|||
{ |
|||
_nextPixelBufferHandler = completion; |
|||
} |
|||
|
|||
- (void)addDidPlayToEndTimeNotificationForPlayerItem:(AVPlayerItem *)item |
|||
{ |
|||
if (_notificationToken) { |
|||
_notificationToken = nil; |
|||
} |
|||
|
|||
_player.actionAtItemEnd = AVPlayerActionAtItemEndNone; |
|||
_notificationToken = |
|||
[[NSNotificationCenter defaultCenter] addObserverForName:AVPlayerItemDidPlayToEndTimeNotification |
|||
object:item |
|||
queue:[NSOperationQueue mainQueue] |
|||
usingBlock:^(NSNotification *note) { |
|||
[[_player currentItem] seekToTime:kCMTimeZero]; |
|||
}]; |
|||
} |
|||
|
|||
- (void)removePlayerObservers |
|||
{ |
|||
if (_notificationToken) { |
|||
[[NSNotificationCenter defaultCenter] removeObserver:_notificationToken |
|||
name:AVPlayerItemDidPlayToEndTimeNotification |
|||
object:_player.currentItem]; |
|||
_notificationToken = nil; |
|||
} |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,22 @@ |
|||
// |
|||
// SCManagedVideoFrameSampler.h |
|||
// Snapchat |
|||
// |
|||
// Created by Michel Loenngren on 3/10/17. |
|||
// Copyright © 2017 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import "SCManagedCapturerListener.h" |
|||
|
|||
#import <Foundation/Foundation.h> |
|||
|
|||
/** |
|||
Allows consumer to register a block to sample the next CMSampleBufferRef and |
|||
automatically leverages Core image to convert the pixel buffer to a UIImage. |
|||
Returned image will be a copy. |
|||
*/ |
|||
@interface SCManagedVideoFrameSampler : NSObject <SCManagedCapturerListener> |
|||
|
|||
- (void)sampleNextFrame:(void (^)(UIImage *frame, CMTime presentationTime))completeBlock; |
|||
|
|||
@end |
@ -0,0 +1,65 @@ |
|||
// |
|||
// SCManagedVideoFrameSampler.m |
|||
// Snapchat |
|||
// |
|||
// Created by Michel Loenngren on 3/10/17. |
|||
// Copyright © 2017 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import "SCManagedVideoFrameSampler.h" |
|||
|
|||
#import <SCFoundation/SCThreadHelpers.h> |
|||
#import <SCFoundation/UIImage+CVPixelBufferRef.h> |
|||
|
|||
@import CoreImage; |
|||
@import ImageIO; |
|||
|
|||
@interface SCManagedVideoFrameSampler () |
|||
|
|||
@property (nonatomic, copy) void (^frameSampleBlock)(UIImage *, CMTime); |
|||
@property (nonatomic, strong) CIContext *ciContext; |
|||
|
|||
@end |
|||
|
|||
@implementation SCManagedVideoFrameSampler |
|||
|
|||
- (void)sampleNextFrame:(void (^)(UIImage *, CMTime))completeBlock |
|||
{ |
|||
_frameSampleBlock = completeBlock; |
|||
} |
|||
|
|||
#pragma mark - SCManagedCapturerListener |
|||
|
|||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer |
|||
didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer |
|||
sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata |
|||
{ |
|||
void (^block)(UIImage *, CMTime) = _frameSampleBlock; |
|||
_frameSampleBlock = nil; |
|||
|
|||
if (!block) { |
|||
return; |
|||
} |
|||
|
|||
CVImageBufferRef cvImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); |
|||
CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); |
|||
UIImage *image; |
|||
if (cvImageBuffer) { |
|||
CGImageRef cgImage = SCCreateCGImageFromPixelBufferRef(cvImageBuffer); |
|||
image = [[UIImage alloc] initWithCGImage:cgImage scale:1.0 orientation:UIImageOrientationRight]; |
|||
CGImageRelease(cgImage); |
|||
} |
|||
runOnMainThreadAsynchronously(^{ |
|||
block(image, presentationTime); |
|||
}); |
|||
} |
|||
|
|||
- (CIContext *)ciContext |
|||
{ |
|||
if (!_ciContext) { |
|||
_ciContext = [CIContext context]; |
|||
} |
|||
return _ciContext; |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,44 @@ |
|||
// |
|||
// SCManagedVideoNoSoundLogger.h |
|||
// Snapchat |
|||
// |
|||
// Created by Pinlin Chen on 15/07/2017. |
|||
// |
|||
// |
|||
|
|||
#import <SCBase/SCMacros.h> |
|||
|
|||
#import <AVFoundation/AVFoundation.h> |
|||
#import <Foundation/Foundation.h> |
|||
|
|||
@protocol SCManiphestTicketCreator; |
|||
|
|||
@interface SCManagedVideoNoSoundLogger : NSObject |
|||
|
|||
@property (nonatomic, strong) NSError *audioSessionError; |
|||
@property (nonatomic, strong) NSError *audioQueueError; |
|||
@property (nonatomic, strong) NSError *assetWriterError; |
|||
@property (nonatomic, assign) BOOL retryAudioQueueSuccess; |
|||
@property (nonatomic, assign) BOOL retryAudioQueueSuccessSetDataSource; |
|||
@property (nonatomic, strong) NSString *brokenMicCodeType; |
|||
@property (nonatomic, assign) BOOL lenseActiveWhileRecording; |
|||
@property (nonatomic, strong) NSString *activeLensId; |
|||
@property (nonatomic, assign) CMTime firstWrittenAudioBufferDelay; |
|||
@property (nonatomic, assign) BOOL audioQueueStarted; |
|||
|
|||
SC_INIT_AND_NEW_UNAVAILABLE |
|||
- (instancetype)initWithTicketCreator:(id<SCManiphestTicketCreator>)ticketCreator; |
|||
|
|||
/* Use to counting how many no sound issue we have fixed */ |
|||
// Call at the place where we have fixed the AVPlayer leak before |
|||
+ (void)startCountingVideoNoSoundHaveBeenFixed; |
|||
|
|||
/* Use to report the detail of new no sound issue */ |
|||
// Reset all the properties of recording error |
|||
- (void)resetAll; |
|||
// Log if the audio track is empty |
|||
- (void)checkVideoFileAndLogIfNeeded:(NSURL *)videoURL; |
|||
// called by AVCameraViewController when lense resume audio |
|||
- (void)managedLensesProcessorDidCallResumeAllSounds; |
|||
|
|||
@end |
@ -0,0 +1,283 @@ |
|||
// |
|||
// SCManagedVideoNoSoundLogger.m |
|||
// Snapchat |
|||
// |
|||
// Created by Pinlin Chen on 15/07/2017. |
|||
// |
|||
// |
|||
|
|||
#import "SCManagedVideoNoSoundLogger.h" |
|||
|
|||
#import "SCManagedCapturer.h" |
|||
#import "SCManiphestTicketCreator.h" |
|||
|
|||
#import <SCAudio/SCAudioSession+Debug.h> |
|||
#import <SCAudio/SCAudioSession.h> |
|||
#import <SCFoundation/NSString+Helpers.h> |
|||
#import <SCFoundation/SCLog.h> |
|||
#import <SCFoundation/SCLogHelper.h> |
|||
#import <SCFoundation/SCThreadHelpers.h> |
|||
#import <SCFoundation/SCUUID.h> |
|||
#import <SCLogger/SCCameraMetrics.h> |
|||
#import <SCLogger/SCLogger.h> |
|||
|
|||
@import AVFoundation; |
|||
|
|||
static BOOL s_startCountingVideoNoSoundFixed; |
|||
// Count the number of no sound errors for an App session |
|||
static NSUInteger s_noSoundCaseCount = 0; |
|||
|
|||
@interface SCManagedVideoNoSoundLogger () { |
|||
BOOL _isAudioSessionDeactivated; |
|||
int _lenseResumeCount; |
|||
} |
|||
|
|||
@property (nonatomic) id<SCManiphestTicketCreator> ticketCreator; |
|||
|
|||
@end |
|||
|
|||
@implementation SCManagedVideoNoSoundLogger |
|||
|
|||
- (instancetype)initWithTicketCreator:(id<SCManiphestTicketCreator>)ticketCreator |
|||
{ |
|||
if (self = [super init]) { |
|||
_ticketCreator = ticketCreator; |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
+ (NSUInteger)noSoundCount |
|||
{ |
|||
return s_noSoundCaseCount; |
|||
} |
|||
|
|||
+ (void)increaseNoSoundCount |
|||
{ |
|||
s_noSoundCaseCount += 1; |
|||
} |
|||
|
|||
+ (void)startCountingVideoNoSoundHaveBeenFixed |
|||
{ |
|||
static dispatch_once_t onceToken; |
|||
dispatch_once(&onceToken, ^{ |
|||
s_startCountingVideoNoSoundFixed = YES; |
|||
SCLogGeneralInfo(@"start counting video no sound have been fixed"); |
|||
}); |
|||
} |
|||
|
|||
+ (NSString *)appSessionIdForNoSound |
|||
{ |
|||
static dispatch_once_t onceToken; |
|||
static NSString *s_AppSessionIdForNoSound = @"SCDefaultSession"; |
|||
dispatch_once(&onceToken, ^{ |
|||
s_AppSessionIdForNoSound = SCUUID(); |
|||
}); |
|||
return s_AppSessionIdForNoSound; |
|||
} |
|||
|
|||
+ (void)logVideoNoSoundHaveBeenFixedIfNeeded |
|||
{ |
|||
if (s_startCountingVideoNoSoundFixed) { |
|||
[[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoNoSoundError |
|||
parameters:@{ |
|||
@"have_been_fixed" : @"true", |
|||
@"fixed_type" : @"player_leak", |
|||
@"asset_writer_success" : @"true", |
|||
@"audio_session_success" : @"true", |
|||
@"audio_queue_success" : @"true", |
|||
} |
|||
secretParameters:nil |
|||
metrics:nil]; |
|||
} |
|||
} |
|||
|
|||
+ (void)logAudioSessionCategoryHaveBeenFixed |
|||
{ |
|||
[[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoNoSoundError |
|||
parameters:@{ |
|||
@"have_been_fixed" : @"true", |
|||
@"fixed_type" : @"audio_session_category_mismatch", |
|||
@"asset_writer_success" : @"true", |
|||
@"audio_session_success" : @"true", |
|||
@"audio_queue_success" : @"true", |
|||
} |
|||
secretParameters:nil |
|||
metrics:nil]; |
|||
} |
|||
|
|||
+ (void)logAudioSessionBrokenMicHaveBeenFixed:(NSString *)type |
|||
{ |
|||
[[SCLogger sharedInstance] |
|||
logUnsampledEvent:kSCCameraMetricsVideoNoSoundError |
|||
parameters:@{ |
|||
@"have_been_fixed" : @"true", |
|||
@"fixed_type" : @"broken_microphone", |
|||
@"asset_writer_success" : @"true", |
|||
@"audio_session_success" : @"true", |
|||
@"audio_queue_success" : @"true", |
|||
@"mic_broken_type" : SC_NULL_STRING_IF_NIL(type), |
|||
@"audio_session_debug_info" : |
|||
[SCAudioSession sharedInstance].lastRecordingRequestDebugInfo ?: @"(null)", |
|||
} |
|||
secretParameters:nil |
|||
metrics:nil]; |
|||
} |
|||
|
|||
- (instancetype)init |
|||
{ |
|||
if (self = [super init]) { |
|||
[[NSNotificationCenter defaultCenter] addObserver:self |
|||
selector:@selector(_audioSessionWillDeactivate) |
|||
name:SCAudioSessionWillDeactivateNotification |
|||
object:nil]; |
|||
[[NSNotificationCenter defaultCenter] addObserver:self |
|||
selector:@selector(_audioSessionDidActivate) |
|||
name:SCAudioSessionActivatedNotification |
|||
object:nil]; |
|||
_firstWrittenAudioBufferDelay = kCMTimeInvalid; |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
- (void)resetAll |
|||
{ |
|||
_audioQueueError = nil; |
|||
_audioSessionError = nil; |
|||
_assetWriterError = nil; |
|||
_retryAudioQueueSuccess = NO; |
|||
_retryAudioQueueSuccessSetDataSource = NO; |
|||
_brokenMicCodeType = nil; |
|||
_lenseActiveWhileRecording = NO; |
|||
_lenseResumeCount = 0; |
|||
_activeLensId = nil; |
|||
self.firstWrittenAudioBufferDelay = kCMTimeInvalid; |
|||
} |
|||
|
|||
- (void)checkVideoFileAndLogIfNeeded:(NSURL *)videoURL |
|||
{ |
|||
AVURLAsset *asset = [AVURLAsset assetWithURL:videoURL]; |
|||
|
|||
__block BOOL hasAudioTrack = ([asset tracksWithMediaType:AVMediaTypeAudio].count > 0); |
|||
|
|||
dispatch_block_t block = ^{ |
|||
|
|||
// Log no audio issues have been fixed |
|||
if (hasAudioTrack) { |
|||
if (_retryAudioQueueSuccess) { |
|||
[SCManagedVideoNoSoundLogger logAudioSessionCategoryHaveBeenFixed]; |
|||
} else if (_retryAudioQueueSuccessSetDataSource) { |
|||
[SCManagedVideoNoSoundLogger logAudioSessionBrokenMicHaveBeenFixed:_brokenMicCodeType]; |
|||
} else { |
|||
[SCManagedVideoNoSoundLogger logVideoNoSoundHaveBeenFixedIfNeeded]; |
|||
} |
|||
} else { |
|||
// Log no audio issues caused by no permission into "wont_fixed_type", won't show in Grafana |
|||
BOOL isPermissonGranted = |
|||
[[SCAudioSession sharedInstance] recordPermission] == AVAudioSessionRecordPermissionGranted; |
|||
if (!isPermissonGranted) { |
|||
[SCManagedVideoNoSoundLogger increaseNoSoundCount]; |
|||
[[SCLogger sharedInstance] |
|||
logUnsampledEvent:kSCCameraMetricsVideoNoSoundError |
|||
parameters:@{ |
|||
@"wont_fix_type" : @"no_permission", |
|||
@"no_sound_count" : |
|||
[@([SCManagedVideoNoSoundLogger noSoundCount]) stringValue] ?: @"(null)", |
|||
@"session_id" : [SCManagedVideoNoSoundLogger appSessionIdForNoSound] ?: @"(null)" |
|||
} |
|||
secretParameters:nil |
|||
metrics:nil]; |
|||
|
|||
} |
|||
// Log no audio issues caused by microphone occupied into "wont_fixed_type", for example Phone Call, |
|||
// It won't show in Grafana |
|||
// TODO: maybe we should prompt the user of these errors in the future |
|||
else if (_audioSessionError.code == AVAudioSessionErrorInsufficientPriority || |
|||
_audioQueueError.code == AVAudioSessionErrorInsufficientPriority) { |
|||
NSDictionary *parameters = @{ |
|||
@"wont_fix_type" : @"microphone_in_use", |
|||
@"asset_writer_error" : _assetWriterError ? [_assetWriterError description] : @"(null)", |
|||
@"audio_session_error" : _audioSessionError.userInfo ?: @"(null)", |
|||
@"audio_queue_error" : _audioQueueError.userInfo ?: @"(null)", |
|||
@"audio_session_deactivated" : _isAudioSessionDeactivated ? @"true" : @"false", |
|||
@"audio_session_debug_info" : |
|||
[SCAudioSession sharedInstance].lastRecordingRequestDebugInfo ?: @"(null)", |
|||
@"no_sound_count" : [@([SCManagedVideoNoSoundLogger noSoundCount]) stringValue] ?: @"(null)", |
|||
@"session_id" : [SCManagedVideoNoSoundLogger appSessionIdForNoSound] ?: @"(null)" |
|||
}; |
|||
|
|||
[SCManagedVideoNoSoundLogger increaseNoSoundCount]; |
|||
[[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoNoSoundError |
|||
parameters:parameters |
|||
secretParameters:nil |
|||
metrics:nil]; |
|||
[_ticketCreator createAndFileBetaReport:JSONStringSerializeObjectForLogging(parameters)]; |
|||
} else { |
|||
// Log other new no audio issues, use "have_been_fixed=false" to show in Grafana |
|||
NSDictionary *parameters = @{ |
|||
@"have_been_fixed" : @"false", |
|||
@"asset_writer_error" : _assetWriterError ? [_assetWriterError description] : @"(null)", |
|||
@"audio_session_error" : _audioSessionError.userInfo ?: @"(null)", |
|||
@"audio_queue_error" : _audioQueueError.userInfo ?: @"(null)", |
|||
@"asset_writer_success" : [NSString stringWithBool:_assetWriterError == nil], |
|||
@"audio_session_success" : [NSString stringWithBool:_audioSessionError == nil], |
|||
@"audio_queue_success" : [NSString stringWithBool:_audioQueueError == nil], |
|||
@"audio_session_deactivated" : _isAudioSessionDeactivated ? @"true" : @"false", |
|||
@"video_duration" : [NSString sc_stringWithFormat:@"%f", CMTimeGetSeconds(asset.duration)], |
|||
@"is_audio_session_nil" : |
|||
[[SCAudioSession sharedInstance] noSoundCheckAudioSessionIsNil] ? @"true" : @"false", |
|||
@"lenses_active" : [NSString stringWithBool:self.lenseActiveWhileRecording], |
|||
@"active_lense_id" : self.activeLensId ?: @"(null)", |
|||
@"lense_audio_resume_count" : @(_lenseResumeCount), |
|||
@"first_audio_buffer_delay" : |
|||
[NSString sc_stringWithFormat:@"%f", CMTimeGetSeconds(self.firstWrittenAudioBufferDelay)], |
|||
@"audio_session_debug_info" : |
|||
[SCAudioSession sharedInstance].lastRecordingRequestDebugInfo ?: @"(null)", |
|||
@"audio_queue_started" : [NSString stringWithBool:_audioQueueStarted], |
|||
@"no_sound_count" : [@([SCManagedVideoNoSoundLogger noSoundCount]) stringValue] ?: @"(null)", |
|||
@"session_id" : [SCManagedVideoNoSoundLogger appSessionIdForNoSound] ?: @"(null)" |
|||
}; |
|||
[SCManagedVideoNoSoundLogger increaseNoSoundCount]; |
|||
[[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoNoSoundError |
|||
parameters:parameters |
|||
secretParameters:nil |
|||
metrics:nil]; |
|||
[_ticketCreator createAndFileBetaReport:JSONStringSerializeObjectForLogging(parameters)]; |
|||
} |
|||
} |
|||
}; |
|||
if (hasAudioTrack) { |
|||
block(); |
|||
} else { |
|||
// Wait for all tracks to be loaded, in case of error counting the metric |
|||
[asset loadValuesAsynchronouslyForKeys:@[ @"tracks" ] |
|||
completionHandler:^{ |
|||
// Return when the tracks couldn't be loaded |
|||
NSError *error = nil; |
|||
if ([asset statusOfValueForKey:@"tracks" error:&error] != AVKeyValueStatusLoaded || |
|||
error != nil) { |
|||
return; |
|||
} |
|||
|
|||
// check audio track again |
|||
hasAudioTrack = ([asset tracksWithMediaType:AVMediaTypeAudio].count > 0); |
|||
runOnMainThreadAsynchronously(block); |
|||
}]; |
|||
} |
|||
} |
|||
|
|||
- (void)_audioSessionWillDeactivate |
|||
{ |
|||
_isAudioSessionDeactivated = YES; |
|||
} |
|||
|
|||
- (void)_audioSessionDidActivate |
|||
{ |
|||
_isAudioSessionDeactivated = NO; |
|||
} |
|||
|
|||
- (void)managedLensesProcessorDidCallResumeAllSounds |
|||
{ |
|||
_lenseResumeCount += 1; |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,35 @@ |
|||
// |
|||
// SCManagedVideoScanner.h |
|||
// Snapchat |
|||
// |
|||
// Created by Liu Liu on 5/5/15. |
|||
// Copyright (c) 2015 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import "SCManagedCapturer.h" |
|||
#import "SCManagedDeviceCapacityAnalyzerListener.h" |
|||
|
|||
#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h> |
|||
|
|||
#import <Foundation/Foundation.h> |
|||
|
|||
@class SCScanConfiguration; |
|||
|
|||
@interface SCManagedVideoScanner : NSObject <SCManagedVideoDataSourceListener, SCManagedDeviceCapacityAnalyzerListener> |
|||
|
|||
/** |
|||
* Calling this method to start scan, scan will automatically stop when a snapcode detected |
|||
*/ |
|||
- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration; |
|||
|
|||
/** |
|||
* Calling this method to stop scan immediately (it is still possible that a successful scan can happen after this is |
|||
* called) |
|||
*/ |
|||
- (void)stopScanAsynchronously; |
|||
|
|||
- (instancetype)initWithMaxFrameDefaultDuration:(NSTimeInterval)maxFrameDefaultDuration |
|||
maxFramePassiveDuration:(NSTimeInterval)maxFramePassiveDuration |
|||
restCycle:(float)restCycle; |
|||
|
|||
@end |
@ -0,0 +1,299 @@ |
|||
// |
|||
// SCManagedVideoScanner.m |
|||
// Snapchat |
|||
// |
|||
// Created by Liu Liu on 5/5/15. |
|||
// Copyright (c) 2015 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import "SCManagedVideoScanner.h" |
|||
|
|||
#import "SCScanConfiguration.h" |
|||
|
|||
#import <SCFeatureSettings/SCFeatureSettingsManager+Property.h> |
|||
#import <SCFoundation/NSData+Base64.h> |
|||
#import <SCFoundation/NSString+SCFormat.h> |
|||
#import <SCFoundation/SCAssertWrapper.h> |
|||
#import <SCFoundation/SCLog.h> |
|||
#import <SCFoundation/SCQueuePerformer.h> |
|||
#import <SCFoundation/SCThreadHelpers.h> |
|||
#import <SCFoundation/SCTrace.h> |
|||
#import <SCFoundation/UIDevice+Filter.h> |
|||
#import <SCLogger/SCLogger.h> |
|||
#import <SCScanTweaks/SCScanTweaks.h> |
|||
#import <SCScanner/SCMachineReadableCodeResult.h> |
|||
#import <SCScanner/SCSnapScanner.h> |
|||
#import <SCVisualProductSearchTweaks/SCVisualProductSearchTweaks.h> |
|||
|
|||
// In seconds |
|||
static NSTimeInterval const kDefaultScanTimeout = 60; |
|||
|
|||
static const char *kSCManagedVideoScannerQueueLabel = "com.snapchat.scvideoscanningcapturechannel.video.snapcode-scan"; |
|||
|
|||
@interface SCManagedVideoScanner () |
|||
|
|||
@end |
|||
|
|||
@implementation SCManagedVideoScanner { |
|||
SCSnapScanner *_snapScanner; |
|||
dispatch_semaphore_t _activeSemaphore; |
|||
NSTimeInterval _maxFrameDuration; // Used to restrict how many frames the scanner processes |
|||
NSTimeInterval _maxFrameDefaultDuration; |
|||
NSTimeInterval _maxFramePassiveDuration; |
|||
float _restCycleOfBusyCycle; |
|||
NSTimeInterval _scanStartTime; |
|||
BOOL _active; |
|||
BOOL _shouldEmitEvent; |
|||
dispatch_block_t _completionHandler; |
|||
NSTimeInterval _scanTimeout; |
|||
SCManagedCaptureDevicePosition _devicePosition; |
|||
SCQueuePerformer *_performer; |
|||
BOOL _adjustingFocus; |
|||
NSArray *_codeTypes; |
|||
NSArray *_codeTypesOld; |
|||
sc_managed_capturer_scan_results_handler_t _scanResultsHandler; |
|||
|
|||
SCUserSession *_userSession; |
|||
} |
|||
|
|||
- (instancetype)initWithMaxFrameDefaultDuration:(NSTimeInterval)maxFrameDefaultDuration |
|||
maxFramePassiveDuration:(NSTimeInterval)maxFramePassiveDuration |
|||
restCycle:(float)restCycle |
|||
{ |
|||
SCTraceStart(); |
|||
self = [super init]; |
|||
if (self) { |
|||
_snapScanner = [SCSnapScanner sharedInstance]; |
|||
_performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoScannerQueueLabel |
|||
qualityOfService:QOS_CLASS_UNSPECIFIED |
|||
queueType:DISPATCH_QUEUE_SERIAL |
|||
context:SCQueuePerformerContextCamera]; |
|||
_activeSemaphore = dispatch_semaphore_create(0); |
|||
SCAssert(restCycle >= 0 && restCycle < 1, @"rest cycle should be between 0 to 1"); |
|||
_maxFrameDefaultDuration = maxFrameDefaultDuration; |
|||
_maxFramePassiveDuration = maxFramePassiveDuration; |
|||
_restCycleOfBusyCycle = restCycle / (1 - restCycle); // Give CPU time to rest |
|||
} |
|||
return self; |
|||
} |
|||
#pragma mark - Public methods |
|||
|
|||
- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration |
|||
{ |
|||
SCTraceStart(); |
|||
[_performer perform:^{ |
|||
_shouldEmitEvent = YES; |
|||
_completionHandler = nil; |
|||
_scanResultsHandler = configuration.scanResultsHandler; |
|||
_userSession = configuration.userSession; |
|||
_scanTimeout = kDefaultScanTimeout; |
|||
_maxFrameDuration = _maxFrameDefaultDuration; |
|||
_codeTypes = [self _scanCodeTypes]; |
|||
_codeTypesOld = @[ @(SCCodeTypeSnapcode18x18Old), @(SCCodeTypeQRCode) ]; |
|||
|
|||
SCTraceStart(); |
|||
// Set the scan start time properly, if we call startScan multiple times while it is active, |
|||
// This makes sure we can scan long enough. |
|||
_scanStartTime = CACurrentMediaTime(); |
|||
// we are not active, need to send the semaphore to start the scan |
|||
if (!_active) { |
|||
_active = YES; |
|||
|
|||
// Signal the semaphore that we can start scan! |
|||
dispatch_semaphore_signal(_activeSemaphore); |
|||
} |
|||
}]; |
|||
} |
|||
|
|||
- (void)stopScanAsynchronously |
|||
{ |
|||
SCTraceStart(); |
|||
[_performer perform:^{ |
|||
SCTraceStart(); |
|||
if (_active) { |
|||
SCLogScanDebug(@"VideoScanner:stopScanAsynchronously turn off from active"); |
|||
_active = NO; |
|||
_scanStartTime = 0; |
|||
_scanResultsHandler = nil; |
|||
_userSession = nil; |
|||
} else { |
|||
SCLogScanDebug(@"VideoScanner:stopScanAsynchronously off already"); |
|||
} |
|||
}]; |
|||
} |
|||
|
|||
#pragma mark - Private Methods |
|||
|
|||
- (void)_handleSnapScanResult:(SCSnapScannedData *)scannedData |
|||
{ |
|||
if (scannedData.hasScannedData) { |
|||
if (scannedData.codeType == SCCodeTypeSnapcode18x18 || scannedData.codeType == SCCodeTypeSnapcodeBitmoji || |
|||
scannedData.codeType == SCCodeTypeSnapcode18x18Old) { |
|||
NSString *data = [scannedData.rawData base64EncodedString]; |
|||
NSString *version = [NSString sc_stringWithFormat:@"%i", scannedData.codeTypeMeta]; |
|||
[[SCLogger sharedInstance] logEvent:@"SNAPCODE_18x18_SCANNED_FROM_CAMERA" |
|||
parameters:@{ |
|||
@"version" : version |
|||
} |
|||
secretParameters:@{ |
|||
@"data" : data |
|||
}]; |
|||
|
|||
if (_completionHandler != nil) { |
|||
runOnMainThreadAsynchronously(_completionHandler); |
|||
_completionHandler = nil; |
|||
} |
|||
} else if (scannedData.codeType == SCCodeTypeBarcode) { |
|||
if (!_userSession || !_userSession.featureSettingsManager.barCodeScanEnabled) { |
|||
return; |
|||
} |
|||
NSString *data = scannedData.data; |
|||
NSString *type = [SCSnapScannedData stringFromBarcodeType:scannedData.codeTypeMeta]; |
|||
[[SCLogger sharedInstance] logEvent:@"BARCODE_SCANNED_FROM_CAMERA" |
|||
parameters:@{ |
|||
@"type" : type |
|||
} |
|||
secretParameters:@{ |
|||
@"data" : data |
|||
}]; |
|||
} else if (scannedData.codeType == SCCodeTypeQRCode) { |
|||
if (!_userSession || !_userSession.featureSettingsManager.qrCodeScanEnabled) { |
|||
return; |
|||
} |
|||
NSURL *url = [NSURL URLWithString:scannedData.data]; |
|||
[[SCLogger sharedInstance] logEvent:@"QR_CODE_SCANNED_FROM_CAMERA" |
|||
parameters:@{ |
|||
@"type" : (url) ? @"url" : @"other" |
|||
} |
|||
secretParameters:@{}]; |
|||
} |
|||
|
|||
if (_shouldEmitEvent) { |
|||
sc_managed_capturer_scan_results_handler_t scanResultsHandler = _scanResultsHandler; |
|||
runOnMainThreadAsynchronously(^{ |
|||
if (scanResultsHandler != nil && scannedData) { |
|||
SCMachineReadableCodeResult *result = |
|||
[SCMachineReadableCodeResult machineReadableCodeResultWithScannedData:scannedData]; |
|||
scanResultsHandler(result); |
|||
} |
|||
}); |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (NSArray *)_scanCodeTypes |
|||
{ |
|||
// Scan types are defined by codetypes. SnapScan will scan the frame based on codetype. |
|||
NSMutableArray *codeTypes = [[NSMutableArray alloc] |
|||
initWithObjects:@(SCCodeTypeSnapcode18x18), @(SCCodeTypeQRCode), @(SCCodeTypeSnapcodeBitmoji), nil]; |
|||
if (SCSearchEnableBarcodeProductSearch()) { |
|||
[codeTypes addObject:@(SCCodeTypeBarcode)]; |
|||
} |
|||
return [codeTypes copy]; |
|||
} |
|||
|
|||
#pragma mark - SCManagedVideoDataSourceListener |
|||
|
|||
- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource |
|||
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer |
|||
devicePosition:(SCManagedCaptureDevicePosition)devicePosition |
|||
{ |
|||
SCTraceStart(); |
|||
_devicePosition = devicePosition; |
|||
|
|||
if (!_active) { |
|||
SCLogScanDebug(@"VideoScanner: Scanner is not active"); |
|||
return; |
|||
} |
|||
SCLogScanDebug(@"VideoScanner: Scanner is active"); |
|||
|
|||
// If we have the semaphore now, enqueue a new buffer, otherwise drop the buffer |
|||
if (dispatch_semaphore_wait(_activeSemaphore, DISPATCH_TIME_NOW) == 0) { |
|||
CFRetain(sampleBuffer); |
|||
NSTimeInterval startTime = CACurrentMediaTime(); |
|||
[_performer perform:^{ |
|||
SCTraceStart(); |
|||
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); |
|||
SCLogScanInfo(@"VideoScanner: Scanner will scan a frame"); |
|||
SCSnapScannedData *scannedData; |
|||
|
|||
SCLogScanInfo(@"VideoScanner:Use new scanner without false alarm check"); |
|||
scannedData = [_snapScanner scanPixelBuffer:pixelBuffer forCodeTypes:_codeTypes]; |
|||
|
|||
if ([UIDevice shouldLogPerfEvents]) { |
|||
NSInteger loadingMs = (CACurrentMediaTime() - startTime) * 1000; |
|||
// Since there are too many unsuccessful scans, we will only log 1/10 of them for now. |
|||
if (scannedData.hasScannedData || (!scannedData.hasScannedData && arc4random() % 10 == 0)) { |
|||
[[SCLogger sharedInstance] logEvent:@"SCAN_SINGLE_FRAME" |
|||
parameters:@{ |
|||
@"time_span" : @(loadingMs), |
|||
@"has_scanned_data" : @(scannedData.hasScannedData), |
|||
}]; |
|||
} |
|||
} |
|||
|
|||
[self _handleSnapScanResult:scannedData]; |
|||
// If it is not turned off, we will continue to scan if result is not presetn |
|||
if (_active) { |
|||
_active = !scannedData.hasScannedData; |
|||
} |
|||
|
|||
// Clean up if result is reported for scan |
|||
if (!_active) { |
|||
_scanResultsHandler = nil; |
|||
_completionHandler = nil; |
|||
} |
|||
|
|||
CFRelease(sampleBuffer); |
|||
|
|||
NSTimeInterval currentTime = CACurrentMediaTime(); |
|||
SCLogScanInfo(@"VideoScanner:Scan time %f maxFrameDuration:%f timeout:%f", currentTime - startTime, |
|||
_maxFrameDuration, _scanTimeout); |
|||
// Haven't found the scanned data yet, haven't reached maximum scan timeout yet, haven't turned this off |
|||
// yet, ready for the next frame |
|||
if (_active && currentTime < _scanStartTime + _scanTimeout) { |
|||
// We've finished processing current sample buffer, ready for next one, but before that, we need to rest |
|||
// a bit (if possible) |
|||
if (currentTime - startTime >= _maxFrameDuration && _restCycleOfBusyCycle < FLT_MIN) { |
|||
// If we already reached deadline (used too much time) and don't want to rest CPU, give the signal |
|||
// now to grab the next frame |
|||
SCLogScanInfo(@"VideoScanner:Signal to get next frame for snapcode scanner"); |
|||
dispatch_semaphore_signal(_activeSemaphore); |
|||
} else { |
|||
NSTimeInterval afterTime = MAX((currentTime - startTime) * _restCycleOfBusyCycle, |
|||
_maxFrameDuration - (currentTime - startTime)); |
|||
// If we need to wait more than 0 second, then do that, otherwise grab the next frame immediately |
|||
if (afterTime > 0) { |
|||
[_performer perform:^{ |
|||
SCLogScanInfo( |
|||
@"VideoScanner:Waited and now signaling to get next frame for snapcode scanner"); |
|||
dispatch_semaphore_signal(_activeSemaphore); |
|||
} |
|||
after:afterTime]; |
|||
} else { |
|||
SCLogScanInfo(@"VideoScanner:Now signaling to get next frame for snapcode scanner"); |
|||
dispatch_semaphore_signal(_activeSemaphore); |
|||
} |
|||
} |
|||
} else { |
|||
// We are not active, and not going to be active any more. |
|||
SCLogScanInfo(@"VideoScanner:not active anymore"); |
|||
_active = NO; |
|||
_scanResultsHandler = nil; |
|||
_completionHandler = nil; |
|||
} |
|||
}]; |
|||
} |
|||
} |
|||
|
|||
#pragma mark - SCManagedDeviceCapacityAnalyzerListener |
|||
|
|||
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer |
|||
didChangeAdjustingFocus:(BOOL)adjustingFocus |
|||
{ |
|||
[_performer perform:^{ |
|||
_adjustingFocus = adjustingFocus; |
|||
}]; |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,15 @@ |
|||
// |
|||
// SCManagedVideoStreamReporter.h |
|||
// Snapchat |
|||
// |
|||
// Created by Liu Liu on 5/16/15. |
|||
// Copyright (c) 2015 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h> |
|||
|
|||
#import <Foundation/Foundation.h> |
|||
|
|||
@interface SCManagedVideoStreamReporter : NSObject <SCManagedVideoDataSourceListener> |
|||
|
|||
@end |
@ -0,0 +1,58 @@ |
|||
// |
|||
// SCManagedVideoStreamReporter.m |
|||
// Snapchat |
|||
// |
|||
// Created by Liu Liu on 5/16/15. |
|||
// Copyright (c) 2015 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import "SCManagedVideoStreamReporter.h" |
|||
|
|||
#import <SCFoundation/SCLog.h> |
|||
#import <SCLogger/SCLogger.h> |
|||
|
|||
static NSTimeInterval const SCManagedVideoStreamReporterInterval = 10; |
|||
|
|||
@implementation SCManagedVideoStreamReporter { |
|||
NSUInteger _droppedSampleBuffers; |
|||
NSUInteger _outputSampleBuffers; |
|||
NSTimeInterval _lastReportTime; |
|||
} |
|||
|
|||
- (instancetype)init |
|||
{ |
|||
self = [super init]; |
|||
if (self) { |
|||
_lastReportTime = CACurrentMediaTime(); |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
- (void)_reportIfNeeded |
|||
{ |
|||
NSTimeInterval currentTime = CACurrentMediaTime(); |
|||
if (currentTime - _lastReportTime > SCManagedVideoStreamReporterInterval) { |
|||
SCLogGeneralInfo(@"Time: (%.3f - %.3f], Video Streamer Dropped %tu, Output %tu", _lastReportTime, currentTime, |
|||
_droppedSampleBuffers, _outputSampleBuffers); |
|||
_droppedSampleBuffers = _outputSampleBuffers = 0; |
|||
_lastReportTime = currentTime; |
|||
} |
|||
} |
|||
|
|||
- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource |
|||
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer |
|||
devicePosition:(SCManagedCaptureDevicePosition)devicePosition |
|||
{ |
|||
++_outputSampleBuffers; |
|||
[self _reportIfNeeded]; |
|||
} |
|||
|
|||
- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource |
|||
didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer |
|||
devicePosition:(SCManagedCaptureDevicePosition)devicePosition |
|||
{ |
|||
++_droppedSampleBuffers; |
|||
[self _reportIfNeeded]; |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,36 @@ |
|||
// |
|||
// SCManagedVideoStreamer.h |
|||
// Snapchat |
|||
// |
|||
// Created by Liu Liu on 4/30/15. |
|||
// Copyright (c) 2015 Liu Liu. All rights reserved. |
|||
// |
|||
|
|||
#import "SCManagedVideoARDataSource.h" |
|||
|
|||
#import <SCCameraFoundation/SCManagedVideoDataSource.h> |
|||
|
|||
#import <AVFoundation/AVFoundation.h> |
|||
#import <Foundation/Foundation.h> |
|||
|
|||
@class ARSession; |
|||
|
|||
/** |
|||
* SCManagedVideoStreamer uses the current AVCaptureSession to create |
|||
* and publish video output frames. SCManagedVideoStreamer also conforms |
|||
* to SCManagedVideoDataSource allowing chained consumption of video frames. |
|||
*/ |
|||
@interface SCManagedVideoStreamer : NSObject <SCManagedVideoDataSource, SCManagedVideoARDataSource> |
|||
|
|||
- (instancetype)initWithSession:(AVCaptureSession *)session |
|||
devicePosition:(SCManagedCaptureDevicePosition)devicePosition; |
|||
|
|||
- (instancetype)initWithSession:(AVCaptureSession *)session |
|||
arSession:(ARSession *)arSession |
|||
devicePosition:(SCManagedCaptureDevicePosition)devicePosition NS_AVAILABLE_IOS(11_0); |
|||
|
|||
- (void)setupWithSession:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition; |
|||
|
|||
- (void)setupWithARSession:(ARSession *)arSession NS_AVAILABLE_IOS(11_0); |
|||
|
|||
@end |
@ -0,0 +1,823 @@ |
|||
// |
|||
// SCManagedVideoStreamer.m |
|||
// Snapchat |
|||
// |
|||
// Created by Liu Liu on 4/30/15. |
|||
// Copyright (c) 2015 Liu Liu. All rights reserved. |
|||
// |
|||
|
|||
#import "SCManagedVideoStreamer.h" |
|||
|
|||
#import "ARConfiguration+SCConfiguration.h" |
|||
#import "SCCameraTweaks.h" |
|||
#import "SCCapturerDefines.h" |
|||
#import "SCLogger+Camera.h" |
|||
#import "SCManagedCapturePreviewLayerController.h" |
|||
#import "SCMetalUtils.h" |
|||
#import "SCProcessingPipeline.h" |
|||
#import "SCProcessingPipelineBuilder.h" |
|||
|
|||
#import <SCCameraFoundation/SCManagedVideoDataSourceListenerAnnouncer.h> |
|||
#import <SCFoundation/NSString+SCFormat.h> |
|||
#import <SCFoundation/SCLog.h> |
|||
#import <SCFoundation/SCQueuePerformer.h> |
|||
#import <SCFoundation/SCTrace.h> |
|||
#import <SCLogger/SCCameraMetrics.h> |
|||
|
|||
#import <Looksery/Looksery.h> |
|||
|
|||
#import <libkern/OSAtomic.h> |
|||
#import <stdatomic.h> |
|||
|
|||
@import ARKit; |
|||
@import AVFoundation; |
|||
|
|||
#define SCLogVideoStreamerInfo(fmt, ...) SCLogCoreCameraInfo(@"[SCManagedVideoStreamer] " fmt, ##__VA_ARGS__) |
|||
#define SCLogVideoStreamerWarning(fmt, ...) SCLogCoreCameraWarning(@"[SCManagedVideoStreamer] " fmt, ##__VA_ARGS__) |
|||
#define SCLogVideoStreamerError(fmt, ...) SCLogCoreCameraError(@"[SCManagedVideoStreamer] " fmt, ##__VA_ARGS__) |
|||
|
|||
static NSInteger const kSCCaptureFrameRate = 30; |
|||
static CGFloat const kSCLogInterval = 3.0; |
|||
static char *const kSCManagedVideoStreamerQueueLabel = "com.snapchat.managed-video-streamer"; |
|||
static char *const kSCManagedVideoStreamerCallbackQueueLabel = "com.snapchat.managed-video-streamer.dequeue"; |
|||
static NSTimeInterval const kSCManagedVideoStreamerMaxAllowedLatency = 1; // Drop the frame if it is 1 second late. |
|||
|
|||
static NSTimeInterval const kSCManagedVideoStreamerStalledDisplay = |
|||
5; // If the frame is not updated for 5 seconds, it is considered to be stalled. |
|||
|
|||
static NSTimeInterval const kSCManagedVideoStreamerARSessionFramerateCap = |
|||
1.0 / (kSCCaptureFrameRate + 1); // Restrict ARSession to 30fps |
|||
static int32_t const kSCManagedVideoStreamerMaxProcessingBuffers = 15; |
|||
|
|||
@interface SCManagedVideoStreamer () <AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureDepthDataOutputDelegate, |
|||
AVCaptureDataOutputSynchronizerDelegate, ARSessionDelegate> |
|||
|
|||
@property (nonatomic, strong) AVCaptureSession *captureSession; |
|||
|
|||
@end |
|||
|
|||
@implementation SCManagedVideoStreamer { |
|||
AVCaptureVideoDataOutput *_videoDataOutput; |
|||
AVCaptureDepthDataOutput *_depthDataOutput NS_AVAILABLE_IOS(11_0); |
|||
AVCaptureDataOutputSynchronizer *_dataOutputSynchronizer NS_AVAILABLE_IOS(11_0); |
|||
BOOL _performingConfigurations; |
|||
SCManagedCaptureDevicePosition _devicePosition; |
|||
BOOL _videoStabilizationEnabledIfSupported; |
|||
SCManagedVideoDataSourceListenerAnnouncer *_announcer; |
|||
|
|||
BOOL _sampleBufferDisplayEnabled; |
|||
id<SCManagedSampleBufferDisplayController> _sampleBufferDisplayController; |
|||
dispatch_block_t _flushOutdatedPreviewBlock; |
|||
NSMutableArray<NSArray *> *_waitUntilSampleBufferDisplayedBlocks; |
|||
SCProcessingPipeline *_processingPipeline; |
|||
|
|||
NSTimeInterval _lastDisplayedFrameTimestamp; |
|||
#ifdef SC_USE_ARKIT_FACE |
|||
NSTimeInterval _lastDisplayedDepthFrameTimestamp; |
|||
#endif |
|||
|
|||
BOOL _depthCaptureEnabled; |
|||
CGPoint _portraitModePointOfInterest; |
|||
|
|||
// For sticky video tweaks |
|||
BOOL _keepLateFrames; |
|||
SCQueuePerformer *_callbackPerformer; |
|||
atomic_int _processingBuffersCount; |
|||
} |
|||
|
|||
@synthesize isStreaming = _isStreaming; |
|||
@synthesize performer = _performer; |
|||
@synthesize currentFrame = _currentFrame; |
|||
@synthesize fieldOfView = _fieldOfView; |
|||
#ifdef SC_USE_ARKIT_FACE |
|||
@synthesize lastDepthData = _lastDepthData; |
|||
#endif |
|||
@synthesize videoOrientation = _videoOrientation; |
|||
|
|||
- (instancetype)initWithSession:(AVCaptureSession *)session |
|||
devicePosition:(SCManagedCaptureDevicePosition)devicePosition |
|||
{ |
|||
SCTraceStart(); |
|||
self = [super init]; |
|||
if (self) { |
|||
_sampleBufferDisplayEnabled = YES; |
|||
_announcer = [[SCManagedVideoDataSourceListenerAnnouncer alloc] init]; |
|||
// We discard frames to support lenses in real time |
|||
_keepLateFrames = NO; |
|||
_performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoStreamerQueueLabel |
|||
qualityOfService:QOS_CLASS_USER_INTERACTIVE |
|||
queueType:DISPATCH_QUEUE_SERIAL |
|||
context:SCQueuePerformerContextCamera]; |
|||
|
|||
_videoOrientation = AVCaptureVideoOrientationLandscapeRight; |
|||
|
|||
[self setupWithSession:session devicePosition:devicePosition]; |
|||
SCLogVideoStreamerInfo(@"init with position:%lu", (unsigned long)devicePosition); |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
- (instancetype)initWithSession:(AVCaptureSession *)session |
|||
arSession:(ARSession *)arSession |
|||
devicePosition:(SCManagedCaptureDevicePosition)devicePosition NS_AVAILABLE_IOS(11_0) |
|||
{ |
|||
self = [self initWithSession:session devicePosition:devicePosition]; |
|||
if (self) { |
|||
[self setupWithARSession:arSession]; |
|||
self.currentFrame = nil; |
|||
#ifdef SC_USE_ARKIT_FACE |
|||
self.lastDepthData = nil; |
|||
#endif |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
- (AVCaptureVideoDataOutput *)_newVideoDataOutput |
|||
{ |
|||
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init]; |
|||
// All inbound frames are going to be the native format of the camera avoid |
|||
// any need for transcoding. |
|||
output.videoSettings = |
|||
@{(NSString *) kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) }; |
|||
return output; |
|||
} |
|||
|
|||
- (void)setupWithSession:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition |
|||
{ |
|||
[self stopStreaming]; |
|||
self.captureSession = session; |
|||
_devicePosition = devicePosition; |
|||
|
|||
_videoDataOutput = [self _newVideoDataOutput]; |
|||
if (SCDeviceSupportsMetal()) { |
|||
// We default to start the streaming if the Metal is supported at startup time. |
|||
_isStreaming = YES; |
|||
// Set the sample buffer delegate before starting it. |
|||
[_videoDataOutput setSampleBufferDelegate:self queue:[self callbackPerformer].queue]; |
|||
} |
|||
|
|||
if ([session canAddOutput:_videoDataOutput]) { |
|||
[session addOutput:_videoDataOutput]; |
|||
[self _enableVideoMirrorForDevicePosition:devicePosition]; |
|||
} |
|||
|
|||
if (SCCameraTweaksEnablePortraitModeButton()) { |
|||
if (@available(iOS 11.0, *)) { |
|||
_depthDataOutput = [[AVCaptureDepthDataOutput alloc] init]; |
|||
[[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:NO]; |
|||
if ([session canAddOutput:_depthDataOutput]) { |
|||
[session addOutput:_depthDataOutput]; |
|||
[_depthDataOutput setDelegate:self callbackQueue:_performer.queue]; |
|||
} |
|||
_depthCaptureEnabled = NO; |
|||
} |
|||
_portraitModePointOfInterest = CGPointMake(0.5, 0.5); |
|||
} |
|||
|
|||
[self setVideoStabilizationEnabledIfSupported:YES]; |
|||
} |
|||
|
|||
- (void)setupWithARSession:(ARSession *)arSession NS_AVAILABLE_IOS(11_0) |
|||
{ |
|||
arSession.delegateQueue = _performer.queue; |
|||
arSession.delegate = self; |
|||
} |
|||
|
|||
- (void)addSampleBufferDisplayController:(id<SCManagedSampleBufferDisplayController>)sampleBufferDisplayController |
|||
{ |
|||
[_performer perform:^{ |
|||
_sampleBufferDisplayController = sampleBufferDisplayController; |
|||
SCLogVideoStreamerInfo(@"add sampleBufferDisplayController:%@", _sampleBufferDisplayController); |
|||
}]; |
|||
} |
|||
|
|||
- (void)setSampleBufferDisplayEnabled:(BOOL)sampleBufferDisplayEnabled |
|||
{ |
|||
[_performer perform:^{ |
|||
_sampleBufferDisplayEnabled = sampleBufferDisplayEnabled; |
|||
SCLogVideoStreamerInfo(@"sampleBufferDisplayEnabled set to:%d", _sampleBufferDisplayEnabled); |
|||
}]; |
|||
} |
|||
|
|||
- (void)waitUntilSampleBufferDisplayed:(dispatch_queue_t)queue completionHandler:(dispatch_block_t)completionHandler |
|||
{ |
|||
SCAssert(queue, @"callback queue must be provided"); |
|||
SCAssert(completionHandler, @"completion handler must be provided"); |
|||
SCLogVideoStreamerInfo(@"waitUntilSampleBufferDisplayed queue:%@ completionHandler:%p isStreaming:%d", queue, |
|||
completionHandler, _isStreaming); |
|||
if (_isStreaming) { |
|||
[_performer perform:^{ |
|||
if (!_waitUntilSampleBufferDisplayedBlocks) { |
|||
_waitUntilSampleBufferDisplayedBlocks = [NSMutableArray array]; |
|||
} |
|||
[_waitUntilSampleBufferDisplayedBlocks addObject:@[ queue, completionHandler ]]; |
|||
SCLogVideoStreamerInfo(@"waitUntilSampleBufferDisplayed add block:%p", completionHandler); |
|||
}]; |
|||
} else { |
|||
dispatch_async(queue, completionHandler); |
|||
} |
|||
} |
|||
|
|||
- (void)startStreaming |
|||
{ |
|||
SCTraceStart(); |
|||
SCLogVideoStreamerInfo(@"start streaming. _isStreaming:%d", _isStreaming); |
|||
if (!_isStreaming) { |
|||
_isStreaming = YES; |
|||
[self _cancelFlushOutdatedPreview]; |
|||
if (@available(ios 11.0, *)) { |
|||
if (_depthCaptureEnabled) { |
|||
[[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:YES]; |
|||
} |
|||
} |
|||
[_videoDataOutput setSampleBufferDelegate:self queue:[self callbackPerformer].queue]; |
|||
} |
|||
} |
|||
|
|||
- (void)setAsOutput:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition |
|||
{ |
|||
SCTraceStart(); |
|||
if ([session canAddOutput:_videoDataOutput]) { |
|||
SCLogVideoStreamerError(@"add videoDataOutput:%@", _videoDataOutput); |
|||
[session addOutput:_videoDataOutput]; |
|||
[self _enableVideoMirrorForDevicePosition:devicePosition]; |
|||
} else { |
|||
SCLogVideoStreamerError(@"cannot add videoDataOutput:%@ to session:%@", _videoDataOutput, session); |
|||
} |
|||
[self _enableVideoStabilizationIfSupported]; |
|||
} |
|||
|
|||
- (void)removeAsOutput:(AVCaptureSession *)session |
|||
{ |
|||
SCTraceStart(); |
|||
SCLogVideoStreamerInfo(@"remove videoDataOutput:%@ from session:%@", _videoDataOutput, session); |
|||
[session removeOutput:_videoDataOutput]; |
|||
} |
|||
|
|||
- (void)_cancelFlushOutdatedPreview |
|||
{ |
|||
SCLogVideoStreamerInfo(@"cancel flush outdated preview:%p", _flushOutdatedPreviewBlock); |
|||
if (_flushOutdatedPreviewBlock) { |
|||
dispatch_block_cancel(_flushOutdatedPreviewBlock); |
|||
_flushOutdatedPreviewBlock = nil; |
|||
} |
|||
} |
|||
|
|||
- (SCQueuePerformer *)callbackPerformer |
|||
{ |
|||
// If sticky video tweak is on, use a separated performer queue |
|||
if (_keepLateFrames) { |
|||
if (!_callbackPerformer) { |
|||
_callbackPerformer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoStreamerCallbackQueueLabel |
|||
qualityOfService:QOS_CLASS_USER_INTERACTIVE |
|||
queueType:DISPATCH_QUEUE_SERIAL |
|||
context:SCQueuePerformerContextCamera]; |
|||
} |
|||
return _callbackPerformer; |
|||
} |
|||
return _performer; |
|||
} |
|||
|
|||
- (void)pauseStreaming |
|||
{ |
|||
SCTraceStart(); |
|||
SCLogVideoStreamerInfo(@"pauseStreaming isStreaming:%d", _isStreaming); |
|||
if (_isStreaming) { |
|||
_isStreaming = NO; |
|||
[_videoDataOutput setSampleBufferDelegate:nil queue:NULL]; |
|||
if (@available(ios 11.0, *)) { |
|||
if (_depthCaptureEnabled) { |
|||
[[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:NO]; |
|||
} |
|||
} |
|||
@weakify(self); |
|||
_flushOutdatedPreviewBlock = dispatch_block_create(0, ^{ |
|||
SCLogVideoStreamerInfo(@"execute flushOutdatedPreviewBlock"); |
|||
@strongify(self); |
|||
SC_GUARD_ELSE_RETURN(self); |
|||
[self->_sampleBufferDisplayController flushOutdatedPreview]; |
|||
}); |
|||
[_performer perform:_flushOutdatedPreviewBlock |
|||
after:SCCameraTweaksEnableKeepLastFrameOnCamera() ? kSCManagedVideoStreamerStalledDisplay : 0]; |
|||
[_performer perform:^{ |
|||
[self _performCompletionHandlersForWaitUntilSampleBufferDisplayed]; |
|||
}]; |
|||
} |
|||
} |
|||
|
|||
- (void)stopStreaming |
|||
{ |
|||
SCTraceStart(); |
|||
SCLogVideoStreamerInfo(@"stopStreaming isStreaming:%d", _isStreaming); |
|||
if (_isStreaming) { |
|||
_isStreaming = NO; |
|||
[_videoDataOutput setSampleBufferDelegate:nil queue:NULL]; |
|||
if (@available(ios 11.0, *)) { |
|||
if (_depthCaptureEnabled) { |
|||
[[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:NO]; |
|||
} |
|||
} |
|||
} |
|||
[self _cancelFlushOutdatedPreview]; |
|||
[_performer perform:^{ |
|||
SCLogVideoStreamerInfo(@"stopStreaming in perfome queue"); |
|||
[_sampleBufferDisplayController flushOutdatedPreview]; |
|||
[self _performCompletionHandlersForWaitUntilSampleBufferDisplayed]; |
|||
}]; |
|||
} |
|||
|
|||
- (void)beginConfiguration |
|||
{ |
|||
SCLogVideoStreamerInfo(@"enter beginConfiguration"); |
|||
[_performer perform:^{ |
|||
SCLogVideoStreamerInfo(@"performingConfigurations set to YES"); |
|||
_performingConfigurations = YES; |
|||
}]; |
|||
} |
|||
|
|||
- (void)setDevicePosition:(SCManagedCaptureDevicePosition)devicePosition |
|||
{ |
|||
SCLogVideoStreamerInfo(@"setDevicePosition with newPosition:%lu", (unsigned long)devicePosition); |
|||
[self _enableVideoMirrorForDevicePosition:devicePosition]; |
|||
[self _enableVideoStabilizationIfSupported]; |
|||
[_performer perform:^{ |
|||
SCLogVideoStreamerInfo(@"setDevicePosition in perform queue oldPosition:%lu newPosition:%lu", |
|||
(unsigned long)_devicePosition, (unsigned long)devicePosition); |
|||
if (_devicePosition != devicePosition) { |
|||
_devicePosition = devicePosition; |
|||
} |
|||
}]; |
|||
} |
|||
|
|||
- (void)setVideoOrientation:(AVCaptureVideoOrientation)videoOrientation |
|||
{ |
|||
SCTraceStart(); |
|||
// It is not neccessary call these changes on private queue, because is is just only data output configuration. |
|||
// It should be called from manged capturer queue to prevent lock capture session in two different(private and |
|||
// managed capturer) queues that will cause the deadlock. |
|||
SCLogVideoStreamerInfo(@"setVideoOrientation oldOrientation:%lu newOrientation:%lu", |
|||
(unsigned long)_videoOrientation, (unsigned long)videoOrientation); |
|||
_videoOrientation = videoOrientation; |
|||
AVCaptureConnection *connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; |
|||
connection.videoOrientation = _videoOrientation; |
|||
} |
|||
|
|||
- (void)setKeepLateFrames:(BOOL)keepLateFrames |
|||
{ |
|||
SCTraceStart(); |
|||
[_performer perform:^{ |
|||
SCTraceStart(); |
|||
if (keepLateFrames != _keepLateFrames) { |
|||
_keepLateFrames = keepLateFrames; |
|||
// Get and set corresponding queue base on keepLateFrames. |
|||
// We don't use AVCaptureVideoDataOutput.alwaysDiscardsLateVideo anymore, because it will potentially |
|||
// result in lenses regression, and we could use all 15 sample buffers by adding a separated calllback |
|||
// queue. |
|||
[_videoDataOutput setSampleBufferDelegate:self queue:[self callbackPerformer].queue]; |
|||
SCLogVideoStreamerInfo(@"keepLateFrames was set to:%d", keepLateFrames); |
|||
} |
|||
}]; |
|||
} |
|||
|
|||
- (void)setDepthCaptureEnabled:(BOOL)enabled NS_AVAILABLE_IOS(11_0) |
|||
{ |
|||
_depthCaptureEnabled = enabled; |
|||
[[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:enabled]; |
|||
if (enabled) { |
|||
_dataOutputSynchronizer = |
|||
[[AVCaptureDataOutputSynchronizer alloc] initWithDataOutputs:@[ _videoDataOutput, _depthDataOutput ]]; |
|||
[_dataOutputSynchronizer setDelegate:self queue:_performer.queue]; |
|||
} else { |
|||
_dataOutputSynchronizer = nil; |
|||
} |
|||
} |
|||
|
|||
- (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest |
|||
{ |
|||
_portraitModePointOfInterest = pointOfInterest; |
|||
} |
|||
|
|||
- (BOOL)getKeepLateFrames |
|||
{ |
|||
return _keepLateFrames; |
|||
} |
|||
|
|||
- (void)commitConfiguration |
|||
{ |
|||
SCLogVideoStreamerInfo(@"enter commitConfiguration"); |
|||
[_performer perform:^{ |
|||
SCLogVideoStreamerInfo(@"performingConfigurations set to NO"); |
|||
_performingConfigurations = NO; |
|||
}]; |
|||
} |
|||
|
|||
- (void)addListener:(id<SCManagedVideoDataSourceListener>)listener |
|||
{ |
|||
SCTraceStart(); |
|||
SCLogVideoStreamerInfo(@"add listener:%@", listener); |
|||
[_announcer addListener:listener]; |
|||
} |
|||
|
|||
- (void)removeListener:(id<SCManagedVideoDataSourceListener>)listener |
|||
{ |
|||
SCTraceStart(); |
|||
SCLogVideoStreamerInfo(@"remove listener:%@", listener); |
|||
[_announcer removeListener:listener]; |
|||
} |
|||
|
|||
- (void)addProcessingPipeline:(SCProcessingPipeline *)processingPipeline |
|||
{ |
|||
SCLogVideoStreamerInfo(@"enter addProcessingPipeline:%@", processingPipeline); |
|||
[_performer perform:^{ |
|||
SCLogVideoStreamerInfo(@"processingPipeline set to %@", processingPipeline); |
|||
_processingPipeline = processingPipeline; |
|||
}]; |
|||
} |
|||
|
|||
- (void)removeProcessingPipeline |
|||
{ |
|||
SCLogVideoStreamerInfo(@"enter removeProcessingPipeline"); |
|||
[_performer perform:^{ |
|||
SCLogVideoStreamerInfo(@"processingPipeline set to nil"); |
|||
_processingPipeline = nil; |
|||
}]; |
|||
} |
|||
|
|||
- (BOOL)isVideoMirrored |
|||
{ |
|||
SCTraceStart(); |
|||
AVCaptureConnection *connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; |
|||
return connection.isVideoMirrored; |
|||
} |
|||
|
|||
#pragma mark - Common Sample Buffer Handling |
|||
|
|||
- (void)didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer |
|||
{ |
|||
return [self didOutputSampleBuffer:sampleBuffer depthData:nil]; |
|||
} |
|||
|
|||
- (void)didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer depthData:(CVPixelBufferRef)depthDataMap |
|||
{ |
|||
// Don't send the sample buffer if we are perform configurations |
|||
if (_performingConfigurations) { |
|||
SCLogVideoStreamerError(@"didOutputSampleBuffer return because performingConfigurations is YES"); |
|||
return; |
|||
} |
|||
SC_GUARD_ELSE_RETURN([_performer isCurrentPerformer]); |
|||
|
|||
// We can't set alwaysDiscardsLateVideoFrames to YES when lens is activated because it will cause camera freezing. |
|||
// When alwaysDiscardsLateVideoFrames is set to NO, the late frames will not be dropped until it reach 15 frames, |
|||
// so we should simulate the dropping behaviour as AVFoundation do. |
|||
NSTimeInterval presentationTime = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)); |
|||
_lastDisplayedFrameTimestamp = presentationTime; |
|||
NSTimeInterval frameLatency = CACurrentMediaTime() - presentationTime; |
|||
// Log interval definied in macro LOG_INTERVAL, now is 3.0s |
|||
BOOL shouldLog = |
|||
(long)(CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * kSCCaptureFrameRate) % |
|||
((long)(kSCCaptureFrameRate * kSCLogInterval)) == |
|||
0; |
|||
if (shouldLog) { |
|||
SCLogVideoStreamerInfo(@"didOutputSampleBuffer:%p", sampleBuffer); |
|||
} |
|||
if (_processingPipeline) { |
|||
RenderData renderData = { |
|||
.sampleBuffer = sampleBuffer, |
|||
.depthDataMap = depthDataMap, |
|||
.depthBlurPointOfInterest = |
|||
SCCameraTweaksEnablePortraitModeAutofocus() || SCCameraTweaksEnablePortraitModeTapToFocus() |
|||
? &_portraitModePointOfInterest |
|||
: nil, |
|||
}; |
|||
// Ensure we are doing all render operations (i.e. accessing textures) on performer to prevent race condition |
|||
SCAssertPerformer(_performer); |
|||
sampleBuffer = [_processingPipeline render:renderData]; |
|||
|
|||
if (shouldLog) { |
|||
SCLogVideoStreamerInfo(@"rendered sampleBuffer:%p in processingPipeline:%@", sampleBuffer, |
|||
_processingPipeline); |
|||
} |
|||
} |
|||
|
|||
if (sampleBuffer && _sampleBufferDisplayEnabled) { |
|||
// Send the buffer only if it is valid, set it to be displayed immediately (See the enqueueSampleBuffer method |
|||
// header, need to get attachments array and set the dictionary). |
|||
CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, YES); |
|||
if (!attachmentsArray) { |
|||
SCLogVideoStreamerError(@"Error getting attachment array for CMSampleBuffer"); |
|||
} else if (CFArrayGetCount(attachmentsArray) > 0) { |
|||
CFMutableDictionaryRef attachment = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachmentsArray, 0); |
|||
CFDictionarySetValue(attachment, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue); |
|||
} |
|||
// Warn if frame that went through is not most recent enough. |
|||
if (frameLatency >= kSCManagedVideoStreamerMaxAllowedLatency) { |
|||
SCLogVideoStreamerWarning( |
|||
@"The sample buffer we received is too late, why? presentationTime:%lf frameLatency:%f", |
|||
presentationTime, frameLatency); |
|||
} |
|||
[_sampleBufferDisplayController enqueueSampleBuffer:sampleBuffer]; |
|||
if (shouldLog) { |
|||
SCLogVideoStreamerInfo(@"displayed sampleBuffer:%p in Metal", sampleBuffer); |
|||
} |
|||
|
|||
[self _performCompletionHandlersForWaitUntilSampleBufferDisplayed]; |
|||
} |
|||
|
|||
if (shouldLog) { |
|||
SCLogVideoStreamerInfo(@"begin annoucing sampleBuffer:%p of devicePosition:%lu", sampleBuffer, |
|||
(unsigned long)_devicePosition); |
|||
} |
|||
[_announcer managedVideoDataSource:self didOutputSampleBuffer:sampleBuffer devicePosition:_devicePosition]; |
|||
if (shouldLog) { |
|||
SCLogVideoStreamerInfo(@"end annoucing sampleBuffer:%p", sampleBuffer); |
|||
} |
|||
} |
|||
|
|||
- (void)didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer |
|||
{ |
|||
if (_performingConfigurations) { |
|||
return; |
|||
} |
|||
SC_GUARD_ELSE_RETURN([_performer isCurrentPerformer]); |
|||
NSTimeInterval currentProcessingTime = CACurrentMediaTime(); |
|||
NSTimeInterval currentSampleTime = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)); |
|||
// Only logging it when sticky tweak is on, which means sticky time is too long, and AVFoundation have to drop the |
|||
// sampleBuffer |
|||
if (_keepLateFrames) { |
|||
SCLogVideoStreamerInfo(@"didDropSampleBuffer:%p timestamp:%f latency:%f", sampleBuffer, currentProcessingTime, |
|||
currentSampleTime); |
|||
} |
|||
[_announcer managedVideoDataSource:self didDropSampleBuffer:sampleBuffer devicePosition:_devicePosition]; |
|||
} |
|||
|
|||
#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate |
|||
|
|||
- (void)captureOutput:(AVCaptureOutput *)captureOutput |
|||
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer |
|||
fromConnection:(AVCaptureConnection *)connection NS_AVAILABLE_IOS(11_0) |
|||
{ |
|||
// Sticky video tweak is off, i.e. lenses is on, |
|||
// we use same queue for callback and processing, and let AVFoundation decide which frame should be dropped |
|||
if (!_keepLateFrames) { |
|||
[self didOutputSampleBuffer:sampleBuffer]; |
|||
} |
|||
// Sticky video tweak is on |
|||
else { |
|||
if ([_performer isCurrentPerformer]) { |
|||
// Note: there might be one frame callbacked in processing queue when switching callback queue, |
|||
// it should be fine. But if following log appears too much, it is not our design. |
|||
SCLogVideoStreamerWarning(@"The callback queue should be a separated queue when sticky tweak is on"); |
|||
} |
|||
// TODO: In sticky video v2, we should consider check free memory |
|||
if (_processingBuffersCount >= kSCManagedVideoStreamerMaxProcessingBuffers - 1) { |
|||
SCLogVideoStreamerWarning(@"processingBuffersCount reached to the max. current count:%d", |
|||
_processingBuffersCount); |
|||
[self didDropSampleBuffer:sampleBuffer]; |
|||
return; |
|||
} |
|||
atomic_fetch_add(&_processingBuffersCount, 1); |
|||
CFRetain(sampleBuffer); |
|||
// _performer should always be the processing queue |
|||
[_performer perform:^{ |
|||
[self didOutputSampleBuffer:sampleBuffer]; |
|||
CFRelease(sampleBuffer); |
|||
atomic_fetch_sub(&_processingBuffersCount, 1); |
|||
}]; |
|||
} |
|||
} |
|||
|
|||
- (void)captureOutput:(AVCaptureOutput *)captureOutput |
|||
didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer |
|||
fromConnection:(AVCaptureConnection *)connection |
|||
{ |
|||
[self didDropSampleBuffer:sampleBuffer]; |
|||
} |
|||
|
|||
#pragma mark - AVCaptureDataOutputSynchronizer (Video + Depth) |
|||
|
|||
- (void)dataOutputSynchronizer:(AVCaptureDataOutputSynchronizer *)synchronizer |
|||
didOutputSynchronizedDataCollection:(AVCaptureSynchronizedDataCollection *)synchronizedDataCollection |
|||
NS_AVAILABLE_IOS(11_0) |
|||
{ |
|||
AVCaptureSynchronizedDepthData *syncedDepthData = (AVCaptureSynchronizedDepthData *)[synchronizedDataCollection |
|||
synchronizedDataForCaptureOutput:_depthDataOutput]; |
|||
AVDepthData *depthData = nil; |
|||
if (syncedDepthData && !syncedDepthData.depthDataWasDropped) { |
|||
depthData = syncedDepthData.depthData; |
|||
} |
|||
|
|||
AVCaptureSynchronizedSampleBufferData *syncedVideoData = |
|||
(AVCaptureSynchronizedSampleBufferData *)[synchronizedDataCollection |
|||
synchronizedDataForCaptureOutput:_videoDataOutput]; |
|||
if (syncedVideoData && !syncedVideoData.sampleBufferWasDropped) { |
|||
CMSampleBufferRef videoSampleBuffer = syncedVideoData.sampleBuffer; |
|||
[self didOutputSampleBuffer:videoSampleBuffer depthData:depthData ? depthData.depthDataMap : nil]; |
|||
} |
|||
} |
|||
|
|||
#pragma mark - ARSessionDelegate |
|||
|
|||
- (void)session:(ARSession *)session cameraDidChangeTrackingState:(ARCamera *)camera NS_AVAILABLE_IOS(11_0) |
|||
{ |
|||
NSString *state = nil; |
|||
NSString *reason = nil; |
|||
switch (camera.trackingState) { |
|||
case ARTrackingStateNormal: |
|||
state = @"Normal"; |
|||
break; |
|||
case ARTrackingStateLimited: |
|||
state = @"Limited"; |
|||
break; |
|||
case ARTrackingStateNotAvailable: |
|||
state = @"Not Available"; |
|||
break; |
|||
} |
|||
switch (camera.trackingStateReason) { |
|||
case ARTrackingStateReasonNone: |
|||
reason = @"None"; |
|||
break; |
|||
case ARTrackingStateReasonInitializing: |
|||
reason = @"Initializing"; |
|||
break; |
|||
case ARTrackingStateReasonExcessiveMotion: |
|||
reason = @"Excessive Motion"; |
|||
break; |
|||
case ARTrackingStateReasonInsufficientFeatures: |
|||
reason = @"Insufficient Features"; |
|||
break; |
|||
#if SC_AT_LEAST_SDK_11_3 |
|||
case ARTrackingStateReasonRelocalizing: |
|||
reason = @"Relocalizing"; |
|||
break; |
|||
#endif |
|||
} |
|||
SCLogVideoStreamerInfo(@"ARKit changed tracking state - %@ (reason: %@)", state, reason); |
|||
} |
|||
|
|||
- (void)session:(ARSession *)session didUpdateFrame:(ARFrame *)frame NS_AVAILABLE_IOS(11_0) |
|||
{ |
|||
#ifdef SC_USE_ARKIT_FACE |
|||
// This is extremely weird, but LOOK-10251 indicates that despite the class having it defined, on some specific |
|||
// devices there are ARFrame instances that don't respond to `capturedDepthData`. |
|||
// (note: this was discovered to be due to some people staying on iOS 11 betas). |
|||
AVDepthData *depth = nil; |
|||
if ([frame respondsToSelector:@selector(capturedDepthData)]) { |
|||
depth = frame.capturedDepthData; |
|||
} |
|||
#endif |
|||
|
|||
CGFloat timeSince = frame.timestamp - _lastDisplayedFrameTimestamp; |
|||
// Don't deliver more than 30 frames per sec |
|||
BOOL framerateMinimumElapsed = timeSince >= kSCManagedVideoStreamerARSessionFramerateCap; |
|||
|
|||
#ifdef SC_USE_ARKIT_FACE |
|||
if (depth) { |
|||
CGFloat timeSince = frame.timestamp - _lastDisplayedDepthFrameTimestamp; |
|||
framerateMinimumElapsed |= timeSince >= kSCManagedVideoStreamerARSessionFramerateCap; |
|||
} |
|||
|
|||
#endif |
|||
|
|||
SC_GUARD_ELSE_RETURN(framerateMinimumElapsed); |
|||
|
|||
#ifdef SC_USE_ARKIT_FACE |
|||
if (depth) { |
|||
self.lastDepthData = depth; |
|||
_lastDisplayedDepthFrameTimestamp = frame.timestamp; |
|||
} |
|||
#endif |
|||
|
|||
// Make sure that current frame is no longer being used, otherwise drop current frame. |
|||
SC_GUARD_ELSE_RETURN(self.currentFrame == nil); |
|||
|
|||
CVPixelBufferRef pixelBuffer = frame.capturedImage; |
|||
CVPixelBufferLockBaseAddress(pixelBuffer, 0); |
|||
CMTime time = CMTimeMakeWithSeconds(frame.timestamp, 1000000); |
|||
CMSampleTimingInfo timing = {kCMTimeInvalid, time, kCMTimeInvalid}; |
|||
|
|||
CMVideoFormatDescriptionRef videoInfo; |
|||
CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &videoInfo); |
|||
|
|||
CMSampleBufferRef buffer; |
|||
CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, YES, nil, nil, videoInfo, &timing, &buffer); |
|||
CFRelease(videoInfo); |
|||
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); |
|||
|
|||
self.currentFrame = frame; |
|||
[self didOutputSampleBuffer:buffer]; |
|||
[self _updateFieldOfViewWithARFrame:frame]; |
|||
|
|||
CFRelease(buffer); |
|||
} |
|||
|
|||
- (void)session:(ARSession *)session didAddAnchors:(NSArray<ARAnchor *> *)anchors NS_AVAILABLE_IOS(11_0) |
|||
{ |
|||
for (ARAnchor *anchor in anchors) { |
|||
if ([anchor isKindOfClass:[ARPlaneAnchor class]]) { |
|||
SCLogVideoStreamerInfo(@"ARKit added plane anchor"); |
|||
return; |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (void)session:(ARSession *)session didFailWithError:(NSError *)error NS_AVAILABLE_IOS(11_0) |
|||
{ |
|||
SCLogVideoStreamerError(@"ARKit session failed with error: %@. Resetting", error); |
|||
[session runWithConfiguration:[ARConfiguration sc_configurationForDevicePosition:_devicePosition]]; |
|||
} |
|||
|
|||
- (void)sessionWasInterrupted:(ARSession *)session NS_AVAILABLE_IOS(11_0) |
|||
{ |
|||
SCLogVideoStreamerWarning(@"ARKit session interrupted"); |
|||
} |
|||
|
|||
- (void)sessionInterruptionEnded:(ARSession *)session NS_AVAILABLE_IOS(11_0) |
|||
{ |
|||
SCLogVideoStreamerInfo(@"ARKit interruption ended"); |
|||
} |
|||
|
|||
#pragma mark - Private methods |
|||
|
|||
- (void)_performCompletionHandlersForWaitUntilSampleBufferDisplayed |
|||
{ |
|||
for (NSArray *completion in _waitUntilSampleBufferDisplayedBlocks) { |
|||
// Call the completion handlers. |
|||
dispatch_async(completion[0], completion[1]); |
|||
} |
|||
[_waitUntilSampleBufferDisplayedBlocks removeAllObjects]; |
|||
} |
|||
|
|||
// This is the magic that ensures the VideoDataOutput will have the correct |
|||
// orientation. |
|||
- (void)_enableVideoMirrorForDevicePosition:(SCManagedCaptureDevicePosition)devicePosition |
|||
{ |
|||
SCLogVideoStreamerInfo(@"enable video mirror for device position:%lu", (unsigned long)devicePosition); |
|||
AVCaptureConnection *connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; |
|||
connection.videoOrientation = _videoOrientation; |
|||
if (devicePosition == SCManagedCaptureDevicePositionFront) { |
|||
connection.videoMirrored = YES; |
|||
} |
|||
} |
|||
|
|||
- (void)_enableVideoStabilizationIfSupported |
|||
{ |
|||
SCTraceStart(); |
|||
if (!SCCameraTweaksEnableVideoStabilization()) { |
|||
SCLogVideoStreamerWarning(@"SCCameraTweaksEnableVideoStabilization is NO, won't enable video stabilization"); |
|||
return; |
|||
} |
|||
|
|||
AVCaptureConnection *videoConnection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; |
|||
if (!videoConnection) { |
|||
SCLogVideoStreamerError(@"cannot get videoConnection from videoDataOutput:%@", videoConnection); |
|||
return; |
|||
} |
|||
// Set the video stabilization mode to auto. Default is off. |
|||
if ([videoConnection isVideoStabilizationSupported]) { |
|||
videoConnection.preferredVideoStabilizationMode = _videoStabilizationEnabledIfSupported |
|||
? AVCaptureVideoStabilizationModeStandard |
|||
: AVCaptureVideoStabilizationModeOff; |
|||
NSDictionary *params = @{ @"iOS8_Mode" : @(videoConnection.activeVideoStabilizationMode) }; |
|||
[[SCLogger sharedInstance] logEvent:@"VIDEO_STABILIZATION_MODE" parameters:params]; |
|||
SCLogVideoStreamerInfo(@"set video stabilization mode:%ld to videoConnection:%@", |
|||
(long)videoConnection.preferredVideoStabilizationMode, videoConnection); |
|||
} else { |
|||
SCLogVideoStreamerInfo(@"video stabilization isn't supported on videoConnection:%@", videoConnection); |
|||
} |
|||
} |
|||
|
|||
- (void)setVideoStabilizationEnabledIfSupported:(BOOL)videoStabilizationIfSupported |
|||
{ |
|||
SCLogVideoStreamerInfo(@"setVideoStabilizationEnabledIfSupported:%d", videoStabilizationIfSupported); |
|||
_videoStabilizationEnabledIfSupported = videoStabilizationIfSupported; |
|||
[self _enableVideoStabilizationIfSupported]; |
|||
} |
|||
|
|||
- (void)_updateFieldOfViewWithARFrame:(ARFrame *)frame NS_AVAILABLE_IOS(11_0) |
|||
{ |
|||
SC_GUARD_ELSE_RETURN(frame.camera); |
|||
CGSize imageResolution = frame.camera.imageResolution; |
|||
matrix_float3x3 intrinsics = frame.camera.intrinsics; |
|||
float xFovDegrees = 2 * atan(imageResolution.width / (2 * intrinsics.columns[0][0])) * 180 / M_PI; |
|||
if (_fieldOfView != xFovDegrees) { |
|||
self.fieldOfView = xFovDegrees; |
|||
} |
|||
} |
|||
|
|||
- (NSString *)description |
|||
{ |
|||
return [self debugDescription]; |
|||
} |
|||
|
|||
- (NSString *)debugDescription |
|||
{ |
|||
NSDictionary *debugDict = @{ |
|||
@"_sampleBufferDisplayEnabled" : _sampleBufferDisplayEnabled ? @"Yes" : @"No", |
|||
@"_videoStabilizationEnabledIfSupported" : _videoStabilizationEnabledIfSupported ? @"Yes" : @"No", |
|||
@"_performingConfigurations" : _performingConfigurations ? @"Yes" : @"No", |
|||
@"alwaysDiscardLateVideoFrames" : _videoDataOutput.alwaysDiscardsLateVideoFrames ? @"Yes" : @"No" |
|||
}; |
|||
return [NSString sc_stringWithFormat:@"%@", debugDict]; |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,63 @@ |
|||
// |
|||
// SCMetalUtils.h |
|||
// Snapchat |
|||
// |
|||
// Created by Michel Loenngren on 7/11/17. |
|||
// |
|||
// Utility class for metal related helpers. |
|||
|
|||
#import <Foundation/Foundation.h> |
|||
#if !TARGET_IPHONE_SIMULATOR |
|||
#import <Metal/Metal.h> |
|||
#endif |
|||
#import <AVFoundation/AVFoundation.h> |
|||
|
|||
#import <SCBase/SCMacros.h> |
|||
|
|||
SC_EXTERN_C_BEGIN |
|||
|
|||
#if !TARGET_IPHONE_SIMULATOR |
|||
extern id<MTLDevice> SCGetManagedCaptureMetalDevice(void); |
|||
#endif |
|||
|
|||
static SC_ALWAYS_INLINE BOOL SCDeviceSupportsMetal(void) |
|||
{ |
|||
#if TARGET_CPU_ARM64 |
|||
return YES; // All 64 bit system supports Metal. |
|||
#else |
|||
return NO; |
|||
#endif |
|||
} |
|||
|
|||
#if !TARGET_IPHONE_SIMULATOR |
|||
static inline id<MTLTexture> SCMetalTextureFromPixelBuffer(CVPixelBufferRef pixelBuffer, size_t planeIndex, |
|||
MTLPixelFormat pixelFormat, |
|||
CVMetalTextureCacheRef textureCache) |
|||
{ |
|||
size_t width = CVPixelBufferGetWidthOfPlane(pixelBuffer, planeIndex); |
|||
size_t height = CVPixelBufferGetHeightOfPlane(pixelBuffer, planeIndex); |
|||
CVMetalTextureRef textureRef; |
|||
if (kCVReturnSuccess != CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, textureCache, pixelBuffer, |
|||
nil, pixelFormat, width, height, planeIndex, |
|||
&textureRef)) { |
|||
return nil; |
|||
} |
|||
id<MTLTexture> texture = CVMetalTextureGetTexture(textureRef); |
|||
CVBufferRelease(textureRef); |
|||
return texture; |
|||
} |
|||
|
|||
static inline void SCMetalCopyTexture(id<MTLTexture> texture, CVPixelBufferRef pixelBuffer, NSUInteger planeIndex) |
|||
{ |
|||
CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); |
|||
void *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, planeIndex); |
|||
NSUInteger bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, planeIndex); |
|||
MTLRegion region = MTLRegionMake2D(0, 0, CVPixelBufferGetWidthOfPlane(pixelBuffer, planeIndex), |
|||
CVPixelBufferGetHeightOfPlane(pixelBuffer, planeIndex)); |
|||
|
|||
[texture getBytes:baseAddress bytesPerRow:bytesPerRow fromRegion:region mipmapLevel:0]; |
|||
CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); |
|||
} |
|||
#endif |
|||
|
|||
SC_EXTERN_C_END |
@ -0,0 +1,25 @@ |
|||
// |
|||
// SCMetalUtils.m |
|||
// Snapchat |
|||
// |
|||
// Created by Michel Loenngren on 8/16/17. |
|||
// |
|||
// |
|||
|
|||
#import "SCMetalUtils.h" |
|||
|
|||
#import <SCFoundation/SCTrace.h> |
|||
|
|||
id<MTLDevice> SCGetManagedCaptureMetalDevice(void) |
|||
{ |
|||
#if !TARGET_IPHONE_SIMULATOR |
|||
SCTraceStart(); |
|||
static dispatch_once_t onceToken; |
|||
static id<MTLDevice> device; |
|||
dispatch_once(&onceToken, ^{ |
|||
device = MTLCreateSystemDefaultDevice(); |
|||
}); |
|||
return device; |
|||
#endif |
|||
return nil; |
|||
} |
@ -0,0 +1,18 @@ |
|||
// |
|||
// SCScanConfiguration.h |
|||
// Snapchat |
|||
// |
|||
// Created by Yang Dai on 3/7/17. |
|||
// Copyright © 2017 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import "SCManagedCapturer.h" |
|||
|
|||
#import <SCSession/SCUserSession.h> |
|||
|
|||
@interface SCScanConfiguration : NSObject |
|||
|
|||
@property (nonatomic, strong) sc_managed_capturer_scan_results_handler_t scanResultsHandler; |
|||
@property (nonatomic, strong) SCUserSession *userSession; |
|||
|
|||
@end |
@ -0,0 +1,13 @@ |
|||
// |
|||
// SCScanConfiguration.m |
|||
// Snapchat |
|||
// |
|||
// Created by Yang Dai on 3/7/17. |
|||
// Copyright © 2017 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import "SCScanConfiguration.h" |
|||
|
|||
@implementation SCScanConfiguration |
|||
|
|||
@end |
@ -0,0 +1,17 @@ |
|||
// |
|||
// SCSingleFrameStreamCapturer.h |
|||
// Snapchat |
|||
// |
|||
// Created by Benjamin Hollis on 5/3/16. |
|||
// Copyright © 2016 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import "SCCaptureCommon.h" |
|||
|
|||
#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h> |
|||
|
|||
#import <Foundation/Foundation.h> |
|||
|
|||
@interface SCSingleFrameStreamCapturer : NSObject <SCManagedVideoDataSourceListener> |
|||
- (instancetype)initWithCompletion:(sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler; |
|||
@end |
@ -0,0 +1,103 @@ |
|||
// |
|||
// SCSingleFrameStreamCapturer.m |
|||
// Snapchat |
|||
// |
|||
// Created by Benjamin Hollis on 5/3/16. |
|||
// Copyright © 2016 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import "SCSingleFrameStreamCapturer.h" |
|||
|
|||
#import "SCManagedCapturer.h" |
|||
|
|||
@implementation SCSingleFrameStreamCapturer { |
|||
sc_managed_capturer_capture_video_frame_completion_handler_t _callback; |
|||
} |
|||
|
|||
- (instancetype)initWithCompletion:(sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler |
|||
{ |
|||
self = [super init]; |
|||
if (self) { |
|||
_callback = completionHandler; |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
#pragma mark - SCManagedVideoDataSourceListener |
|||
|
|||
- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource |
|||
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer |
|||
devicePosition:(SCManagedCaptureDevicePosition)devicePosition |
|||
{ |
|||
if (_callback) { |
|||
UIImage *image = [self imageFromSampleBuffer:sampleBuffer]; |
|||
_callback(image); |
|||
} |
|||
_callback = nil; |
|||
} |
|||
|
|||
/** |
|||
* Decode a CMSampleBufferRef to our native camera format (kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, |
|||
* as set in SCManagedVideoStreamer) to a UIImage. |
|||
* |
|||
* Code from http://stackoverflow.com/a/31553521/11284 |
|||
*/ |
|||
#define clamp(a) (a > 255 ? 255 : (a < 0 ? 0 : a)) |
|||
// TODO: Use the transform code from SCImageProcessIdentityYUVCommand |
|||
- (UIImage *)imageFromSampleBuffer:(CMSampleBufferRef)sampleBuffer |
|||
{ |
|||
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); |
|||
CVPixelBufferLockBaseAddress(imageBuffer, 0); |
|||
|
|||
size_t width = CVPixelBufferGetWidth(imageBuffer); |
|||
size_t height = CVPixelBufferGetHeight(imageBuffer); |
|||
uint8_t *yBuffer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0); |
|||
size_t yPitch = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0); |
|||
uint8_t *cbCrBuffer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 1); |
|||
size_t cbCrPitch = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 1); |
|||
|
|||
int bytesPerPixel = 4; |
|||
uint8_t *rgbBuffer = malloc(width * height * bytesPerPixel); |
|||
|
|||
for (int y = 0; y < height; y++) { |
|||
uint8_t *rgbBufferLine = &rgbBuffer[y * width * bytesPerPixel]; |
|||
uint8_t *yBufferLine = &yBuffer[y * yPitch]; |
|||
uint8_t *cbCrBufferLine = &cbCrBuffer[(y >> 1) * cbCrPitch]; |
|||
|
|||
for (int x = 0; x < width; x++) { |
|||
int16_t y = yBufferLine[x]; |
|||
int16_t cb = cbCrBufferLine[x & ~1] - 128; |
|||
int16_t cr = cbCrBufferLine[x | 1] - 128; |
|||
|
|||
uint8_t *rgbOutput = &rgbBufferLine[x * bytesPerPixel]; |
|||
|
|||
int16_t r = (int16_t)roundf(y + cr * 1.4); |
|||
int16_t g = (int16_t)roundf(y + cb * -0.343 + cr * -0.711); |
|||
int16_t b = (int16_t)roundf(y + cb * 1.765); |
|||
|
|||
rgbOutput[0] = 0xff; |
|||
rgbOutput[1] = clamp(b); |
|||
rgbOutput[2] = clamp(g); |
|||
rgbOutput[3] = clamp(r); |
|||
} |
|||
} |
|||
|
|||
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); |
|||
CGContextRef context = CGBitmapContextCreate(rgbBuffer, width, height, 8, width * bytesPerPixel, colorSpace, |
|||
kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipLast); |
|||
CGImageRef quartzImage = CGBitmapContextCreateImage(context); |
|||
|
|||
// TODO: Hardcoding UIImageOrientationRight seems cheesy |
|||
UIImage *image = [UIImage imageWithCGImage:quartzImage scale:1.0 orientation:UIImageOrientationRight]; |
|||
|
|||
CGContextRelease(context); |
|||
CGColorSpaceRelease(colorSpace); |
|||
CGImageRelease(quartzImage); |
|||
free(rgbBuffer); |
|||
|
|||
CVPixelBufferUnlockBaseAddress(imageBuffer, 0); |
|||
|
|||
return image; |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,19 @@ |
|||
// |
|||
// SCStillImageCaptureVideoInputMethod.h |
|||
// Snapchat |
|||
// |
|||
// Created by Alexander Grytsiuk on 3/16/16. |
|||
// Copyright © 2016 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import "SCManagedCapturerState.h" |
|||
|
|||
#import <AVFoundation/AVFoundation.h> |
|||
|
|||
@interface SCStillImageCaptureVideoInputMethod : NSObject |
|||
|
|||
- (void)captureStillImageWithCapturerState:(SCManagedCapturerState *)state |
|||
successBlock:(void (^)(NSData *imageData, NSDictionary *cameraInfo, |
|||
NSError *error))successBlock |
|||
failureBlock:(void (^)(NSError *error))failureBlock; |
|||
@end |
@ -0,0 +1,140 @@ |
|||
// |
|||
// SCStillImageCaptureVideoInputMethod.m |
|||
// Snapchat |
|||
// |
|||
// Created by Alexander Grytsiuk on 3/16/16. |
|||
// Copyright © 2016 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import "SCStillImageCaptureVideoInputMethod.h" |
|||
|
|||
#import "SCManagedCapturer.h" |
|||
#import "SCManagedVideoFileStreamer.h" |
|||
|
|||
typedef unsigned char uchar_t; |
|||
int clamp(int val, int low, int high) |
|||
{ |
|||
if (val < low) |
|||
val = low; |
|||
if (val > high) |
|||
val = high; |
|||
return val; |
|||
} |
|||
|
|||
void yuv2rgb(uchar_t yValue, uchar_t uValue, uchar_t vValue, uchar_t *r, uchar_t *g, uchar_t *b) |
|||
{ |
|||
double red = yValue + (1.370705 * (vValue - 128)); |
|||
double green = yValue - (0.698001 * (vValue - 128)) - (0.337633 * (uValue - 128)); |
|||
double blue = yValue + (1.732446 * (uValue - 128)); |
|||
*r = clamp(red, 0, 255); |
|||
*g = clamp(green, 0, 255); |
|||
*b = clamp(blue, 0, 255); |
|||
} |
|||
|
|||
void convertNV21DataToRGBData(int width, int height, uchar_t *nv21Data, uchar_t *rgbData, int rgbBytesPerPixel, |
|||
int rgbBytesPerRow) |
|||
{ |
|||
uchar_t *uvData = nv21Data + height * width; |
|||
for (int h = 0; h < height; h++) { |
|||
uchar_t *yRowBegin = nv21Data + h * width; |
|||
uchar_t *uvRowBegin = uvData + h / 2 * width; |
|||
uchar_t *rgbRowBegin = rgbData + rgbBytesPerRow * h; |
|||
for (int w = 0; w < width; w++) { |
|||
uchar_t *rgbPixelBegin = rgbRowBegin + rgbBytesPerPixel * w; |
|||
yuv2rgb(yRowBegin[w], uvRowBegin[w / 2 * 2], uvRowBegin[w / 2 * 2 + 1], &(rgbPixelBegin[0]), |
|||
&(rgbPixelBegin[1]), &(rgbPixelBegin[2])); |
|||
} |
|||
} |
|||
} |
|||
|
|||
@implementation SCStillImageCaptureVideoInputMethod |
|||
|
|||
- (void)captureStillImageWithCapturerState:(SCManagedCapturerState *)state |
|||
successBlock:(void (^)(NSData *imageData, NSDictionary *cameraInfo, |
|||
NSError *error))successBlock |
|||
failureBlock:(void (^)(NSError *error))failureBlock |
|||
{ |
|||
id<SCManagedVideoDataSource> videoDataSource = [[SCManagedCapturer sharedInstance] currentVideoDataSource]; |
|||
if ([videoDataSource isKindOfClass:[SCManagedVideoFileStreamer class]]) { |
|||
SCManagedVideoFileStreamer *videoFileStreamer = (SCManagedVideoFileStreamer *)videoDataSource; |
|||
[videoFileStreamer getNextPixelBufferWithCompletion:^(CVPixelBufferRef pixelBuffer) { |
|||
BOOL shouldFlip = state.devicePosition == SCManagedCaptureDevicePositionFront; |
|||
#if TARGET_IPHONE_SIMULATOR |
|||
UIImage *uiImage = [self imageWithCVPixelBuffer:pixelBuffer]; |
|||
CGImageRef videoImage = uiImage.CGImage; |
|||
UIImage *capturedImage = [UIImage |
|||
imageWithCGImage:shouldFlip ? [self flipCGImage:videoImage size:uiImage.size].CGImage : videoImage |
|||
scale:1.0 |
|||
orientation:UIImageOrientationRight]; |
|||
#else |
|||
CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer]; |
|||
CIContext *temporaryContext = [CIContext contextWithOptions:nil]; |
|||
|
|||
CGSize size = CGSizeMake(CVPixelBufferGetWidth(pixelBuffer), CVPixelBufferGetHeight(pixelBuffer)); |
|||
CGImageRef videoImage = |
|||
[temporaryContext createCGImage:ciImage fromRect:CGRectMake(0, 0, size.width, size.height)]; |
|||
|
|||
UIImage *capturedImage = |
|||
[UIImage imageWithCGImage:shouldFlip ? [self flipCGImage:videoImage size:size].CGImage : videoImage |
|||
scale:1.0 |
|||
orientation:UIImageOrientationRight]; |
|||
|
|||
CGImageRelease(videoImage); |
|||
#endif |
|||
if (successBlock) { |
|||
successBlock(UIImageJPEGRepresentation(capturedImage, 1.0), nil, nil); |
|||
} |
|||
}]; |
|||
} else { |
|||
if (failureBlock) { |
|||
failureBlock([NSError errorWithDomain:NSStringFromClass(self.class) code:-1 userInfo:nil]); |
|||
} |
|||
} |
|||
} |
|||
|
|||
- (UIImage *)flipCGImage:(CGImageRef)cgImage size:(CGSize)size |
|||
{ |
|||
UIGraphicsBeginImageContext(size); |
|||
CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0, 0, size.width, size.height), cgImage); |
|||
UIImage *image = UIGraphicsGetImageFromCurrentImageContext(); |
|||
UIGraphicsEndImageContext(); |
|||
return image; |
|||
} |
|||
|
|||
- (UIImage *)imageWithCVPixelBuffer:(CVPixelBufferRef)imageBuffer |
|||
{ |
|||
CVPixelBufferLockBaseAddress(imageBuffer, 0); |
|||
|
|||
size_t width = CVPixelBufferGetWidth(imageBuffer); |
|||
size_t height = CVPixelBufferGetHeight(imageBuffer); |
|||
size_t rgbBytesPerPixel = 4; |
|||
size_t rgbBytesPerRow = width * rgbBytesPerPixel; |
|||
|
|||
uchar_t *nv21Data = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0); |
|||
uchar_t *rgbData = malloc(rgbBytesPerRow * height); |
|||
|
|||
convertNV21DataToRGBData((int)width, (int)height, nv21Data, rgbData, (int)rgbBytesPerPixel, (int)rgbBytesPerRow); |
|||
|
|||
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); |
|||
CGContextRef context = |
|||
CGBitmapContextCreate(rgbData, width, height, 8, rgbBytesPerRow, colorSpace, kCGImageAlphaNoneSkipLast); |
|||
CGImageRef cgImage = CGBitmapContextCreateImage(context); |
|||
|
|||
UIImage *result = [UIImage imageWithCGImage:cgImage]; |
|||
|
|||
CGImageRelease(cgImage); |
|||
CGContextRelease(context); |
|||
CGColorSpaceRelease(colorSpace); |
|||
free(rgbData); |
|||
|
|||
CVPixelBufferUnlockBaseAddress(imageBuffer, 0); |
|||
|
|||
return result; |
|||
} |
|||
|
|||
- (NSString *)methodName |
|||
{ |
|||
return @"VideoInput"; |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,28 @@ |
|||
// |
|||
// SCTimedTask.h |
|||
// Snapchat |
|||
// |
|||
// Created by Michel Loenngren on 4/2/17. |
|||
// Copyright © 2017 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import <AVFoundation/AVFoundation.h> |
|||
#import <Foundation/Foundation.h> |
|||
|
|||
/* |
|||
Block based timed task |
|||
*/ |
|||
@interface SCTimedTask : NSObject |
|||
|
|||
@property (nonatomic, assign) CMTime targetTime; |
|||
@property (nonatomic, copy) void (^task)(CMTime relativePresentationTime, CGFloat sessionStartTimeDelayInSecond); |
|||
|
|||
- (instancetype)init NS_UNAVAILABLE; |
|||
|
|||
- (instancetype)initWithTargetTime:(CMTime)targetTime |
|||
task:(void (^)(CMTime relativePresentationTime, |
|||
CGFloat sessionStartTimeDelayInSecond))task; |
|||
|
|||
- (NSString *)description; |
|||
|
|||
@end |
@ -0,0 +1,32 @@ |
|||
// |
|||
// SCTimedTask.m |
|||
// Snapchat |
|||
// |
|||
// Created by Michel Loenngren on 4/2/17. |
|||
// Copyright © 2017 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import "SCTimedTask.h" |
|||
|
|||
#import <SCFoundation/NSString+SCFormat.h> |
|||
|
|||
@implementation SCTimedTask |
|||
|
|||
- (instancetype)initWithTargetTime:(CMTime)targetTime |
|||
task: |
|||
(void (^)(CMTime relativePresentationTime, CGFloat sessionStartTimeDelayInSecond))task |
|||
{ |
|||
if (self = [super init]) { |
|||
_targetTime = targetTime; |
|||
_task = task; |
|||
} |
|||
return self; |
|||
} |
|||
|
|||
- (NSString *)description |
|||
{ |
|||
return [NSString |
|||
sc_stringWithFormat:@"<%@: %p, targetTime: %lld>", NSStringFromClass([self class]), self, _targetTime.value]; |
|||
} |
|||
|
|||
@end |
@ -0,0 +1,83 @@ |
|||
// |
|||
// SCVideoCaptureSessionInfo.h |
|||
// Snapchat |
|||
// |
|||
// Created by Michel Loenngren on 3/27/17. |
|||
// Copyright © 2017 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import <SCFoundation/NSString+SCFormat.h> |
|||
|
|||
#import <CoreMedia/CoreMedia.h> |
|||
#import <Foundation/Foundation.h> |
|||
|
|||
typedef NS_ENUM(NSInteger, SCManagedVideoCapturerInfoType) { |
|||
SCManagedVideoCapturerInfoAudioQueueError, |
|||
SCManagedVideoCapturerInfoAssetWriterError, |
|||
SCManagedVideoCapturerInfoAudioSessionError, |
|||
SCManagedVideoCapturerInfoAudioQueueRetrySuccess, |
|||
SCManagedVideoCapturerInfoAudioQueueRetryDataSourceSuccess_audioQueue, |
|||
SCManagedVideoCapturerInfoAudioQueueRetryDataSourceSuccess_hardware |
|||
}; |
|||
|
|||
typedef u_int32_t sc_managed_capturer_recording_session_t; |
|||
|
|||
/* |
|||
Container object holding information about the |
|||
current recording session. |
|||
*/ |
|||
typedef struct { |
|||
CMTime startTime; |
|||
CMTime endTime; |
|||
CMTime duration; |
|||
sc_managed_capturer_recording_session_t sessionId; |
|||
} SCVideoCaptureSessionInfo; |
|||
|
|||
static inline SCVideoCaptureSessionInfo SCVideoCaptureSessionInfoMake(CMTime startTime, CMTime endTime, |
|||
sc_managed_capturer_recording_session_t sessionId) |
|||
{ |
|||
SCVideoCaptureSessionInfo session; |
|||
session.startTime = startTime; |
|||
session.endTime = endTime; |
|||
if (CMTIME_IS_VALID(startTime) && CMTIME_IS_VALID(endTime)) { |
|||
session.duration = CMTimeSubtract(endTime, startTime); |
|||
} else { |
|||
session.duration = kCMTimeInvalid; |
|||
} |
|||
session.sessionId = sessionId; |
|||
return session; |
|||
} |
|||
|
|||
static inline NSTimeInterval SCVideoCaptureSessionInfoGetCurrentDuration(SCVideoCaptureSessionInfo sessionInfo) |
|||
{ |
|||
if (CMTIME_IS_VALID(sessionInfo.startTime)) { |
|||
if (CMTIME_IS_VALID(sessionInfo.endTime)) { |
|||
return CMTimeGetSeconds(sessionInfo.duration); |
|||
} |
|||
return CACurrentMediaTime() - CMTimeGetSeconds(sessionInfo.startTime); |
|||
} |
|||
return 0; |
|||
} |
|||
|
|||
static inline NSString *SCVideoCaptureSessionInfoGetDebugString(CMTime time, NSString *label) |
|||
{ |
|||
if (CMTIME_IS_VALID(time)) { |
|||
return [NSString sc_stringWithFormat:@"%@: %f", label, CMTimeGetSeconds(time)]; |
|||
} else { |
|||
return [NSString sc_stringWithFormat:@"%@: Invalid", label]; |
|||
} |
|||
} |
|||
|
|||
static inline NSString *SCVideoCaptureSessionInfoGetDebugDescription(SCVideoCaptureSessionInfo sessionInfo) |
|||
{ |
|||
NSMutableString *description = [NSMutableString new]; |
|||
[description appendString:SCVideoCaptureSessionInfoGetDebugString(sessionInfo.startTime, @"StartTime")]; |
|||
[description appendString:@", "]; |
|||
[description appendString:SCVideoCaptureSessionInfoGetDebugString(sessionInfo.endTime, @"EndTime")]; |
|||
[description appendString:@", "]; |
|||
[description appendString:SCVideoCaptureSessionInfoGetDebugString(sessionInfo.duration, @"Duration")]; |
|||
[description appendString:@", "]; |
|||
[description appendString:[NSString sc_stringWithFormat:@"Id: %u", sessionInfo.sessionId]]; |
|||
|
|||
return [description copy]; |
|||
} |
@ -0,0 +1,13 @@ |
|||
// |
|||
// UIScreen+Debug.h |
|||
// Snapchat |
|||
// |
|||
// Created by Derek Peirce on 6/1/17. |
|||
// Copyright © 2017 Snapchat, Inc. All rights reserved. |
|||
// |
|||
|
|||
#import <UIKit/UIKit.h> |
|||
|
|||
@interface UIScreen (Debug) |
|||
|
|||
@end |
@ -0,0 +1,28 @@ |
|||
|
|||
#import "UIScreen+Debug.h" |
|||
|
|||
#import <SCFoundation/SCAppEnvironment.h> |
|||
#import <SCFoundation/SCLog.h> |
|||
|
|||
#import <objc/runtime.h> |
|||
|
|||
@implementation UIScreen (Debug) |
|||
+ (void)load |
|||
{ |
|||
if (SCIsPerformanceLoggingEnabled()) { |
|||
static dispatch_once_t once_token; |
|||
dispatch_once(&once_token, ^{ |
|||
SEL setBrightnessSelector = @selector(setBrightness:); |
|||
SEL setBrightnessLoggerSelector = @selector(logged_setBrightness:); |
|||
Method originalMethod = class_getInstanceMethod(self, setBrightnessSelector); |
|||
Method extendedMethod = class_getInstanceMethod(self, setBrightnessLoggerSelector); |
|||
method_exchangeImplementations(originalMethod, extendedMethod); |
|||
}); |
|||
} |
|||
} |
|||
- (void)logged_setBrightness:(CGFloat)brightness |
|||
{ |
|||
SCLogGeneralInfo(@"Setting brightness from %f to %f", self.brightness, brightness); |
|||
[self logged_setBrightness:brightness]; |
|||
} |
|||
@end |
Write
Preview
Loading…
Cancel
Save
Reference in new issue