2014 snapchat source code
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1095 lines
53 KiB

  1. //
  2. // SCCaptureWorker.m
  3. // Snapchat
  4. //
  5. // Created by Lin Jia on 10/19/17.
  6. //
  7. //
  8. #import "SCCaptureWorker.h"
  9. #import "ARConfiguration+SCConfiguration.h"
  10. #import "SCBlackCameraDetector.h"
  11. #import "SCBlackCameraNoOutputDetector.h"
  12. #import "SCCameraTweaks.h"
  13. #import "SCCaptureCoreImageFaceDetector.h"
  14. #import "SCCaptureFaceDetector.h"
  15. #import "SCCaptureMetadataOutputDetector.h"
  16. #import "SCCaptureSessionFixer.h"
  17. #import "SCManagedCaptureDevice+SCManagedCapturer.h"
  18. #import "SCManagedCaptureDeviceDefaultZoomHandler.h"
  19. #import "SCManagedCaptureDeviceHandler.h"
  20. #import "SCManagedCaptureDeviceLinearInterpolationZoomHandler.h"
  21. #import "SCManagedCaptureDeviceSavitzkyGolayZoomHandler.h"
  22. #import "SCManagedCaptureDeviceSubjectAreaHandler.h"
  23. #import "SCManagedCapturePreviewLayerController.h"
  24. #import "SCManagedCaptureSession.h"
  25. #import "SCManagedCapturer.h"
  26. #import "SCManagedCapturerARImageCaptureProvider.h"
  27. #import "SCManagedCapturerARSessionHandler.h"
  28. #import "SCManagedCapturerGLViewManagerAPI.h"
  29. #import "SCManagedCapturerLensAPIProvider.h"
  30. #import "SCManagedCapturerLogging.h"
  31. #import "SCManagedCapturerState.h"
  32. #import "SCManagedCapturerStateBuilder.h"
  33. #import "SCManagedCapturerV1.h"
  34. #import "SCManagedDeviceCapacityAnalyzer.h"
  35. #import "SCManagedDeviceCapacityAnalyzerHandler.h"
  36. #import "SCManagedDroppedFramesReporter.h"
  37. #import "SCManagedFrontFlashController.h"
  38. #import "SCManagedStillImageCapturerHandler.h"
  39. #import "SCManagedVideoARDataSource.h"
  40. #import "SCManagedVideoCapturer.h"
  41. #import "SCManagedVideoCapturerHandler.h"
  42. #import "SCManagedVideoFileStreamer.h"
  43. #import "SCManagedVideoScanner.h"
  44. #import "SCManagedVideoStreamReporter.h"
  45. #import "SCManagedVideoStreamer.h"
  46. #import "SCMetalUtils.h"
  47. #import "SCProcessingPipelineBuilder.h"
  48. #import "SCVideoCaptureSessionInfo.h"
  49. #import <SCBatteryLogger/SCBatteryLogger.h>
  50. #import <SCFoundation/SCDeviceName.h>
  51. #import <SCFoundation/SCQueuePerformer.h>
  52. #import <SCFoundation/SCThreadHelpers.h>
  53. #import <SCFoundation/SCTraceODPCompatible.h>
  54. #import <SCFoundation/SCZeroDependencyExperiments.h>
  55. #import <SCGhostToSnappable/SCGhostToSnappableSignal.h>
  56. #import <SCImageProcess/SCImageProcessVideoPlaybackSession.h>
  57. #import <SCLogger/SCCameraMetrics.h>
  58. #import <SCLogger/SCLogger+Performance.h>
  59. @import ARKit;
  60. static const char *kSCManagedCapturerQueueLabel = "com.snapchat.managed_capturer";
  61. static NSTimeInterval const kMaxDefaultScanFrameDuration = 1. / 15; // Restrict scanning to max 15 frames per second
  62. static NSTimeInterval const kMaxPassiveScanFrameDuration = 1.; // Restrict scanning to max 1 frame per second
  63. static float const kScanTargetCPUUtilization = 0.5; // 50% utilization
  64. static NSString *const kSCManagedCapturerErrorDomain = @"kSCManagedCapturerErrorDomain";
  65. static NSInteger const kSCManagedCapturerRecordVideoBusy = 3001;
  66. static NSInteger const kSCManagedCapturerCaptureStillImageBusy = 3002;
  67. static UIImageOrientation SCMirroredImageOrientation(UIImageOrientation orientation)
  68. {
  69. switch (orientation) {
  70. case UIImageOrientationRight:
  71. return UIImageOrientationLeftMirrored;
  72. case UIImageOrientationLeftMirrored:
  73. return UIImageOrientationRight;
  74. case UIImageOrientationUp:
  75. return UIImageOrientationUpMirrored;
  76. case UIImageOrientationUpMirrored:
  77. return UIImageOrientationUp;
  78. case UIImageOrientationDown:
  79. return UIImageOrientationDownMirrored;
  80. case UIImageOrientationDownMirrored:
  81. return UIImageOrientationDown;
  82. case UIImageOrientationLeft:
  83. return UIImageOrientationRightMirrored;
  84. case UIImageOrientationRightMirrored:
  85. return UIImageOrientationLeft;
  86. }
  87. }
  88. @implementation SCCaptureWorker
  89. + (SCCaptureResource *)generateCaptureResource
  90. {
  91. SCCaptureResource *captureResource = [[SCCaptureResource alloc] init];
  92. captureResource.queuePerformer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedCapturerQueueLabel
  93. qualityOfService:QOS_CLASS_USER_INTERACTIVE
  94. queueType:DISPATCH_QUEUE_SERIAL
  95. context:SCQueuePerformerContextCamera];
  96. captureResource.announcer = [[SCManagedCapturerListenerAnnouncer alloc] init];
  97. captureResource.videoCapturerHandler =
  98. [[SCManagedVideoCapturerHandler alloc] initWithCaptureResource:captureResource];
  99. captureResource.stillImageCapturerHandler =
  100. [[SCManagedStillImageCapturerHandler alloc] initWithCaptureResource:captureResource];
  101. captureResource.deviceCapacityAnalyzerHandler =
  102. [[SCManagedDeviceCapacityAnalyzerHandler alloc] initWithCaptureResource:captureResource];
  103. captureResource.deviceZoomHandler = ({
  104. SCManagedCaptureDeviceDefaultZoomHandler *handler = nil;
  105. switch (SCCameraTweaksDeviceZoomHandlerStrategy()) {
  106. case SCManagedCaptureDeviceDefaultZoom:
  107. handler = [[SCManagedCaptureDeviceDefaultZoomHandler alloc] initWithCaptureResource:captureResource];
  108. break;
  109. case SCManagedCaptureDeviceSavitzkyGolayFilter:
  110. handler = [[SCManagedCaptureDeviceSavitzkyGolayZoomHandler alloc] initWithCaptureResource:captureResource];
  111. break;
  112. case SCManagedCaptureDeviceLinearInterpolation:
  113. handler =
  114. [[SCManagedCaptureDeviceLinearInterpolationZoomHandler alloc] initWithCaptureResource:captureResource];
  115. break;
  116. }
  117. handler;
  118. });
  119. captureResource.captureDeviceHandler =
  120. [[SCManagedCaptureDeviceHandler alloc] initWithCaptureResource:captureResource];
  121. captureResource.arSessionHandler =
  122. [[SCManagedCapturerARSessionHandler alloc] initWithCaptureResource:captureResource];
  123. captureResource.tokenSet = [NSMutableSet new];
  124. captureResource.allowsZoom = YES;
  125. captureResource.debugInfoDict = [[NSMutableDictionary alloc] init];
  126. captureResource.notificationRegistered = NO;
  127. return captureResource;
  128. }
  129. + (void)setupWithCaptureResource:(SCCaptureResource *)captureResource
  130. devicePosition:(SCManagedCaptureDevicePosition)devicePosition
  131. {
  132. SCTraceODPCompatibleStart(2);
  133. SCAssert(captureResource.status == SCManagedCapturerStatusUnknown, @"The status should be unknown");
  134. captureResource.device = [SCManagedCaptureDevice deviceWithPosition:devicePosition];
  135. if (!captureResource.device) {
  136. // Always prefer front camera over back camera
  137. if ([SCManagedCaptureDevice front]) {
  138. captureResource.device = [SCManagedCaptureDevice front];
  139. devicePosition = SCManagedCaptureDevicePositionFront;
  140. } else {
  141. captureResource.device = [SCManagedCaptureDevice back];
  142. devicePosition = SCManagedCaptureDevicePositionBack;
  143. }
  144. }
  145. // Initial state
  146. SCLogCapturerInfo(@"Init state with devicePosition:%lu, zoomFactor:%f, flashSupported:%d, "
  147. @"torchSupported:%d, flashActive:%d, torchActive:%d",
  148. (unsigned long)devicePosition, captureResource.device.zoomFactor,
  149. captureResource.device.isFlashSupported, captureResource.device.isTorchSupported,
  150. captureResource.device.flashActive, captureResource.device.torchActive);
  151. captureResource.state = [[SCManagedCapturerState alloc] initWithIsRunning:NO
  152. isNightModeActive:NO
  153. isPortraitModeActive:NO
  154. lowLightCondition:NO
  155. adjustingExposure:NO
  156. devicePosition:devicePosition
  157. zoomFactor:captureResource.device.zoomFactor
  158. flashSupported:captureResource.device.isFlashSupported
  159. torchSupported:captureResource.device.isTorchSupported
  160. flashActive:captureResource.device.flashActive
  161. torchActive:captureResource.device.torchActive
  162. lensesActive:NO
  163. arSessionActive:NO
  164. liveVideoStreaming:NO
  165. lensProcessorReady:NO];
  166. [self configLensesProcessorWithCaptureResource:captureResource];
  167. [self configARSessionWithCaptureResource:captureResource];
  168. [self configCaptureDeviceHandlerWithCaptureResource:captureResource];
  169. [self configAVCaptureSessionWithCaptureResource:captureResource];
  170. [self configImageCapturerWithCaptureResource:captureResource];
  171. [self configDeviceCapacityAnalyzerWithCaptureResource:captureResource];
  172. [self configVideoDataSourceWithCaptureResource:captureResource devicePosition:devicePosition];
  173. [self configVideoScannerWithCaptureResource:captureResource];
  174. [self configVideoCapturerWithCaptureResource:captureResource];
  175. if (!SCIsSimulator()) {
  176. // We don't want it enabled for simulator
  177. [self configBlackCameraDetectorWithCaptureResource:captureResource];
  178. }
  179. if (SCCameraTweaksEnableFaceDetectionFocus(captureResource.state.devicePosition)) {
  180. [self configureCaptureFaceDetectorWithCaptureResource:captureResource];
  181. }
  182. }
  183. + (void)setupCapturePreviewLayerController
  184. {
  185. SCAssert([[SCQueuePerformer mainQueuePerformer] isCurrentPerformer], @"");
  186. [[SCManagedCapturePreviewLayerController sharedInstance] setupPreviewLayer];
  187. }
  188. + (void)configLensesProcessorWithCaptureResource:(SCCaptureResource *)captureResource
  189. {
  190. SCManagedCapturerStateBuilder *stateBuilder =
  191. [SCManagedCapturerStateBuilder withManagedCapturerState:captureResource.state];
  192. [stateBuilder setLensProcessorReady:YES];
  193. captureResource.state = [stateBuilder build];
  194. captureResource.lensProcessingCore = [captureResource.lensAPIProvider lensAPIForCaptureResource:captureResource];
  195. }
  196. + (void)configARSessionWithCaptureResource:(SCCaptureResource *)captureResource
  197. {
  198. if (@available(iOS 11.0, *)) {
  199. captureResource.arSession = [[ARSession alloc] init];
  200. captureResource.arImageCapturer =
  201. [captureResource.arImageCaptureProvider arImageCapturerWith:captureResource.queuePerformer
  202. lensProcessingCore:captureResource.lensProcessingCore];
  203. }
  204. }
  205. + (void)configAVCaptureSessionWithCaptureResource:(SCCaptureResource *)captureResource
  206. {
  207. #if !TARGET_IPHONE_SIMULATOR
  208. captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0;
  209. // lazily initialize _captureResource.kvoController on background thread
  210. if (!captureResource.kvoController) {
  211. captureResource.kvoController = [[FBKVOController alloc] initWithObserver:[SCManagedCapturerV1 sharedInstance]];
  212. }
  213. [captureResource.kvoController unobserve:captureResource.managedSession.avSession];
  214. captureResource.managedSession =
  215. [[SCManagedCaptureSession alloc] initWithBlackCameraDetector:captureResource.blackCameraDetector];
  216. [captureResource.kvoController observe:captureResource.managedSession.avSession
  217. keyPath:@keypath(captureResource.managedSession.avSession, running)
  218. options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld
  219. action:captureResource.handleAVSessionStatusChange];
  220. #endif
  221. [captureResource.managedSession.avSession setAutomaticallyConfiguresApplicationAudioSession:NO];
  222. [captureResource.device setDeviceAsInput:captureResource.managedSession.avSession];
  223. }
  224. + (void)configDeviceCapacityAnalyzerWithCaptureResource:(SCCaptureResource *)captureResource
  225. {
  226. captureResource.deviceCapacityAnalyzer =
  227. [[SCManagedDeviceCapacityAnalyzer alloc] initWithPerformer:captureResource.videoDataSource.performer];
  228. [captureResource.deviceCapacityAnalyzer addListener:captureResource.deviceCapacityAnalyzerHandler];
  229. [captureResource.deviceCapacityAnalyzer setLowLightConditionEnabled:[SCManagedCaptureDevice isNightModeSupported]];
  230. [captureResource.deviceCapacityAnalyzer addListener:captureResource.stillImageCapturer];
  231. [captureResource.deviceCapacityAnalyzer setAsFocusListenerForDevice:captureResource.device];
  232. }
  233. + (void)configVideoDataSourceWithCaptureResource:(SCCaptureResource *)captureResource
  234. devicePosition:(SCManagedCaptureDevicePosition)devicePosition
  235. {
  236. if (captureResource.fileInputDecider.shouldProcessFileInput) {
  237. captureResource.videoDataSource =
  238. [[SCManagedVideoFileStreamer alloc] initWithPlaybackForURL:captureResource.fileInputDecider.fileURL];
  239. [captureResource.lensProcessingCore setLensesActive:YES
  240. videoOrientation:captureResource.videoDataSource.videoOrientation
  241. filterFactory:nil];
  242. runOnMainThreadAsynchronously(^{
  243. [captureResource.videoPreviewGLViewManager prepareViewIfNecessary];
  244. });
  245. } else {
  246. if (@available(iOS 11.0, *)) {
  247. captureResource.videoDataSource =
  248. [[SCManagedVideoStreamer alloc] initWithSession:captureResource.managedSession.avSession
  249. arSession:captureResource.arSession
  250. devicePosition:devicePosition];
  251. [captureResource.videoDataSource addListener:captureResource.arImageCapturer];
  252. if (captureResource.state.isPortraitModeActive) {
  253. [captureResource.videoDataSource setDepthCaptureEnabled:YES];
  254. SCProcessingPipelineBuilder *processingPipelineBuilder = [[SCProcessingPipelineBuilder alloc] init];
  255. processingPipelineBuilder.portraitModeEnabled = YES;
  256. SCProcessingPipeline *pipeline = [processingPipelineBuilder build];
  257. [captureResource.videoDataSource addProcessingPipeline:pipeline];
  258. }
  259. } else {
  260. captureResource.videoDataSource =
  261. [[SCManagedVideoStreamer alloc] initWithSession:captureResource.managedSession.avSession
  262. devicePosition:devicePosition];
  263. }
  264. }
  265. [captureResource.videoDataSource addListener:captureResource.lensProcessingCore.capturerListener];
  266. [captureResource.videoDataSource addListener:captureResource.deviceCapacityAnalyzer];
  267. [captureResource.videoDataSource addListener:captureResource.stillImageCapturer];
  268. if (SCIsMasterBuild()) {
  269. captureResource.videoStreamReporter = [[SCManagedVideoStreamReporter alloc] init];
  270. [captureResource.videoDataSource addListener:captureResource.videoStreamReporter];
  271. }
  272. }
  273. + (void)configVideoScannerWithCaptureResource:(SCCaptureResource *)captureResource
  274. {
  275. // When initializing video scanner:
  276. // Restrict default scanning to max 15 frames per second.
  277. // Restrict passive scanning to max 1 frame per second.
  278. // Give CPU time to rest.
  279. captureResource.videoScanner =
  280. [[SCManagedVideoScanner alloc] initWithMaxFrameDefaultDuration:kMaxDefaultScanFrameDuration
  281. maxFramePassiveDuration:kMaxPassiveScanFrameDuration
  282. restCycle:1 - kScanTargetCPUUtilization];
  283. [captureResource.videoDataSource addListener:captureResource.videoScanner];
  284. [captureResource.deviceCapacityAnalyzer addListener:captureResource.videoScanner];
  285. }
  286. + (void)configVideoCapturerWithCaptureResource:(SCCaptureResource *)captureResource
  287. {
  288. if (SCCameraTweaksEnableCaptureSharePerformer()) {
  289. captureResource.videoCapturer =
  290. [[SCManagedVideoCapturer alloc] initWithQueuePerformer:captureResource.queuePerformer];
  291. } else {
  292. captureResource.videoCapturer = [[SCManagedVideoCapturer alloc] init];
  293. }
  294. [captureResource.videoCapturer addListener:captureResource.lensProcessingCore.capturerListener];
  295. captureResource.videoCapturer.delegate = captureResource.videoCapturerHandler;
  296. }
  297. + (void)configImageCapturerWithCaptureResource:(SCCaptureResource *)captureResource
  298. {
  299. captureResource.stillImageCapturer = [SCManagedStillImageCapturer capturerWithCaptureResource:captureResource];
  300. }
  301. + (void)startRunningWithCaptureResource:(SCCaptureResource *)captureResource
  302. token:(SCCapturerToken *)token
  303. completionHandler:(dispatch_block_t)completionHandler
  304. {
  305. [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsOpen
  306. uniqueId:@""
  307. stepName:@"startOpenCameraOnManagedCaptureQueue"];
  308. SCTraceSignal(@"Add token %@ to set %@", token, captureResource.tokenSet);
  309. [captureResource.tokenSet addObject:token];
  310. if (captureResource.appInBackground) {
  311. SCTraceSignal(@"Will skip startRunning on AVCaptureSession because we are in background");
  312. }
  313. SCTraceStartSection("start session")
  314. {
  315. if (!SCDeviceSupportsMetal()) {
  316. SCCAssert(captureResource.videoPreviewLayer, @"videoPreviewLayer should be created already");
  317. if (captureResource.status == SCManagedCapturerStatusReady) {
  318. // Need to wrap this into a CATransaction because startRunning will change
  319. // AVCaptureVideoPreviewLayer,
  320. // therefore,
  321. // without atomic update, will cause layer inconsistency.
  322. [CATransaction begin];
  323. [CATransaction setDisableActions:YES];
  324. captureResource.videoPreviewLayer.session = captureResource.managedSession.avSession;
  325. if (!captureResource.appInBackground) {
  326. SCGhostToSnappableSignalCameraStart();
  327. [captureResource.managedSession startRunning];
  328. }
  329. [self setupVideoPreviewLayer:captureResource];
  330. [CATransaction commit];
  331. SCLogCapturerInfo(@"[_captureResource.avSession startRunning] finished. token: %@", token);
  332. }
  333. // In case we don't use sample buffer, then we need to fake that we know when the first frame receieved.
  334. SCGhostToSnappableSignalDidReceiveFirstPreviewFrame();
  335. } else {
  336. if (captureResource.status == SCManagedCapturerStatusReady) {
  337. if (!captureResource.appInBackground) {
  338. SCGhostToSnappableSignalCameraStart();
  339. [captureResource.managedSession startRunning];
  340. SCLogCapturerInfo(
  341. @"[_captureResource.avSession startRunning] finished using sample buffer. token: %@", token);
  342. }
  343. }
  344. }
  345. }
  346. SCTraceEndSection();
  347. SCTraceStartSection("start streaming")
  348. {
  349. // Do the start streaming after start running, but make sure we start it
  350. // regardless if the status is ready or
  351. // not.
  352. [self startStreaming:captureResource];
  353. }
  354. SCTraceEndSection();
  355. if (!captureResource.notificationRegistered) {
  356. captureResource.notificationRegistered = YES;
  357. [captureResource.deviceSubjectAreaHandler startObserving];
  358. [[NSNotificationCenter defaultCenter] addObserver:[SCManagedCapturerV1 sharedInstance]
  359. selector:captureResource.sessionRuntimeError
  360. name:AVCaptureSessionRuntimeErrorNotification
  361. object:nil];
  362. }
  363. if (captureResource.status == SCManagedCapturerStatusReady) {
  364. // Schedule a timer to check the running state and fix any inconsistency.
  365. runOnMainThreadAsynchronously(^{
  366. [self setupLivenessConsistencyTimerIfForeground:captureResource];
  367. });
  368. SCLogCapturerInfo(@"Setting isRunning to YES. token: %@", token);
  369. captureResource.state =
  370. [[[SCManagedCapturerStateBuilder withManagedCapturerState:captureResource.state] setIsRunning:YES] build];
  371. captureResource.status = SCManagedCapturerStatusRunning;
  372. }
  373. [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsOpen
  374. uniqueId:@""
  375. stepName:@"endOpenCameraOnManagedCaptureQueue"];
  376. [[SCLogger sharedInstance] logTimedEventEnd:kSCCameraMetricsOpen uniqueId:@"" parameters:nil];
  377. SCManagedCapturerState *state = [captureResource.state copy];
  378. SCTraceResumeToken resumeToken = SCTraceCapture();
  379. runOnMainThreadAsynchronously(^{
  380. SCTraceResume(resumeToken);
  381. [captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:state];
  382. [captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didStartRunning:state];
  383. [[SCBatteryLogger shared] logManagedCapturerDidStartRunning];
  384. if (completionHandler) {
  385. completionHandler();
  386. }
  387. if (!SCDeviceSupportsMetal()) {
  388. // To approximate this did render timer, it is not accurate.
  389. SCGhostToSnappableSignalDidRenderFirstPreviewFrame(CACurrentMediaTime());
  390. }
  391. });
  392. }
  393. + (BOOL)stopRunningWithCaptureResource:(SCCaptureResource *)captureResource
  394. token:(SCCapturerToken *)token
  395. completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
  396. {
  397. SCTraceODPCompatibleStart(2);
  398. SCAssert([captureResource.queuePerformer isCurrentPerformer], @"");
  399. BOOL videoPreviewLayerChanged = NO;
  400. SCAssert([captureResource.tokenSet containsObject:token],
  401. @"It should be a valid token that is issued by startRunning method.");
  402. SCTraceSignal(@"Remove token %@, from set %@", token, captureResource.tokenSet);
  403. SCLogCapturerInfo(@"Stop running. token:%@ tokenSet:%@", token, captureResource.tokenSet);
  404. [captureResource.tokenSet removeObject:token];
  405. BOOL succeed = (captureResource.tokenSet.count == 0);
  406. if (succeed && captureResource.status == SCManagedCapturerStatusRunning) {
  407. captureResource.status = SCManagedCapturerStatusReady;
  408. if (@available(iOS 11.0, *)) {
  409. [captureResource.arSession pause];
  410. }
  411. [captureResource.managedSession stopRunning];
  412. if (!SCDeviceSupportsMetal()) {
  413. [captureResource.videoDataSource stopStreaming];
  414. [self redoVideoPreviewLayer:captureResource];
  415. videoPreviewLayerChanged = YES;
  416. } else {
  417. [captureResource.videoDataSource pauseStreaming];
  418. }
  419. if (captureResource.state.devicePosition == SCManagedCaptureDevicePositionBackDualCamera) {
  420. [[SCManagedCapturerV1 sharedInstance] setDevicePositionAsynchronously:SCManagedCaptureDevicePositionBack
  421. completionHandler:nil
  422. context:SCCapturerContext];
  423. }
  424. // We always disable lenses and hide _captureResource.videoPreviewGLView when app goes into
  425. // the background
  426. // thus there is no need to clean up anything.
  427. // _captureResource.videoPreviewGLView will be shown again to the user only when the frame
  428. // will be processed by the lenses
  429. // processor
  430. // Remove the liveness timer which checks the health of the running state
  431. runOnMainThreadAsynchronously(^{
  432. [self destroyLivenessConsistencyTimer:captureResource];
  433. });
  434. SCLogCapturerInfo(@"Setting isRunning to NO. removed token: %@", token);
  435. captureResource.state =
  436. [[[SCManagedCapturerStateBuilder withManagedCapturerState:captureResource.state] setIsRunning:NO] build];
  437. captureResource.notificationRegistered = NO;
  438. [captureResource.deviceSubjectAreaHandler stopObserving];
  439. [[NSNotificationCenter defaultCenter] removeObserver:[SCManagedCapturerV1 sharedInstance]
  440. name:AVCaptureSessionRuntimeErrorNotification
  441. object:nil];
  442. [captureResource.arSessionHandler stopObserving];
  443. }
  444. SCManagedCapturerState *state = [captureResource.state copy];
  445. AVCaptureVideoPreviewLayer *videoPreviewLayer = videoPreviewLayerChanged ? captureResource.videoPreviewLayer : nil;
  446. runOnMainThreadAsynchronously(^{
  447. if (succeed) {
  448. [captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:state];
  449. [captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didStopRunning:state];
  450. [[SCBatteryLogger shared] logManagedCapturerDidStopRunning];
  451. if (videoPreviewLayerChanged) {
  452. [captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
  453. didChangeVideoPreviewLayer:videoPreviewLayer];
  454. }
  455. }
  456. if (completionHandler) {
  457. completionHandler(succeed);
  458. }
  459. });
  460. return succeed;
  461. }
  462. + (void)setupVideoPreviewLayer:(SCCaptureResource *)resource
  463. {
  464. SCTraceODPCompatibleStart(2);
  465. SCAssert([resource.queuePerformer isCurrentPerformer] || [[SCQueuePerformer mainQueuePerformer] isCurrentPerformer],
  466. @"");
  467. if ([resource.videoPreviewLayer.connection isVideoOrientationSupported]) {
  468. resource.videoPreviewLayer.connection.videoOrientation = AVCaptureVideoOrientationPortrait;
  469. }
  470. resource.videoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
  471. resource.videoPreviewLayer.hidden = !resource.managedSession.isRunning;
  472. SCLogCapturerInfo(@"Setup video preview layer with connect.enabled:%d, hidden:%d",
  473. resource.videoPreviewLayer.connection.enabled, resource.videoPreviewLayer.hidden);
  474. }
  475. + (void)makeVideoPreviewLayer:(SCCaptureResource *)resource
  476. {
  477. SCTraceODPCompatibleStart(2);
  478. // This can be called either from current queue or from main queue.
  479. SCAssert([resource.queuePerformer isCurrentPerformer] || [[SCQueuePerformer mainQueuePerformer] isCurrentPerformer],
  480. @"");
  481. #if !TARGET_IPHONE_SIMULATOR
  482. SCAssert(resource.managedSession.avSession, @"session shouldn't be nil");
  483. #endif
  484. // Need to wrap this to a transcation otherwise this is happening off the main
  485. // thread, and the layer
  486. // won't be lay out correctly.
  487. [CATransaction begin];
  488. [CATransaction setDisableActions:YES];
  489. // Since _captureResource.avSession is always created / recreated on this private queue, and
  490. // videoPreviewLayer.session,
  491. // if not touched by anyone else, is also set on this private queue, it should
  492. // be safe to do this
  493. // If-clause check.
  494. resource.videoPreviewLayer = [AVCaptureVideoPreviewLayer new];
  495. SCAssert(resource.videoPreviewLayer, @"_captureResource.videoPreviewLayer shouldn't be nil");
  496. [self setupVideoPreviewLayer:resource];
  497. if (resource.device.softwareZoom && resource.device.zoomFactor != 1) {
  498. [self softwareZoomWithDevice:resource.device resource:resource];
  499. }
  500. [CATransaction commit];
  501. SCLogCapturerInfo(@"Created AVCaptureVideoPreviewLayer:%@", resource.videoPreviewLayer);
  502. }
  503. + (void)redoVideoPreviewLayer:(SCCaptureResource *)resource
  504. {
  505. SCTraceODPCompatibleStart(2);
  506. SCLogCapturerInfo(@"redo video preview layer");
  507. AVCaptureVideoPreviewLayer *videoPreviewLayer = resource.videoPreviewLayer;
  508. resource.videoPreviewLayer = nil;
  509. // This will do dispatch_sync on the main thread, since mainQueuePerformer
  510. // is reentrant, it should be fine
  511. // on iOS 7.
  512. [[SCQueuePerformer mainQueuePerformer] performAndWait:^{
  513. // Hide and remove the session when stop the video preview layer at main
  514. // thread.
  515. // It seems that when we nil out the session, it will cause some relayout
  516. // on iOS 9
  517. // and trigger an assertion.
  518. videoPreviewLayer.hidden = YES;
  519. videoPreviewLayer.session = nil;
  520. // We setup the video preview layer immediately after destroy it so
  521. // that when we start running again, we don't need to pay the setup
  522. // cost.
  523. [self makeVideoPreviewLayer:resource];
  524. }];
  525. }
  526. + (void)startStreaming:(SCCaptureResource *)resource
  527. {
  528. SCTraceODPCompatibleStart(2);
  529. ++resource.streamingSequence;
  530. SCLogCapturerInfo(@"Start streaming. streamingSequence:%lu", (unsigned long)resource.streamingSequence);
  531. [resource.videoDataSource startStreaming];
  532. }
  533. + (void)setupLivenessConsistencyTimerIfForeground:(SCCaptureResource *)resource
  534. {
  535. SCTraceODPCompatibleStart(2);
  536. SCAssertMainThread();
  537. if (resource.livenessTimer) {
  538. // If we have the liveness timer already, don't need to set it up.
  539. return;
  540. }
  541. // Check if the application state is in background now, if so, we don't need
  542. // to setup liveness timer
  543. if ([UIApplication sharedApplication].applicationState != UIApplicationStateBackground) {
  544. resource.livenessTimer = [NSTimer scheduledTimerWithTimeInterval:1
  545. target:[SCManagedCapturerV1 sharedInstance]
  546. selector:resource.livenessConsistency
  547. userInfo:nil
  548. repeats:YES];
  549. }
  550. }
  551. + (void)destroyLivenessConsistencyTimer:(SCCaptureResource *)resource
  552. {
  553. SCTraceODPCompatibleStart(2);
  554. SCAssertMainThread();
  555. [resource.livenessTimer invalidate];
  556. resource.livenessTimer = nil;
  557. }
  558. + (void)softwareZoomWithDevice:(SCManagedCaptureDevice *)device resource:(SCCaptureResource *)resource
  559. {
  560. [resource.deviceZoomHandler softwareZoomWithDevice:device];
  561. }
  562. + (void)captureStillImageWithCaptureResource:(SCCaptureResource *)captureResource
  563. aspectRatio:(CGFloat)aspectRatio
  564. captureSessionID:(NSString *)captureSessionID
  565. shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo
  566. completionHandler:
  567. (sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler
  568. context:(NSString *)context
  569. {
  570. SCTraceODPCompatibleStart(2);
  571. if (captureResource.stillImageCapturing) {
  572. SCLogCapturerWarning(@"Another still image is capturing. aspectRatio:%f", aspectRatio);
  573. if (completionHandler) {
  574. SCManagedCapturerState *state = [captureResource.state copy];
  575. runOnMainThreadAsynchronously(^{
  576. completionHandler(nil, nil, [NSError errorWithDomain:kSCManagedCapturerErrorDomain
  577. code:kSCManagedCapturerCaptureStillImageBusy
  578. userInfo:nil],
  579. state);
  580. });
  581. }
  582. } else {
  583. captureResource.stillImageCapturing = YES;
  584. [SCCaptureWorker _captureStillImageAsynchronouslyWithCaptureResource:captureResource
  585. aspectRatio:aspectRatio
  586. captureSessionID:captureSessionID
  587. shouldCaptureFromVideo:shouldCaptureFromVideo
  588. completionHandler:completionHandler];
  589. }
  590. }
  591. + (void)_captureStillImageAsynchronouslyWithCaptureResource:(SCCaptureResource *)captureResource
  592. aspectRatio:(CGFloat)aspectRatio
  593. captureSessionID:(NSString *)captureSessionID
  594. shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo
  595. completionHandler:
  596. (sc_managed_capturer_capture_still_image_completion_handler_t)
  597. completionHandler
  598. {
  599. SCTraceODPCompatibleStart(2);
  600. SCAssert([captureResource.queuePerformer isCurrentPerformer], @"");
  601. SCAssert(completionHandler, @"completionHandler cannot be nil");
  602. SCManagedCapturerState *state = [captureResource.state copy];
  603. SCLogCapturerInfo(@"Capturing still image. aspectRatio:%f state:%@", aspectRatio, state);
  604. // If when we start capturing, the video streamer is not running yet, start
  605. // running it.
  606. [SCCaptureWorker startStreaming:captureResource];
  607. SCManagedStillImageCapturer *stillImageCapturer = captureResource.stillImageCapturer;
  608. if (@available(iOS 11.0, *)) {
  609. if (state.arSessionActive) {
  610. stillImageCapturer = captureResource.arImageCapturer;
  611. }
  612. }
  613. dispatch_block_t stillImageCaptureHandler = ^{
  614. SCCAssert(captureResource.stillImageCapturer, @"stillImageCapturer should be available");
  615. float zoomFactor = captureResource.device.softwareZoom ? captureResource.device.zoomFactor : 1;
  616. [stillImageCapturer
  617. captureStillImageWithAspectRatio:aspectRatio
  618. atZoomFactor:zoomFactor
  619. fieldOfView:captureResource.device.fieldOfView
  620. state:state
  621. captureSessionID:captureSessionID
  622. shouldCaptureFromVideo:shouldCaptureFromVideo
  623. completionHandler:^(UIImage *fullScreenImage, NSDictionary *metadata, NSError *error) {
  624. SCTraceStart();
  625. // We are done here, turn off front flash if needed,
  626. // this is dispatched in
  627. // SCManagedCapturer's private queue
  628. if (captureResource.state.flashActive && !captureResource.state.flashSupported &&
  629. captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) {
  630. captureResource.frontFlashController.flashActive = NO;
  631. }
  632. if (state.devicePosition == SCManagedCaptureDevicePositionFront) {
  633. fullScreenImage = [UIImage
  634. imageWithCGImage:fullScreenImage.CGImage
  635. scale:1.0
  636. orientation:SCMirroredImageOrientation(fullScreenImage.imageOrientation)];
  637. }
  638. captureResource.stillImageCapturing = NO;
  639. runOnMainThreadAsynchronously(^{
  640. completionHandler(fullScreenImage, metadata, error, state);
  641. });
  642. }];
  643. };
  644. if (state.flashActive && !captureResource.state.flashSupported &&
  645. state.devicePosition == SCManagedCaptureDevicePositionFront) {
  646. captureResource.frontFlashController.flashActive = YES;
  647. // Do the first capture only after 0.175 seconds so that the front flash is
  648. // already available
  649. [captureResource.queuePerformer perform:stillImageCaptureHandler after:0.175];
  650. } else {
  651. stillImageCaptureHandler();
  652. }
  653. }
  654. + (void)startRecordingWithCaptureResource:(SCCaptureResource *)captureResource
  655. outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
  656. audioConfiguration:(SCAudioConfiguration *)configuration
  657. maxDuration:(NSTimeInterval)maxDuration
  658. fileURL:(NSURL *)fileURL
  659. captureSessionID:(NSString *)captureSessionID
  660. completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler
  661. {
  662. SCTraceODPCompatibleStart(2);
  663. if (captureResource.videoRecording) {
  664. if (completionHandler) {
  665. runOnMainThreadAsynchronously(^{
  666. completionHandler(SCVideoCaptureSessionInfoMake(kCMTimeInvalid, kCMTimeInvalid, 0),
  667. [NSError errorWithDomain:kSCManagedCapturerErrorDomain
  668. code:kSCManagedCapturerRecordVideoBusy
  669. userInfo:nil]);
  670. });
  671. }
  672. // Don't start recording session
  673. SCLogCapturerInfo(@"*** Tries to start multiple video recording session ***");
  674. return;
  675. }
  676. // Fix: https://jira.sc-corp.net/browse/CCAM-12322
  677. // Fire this notification in recording state to let PlaybackSession stop
  678. runOnMainThreadAsynchronously(^{
  679. [[NSNotificationCenter defaultCenter] postNotificationName:kSCImageProcessVideoPlaybackStopNotification
  680. object:[SCManagedCapturer sharedInstance]
  681. userInfo:nil];
  682. });
  683. SCLogCapturerInfo(@"Start recording. OutputSettigns:%@, maxDuration:%f, fileURL:%@", outputSettings, maxDuration,
  684. fileURL);
  685. // Turns on torch temporarily if we have Flash active
  686. if (!captureResource.state.torchActive) {
  687. if (captureResource.state.flashActive) {
  688. [captureResource.device setTorchActive:YES];
  689. if (captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) {
  690. captureResource.frontFlashController.torchActive = YES;
  691. }
  692. }
  693. }
  694. if (captureResource.device.softwareZoom) {
  695. captureResource.device.zoomFactor = 1;
  696. [SCCaptureWorker softwareZoomWithDevice:captureResource.device resource:captureResource];
  697. }
  698. // Lock focus on both front and back camera if not using ARKit
  699. if (!captureResource.state.arSessionActive) {
  700. SCManagedCaptureDevice *front = [SCManagedCaptureDevice front];
  701. SCManagedCaptureDevice *back = [SCManagedCaptureDevice back];
  702. [front setRecording:YES];
  703. [back setRecording:YES];
  704. }
  705. // Start streaming if we haven't already
  706. [self startStreaming:captureResource];
  707. // Remove other listeners from video streamer
  708. [captureResource.videoDataSource removeListener:captureResource.deviceCapacityAnalyzer];
  709. // If lenses is not actually applied, we should open sticky video tweak
  710. BOOL isLensApplied = [SCCaptureWorker isLensApplied:captureResource];
  711. [captureResource.videoDataSource setKeepLateFrames:!isLensApplied];
  712. SCLogCapturerInfo(@"Start recording. isLensApplied:%d", isLensApplied);
  713. [captureResource.videoDataSource addListener:captureResource.videoCapturer];
  714. captureResource.videoRecording = YES;
  715. if (captureResource.state.lensesActive) {
  716. BOOL modifySource = captureResource.videoRecording || captureResource.state.liveVideoStreaming;
  717. [captureResource.lensProcessingCore setModifySource:modifySource];
  718. }
  719. if (captureResource.fileInputDecider.shouldProcessFileInput) {
  720. [captureResource.videoDataSource stopStreaming];
  721. }
  722. // The max video duration, we will stop process sample buffer if the current
  723. // time is larger than max video duration.
  724. // 0.5 so that we have a bit of lean way on video recording initialization, and
  725. // when NSTimer stucked in normal
  726. // recording sessions, we don't suck too much as breaking expections on how long
  727. // it is recorded.
  728. SCVideoCaptureSessionInfo sessionInfo = [captureResource.videoCapturer
  729. startRecordingAsynchronouslyWithOutputSettings:outputSettings
  730. audioConfiguration:configuration
  731. maxDuration:maxDuration + 0.5
  732. toURL:fileURL
  733. deviceFormat:captureResource.device.activeFormat
  734. orientation:AVCaptureVideoOrientationLandscapeLeft
  735. captureSessionID:captureSessionID];
  736. if (completionHandler) {
  737. runOnMainThreadAsynchronously(^{
  738. completionHandler(sessionInfo, nil);
  739. });
  740. }
  741. captureResource.droppedFramesReporter = [SCManagedDroppedFramesReporter new];
  742. [captureResource.videoDataSource addListener:captureResource.droppedFramesReporter];
  743. [[SCManagedCapturerV1 sharedInstance] addListener:captureResource.droppedFramesReporter];
  744. }
  745. + (void)stopRecordingWithCaptureResource:(SCCaptureResource *)captureResource
  746. {
  747. SCTraceStart();
  748. SCLogCapturerInfo(@"Stop recording asynchronously");
  749. [captureResource.videoCapturer stopRecordingAsynchronously];
  750. [captureResource.videoDataSource removeListener:captureResource.droppedFramesReporter];
  751. SCManagedDroppedFramesReporter *droppedFramesReporter = captureResource.droppedFramesReporter;
  752. [[SCManagedCapturerV1 sharedInstance] removeListener:captureResource.droppedFramesReporter];
  753. captureResource.droppedFramesReporter = nil;
  754. [captureResource.videoDataSource.performer perform:^{
  755. // call on the same performer as that of managedVideoDataSource: didOutputSampleBuffer: devicePosition:
  756. BOOL keepLateFrames = [captureResource.videoDataSource getKeepLateFrames];
  757. [droppedFramesReporter reportWithKeepLateFrames:keepLateFrames
  758. lensesApplied:[SCCaptureWorker isLensApplied:captureResource]];
  759. // Disable keepLateFrames once stop recording to make sure the recentness of preview
  760. [captureResource.videoDataSource setKeepLateFrames:NO];
  761. }];
  762. }
  763. + (void)cancelRecordingWithCaptureResource:(SCCaptureResource *)captureResource
  764. {
  765. SCTraceStart();
  766. SCLogCapturerInfo(@"Cancel recording asynchronously");
  767. [captureResource.videoDataSource removeListener:captureResource.droppedFramesReporter];
  768. [[SCManagedCapturerV1 sharedInstance] removeListener:captureResource.droppedFramesReporter];
  769. captureResource.droppedFramesReporter = nil;
  770. [captureResource.videoDataSource removeListener:captureResource.videoCapturer];
  771. // Add back other listeners to video streamer
  772. [captureResource.videoDataSource addListener:captureResource.deviceCapacityAnalyzer];
  773. [captureResource.videoCapturer cancelRecordingAsynchronously];
  774. captureResource.droppedFramesReporter = nil;
  775. }
  776. + (SCVideoCaptureSessionInfo)activeSession:(SCCaptureResource *)resource
  777. {
  778. if (resource.videoCapturer == nil) {
  779. SCLogCapturerWarning(
  780. @"Trying to retrieve SCVideoCaptureSessionInfo while _captureResource.videoCapturer is nil.");
  781. return SCVideoCaptureSessionInfoMake(kCMTimeInvalid, kCMTimeInvalid, 0);
  782. } else {
  783. return resource.videoCapturer.activeSession;
  784. }
  785. }
  786. + (BOOL)canRunARSession:(SCCaptureResource *)resource
  787. {
  788. SCTraceODPCompatibleStart(2);
  789. if (@available(iOS 11.0, *)) {
  790. return resource.state.lensesActive &&
  791. [ARConfiguration sc_supportedForDevicePosition:resource.state.devicePosition];
  792. }
  793. return NO;
  794. }
  795. + (void)turnARSessionOff:(SCCaptureResource *)resource
  796. {
  797. SCTraceODPCompatibleStart(2);
  798. SCAssert([resource.queuePerformer isCurrentPerformer], @"");
  799. if (@available(iOS 11.0, *)) {
  800. SC_GUARD_ELSE_RETURN(resource.state.arSessionActive);
  801. SCLogCapturerInfo(@"Stopping ARSession");
  802. [resource.arSessionHandler stopARSessionRunning];
  803. [resource.managedSession performConfiguration:^{
  804. [resource.device updateActiveFormatWithSession:resource.managedSession.avSession];
  805. }];
  806. [resource.managedSession startRunning];
  807. resource.state =
  808. [[[SCManagedCapturerStateBuilder withManagedCapturerState:resource.state] setArSessionActive:NO] build];
  809. [resource.lensProcessingCore setShouldProcessARFrames:resource.state.arSessionActive];
  810. [self clearARKitData:resource];
  811. [self updateLensesFieldOfViewTracking:resource];
  812. runOnMainThreadAsynchronously(^{
  813. [resource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:resource.state];
  814. [resource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
  815. didChangeARSessionActive:resource.state];
  816. [[SCManagedCapturerV1 sharedInstance] unlockZoomWithContext:SCCapturerContext];
  817. });
  818. };
  819. }
  820. + (void)clearARKitData:(SCCaptureResource *)resource
  821. {
  822. SCTraceODPCompatibleStart(2);
  823. if (@available(iOS 11.0, *)) {
  824. if ([resource.videoDataSource conformsToProtocol:@protocol(SCManagedVideoARDataSource)]) {
  825. id<SCManagedVideoARDataSource> dataSource = (id<SCManagedVideoARDataSource>)resource.videoDataSource;
  826. dataSource.currentFrame = nil;
  827. #ifdef SC_USE_ARKIT_FACE
  828. dataSource.lastDepthData = nil;
  829. #endif
  830. }
  831. }
  832. }
  833. + (void)turnARSessionOn:(SCCaptureResource *)resource
  834. {
  835. SCTraceODPCompatibleStart(2);
  836. SCAssert([resource.queuePerformer isCurrentPerformer], @"");
  837. if (@available(iOS 11.0, *)) {
  838. SC_GUARD_ELSE_RETURN(!resource.state.arSessionActive);
  839. SC_GUARD_ELSE_RETURN([self canRunARSession:resource]);
  840. SCLogCapturerInfo(@"Starting ARSession");
  841. resource.state =
  842. [[[SCManagedCapturerStateBuilder withManagedCapturerState:resource.state] setArSessionActive:YES] build];
  843. // Make sure we commit any configurations that may be in flight.
  844. [resource.videoDataSource commitConfiguration];
  845. runOnMainThreadAsynchronously(^{
  846. [resource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:resource.state];
  847. [resource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
  848. didChangeARSessionActive:resource.state];
  849. // Zooming on an ARSession breaks stuff in super weird ways.
  850. [[SCManagedCapturerV1 sharedInstance] lockZoomWithContext:SCCapturerContext];
  851. });
  852. [self clearARKitData:resource];
  853. [resource.managedSession stopRunning];
  854. [resource.arSession
  855. runWithConfiguration:[ARConfiguration sc_configurationForDevicePosition:resource.state.devicePosition]
  856. options:(ARSessionRunOptionResetTracking | ARSessionRunOptionRemoveExistingAnchors)];
  857. [resource.lensProcessingCore setShouldProcessARFrames:resource.state.arSessionActive];
  858. [self updateLensesFieldOfViewTracking:resource];
  859. }
  860. }
  861. + (void)configBlackCameraDetectorWithCaptureResource:(SCCaptureResource *)captureResource
  862. {
  863. captureResource.captureSessionFixer = [[SCCaptureSessionFixer alloc] init];
  864. captureResource.blackCameraDetector.blackCameraNoOutputDetector.delegate = captureResource.captureSessionFixer;
  865. [captureResource.videoDataSource addListener:captureResource.blackCameraDetector.blackCameraNoOutputDetector];
  866. }
  867. + (void)configureCaptureFaceDetectorWithCaptureResource:(SCCaptureResource *)captureResource
  868. {
  869. if (SCCameraFaceFocusDetectionMethod() == SCCameraFaceFocusDetectionMethodTypeCIDetector) {
  870. SCCaptureCoreImageFaceDetector *detector =
  871. [[SCCaptureCoreImageFaceDetector alloc] initWithCaptureResource:captureResource];
  872. captureResource.captureFaceDetector = detector;
  873. [captureResource.videoDataSource addListener:detector];
  874. } else {
  875. captureResource.captureFaceDetector =
  876. [[SCCaptureMetadataOutputDetector alloc] initWithCaptureResource:captureResource];
  877. }
  878. }
  879. + (void)configCaptureDeviceHandlerWithCaptureResource:(SCCaptureResource *)captureResource
  880. {
  881. captureResource.device.delegate = captureResource.captureDeviceHandler;
  882. }
  883. + (void)updateLensesFieldOfViewTracking:(SCCaptureResource *)captureResource
  884. {
  885. // 1. reset observers
  886. [captureResource.lensProcessingCore removeFieldOfViewListener];
  887. if (@available(iOS 11.0, *)) {
  888. if (captureResource.state.arSessionActive &&
  889. [captureResource.videoDataSource conformsToProtocol:@protocol(SCManagedVideoARDataSource)]) {
  890. // 2. handle ARKit case
  891. id<SCManagedVideoARDataSource> arDataSource =
  892. (id<SCManagedVideoARDataSource>)captureResource.videoDataSource;
  893. float fieldOfView = arDataSource.fieldOfView;
  894. if (fieldOfView > 0) {
  895. // 2.5 there will be no field of view
  896. [captureResource.lensProcessingCore setFieldOfView:fieldOfView];
  897. }
  898. [captureResource.lensProcessingCore setAsFieldOfViewListenerForARDataSource:arDataSource];
  899. return;
  900. }
  901. }
  902. // 3. fallback to regular device field of view
  903. float fieldOfView = captureResource.device.fieldOfView;
  904. [captureResource.lensProcessingCore setFieldOfView:fieldOfView];
  905. [captureResource.lensProcessingCore setAsFieldOfViewListenerForDevice:captureResource.device];
  906. }
  907. + (CMTime)firstWrittenAudioBufferDelay:(SCCaptureResource *)resource
  908. {
  909. return resource.videoCapturer.firstWrittenAudioBufferDelay;
  910. }
  911. + (BOOL)audioQueueStarted:(SCCaptureResource *)resource
  912. {
  913. return resource.videoCapturer.audioQueueStarted;
  914. }
  915. + (BOOL)isLensApplied:(SCCaptureResource *)resource
  916. {
  917. return resource.state.lensesActive && resource.lensProcessingCore.isLensApplied;
  918. }
  919. + (BOOL)isVideoMirrored:(SCCaptureResource *)resource
  920. {
  921. if ([resource.videoDataSource respondsToSelector:@selector(isVideoMirrored)]) {
  922. return [resource.videoDataSource isVideoMirrored];
  923. } else {
  924. // Default is NO.
  925. return NO;
  926. }
  927. }
  928. + (BOOL)shouldCaptureImageFromVideoWithResource:(SCCaptureResource *)resource
  929. {
  930. SCTraceODPCompatibleStart(2);
  931. BOOL isIphone5Series = [SCDeviceName isSimilarToIphone5orNewer] && ![SCDeviceName isSimilarToIphone6orNewer];
  932. return isIphone5Series && !resource.state.flashActive && ![SCCaptureWorker isLensApplied:resource];
  933. }
  934. + (void)setPortraitModePointOfInterestAsynchronously:(CGPoint)pointOfInterest
  935. completionHandler:(dispatch_block_t)completionHandler
  936. resource:(SCCaptureResource *)resource
  937. {
  938. SCTraceODPCompatibleStart(2);
  939. if (resource.state.isPortraitModeActive) {
  940. SCTraceODPCompatibleStart(2);
  941. [resource.queuePerformer perform:^{
  942. SCTraceStart();
  943. if (resource.device.isConnected) {
  944. if (resource.device.softwareZoom) {
  945. CGPoint adjustedPoint = CGPointMake((pointOfInterest.x - 0.5) / resource.device.softwareZoom + 0.5,
  946. (pointOfInterest.y - 0.5) / resource.device.softwareZoom + 0.5);
  947. // Fix for the zooming factor
  948. [resource.videoDataSource setPortraitModePointOfInterest:adjustedPoint];
  949. if (resource.state.arSessionActive) {
  950. if (@available(ios 11.0, *)) {
  951. [resource.arImageCapturer setPortraitModePointOfInterest:adjustedPoint];
  952. }
  953. } else {
  954. [resource.stillImageCapturer setPortraitModePointOfInterest:adjustedPoint];
  955. }
  956. } else {
  957. [resource.videoDataSource setPortraitModePointOfInterest:pointOfInterest];
  958. if (resource.state.arSessionActive) {
  959. if (@available(ios 11.0, *)) {
  960. [resource.arImageCapturer setPortraitModePointOfInterest:pointOfInterest];
  961. }
  962. } else {
  963. [resource.stillImageCapturer setPortraitModePointOfInterest:pointOfInterest];
  964. }
  965. }
  966. }
  967. if (completionHandler) {
  968. runOnMainThreadAsynchronously(completionHandler);
  969. }
  970. }];
  971. }
  972. }
  973. + (void)prepareForRecordingWithAudioConfiguration:(SCAudioConfiguration *)configuration
  974. resource:(SCCaptureResource *)resource
  975. {
  976. SCAssertPerformer(resource.queuePerformer);
  977. [resource.videoCapturer prepareForRecordingWithAudioConfiguration:configuration];
  978. }
  979. + (void)stopScanWithCompletionHandler:(dispatch_block_t)completionHandler resource:(SCCaptureResource *)resource
  980. {
  981. SCTraceODPCompatibleStart(2);
  982. SCLogCapturerInfo(@"Stop scan");
  983. [resource.videoScanner stopScanAsynchronously];
  984. if (completionHandler) {
  985. runOnMainThreadAsynchronously(completionHandler);
  986. }
  987. }
  988. + (void)startScanWithScanConfiguration:(SCScanConfiguration *)configuration resource:(SCCaptureResource *)resource
  989. {
  990. SCTraceODPCompatibleStart(2);
  991. SCLogCapturerInfo(@"Start scan. ScanConfiguration:%@", configuration);
  992. [SCCaptureWorker startStreaming:resource];
  993. [resource.videoScanner startScanAsynchronouslyWithScanConfiguration:configuration];
  994. }
  995. @end