2014 snapchat source code
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

2165 lines
100 KiB

  1. //
  2. // SCManagedCapturer.m
  3. // Snapchat
  4. //
  5. // Created by Liu Liu on 4/20/15.
  6. // Copyright (c) 2015 Liu Liu. All rights reserved.
  7. //
  8. #import "SCManagedCapturerV1.h"
  9. #import "SCManagedCapturerV1_Private.h"
  10. #import "ARConfiguration+SCConfiguration.h"
  11. #import "NSURL+Asset.h"
  12. #import "SCBlackCameraDetector.h"
  13. #import "SCBlackCameraNoOutputDetector.h"
  14. #import "SCCameraTweaks.h"
  15. #import "SCCaptureResource.h"
  16. #import "SCCaptureSessionFixer.h"
  17. #import "SCCaptureUninitializedState.h"
  18. #import "SCCaptureWorker.h"
  19. #import "SCCapturerToken.h"
  20. #import "SCManagedAudioStreamer.h"
  21. #import "SCManagedCaptureDevice+SCManagedCapturer.h"
  22. #import "SCManagedCaptureDeviceDefaultZoomHandler.h"
  23. #import "SCManagedCaptureDeviceHandler.h"
  24. #import "SCManagedCaptureDeviceSubjectAreaHandler.h"
  25. #import "SCManagedCapturePreviewLayerController.h"
  26. #import "SCManagedCaptureSession.h"
  27. #import "SCManagedCapturerARImageCaptureProvider.h"
  28. #import "SCManagedCapturerGLViewManagerAPI.h"
  29. #import "SCManagedCapturerLSAComponentTrackerAPI.h"
  30. #import "SCManagedCapturerLensAPI.h"
  31. #import "SCManagedCapturerListenerAnnouncer.h"
  32. #import "SCManagedCapturerLogging.h"
  33. #import "SCManagedCapturerSampleMetadata.h"
  34. #import "SCManagedCapturerState.h"
  35. #import "SCManagedCapturerStateBuilder.h"
  36. #import "SCManagedDeviceCapacityAnalyzer.h"
  37. #import "SCManagedDroppedFramesReporter.h"
  38. #import "SCManagedFrameHealthChecker.h"
  39. #import "SCManagedFrontFlashController.h"
  40. #import "SCManagedStillImageCapturer.h"
  41. #import "SCManagedStillImageCapturerHandler.h"
  42. #import "SCManagedVideoARDataSource.h"
  43. #import "SCManagedVideoCapturer.h"
  44. #import "SCManagedVideoFileStreamer.h"
  45. #import "SCManagedVideoFrameSampler.h"
  46. #import "SCManagedVideoScanner.h"
  47. #import "SCManagedVideoStreamReporter.h"
  48. #import "SCManagedVideoStreamer.h"
  49. #import "SCMetalUtils.h"
  50. #import "SCProcessingPipeline.h"
  51. #import "SCProcessingPipelineBuilder.h"
  52. #import "SCScanConfiguration.h"
  53. #import "SCSingleFrameStreamCapturer.h"
  54. #import "SCSnapCreationTriggers.h"
  55. #import "SCTimedTask.h"
  56. #import <SCBase/SCAssignment.h>
  57. #import <SCBase/SCLazyLoadingProxy.h>
  58. #import <SCBatteryLogger/SCBatteryLogger.h>
  59. #import <SCFoundation/NSData+Random.h>
  60. #import <SCFoundation/NSError+Helpers.h>
  61. #import <SCFoundation/NSString+SCFormat.h>
  62. #import <SCFoundation/SCAppEnvironment.h>
  63. #import <SCFoundation/SCDeviceName.h>
  64. #import <SCFoundation/SCQueuePerformer.h>
  65. #import <SCFoundation/SCThreadHelpers.h>
  66. #import <SCFoundation/SCTrace.h>
  67. #import <SCFoundation/SCTraceODPCompatible.h>
  68. #import <SCFoundation/SCZeroDependencyExperiments.h>
  69. #import <SCGhostToSnappable/SCGhostToSnappableSignal.h>
  70. #import <SCImageProcess/SCImageProcessVideoPlaybackSession.h>
  71. #import <SCLenses/SCLens.h>
  72. #import <SCLogger/SCCameraMetrics.h>
  73. #import <SCLogger/SCLogger+Performance.h>
  74. #import <SCUserTraceLogger/SCUserTraceLogger.h>
  75. #import <Looksery/Looksery.h>
  76. @import ARKit;
  77. static NSUInteger const kSCManagedCapturerFixInconsistencyMaxRetriesWithCurrentSession = 22;
  78. static CGFloat const kSCManagedCapturerFixInconsistencyARSessionDelayThreshold = 2;
  79. static CGFloat const kSCManagedCapturerFixInconsistencyARSessionHungInitThreshold = 5;
  80. static NSTimeInterval const kMinFixAVSessionRunningInterval = 1; // Interval to run _fixAVSessionIfNecessary
  81. static NSTimeInterval const kMinFixSessionRuntimeErrorInterval =
  82. 1; // Min interval that RuntimeError calls _startNewSession
  83. static NSString *const kSCManagedCapturerErrorDomain = @"kSCManagedCapturerErrorDomain";
  84. NSString *const kSCLensesTweaksDidChangeFileInput = @"kSCLensesTweaksDidChangeFileInput";
  85. @implementation SCManagedCapturerV1 {
  86. // No ivars for CapturerV1 please, they should be in resource.
  87. SCCaptureResource *_captureResource;
  88. }
  89. + (SCManagedCapturerV1 *)sharedInstance
  90. {
  91. static dispatch_once_t onceToken;
  92. static SCManagedCapturerV1 *managedCapturerV1;
  93. dispatch_once(&onceToken, ^{
  94. managedCapturerV1 = [[SCManagedCapturerV1 alloc] init];
  95. });
  96. return managedCapturerV1;
  97. }
  98. - (instancetype)init
  99. {
  100. SCTraceStart();
  101. SCAssertMainThread();
  102. SCCaptureResource *resource = [SCCaptureWorker generateCaptureResource];
  103. return [self initWithResource:resource];
  104. }
  105. - (instancetype)initWithResource:(SCCaptureResource *)resource
  106. {
  107. SCTraceODPCompatibleStart(2);
  108. SCAssertMainThread();
  109. self = [super init];
  110. if (self) {
  111. // Assuming I am not in background. I can be more defensive here and fetch the app state.
  112. // But to avoid potential problems, won't do that until later.
  113. SCLogCapturerInfo(@"======================= cool startup =======================");
  114. // Initialization of capture resource should be done in worker to be shared between V1 and V2.
  115. _captureResource = resource;
  116. _captureResource.handleAVSessionStatusChange = @selector(_handleAVSessionStatusChange:);
  117. _captureResource.sessionRuntimeError = @selector(_sessionRuntimeError:);
  118. _captureResource.livenessConsistency = @selector(_livenessConsistency:);
  119. _captureResource.deviceSubjectAreaHandler =
  120. [[SCManagedCaptureDeviceSubjectAreaHandler alloc] initWithCaptureResource:_captureResource];
  121. _captureResource.snapCreationTriggers = [SCSnapCreationTriggers new];
  122. if (SCIsMasterBuild()) {
  123. // We call _sessionRuntimeError to reset _captureResource.videoDataSource if input changes
  124. [[NSNotificationCenter defaultCenter] addObserver:self
  125. selector:@selector(_sessionRuntimeError:)
  126. name:kSCLensesTweaksDidChangeFileInput
  127. object:nil];
  128. }
  129. }
  130. return self;
  131. }
  132. - (SCBlackCameraDetector *)blackCameraDetector
  133. {
  134. return _captureResource.blackCameraDetector;
  135. }
  136. - (void)recreateAVCaptureSession
  137. {
  138. SCTraceODPCompatibleStart(2);
  139. [self _startRunningWithNewCaptureSessionIfNecessary];
  140. }
  141. - (void)_handleAVSessionStatusChange:(NSDictionary *)change
  142. {
  143. SCTraceODPCompatibleStart(2);
  144. SC_GUARD_ELSE_RETURN(!_captureResource.state.arSessionActive);
  145. SC_GUARD_ELSE_RETURN(!_captureResource.appInBackground);
  146. BOOL wasRunning = [change[NSKeyValueChangeOldKey] boolValue];
  147. BOOL isRunning = [change[NSKeyValueChangeNewKey] boolValue];
  148. SCLogCapturerInfo(@"avSession running status changed: %@ -> %@", wasRunning ? @"running" : @"stopped",
  149. isRunning ? @"running" : @"stopped");
  150. [_captureResource.blackCameraDetector sessionDidChangeIsRunning:isRunning];
  151. if (_captureResource.isRecreateSessionFixScheduled) {
  152. SCLogCapturerInfo(@"Scheduled AVCaptureSession recreation, return");
  153. return;
  154. }
  155. if (wasRunning != isRunning) {
  156. runOnMainThreadAsynchronously(^{
  157. if (isRunning) {
  158. [_captureResource.announcer managedCapturer:self didStartRunning:_captureResource.state];
  159. } else {
  160. [_captureResource.announcer managedCapturer:self didStopRunning:_captureResource.state];
  161. }
  162. });
  163. }
  164. if (!isRunning) {
  165. [_captureResource.queuePerformer perform:^{
  166. [self _fixAVSessionIfNecessary];
  167. }];
  168. } else {
  169. if (!SCDeviceSupportsMetal()) {
  170. [self _fixNonMetalSessionPreviewInconsistency];
  171. }
  172. }
  173. }
  174. - (void)_fixAVSessionIfNecessary
  175. {
  176. SCTraceODPCompatibleStart(2);
  177. SCAssert([_captureResource.queuePerformer isCurrentPerformer], @"");
  178. SC_GUARD_ELSE_RETURN(!_captureResource.appInBackground);
  179. SC_GUARD_ELSE_RETURN(_captureResource.status == SCManagedCapturerStatusRunning);
  180. [[SCLogger sharedInstance] logStepToEvent:kSCCameraFixAVCaptureSession
  181. uniqueId:@""
  182. stepName:@"startConsistencyCheckAndFix"];
  183. NSTimeInterval timeNow = [NSDate timeIntervalSinceReferenceDate];
  184. if (timeNow - _captureResource.lastFixSessionTimestamp < kMinFixAVSessionRunningInterval) {
  185. SCLogCoreCameraInfo(@"Fixing session in less than %f, skip", kMinFixAVSessionRunningInterval);
  186. return;
  187. }
  188. _captureResource.lastFixSessionTimestamp = timeNow;
  189. if (!_captureResource.managedSession.isRunning) {
  190. SCTraceStartSection("Fix AVSession")
  191. {
  192. _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession++;
  193. SCGhostToSnappableSignalCameraFixInconsistency();
  194. if (_captureResource.numRetriesFixAVCaptureSessionWithCurrentSession <=
  195. kSCManagedCapturerFixInconsistencyARSessionDelayThreshold) {
  196. SCLogCapturerInfo(@"Fixing AVSession");
  197. [_captureResource.managedSession startRunning];
  198. SCLogCapturerInfo(@"Fixed AVSession, success : %@", @(_captureResource.managedSession.isRunning));
  199. [[SCLogger sharedInstance] logStepToEvent:kSCCameraFixAVCaptureSession
  200. uniqueId:@""
  201. stepName:@"finishCaptureSessionFix"];
  202. } else {
  203. // start running with new capture session if the inconsistency fixing not succeeds
  204. SCLogCapturerInfo(@"*** Recreate and run new capture session to fix the inconsistency ***");
  205. [self _startRunningWithNewCaptureSessionIfNecessary];
  206. [[SCLogger sharedInstance] logStepToEvent:kSCCameraFixAVCaptureSession
  207. uniqueId:@""
  208. stepName:@"finishNewCaptureSessionCreation"];
  209. }
  210. }
  211. SCTraceEndSection();
  212. [[SCLogger sharedInstance]
  213. logTimedEventEnd:kSCCameraFixAVCaptureSession
  214. uniqueId:@""
  215. parameters:@{
  216. @"success" : @(_captureResource.managedSession.isRunning),
  217. @"count" : @(_captureResource.numRetriesFixAVCaptureSessionWithCurrentSession)
  218. }];
  219. } else {
  220. _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0;
  221. [[SCLogger sharedInstance] cancelLogTimedEvent:kSCCameraFixAVCaptureSession uniqueId:@""];
  222. }
  223. if (_captureResource.managedSession.isRunning) {
  224. // If it is fixed, we signal received the first frame.
  225. SCGhostToSnappableSignalDidReceiveFirstPreviewFrame();
  226. // For non-metal preview render, we need to make sure preview is not hidden
  227. if (!SCDeviceSupportsMetal()) {
  228. [self _fixNonMetalSessionPreviewInconsistency];
  229. }
  230. runOnMainThreadAsynchronously(^{
  231. [_captureResource.announcer managedCapturer:self didStartRunning:_captureResource.state];
  232. // To approximate this did render timer, it is not accurate.
  233. SCGhostToSnappableSignalDidRenderFirstPreviewFrame(CACurrentMediaTime());
  234. });
  235. } else {
  236. [_captureResource.queuePerformer perform:^{
  237. [self _fixAVSessionIfNecessary];
  238. }
  239. after:1];
  240. }
  241. [_captureResource.blackCameraDetector sessionDidChangeIsRunning:_captureResource.managedSession.isRunning];
  242. }
  243. - (void)_fixNonMetalSessionPreviewInconsistency
  244. {
  245. SCTraceODPCompatibleStart(2);
  246. SC_GUARD_ELSE_RETURN(_captureResource.status == SCManagedCapturerStatusRunning);
  247. if ((!_captureResource.videoPreviewLayer.hidden) != _captureResource.managedSession.isRunning) {
  248. SCTraceStartSection("Fix non-Metal VideoPreviewLayer");
  249. {
  250. [CATransaction begin];
  251. [CATransaction setDisableActions:YES];
  252. [SCCaptureWorker setupVideoPreviewLayer:_captureResource];
  253. [CATransaction commit];
  254. }
  255. SCTraceEndSection();
  256. }
  257. }
  258. - (SCCaptureResource *)captureResource
  259. {
  260. SCTraceODPCompatibleStart(2);
  261. return _captureResource;
  262. }
  263. - (id<SCManagedCapturerLensAPI>)lensProcessingCore
  264. {
  265. SCTraceODPCompatibleStart(2);
  266. @weakify(self);
  267. return (id<SCManagedCapturerLensAPI>)[[SCLazyLoadingProxy alloc] initWithInitializationBlock:^id {
  268. @strongify(self);
  269. SCReportErrorIf(self.captureResource.state.lensProcessorReady, @"[Lenses] Lens processing core is not ready");
  270. return self.captureResource.lensProcessingCore;
  271. }];
  272. }
  273. - (SCVideoCaptureSessionInfo)activeSession
  274. {
  275. SCTraceODPCompatibleStart(2);
  276. return [SCCaptureWorker activeSession:_captureResource];
  277. }
  278. - (BOOL)isLensApplied
  279. {
  280. SCTraceODPCompatibleStart(2);
  281. return [SCCaptureWorker isLensApplied:_captureResource];
  282. }
  283. - (BOOL)isVideoMirrored
  284. {
  285. SCTraceODPCompatibleStart(2);
  286. return [SCCaptureWorker isVideoMirrored:_captureResource];
  287. }
  288. #pragma mark - Setup, Start & Stop
  289. - (void)_updateHRSIEnabled
  290. {
  291. SCTraceODPCompatibleStart(2);
  292. // Since night mode is low-res, we set high resolution still image output when night mode is enabled
  293. // SoftwareZoom requires higher resolution image to get better zooming result too.
  294. // We also want a higher resolution on newer devices
  295. BOOL is1080pSupported = [SCManagedCaptureDevice is1080pSupported];
  296. BOOL shouldHRSIEnabled =
  297. (_captureResource.device.isNightModeActive || _captureResource.device.softwareZoom || is1080pSupported);
  298. SCLogCapturerInfo(@"Setting HRSIEnabled to: %d. isNightModeActive:%d softwareZoom:%d is1080pSupported:%d",
  299. shouldHRSIEnabled, _captureResource.device.isNightModeActive,
  300. _captureResource.device.softwareZoom, is1080pSupported);
  301. [_captureResource.stillImageCapturer setHighResolutionStillImageOutputEnabled:shouldHRSIEnabled];
  302. }
  303. - (void)_updateStillImageStabilizationEnabled
  304. {
  305. SCTraceODPCompatibleStart(2);
  306. SCLogCapturerInfo(@"Enabling still image stabilization");
  307. [_captureResource.stillImageCapturer enableStillImageStabilization];
  308. }
  309. - (void)setupWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition
  310. completionHandler:(dispatch_block_t)completionHandler
  311. context:(NSString *)context
  312. {
  313. SCTraceODPCompatibleStart(2);
  314. SCLogCapturerInfo(@"Setting up with devicePosition:%lu", (unsigned long)devicePosition);
  315. SCTraceResumeToken token = SCTraceCapture();
  316. [[SCManagedCapturePreviewLayerController sharedInstance] setupPreviewLayer];
  317. [_captureResource.queuePerformer perform:^{
  318. SCTraceResume(token);
  319. [self setupWithDevicePosition:devicePosition completionHandler:completionHandler];
  320. }];
  321. }
  322. - (void)setupWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition
  323. completionHandler:(dispatch_block_t)completionHandler
  324. {
  325. SCTraceODPCompatibleStart(2);
  326. SCAssertPerformer(_captureResource.queuePerformer);
  327. [SCCaptureWorker setupWithCaptureResource:_captureResource devicePosition:devicePosition];
  328. [self addListener:_captureResource.stillImageCapturer];
  329. [self addListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector];
  330. [self addListener:_captureResource.lensProcessingCore];
  331. [self _updateHRSIEnabled];
  332. [self _updateStillImageStabilizationEnabled];
  333. [SCCaptureWorker updateLensesFieldOfViewTracking:_captureResource];
  334. if (!SCDeviceSupportsMetal()) {
  335. [SCCaptureWorker makeVideoPreviewLayer:_captureResource];
  336. }
  337. // I need to do this setup now. Thus, it is off the main thread. This also means my preview layer controller is
  338. // entangled with the capturer.
  339. [[SCManagedCapturePreviewLayerController sharedInstance] setupRenderPipeline];
  340. [[SCManagedCapturePreviewLayerController sharedInstance] setManagedCapturer:self];
  341. _captureResource.status = SCManagedCapturerStatusReady;
  342. SCManagedCapturerState *state = [_captureResource.state copy];
  343. AVCaptureVideoPreviewLayer *videoPreviewLayer = _captureResource.videoPreviewLayer;
  344. runOnMainThreadAsynchronously(^{
  345. SCLogCapturerInfo(@"Did setup with devicePosition:%lu", (unsigned long)devicePosition);
  346. [_captureResource.announcer managedCapturer:self didChangeState:state];
  347. [_captureResource.announcer managedCapturer:self didChangeCaptureDevicePosition:state];
  348. if (!SCDeviceSupportsMetal()) {
  349. [_captureResource.announcer managedCapturer:self didChangeVideoPreviewLayer:videoPreviewLayer];
  350. }
  351. if (completionHandler) {
  352. completionHandler();
  353. }
  354. });
  355. }
  356. - (void)addSampleBufferDisplayController:(id<SCManagedSampleBufferDisplayController>)sampleBufferDisplayController
  357. context:(NSString *)context
  358. {
  359. SCTraceODPCompatibleStart(2);
  360. [_captureResource.queuePerformer perform:^{
  361. _captureResource.sampleBufferDisplayController = sampleBufferDisplayController;
  362. [_captureResource.videoDataSource addSampleBufferDisplayController:sampleBufferDisplayController];
  363. }];
  364. }
  365. - (SCCapturerToken *)startRunningAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler
  366. context:(NSString *)context
  367. {
  368. SCTraceODPCompatibleStart(2);
  369. SCTraceResumeToken resumeToken = SCTraceCapture();
  370. [[SCLogger sharedInstance] updateLogTimedEventStart:kSCCameraMetricsOpen uniqueId:@""];
  371. SCCapturerToken *token = [[SCCapturerToken alloc] initWithIdentifier:context];
  372. SCLogCapturerInfo(@"startRunningAsynchronouslyWithCompletionHandler called. token: %@", token);
  373. [_captureResource.queuePerformer perform:^{
  374. SCTraceResume(resumeToken);
  375. [SCCaptureWorker startRunningWithCaptureResource:_captureResource
  376. token:token
  377. completionHandler:completionHandler];
  378. // After startRunning, we need to make sure _fixAVSessionIfNecessary start running.
  379. // The problem: with the new KVO fix strategy, it may happen that AVCaptureSession is in stopped state, thus no
  380. // KVO callback is triggered.
  381. // And calling startRunningAsynchronouslyWithCompletionHandler has no effect because SCManagedCapturerStatus is
  382. // in SCManagedCapturerStatusRunning state
  383. [self _fixAVSessionIfNecessary];
  384. }];
  385. return token;
  386. }
  387. - (BOOL)stopRunningWithCaptureToken:(SCCapturerToken *)token
  388. completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
  389. context:(NSString *)context
  390. {
  391. SCTraceODPCompatibleStart(2);
  392. SCAssertPerformer(_captureResource.queuePerformer);
  393. SCLogCapturerInfo(@"Stop running. token:%@ context:%@", token, context);
  394. return [SCCaptureWorker stopRunningWithCaptureResource:_captureResource
  395. token:token
  396. completionHandler:completionHandler];
  397. }
  398. - (void)stopRunningAsynchronously:(SCCapturerToken *)token
  399. completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
  400. context:(NSString *)context
  401. {
  402. SCTraceODPCompatibleStart(2);
  403. SCLogCapturerInfo(@"Stop running asynchronously. token:%@ context:%@", token, context);
  404. SCTraceResumeToken resumeToken = SCTraceCapture();
  405. [_captureResource.queuePerformer perform:^{
  406. SCTraceResume(resumeToken);
  407. [SCCaptureWorker stopRunningWithCaptureResource:_captureResource
  408. token:token
  409. completionHandler:completionHandler];
  410. }];
  411. }
  412. - (void)stopRunningAsynchronously:(SCCapturerToken *)token
  413. completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
  414. after:(NSTimeInterval)delay
  415. context:(NSString *)context
  416. {
  417. SCTraceODPCompatibleStart(2);
  418. SCLogCapturerInfo(@"Stop running asynchronously. token:%@ delay:%f", token, delay);
  419. NSTimeInterval startTime = CACurrentMediaTime();
  420. [_captureResource.queuePerformer perform:^{
  421. NSTimeInterval elapsedTime = CACurrentMediaTime() - startTime;
  422. [_captureResource.queuePerformer perform:^{
  423. SCTraceStart();
  424. // If we haven't started a new running sequence yet, stop running now
  425. [SCCaptureWorker stopRunningWithCaptureResource:_captureResource
  426. token:token
  427. completionHandler:completionHandler];
  428. }
  429. after:MAX(delay - elapsedTime, 0)];
  430. }];
  431. }
  432. - (void)startStreamingAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler
  433. context:(NSString *)context
  434. {
  435. SCTraceODPCompatibleStart(2);
  436. SCLogCapturerInfo(@"Start streaming asynchronously");
  437. [_captureResource.queuePerformer perform:^{
  438. SCTraceStart();
  439. [SCCaptureWorker startStreaming:_captureResource];
  440. if (completionHandler) {
  441. runOnMainThreadAsynchronously(completionHandler);
  442. }
  443. }];
  444. }
  445. #pragma mark - Recording / Capture
  446. - (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio
  447. captureSessionID:(NSString *)captureSessionID
  448. completionHandler:
  449. (sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler
  450. context:(NSString *)context
  451. {
  452. SCTraceODPCompatibleStart(2);
  453. [_captureResource.queuePerformer perform:^{
  454. [SCCaptureWorker captureStillImageWithCaptureResource:_captureResource
  455. aspectRatio:aspectRatio
  456. captureSessionID:captureSessionID
  457. shouldCaptureFromVideo:[self _shouldCaptureImageFromVideo]
  458. completionHandler:completionHandler
  459. context:context];
  460. }];
  461. }
  462. - (void)captureSingleVideoFrameAsynchronouslyWithCompletionHandler:
  463. (sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler
  464. context:(NSString *)context
  465. {
  466. SCTraceODPCompatibleStart(2);
  467. [_captureResource.queuePerformer perform:^{
  468. SCTraceStart();
  469. SCLogCapturerInfo(@"Start capturing single video frame");
  470. _captureResource.frameCap = [[SCSingleFrameStreamCapturer alloc] initWithCompletion:^void(UIImage *image) {
  471. [_captureResource.queuePerformer perform:^{
  472. [_captureResource.videoDataSource removeListener:_captureResource.frameCap];
  473. _captureResource.frameCap = nil;
  474. }];
  475. runOnMainThreadAsynchronously(^{
  476. [_captureResource.device setTorchActive:NO];
  477. SCLogCapturerInfo(@"End capturing single video frame");
  478. completionHandler(image);
  479. });
  480. }];
  481. BOOL waitForTorch = NO;
  482. if (!_captureResource.state.torchActive) {
  483. if (_captureResource.state.flashActive) {
  484. waitForTorch = YES;
  485. [_captureResource.device setTorchActive:YES];
  486. }
  487. }
  488. [_captureResource.queuePerformer perform:^{
  489. [_captureResource.videoDataSource addListener:_captureResource.frameCap];
  490. [SCCaptureWorker startStreaming:_captureResource];
  491. }
  492. after:(waitForTorch ? 0.5 : 0)];
  493. }];
  494. }
  495. - (void)prepareForRecordingAsynchronouslyWithContext:(NSString *)context
  496. audioConfiguration:(SCAudioConfiguration *)configuration
  497. {
  498. SCTraceODPCompatibleStart(2);
  499. [_captureResource.queuePerformer perform:^{
  500. SCLogCapturerInfo(@"prepare for recording");
  501. [_captureResource.videoCapturer prepareForRecordingWithAudioConfiguration:configuration];
  502. }];
  503. }
  504. - (void)startRecordingAsynchronouslyWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
  505. audioConfiguration:(SCAudioConfiguration *)configuration
  506. maxDuration:(NSTimeInterval)maxDuration
  507. fileURL:(NSURL *)fileURL
  508. captureSessionID:(NSString *)captureSessionID
  509. completionHandler:
  510. (sc_managed_capturer_start_recording_completion_handler_t)completionHandler
  511. context:(NSString *)context
  512. {
  513. SCTraceODPCompatibleStart(2);
  514. [_captureResource.queuePerformer perform:^{
  515. [SCCaptureWorker startRecordingWithCaptureResource:_captureResource
  516. outputSettings:outputSettings
  517. audioConfiguration:configuration
  518. maxDuration:maxDuration
  519. fileURL:fileURL
  520. captureSessionID:captureSessionID
  521. completionHandler:completionHandler];
  522. }];
  523. }
  524. - (void)stopRecordingAsynchronouslyWithContext:(NSString *)context
  525. {
  526. SCTraceODPCompatibleStart(2);
  527. [_captureResource.queuePerformer perform:^{
  528. [SCCaptureWorker stopRecordingWithCaptureResource:_captureResource];
  529. }];
  530. }
  531. - (void)cancelRecordingAsynchronouslyWithContext:(NSString *)context
  532. {
  533. SCTraceODPCompatibleStart(2);
  534. [_captureResource.queuePerformer perform:^{
  535. [SCCaptureWorker cancelRecordingWithCaptureResource:_captureResource];
  536. }];
  537. }
  538. - (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context
  539. {
  540. SCTraceODPCompatibleStart(2);
  541. [_captureResource.queuePerformer perform:^{
  542. SCTraceStart();
  543. [SCCaptureWorker startScanWithScanConfiguration:configuration resource:_captureResource];
  544. }];
  545. }
  546. - (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context
  547. {
  548. SCTraceODPCompatibleStart(2);
  549. [_captureResource.queuePerformer perform:^{
  550. SCTraceStart();
  551. [SCCaptureWorker stopScanWithCompletionHandler:completionHandler resource:_captureResource];
  552. }];
  553. }
  554. - (void)sampleFrameWithCompletionHandler:(void (^)(UIImage *frame, CMTime presentationTime))completionHandler
  555. context:(NSString *)context
  556. {
  557. SCTraceODPCompatibleStart(2);
  558. // Previously _captureResource.videoFrameSampler was conditionally created when setting up, but if this method is
  559. // called it is a
  560. // safe assumption the client wants it to run instead of failing silently, so always create
  561. // _captureResource.videoFrameSampler
  562. if (!_captureResource.videoFrameSampler) {
  563. _captureResource.videoFrameSampler = [SCManagedVideoFrameSampler new];
  564. [_captureResource.announcer addListener:_captureResource.videoFrameSampler];
  565. }
  566. SCLogCapturerInfo(@"Sampling next frame");
  567. [_captureResource.videoFrameSampler sampleNextFrame:completionHandler];
  568. }
  569. - (void)addTimedTask:(SCTimedTask *)task context:(NSString *)context
  570. {
  571. SCTraceODPCompatibleStart(2);
  572. SCLogCapturerInfo(@"Adding timed task:%@", task);
  573. [_captureResource.queuePerformer perform:^{
  574. [_captureResource.videoCapturer addTimedTask:task];
  575. }];
  576. }
  577. - (void)clearTimedTasksWithContext:(NSString *)context
  578. {
  579. SCTraceODPCompatibleStart(2);
  580. [_captureResource.queuePerformer perform:^{
  581. [_captureResource.videoCapturer clearTimedTasks];
  582. }];
  583. }
  584. #pragma mark - Utilities
  585. - (void)convertViewCoordinates:(CGPoint)viewCoordinates
  586. completionHandler:(sc_managed_capturer_convert_view_coordniates_completion_handler_t)completionHandler
  587. context:(NSString *)context
  588. {
  589. SCTraceODPCompatibleStart(2);
  590. SCAssert(completionHandler, @"completionHandler shouldn't be nil");
  591. [_captureResource.queuePerformer perform:^{
  592. SCTraceStart();
  593. if (SCDeviceSupportsMetal()) {
  594. CGSize viewSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size;
  595. CGPoint pointOfInterest =
  596. [_captureResource.device convertViewCoordinates:viewCoordinates
  597. viewSize:viewSize
  598. videoGravity:AVLayerVideoGravityResizeAspectFill];
  599. runOnMainThreadAsynchronously(^{
  600. completionHandler(pointOfInterest);
  601. });
  602. } else {
  603. CGSize viewSize = _captureResource.videoPreviewLayer.bounds.size;
  604. CGPoint pointOfInterest =
  605. [_captureResource.device convertViewCoordinates:viewCoordinates
  606. viewSize:viewSize
  607. videoGravity:_captureResource.videoPreviewLayer.videoGravity];
  608. runOnMainThreadAsynchronously(^{
  609. completionHandler(pointOfInterest);
  610. });
  611. }
  612. }];
  613. }
  614. - (void)detectLensCategoryOnNextFrame:(CGPoint)point
  615. lenses:(NSArray<SCLens *> *)lenses
  616. completion:(sc_managed_lenses_processor_category_point_completion_handler_t)completion
  617. context:(NSString *)context
  618. {
  619. SCTraceODPCompatibleStart(2);
  620. SCAssert(completion, @"completionHandler shouldn't be nil");
  621. SCAssertMainThread();
  622. [_captureResource.queuePerformer perform:^{
  623. SCTraceStart();
  624. SCLogCapturerInfo(@"Detecting lens category on next frame. point:%@, lenses:%@", NSStringFromCGPoint(point),
  625. [lenses valueForKey:NSStringFromSelector(@selector(lensId))]);
  626. [_captureResource.lensProcessingCore
  627. detectLensCategoryOnNextFrame:point
  628. videoOrientation:_captureResource.videoDataSource.videoOrientation
  629. lenses:lenses
  630. completion:^(SCLensCategory *_Nullable category, NSInteger categoriesCount) {
  631. runOnMainThreadAsynchronously(^{
  632. if (completion) {
  633. completion(category, categoriesCount);
  634. }
  635. });
  636. }];
  637. }];
  638. }
  639. #pragma mark - Configurations
  640. - (void)setDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition
  641. completionHandler:(dispatch_block_t)completionHandler
  642. context:(NSString *)context
  643. {
  644. SCTraceODPCompatibleStart(2);
  645. SCLogCapturerInfo(@"Setting device position asynchronously to: %lu", (unsigned long)devicePosition);
  646. [_captureResource.queuePerformer perform:^{
  647. SCTraceStart();
  648. BOOL devicePositionChanged = NO;
  649. BOOL nightModeChanged = NO;
  650. BOOL portraitModeChanged = NO;
  651. BOOL zoomFactorChanged = NO;
  652. BOOL flashSupportedOrTorchSupportedChanged = NO;
  653. SCManagedCapturerState *state = [_captureResource.state copy];
  654. if (_captureResource.state.devicePosition != devicePosition) {
  655. SCManagedCaptureDevice *device = [SCManagedCaptureDevice deviceWithPosition:devicePosition];
  656. if (device) {
  657. if (!device.delegate) {
  658. device.delegate = _captureResource.captureDeviceHandler;
  659. }
  660. SCManagedCaptureDevice *prevDevice = _captureResource.device;
  661. [SCCaptureWorker turnARSessionOff:_captureResource];
  662. BOOL isStreaming = _captureResource.videoDataSource.isStreaming;
  663. if (!SCDeviceSupportsMetal()) {
  664. if (isStreaming) {
  665. [_captureResource.videoDataSource stopStreaming];
  666. }
  667. }
  668. SCLogCapturerInfo(@"Set device position beginConfiguration");
  669. [_captureResource.videoDataSource beginConfiguration];
  670. [_captureResource.managedSession beginConfiguration];
  671. // Turn off flash for the current device in case it is active
  672. [_captureResource.device setTorchActive:NO];
  673. if (_captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) {
  674. _captureResource.frontFlashController.torchActive = NO;
  675. }
  676. [_captureResource.deviceCapacityAnalyzer removeFocusListener];
  677. [_captureResource.device removeDeviceAsInput:_captureResource.managedSession.avSession];
  678. _captureResource.device = device;
  679. BOOL deviceSet = [_captureResource.device setDeviceAsInput:_captureResource.managedSession.avSession];
  680. // If we are toggling while recording, set the night mode back to not
  681. // active
  682. if (_captureResource.videoRecording) {
  683. [self _setNightModeActive:NO];
  684. }
  685. // Sync night mode, torch and flash state with the current device
  686. devicePositionChanged = (_captureResource.state.devicePosition != devicePosition);
  687. nightModeChanged =
  688. (_captureResource.state.isNightModeActive != _captureResource.device.isNightModeActive);
  689. portraitModeChanged =
  690. devicePositionChanged &&
  691. (devicePosition == SCManagedCaptureDevicePositionBackDualCamera ||
  692. _captureResource.state.devicePosition == SCManagedCaptureDevicePositionBackDualCamera);
  693. zoomFactorChanged = (_captureResource.state.zoomFactor != _captureResource.device.zoomFactor);
  694. if (zoomFactorChanged && _captureResource.device.softwareZoom) {
  695. [SCCaptureWorker softwareZoomWithDevice:_captureResource.device resource:_captureResource];
  696. }
  697. if (_captureResource.state.flashActive != _captureResource.device.flashActive) {
  698. // preserve flashActive across devices
  699. _captureResource.device.flashActive = _captureResource.state.flashActive;
  700. }
  701. if (_captureResource.state.liveVideoStreaming != device.liveVideoStreamingActive) {
  702. // preserve liveVideoStreaming state across devices
  703. [_captureResource.device setLiveVideoStreaming:_captureResource.state.liveVideoStreaming
  704. session:_captureResource.managedSession.avSession];
  705. }
  706. if (devicePosition == SCManagedCaptureDevicePositionBackDualCamera &&
  707. _captureResource.state.isNightModeActive != _captureResource.device.isNightModeActive) {
  708. // preserve nightMode when switching from back camera to back dual camera
  709. [self _setNightModeActive:_captureResource.state.isNightModeActive];
  710. }
  711. flashSupportedOrTorchSupportedChanged =
  712. (_captureResource.state.flashSupported != _captureResource.device.isFlashSupported ||
  713. _captureResource.state.torchSupported != _captureResource.device.isTorchSupported);
  714. SCLogCapturerInfo(@"Set device position: %lu -> %lu, night mode: %d -> %d, zoom "
  715. @"factor: %f -> %f, flash supported: %d -> %d, torch supported: %d -> %d",
  716. (unsigned long)_captureResource.state.devicePosition, (unsigned long)devicePosition,
  717. _captureResource.state.isNightModeActive, _captureResource.device.isNightModeActive,
  718. _captureResource.state.zoomFactor, _captureResource.device.zoomFactor,
  719. _captureResource.state.flashSupported, _captureResource.device.isFlashSupported,
  720. _captureResource.state.torchSupported, _captureResource.device.isTorchSupported);
  721. _captureResource.state = [[[[[[[[SCManagedCapturerStateBuilder
  722. withManagedCapturerState:_captureResource.state] setDevicePosition:devicePosition]
  723. setIsNightModeActive:_captureResource.device.isNightModeActive]
  724. setZoomFactor:_captureResource.device.zoomFactor]
  725. setFlashSupported:_captureResource.device.isFlashSupported]
  726. setTorchSupported:_captureResource.device.isTorchSupported]
  727. setIsPortraitModeActive:devicePosition == SCManagedCaptureDevicePositionBackDualCamera] build];
  728. [self _updateHRSIEnabled];
  729. [self _updateStillImageStabilizationEnabled];
  730. // This needs to be done after we have finished configure everything
  731. // for session otherwise we
  732. // may set it up without hooking up the video input yet, and will set
  733. // wrong parameter for the
  734. // output.
  735. [_captureResource.videoDataSource setDevicePosition:devicePosition];
  736. if (@available(ios 11.0, *)) {
  737. if (portraitModeChanged) {
  738. [_captureResource.videoDataSource
  739. setDepthCaptureEnabled:_captureResource.state.isPortraitModeActive];
  740. [_captureResource.device setCaptureDepthData:_captureResource.state.isPortraitModeActive
  741. session:_captureResource.managedSession.avSession];
  742. [_captureResource.stillImageCapturer
  743. setPortraitModeCaptureEnabled:_captureResource.state.isPortraitModeActive];
  744. if (_captureResource.state.isPortraitModeActive) {
  745. SCProcessingPipelineBuilder *processingPipelineBuilder =
  746. [[SCProcessingPipelineBuilder alloc] init];
  747. processingPipelineBuilder.portraitModeEnabled = YES;
  748. SCProcessingPipeline *pipeline = [processingPipelineBuilder build];
  749. SCLogCapturerInfo(@"Adding processing pipeline:%@", pipeline);
  750. [_captureResource.videoDataSource addProcessingPipeline:pipeline];
  751. } else {
  752. [_captureResource.videoDataSource removeProcessingPipeline];
  753. }
  754. }
  755. }
  756. [_captureResource.deviceCapacityAnalyzer setAsFocusListenerForDevice:_captureResource.device];
  757. [SCCaptureWorker updateLensesFieldOfViewTracking:_captureResource];
  758. [_captureResource.managedSession commitConfiguration];
  759. [_captureResource.videoDataSource commitConfiguration];
  760. // Checks if the flash is activated and if so switches the flash along
  761. // with the camera view. Setting device's torch mode has to be called after -[AVCaptureSession
  762. // commitConfiguration], otherwise flash may be not working, especially for iPhone 8/8 Plus.
  763. if (_captureResource.state.torchActive ||
  764. (_captureResource.state.flashActive && _captureResource.videoRecording)) {
  765. [_captureResource.device setTorchActive:YES];
  766. if (devicePosition == SCManagedCaptureDevicePositionFront) {
  767. _captureResource.frontFlashController.torchActive = YES;
  768. }
  769. }
  770. SCLogCapturerInfo(@"Set device position commitConfiguration");
  771. [_captureResource.droppedFramesReporter didChangeCaptureDevicePosition];
  772. if (!SCDeviceSupportsMetal()) {
  773. if (isStreaming) {
  774. [SCCaptureWorker startStreaming:_captureResource];
  775. }
  776. }
  777. NSArray *inputs = _captureResource.managedSession.avSession.inputs;
  778. if (!deviceSet) {
  779. [self _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition
  780. to:devicePosition
  781. reason:@"setDeviceForInput failed"];
  782. } else if (inputs.count == 0) {
  783. [self _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition
  784. to:devicePosition
  785. reason:@"no input"];
  786. } else if (inputs.count > 1) {
  787. [self
  788. _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition
  789. to:devicePosition
  790. reason:[NSString sc_stringWithFormat:@"multiple inputs: %@", inputs]];
  791. } else {
  792. AVCaptureDeviceInput *input = [inputs firstObject];
  793. AVCaptureDevice *resultDevice = input.device;
  794. if (resultDevice == prevDevice.device) {
  795. [self _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition
  796. to:devicePosition
  797. reason:@"stayed on previous device"];
  798. } else if (resultDevice != _captureResource.device.device) {
  799. [self
  800. _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition
  801. to:devicePosition
  802. reason:[NSString sc_stringWithFormat:@"unknown input device: %@",
  803. resultDevice]];
  804. }
  805. }
  806. } else {
  807. [self _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition
  808. to:devicePosition
  809. reason:@"no device"];
  810. }
  811. } else {
  812. SCLogCapturerInfo(@"Device position did not change");
  813. if (_captureResource.device.position != _captureResource.state.devicePosition) {
  814. [self _logFailureSetDevicePositionFrom:state.devicePosition
  815. to:devicePosition
  816. reason:@"state position set incorrectly"];
  817. }
  818. }
  819. BOOL stateChanged = ![_captureResource.state isEqual:state];
  820. state = [_captureResource.state copy];
  821. runOnMainThreadAsynchronously(^{
  822. if (stateChanged) {
  823. [_captureResource.announcer managedCapturer:self didChangeState:state];
  824. }
  825. if (devicePositionChanged) {
  826. [_captureResource.announcer managedCapturer:self didChangeCaptureDevicePosition:state];
  827. }
  828. if (nightModeChanged) {
  829. [_captureResource.announcer managedCapturer:self didChangeNightModeActive:state];
  830. }
  831. if (portraitModeChanged) {
  832. [_captureResource.announcer managedCapturer:self didChangePortraitModeActive:state];
  833. }
  834. if (zoomFactorChanged) {
  835. [_captureResource.announcer managedCapturer:self didChangeZoomFactor:state];
  836. }
  837. if (flashSupportedOrTorchSupportedChanged) {
  838. [_captureResource.announcer managedCapturer:self didChangeFlashSupportedAndTorchSupported:state];
  839. }
  840. if (completionHandler) {
  841. completionHandler();
  842. }
  843. });
  844. }];
  845. }
  846. - (void)_logFailureSetDevicePositionFrom:(SCManagedCaptureDevicePosition)start
  847. to:(SCManagedCaptureDevicePosition)end
  848. reason:(NSString *)reason
  849. {
  850. SCTraceODPCompatibleStart(2);
  851. SCLogCapturerInfo(@"Device position change failed: %@", reason);
  852. [[SCLogger sharedInstance] logEvent:kSCCameraMetricsCameraFlipFailure
  853. parameters:@{
  854. @"start" : @(start),
  855. @"end" : @(end),
  856. @"reason" : reason,
  857. }];
  858. }
  859. - (void)setFlashActive:(BOOL)flashActive
  860. completionHandler:(dispatch_block_t)completionHandler
  861. context:(NSString *)context
  862. {
  863. SCTraceODPCompatibleStart(2);
  864. [_captureResource.queuePerformer perform:^{
  865. SCTraceStart();
  866. BOOL flashActiveOrFrontFlashEnabledChanged = NO;
  867. if (_captureResource.state.flashActive != flashActive) {
  868. [_captureResource.device setFlashActive:flashActive];
  869. SCLogCapturerInfo(@"Set flash active: %d -> %d", _captureResource.state.flashActive, flashActive);
  870. _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state]
  871. setFlashActive:flashActive] build];
  872. flashActiveOrFrontFlashEnabledChanged = YES;
  873. }
  874. SCManagedCapturerState *state = [_captureResource.state copy];
  875. runOnMainThreadAsynchronously(^{
  876. if (flashActiveOrFrontFlashEnabledChanged) {
  877. [_captureResource.announcer managedCapturer:self didChangeState:state];
  878. [_captureResource.announcer managedCapturer:self didChangeFlashActive:state];
  879. }
  880. if (completionHandler) {
  881. completionHandler();
  882. }
  883. });
  884. }];
  885. }
  886. - (void)setLensesActive:(BOOL)lensesActive
  887. completionHandler:(dispatch_block_t)completionHandler
  888. context:(NSString *)context
  889. {
  890. SCTraceODPCompatibleStart(2);
  891. [self _setLensesActive:lensesActive
  892. liveVideoStreaming:NO
  893. filterFactory:nil
  894. completionHandler:completionHandler
  895. context:context];
  896. }
  897. - (void)setLensesActive:(BOOL)lensesActive
  898. filterFactory:(SCLookseryFilterFactory *)filterFactory
  899. completionHandler:(dispatch_block_t)completionHandler
  900. context:(NSString *)context
  901. {
  902. [self _setLensesActive:lensesActive
  903. liveVideoStreaming:NO
  904. filterFactory:filterFactory
  905. completionHandler:completionHandler
  906. context:context];
  907. }
  908. - (void)setLensesInTalkActive:(BOOL)lensesActive
  909. completionHandler:(dispatch_block_t)completionHandler
  910. context:(NSString *)context
  911. {
  912. // Talk requires liveVideoStreaming to be turned on
  913. BOOL liveVideoStreaming = lensesActive;
  914. dispatch_block_t activationBlock = ^{
  915. [self _setLensesActive:lensesActive
  916. liveVideoStreaming:liveVideoStreaming
  917. filterFactory:nil
  918. completionHandler:completionHandler
  919. context:context];
  920. };
  921. @weakify(self);
  922. [_captureResource.queuePerformer perform:^{
  923. @strongify(self);
  924. SC_GUARD_ELSE_RETURN(self);
  925. // If lenses are enabled in TV3 and it was enabled not from TV3. We have to turn off lenses off at first.
  926. BOOL shouldTurnOffBeforeActivation = liveVideoStreaming && !self->_captureResource.state.liveVideoStreaming &&
  927. self->_captureResource.state.lensesActive;
  928. if (shouldTurnOffBeforeActivation) {
  929. [self _setLensesActive:NO
  930. liveVideoStreaming:NO
  931. filterFactory:nil
  932. completionHandler:activationBlock
  933. context:context];
  934. } else {
  935. activationBlock();
  936. }
  937. }];
  938. }
  939. - (void)_setLensesActive:(BOOL)lensesActive
  940. liveVideoStreaming:(BOOL)liveVideoStreaming
  941. filterFactory:(SCLookseryFilterFactory *)filterFactory
  942. completionHandler:(dispatch_block_t)completionHandler
  943. context:(NSString *)context
  944. {
  945. SCTraceODPCompatibleStart(2);
  946. SCLogCapturerInfo(@"Setting lenses active to: %d", lensesActive);
  947. [_captureResource.queuePerformer perform:^{
  948. SCTraceStart();
  949. BOOL lensesActiveChanged = NO;
  950. if (_captureResource.state.lensesActive != lensesActive) {
  951. SCLogCapturerInfo(@"Set lenses active: %d -> %d", _captureResource.state.lensesActive, lensesActive);
  952. _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state]
  953. setLensesActive:lensesActive] build];
  954. // Update capturer settings(orientation and resolution) after changing state, because
  955. // _setLiveVideoStreaming logic is depends on it
  956. [self _setLiveVideoStreaming:liveVideoStreaming];
  957. [SCCaptureWorker turnARSessionOff:_captureResource];
  958. // Only enable sample buffer display when lenses is not active.
  959. [_captureResource.videoDataSource setSampleBufferDisplayEnabled:!lensesActive];
  960. [_captureResource.debugInfoDict setObject:!lensesActive ? @"True" : @"False"
  961. forKey:@"sampleBufferDisplayEnabled"];
  962. lensesActiveChanged = YES;
  963. [_captureResource.lensProcessingCore setAspectRatio:_captureResource.state.liveVideoStreaming];
  964. [_captureResource.lensProcessingCore setLensesActive:_captureResource.state.lensesActive
  965. videoOrientation:_captureResource.videoDataSource.videoOrientation
  966. filterFactory:filterFactory];
  967. BOOL modifySource = _captureResource.state.liveVideoStreaming || _captureResource.videoRecording;
  968. [_captureResource.lensProcessingCore setModifySource:modifySource];
  969. [_captureResource.lensProcessingCore setShouldMuteAllSounds:_captureResource.state.liveVideoStreaming];
  970. if (_captureResource.fileInputDecider.shouldProcessFileInput) {
  971. [_captureResource.lensProcessingCore setLensesActive:YES
  972. videoOrientation:_captureResource.videoDataSource.videoOrientation
  973. filterFactory:filterFactory];
  974. }
  975. [_captureResource.videoDataSource
  976. setVideoStabilizationEnabledIfSupported:!_captureResource.state.lensesActive];
  977. if (SCIsMasterBuild()) {
  978. // Check that connection configuration is correct
  979. if (_captureResource.state.lensesActive &&
  980. _captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) {
  981. for (AVCaptureOutput *output in _captureResource.managedSession.avSession.outputs) {
  982. if ([output isKindOfClass:[AVCaptureVideoDataOutput class]]) {
  983. AVCaptureConnection *connection = [output connectionWithMediaType:AVMediaTypeVideo];
  984. SCAssert(connection.videoMirrored &&
  985. connection.videoOrientation == !_captureResource.state.liveVideoStreaming
  986. ? AVCaptureVideoOrientationLandscapeRight
  987. : AVCaptureVideoOrientationPortrait,
  988. @"Connection configuration is not correct");
  989. }
  990. }
  991. }
  992. }
  993. }
  994. dispatch_block_t viewChangeHandler = ^{
  995. SCManagedCapturerState *state = [_captureResource.state copy]; // update to latest state always
  996. runOnMainThreadAsynchronously(^{
  997. [_captureResource.announcer managedCapturer:self didChangeState:state];
  998. [_captureResource.announcer managedCapturer:self didChangeLensesActive:state];
  999. [_captureResource.videoPreviewGLViewManager setLensesActive:state.lensesActive];
  1000. if (completionHandler) {
  1001. completionHandler();
  1002. }
  1003. });
  1004. };
  1005. if (lensesActiveChanged && !lensesActive && SCDeviceSupportsMetal()) {
  1006. // If we are turning off lenses and have sample buffer display on.
  1007. // We need to wait until new frame presented in sample buffer before
  1008. // dismiss the Lenses' OpenGL view.
  1009. [_captureResource.videoDataSource waitUntilSampleBufferDisplayed:_captureResource.queuePerformer.queue
  1010. completionHandler:viewChangeHandler];
  1011. } else {
  1012. viewChangeHandler();
  1013. }
  1014. }];
  1015. }
  1016. - (void)_setLiveVideoStreaming:(BOOL)liveVideoStreaming
  1017. {
  1018. SCAssertPerformer(_captureResource.queuePerformer);
  1019. BOOL enableLiveVideoStreaming = liveVideoStreaming;
  1020. if (!_captureResource.state.lensesActive && liveVideoStreaming) {
  1021. SCLogLensesError(@"LiveVideoStreaming is not allowed when lenses are turned off");
  1022. enableLiveVideoStreaming = NO;
  1023. }
  1024. SC_GUARD_ELSE_RETURN(enableLiveVideoStreaming != _captureResource.state.liveVideoStreaming);
  1025. // We will disable blackCameraNoOutputDetector if in live video streaming
  1026. // In case there is some black camera when doing video call, will consider re-enable it
  1027. [self _setBlackCameraNoOutputDetectorEnabled:!liveVideoStreaming];
  1028. if (!_captureResource.device.isConnected) {
  1029. SCLogCapturerError(@"Can't perform configuration for live video streaming");
  1030. }
  1031. SCLogCapturerInfo(@"Set live video streaming: %d -> %d", _captureResource.state.liveVideoStreaming,
  1032. enableLiveVideoStreaming);
  1033. _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state]
  1034. setLiveVideoStreaming:enableLiveVideoStreaming] build];
  1035. BOOL isStreaming = _captureResource.videoDataSource.isStreaming;
  1036. if (isStreaming) {
  1037. [_captureResource.videoDataSource stopStreaming];
  1038. }
  1039. SCLogCapturerInfo(@"Set live video streaming beginConfiguration");
  1040. [_captureResource.managedSession performConfiguration:^{
  1041. [_captureResource.videoDataSource beginConfiguration];
  1042. // If video chat is active we should use portrait orientation, otherwise landscape right
  1043. [_captureResource.videoDataSource setVideoOrientation:_captureResource.state.liveVideoStreaming
  1044. ? AVCaptureVideoOrientationPortrait
  1045. : AVCaptureVideoOrientationLandscapeRight];
  1046. [_captureResource.device setLiveVideoStreaming:_captureResource.state.liveVideoStreaming
  1047. session:_captureResource.managedSession.avSession];
  1048. [_captureResource.videoDataSource commitConfiguration];
  1049. }];
  1050. SCLogCapturerInfo(@"Set live video streaming commitConfiguration");
  1051. if (isStreaming) {
  1052. [_captureResource.videoDataSource startStreaming];
  1053. }
  1054. }
  1055. - (void)_setBlackCameraNoOutputDetectorEnabled:(BOOL)enabled
  1056. {
  1057. if (enabled) {
  1058. [self addListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector];
  1059. [_captureResource.videoDataSource addListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector];
  1060. } else {
  1061. [self removeListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector];
  1062. [_captureResource.videoDataSource
  1063. removeListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector];
  1064. }
  1065. }
  1066. - (void)setTorchActiveAsynchronously:(BOOL)torchActive
  1067. completionHandler:(dispatch_block_t)completionHandler
  1068. context:(NSString *)context
  1069. {
  1070. SCTraceODPCompatibleStart(2);
  1071. SCLogCapturerInfo(@"Setting torch active asynchronously to: %d", torchActive);
  1072. [_captureResource.queuePerformer perform:^{
  1073. SCTraceStart();
  1074. BOOL torchActiveChanged = NO;
  1075. if (_captureResource.state.torchActive != torchActive) {
  1076. [_captureResource.device setTorchActive:torchActive];
  1077. if (_captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) {
  1078. _captureResource.frontFlashController.torchActive = torchActive;
  1079. }
  1080. SCLogCapturerInfo(@"Set torch active: %d -> %d", _captureResource.state.torchActive, torchActive);
  1081. _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state]
  1082. setTorchActive:torchActive] build];
  1083. torchActiveChanged = YES;
  1084. }
  1085. SCManagedCapturerState *state = [_captureResource.state copy];
  1086. runOnMainThreadAsynchronously(^{
  1087. if (torchActiveChanged) {
  1088. [_captureResource.announcer managedCapturer:self didChangeState:state];
  1089. }
  1090. if (completionHandler) {
  1091. completionHandler();
  1092. }
  1093. });
  1094. }];
  1095. }
  1096. - (void)setNightModeActiveAsynchronously:(BOOL)active
  1097. completionHandler:(dispatch_block_t)completionHandler
  1098. context:(NSString *)context
  1099. {
  1100. SCTraceODPCompatibleStart(2);
  1101. [_captureResource.queuePerformer perform:^{
  1102. SCTraceStart();
  1103. // Only do the configuration if current device is connected
  1104. if (_captureResource.device.isConnected) {
  1105. SCLogCapturerInfo(@"Set night mode beginConfiguration");
  1106. [_captureResource.managedSession performConfiguration:^{
  1107. [self _setNightModeActive:active];
  1108. [self _updateHRSIEnabled];
  1109. [self _updateStillImageStabilizationEnabled];
  1110. }];
  1111. SCLogCapturerInfo(@"Set night mode commitConfiguration");
  1112. }
  1113. BOOL nightModeChanged = (_captureResource.state.isNightModeActive != active);
  1114. if (nightModeChanged) {
  1115. SCLogCapturerInfo(@"Set night mode active: %d -> %d", _captureResource.state.isNightModeActive, active);
  1116. _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state]
  1117. setIsNightModeActive:active] build];
  1118. }
  1119. SCManagedCapturerState *state = [_captureResource.state copy];
  1120. runOnMainThreadAsynchronously(^{
  1121. if (nightModeChanged) {
  1122. [_captureResource.announcer managedCapturer:self didChangeState:state];
  1123. [_captureResource.announcer managedCapturer:self didChangeNightModeActive:state];
  1124. }
  1125. if (completionHandler) {
  1126. completionHandler();
  1127. }
  1128. });
  1129. }];
  1130. }
  1131. - (void)_setNightModeActive:(BOOL)active
  1132. {
  1133. SCTraceODPCompatibleStart(2);
  1134. [_captureResource.device setNightModeActive:active session:_captureResource.managedSession.avSession];
  1135. if ([SCManagedCaptureDevice isEnhancedNightModeSupported]) {
  1136. [self _toggleSoftwareNightmode:active];
  1137. }
  1138. }
  1139. - (void)_toggleSoftwareNightmode:(BOOL)active
  1140. {
  1141. SCTraceODPCompatibleStart(2);
  1142. if (active) {
  1143. SCLogCapturerInfo(@"Set enhanced night mode active");
  1144. SCProcessingPipelineBuilder *processingPipelineBuilder = [[SCProcessingPipelineBuilder alloc] init];
  1145. processingPipelineBuilder.enhancedNightMode = YES;
  1146. SCProcessingPipeline *pipeline = [processingPipelineBuilder build];
  1147. SCLogCapturerInfo(@"Adding processing pipeline:%@", pipeline);
  1148. [_captureResource.videoDataSource addProcessingPipeline:pipeline];
  1149. } else {
  1150. SCLogCapturerInfo(@"Removing processing pipeline");
  1151. [_captureResource.videoDataSource removeProcessingPipeline];
  1152. }
  1153. }
  1154. - (BOOL)_shouldCaptureImageFromVideo
  1155. {
  1156. SCTraceODPCompatibleStart(2);
  1157. BOOL isIphone5Series = [SCDeviceName isSimilarToIphone5orNewer] && ![SCDeviceName isSimilarToIphone6orNewer];
  1158. return isIphone5Series && !_captureResource.state.flashActive && ![self isLensApplied];
  1159. }
  1160. - (void)lockZoomWithContext:(NSString *)context
  1161. {
  1162. SCTraceODPCompatibleStart(2);
  1163. SCAssertMainThread();
  1164. SCLogCapturerInfo(@"Lock zoom");
  1165. _captureResource.allowsZoom = NO;
  1166. }
  1167. - (void)unlockZoomWithContext:(NSString *)context
  1168. {
  1169. SCTraceODPCompatibleStart(2);
  1170. SCAssertMainThread();
  1171. SCLogCapturerInfo(@"Unlock zoom");
  1172. // Don't let anyone unlock the zoom while ARKit is active. When ARKit shuts down, it'll unlock it.
  1173. SC_GUARD_ELSE_RETURN(!_captureResource.state.arSessionActive);
  1174. _captureResource.allowsZoom = YES;
  1175. }
  1176. - (void)setZoomFactorAsynchronously:(CGFloat)zoomFactor context:(NSString *)context
  1177. {
  1178. SCTraceODPCompatibleStart(2);
  1179. SCAssertMainThread();
  1180. SC_GUARD_ELSE_RETURN(_captureResource.allowsZoom);
  1181. SCLogCapturerInfo(@"Setting zoom factor to: %f", zoomFactor);
  1182. [_captureResource.deviceZoomHandler setZoomFactor:zoomFactor forDevice:_captureResource.device immediately:NO];
  1183. }
  1184. - (void)resetZoomFactorAsynchronously:(CGFloat)zoomFactor
  1185. devicePosition:(SCManagedCaptureDevicePosition)devicePosition
  1186. context:(NSString *)context
  1187. {
  1188. SCTraceODPCompatibleStart(2);
  1189. SCAssertMainThread();
  1190. SC_GUARD_ELSE_RETURN(_captureResource.allowsZoom);
  1191. SCLogCapturerInfo(@"Setting zoom factor to: %f devicePosition:%lu", zoomFactor, (unsigned long)devicePosition);
  1192. SCManagedCaptureDevice *device = [SCManagedCaptureDevice deviceWithPosition:devicePosition];
  1193. [_captureResource.deviceZoomHandler setZoomFactor:zoomFactor forDevice:device immediately:YES];
  1194. }
  1195. - (void)setExposurePointOfInterestAsynchronously:(CGPoint)pointOfInterest
  1196. fromUser:(BOOL)fromUser
  1197. completionHandler:(dispatch_block_t)completionHandler
  1198. context:(NSString *)context
  1199. {
  1200. SCTraceODPCompatibleStart(2);
  1201. [_captureResource.queuePerformer perform:^{
  1202. SCTraceStart();
  1203. if (_captureResource.device.isConnected) {
  1204. CGPoint exposurePoint;
  1205. if ([self isVideoMirrored]) {
  1206. exposurePoint = CGPointMake(pointOfInterest.x, 1 - pointOfInterest.y);
  1207. } else {
  1208. exposurePoint = pointOfInterest;
  1209. }
  1210. if (_captureResource.device.softwareZoom) {
  1211. // Fix for the zooming factor
  1212. [_captureResource.device
  1213. setExposurePointOfInterest:CGPointMake(
  1214. (exposurePoint.x - 0.5) / _captureResource.device.softwareZoom + 0.5,
  1215. (exposurePoint.y - 0.5) / _captureResource.device.softwareZoom + 0.5)
  1216. fromUser:fromUser];
  1217. } else {
  1218. [_captureResource.device setExposurePointOfInterest:exposurePoint fromUser:fromUser];
  1219. }
  1220. }
  1221. if (completionHandler) {
  1222. runOnMainThreadAsynchronously(completionHandler);
  1223. }
  1224. }];
  1225. }
  1226. - (void)setAutofocusPointOfInterestAsynchronously:(CGPoint)pointOfInterest
  1227. completionHandler:(dispatch_block_t)completionHandler
  1228. context:(NSString *)context
  1229. {
  1230. SCTraceODPCompatibleStart(2);
  1231. [_captureResource.queuePerformer perform:^{
  1232. SCTraceStart();
  1233. if (_captureResource.device.isConnected) {
  1234. CGPoint focusPoint;
  1235. if ([self isVideoMirrored]) {
  1236. focusPoint = CGPointMake(pointOfInterest.x, 1 - pointOfInterest.y);
  1237. } else {
  1238. focusPoint = pointOfInterest;
  1239. }
  1240. if (_captureResource.device.softwareZoom) {
  1241. // Fix for the zooming factor
  1242. [_captureResource.device
  1243. setAutofocusPointOfInterest:CGPointMake(
  1244. (focusPoint.x - 0.5) / _captureResource.device.softwareZoom + 0.5,
  1245. (focusPoint.y - 0.5) / _captureResource.device.softwareZoom + 0.5)];
  1246. } else {
  1247. [_captureResource.device setAutofocusPointOfInterest:focusPoint];
  1248. }
  1249. }
  1250. if (completionHandler) {
  1251. runOnMainThreadAsynchronously(completionHandler);
  1252. }
  1253. }];
  1254. }
  1255. - (void)setPortraitModePointOfInterestAsynchronously:(CGPoint)pointOfInterest
  1256. completionHandler:(dispatch_block_t)completionHandler
  1257. context:(NSString *)context
  1258. {
  1259. SCTraceODPCompatibleStart(2);
  1260. [SCCaptureWorker setPortraitModePointOfInterestAsynchronously:pointOfInterest
  1261. completionHandler:completionHandler
  1262. resource:_captureResource];
  1263. }
  1264. - (void)continuousAutofocusAndExposureAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler
  1265. context:(NSString *)context
  1266. {
  1267. SCTraceODPCompatibleStart(2);
  1268. [_captureResource.queuePerformer perform:^{
  1269. SCTraceStart();
  1270. if (_captureResource.device.isConnected) {
  1271. [_captureResource.device continuousAutofocus];
  1272. [_captureResource.device setExposurePointOfInterest:CGPointMake(0.5, 0.5) fromUser:NO];
  1273. if (SCCameraTweaksEnablePortraitModeAutofocus()) {
  1274. [self setPortraitModePointOfInterestAsynchronously:CGPointMake(0.5, 0.5)
  1275. completionHandler:nil
  1276. context:context];
  1277. }
  1278. }
  1279. if (completionHandler) {
  1280. runOnMainThreadAsynchronously(completionHandler);
  1281. }
  1282. }];
  1283. }
  1284. #pragma mark - Add / Remove Listener
  1285. - (void)addListener:(id<SCManagedCapturerListener>)listener
  1286. {
  1287. SCTraceODPCompatibleStart(2);
  1288. // Only do the make sure thing if I added it to announcer fresh.
  1289. SC_GUARD_ELSE_RETURN([_captureResource.announcer addListener:listener]);
  1290. // After added the listener, make sure we called all these methods with its
  1291. // initial values
  1292. [_captureResource.queuePerformer perform:^{
  1293. SCManagedCapturerState *state = [_captureResource.state copy];
  1294. AVCaptureVideoPreviewLayer *videoPreviewLayer = _captureResource.videoPreviewLayer;
  1295. LSAGLView *videoPreviewGLView = _captureResource.videoPreviewGLViewManager.view;
  1296. runOnMainThreadAsynchronously(^{
  1297. SCTraceStart();
  1298. if ([listener respondsToSelector:@selector(managedCapturer:didChangeState:)]) {
  1299. [listener managedCapturer:self didChangeState:state];
  1300. }
  1301. if ([listener respondsToSelector:@selector(managedCapturer:didChangeCaptureDevicePosition:)]) {
  1302. [listener managedCapturer:self didChangeCaptureDevicePosition:state];
  1303. }
  1304. if ([listener respondsToSelector:@selector(managedCapturer:didChangeNightModeActive:)]) {
  1305. [listener managedCapturer:self didChangeNightModeActive:state];
  1306. }
  1307. if ([listener respondsToSelector:@selector(managedCapturer:didChangeFlashActive:)]) {
  1308. [listener managedCapturer:self didChangeFlashActive:state];
  1309. }
  1310. if ([listener respondsToSelector:@selector(managedCapturer:didChangeFlashSupportedAndTorchSupported:)]) {
  1311. [listener managedCapturer:self didChangeFlashSupportedAndTorchSupported:state];
  1312. }
  1313. if ([listener respondsToSelector:@selector(managedCapturer:didChangeZoomFactor:)]) {
  1314. [listener managedCapturer:self didChangeZoomFactor:state];
  1315. }
  1316. if ([listener respondsToSelector:@selector(managedCapturer:didChangeLowLightCondition:)]) {
  1317. [listener managedCapturer:self didChangeLowLightCondition:state];
  1318. }
  1319. if ([listener respondsToSelector:@selector(managedCapturer:didChangeAdjustingExposure:)]) {
  1320. [listener managedCapturer:self didChangeAdjustingExposure:state];
  1321. }
  1322. if (!SCDeviceSupportsMetal()) {
  1323. if ([listener respondsToSelector:@selector(managedCapturer:didChangeVideoPreviewLayer:)]) {
  1324. [listener managedCapturer:self didChangeVideoPreviewLayer:videoPreviewLayer];
  1325. }
  1326. }
  1327. if (videoPreviewGLView &&
  1328. [listener respondsToSelector:@selector(managedCapturer:didChangeVideoPreviewGLView:)]) {
  1329. [listener managedCapturer:self didChangeVideoPreviewGLView:videoPreviewGLView];
  1330. }
  1331. if ([listener respondsToSelector:@selector(managedCapturer:didChangeLensesActive:)]) {
  1332. [listener managedCapturer:self didChangeLensesActive:state];
  1333. }
  1334. });
  1335. }];
  1336. }
  1337. - (void)removeListener:(id<SCManagedCapturerListener>)listener
  1338. {
  1339. SCTraceODPCompatibleStart(2);
  1340. [_captureResource.announcer removeListener:listener];
  1341. }
  1342. - (void)addVideoDataSourceListener:(id<SCManagedVideoDataSourceListener>)listener
  1343. {
  1344. SCTraceODPCompatibleStart(2);
  1345. [_captureResource.videoDataSource addListener:listener];
  1346. }
  1347. - (void)removeVideoDataSourceListener:(id<SCManagedVideoDataSourceListener>)listener
  1348. {
  1349. SCTraceODPCompatibleStart(2);
  1350. [_captureResource.videoDataSource removeListener:listener];
  1351. }
  1352. - (void)addDeviceCapacityAnalyzerListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener
  1353. {
  1354. SCTraceODPCompatibleStart(2);
  1355. [_captureResource.deviceCapacityAnalyzer addListener:listener];
  1356. }
  1357. - (void)removeDeviceCapacityAnalyzerListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener
  1358. {
  1359. SCTraceODPCompatibleStart(2);
  1360. [_captureResource.deviceCapacityAnalyzer removeListener:listener];
  1361. }
  1362. #pragma mark - Debug
  1363. - (NSString *)debugInfo
  1364. {
  1365. SCTraceODPCompatibleStart(2);
  1366. NSMutableString *info = [NSMutableString new];
  1367. [info appendString:@"==== SCManagedCapturer tokens ====\n"];
  1368. [_captureResource.tokenSet enumerateObjectsUsingBlock:^(SCCapturerToken *_Nonnull token, BOOL *_Nonnull stop) {
  1369. [info appendFormat:@"%@\n", token.debugDescription];
  1370. }];
  1371. return info.copy;
  1372. }
  1373. - (NSString *)description
  1374. {
  1375. return [self debugDescription];
  1376. }
  1377. - (NSString *)debugDescription
  1378. {
  1379. return [NSString sc_stringWithFormat:@"SCManagedCapturer state:\n%@\nVideo streamer info:\n%@",
  1380. _captureResource.state.debugDescription,
  1381. _captureResource.videoDataSource.description];
  1382. }
  1383. - (CMTime)firstWrittenAudioBufferDelay
  1384. {
  1385. SCTraceODPCompatibleStart(2);
  1386. return [SCCaptureWorker firstWrittenAudioBufferDelay:_captureResource];
  1387. }
  1388. - (BOOL)audioQueueStarted
  1389. {
  1390. SCTraceODPCompatibleStart(2);
  1391. return [SCCaptureWorker audioQueueStarted:_captureResource];
  1392. }
  1393. #pragma mark - SCTimeProfilable
  1394. + (SCTimeProfilerContext)context
  1395. {
  1396. return SCTimeProfilerContextCamera;
  1397. }
  1398. // We disable and re-enable liveness timer when enter background and foreground
  1399. - (void)applicationDidEnterBackground
  1400. {
  1401. SCTraceODPCompatibleStart(2);
  1402. [SCCaptureWorker destroyLivenessConsistencyTimer:_captureResource];
  1403. // Hide the view when in background.
  1404. if (!SCDeviceSupportsMetal()) {
  1405. [_captureResource.queuePerformer perform:^{
  1406. _captureResource.appInBackground = YES;
  1407. [CATransaction begin];
  1408. [CATransaction setDisableActions:YES];
  1409. _captureResource.videoPreviewLayer.hidden = YES;
  1410. [CATransaction commit];
  1411. }];
  1412. } else {
  1413. [_captureResource.queuePerformer perform:^{
  1414. _captureResource.appInBackground = YES;
  1415. // If it is running, stop the streaming.
  1416. if (_captureResource.status == SCManagedCapturerStatusRunning) {
  1417. [_captureResource.videoDataSource stopStreaming];
  1418. }
  1419. }];
  1420. }
  1421. [[SCManagedCapturePreviewLayerController sharedInstance] applicationDidEnterBackground];
  1422. }
  1423. - (void)applicationWillEnterForeground
  1424. {
  1425. SCTraceODPCompatibleStart(2);
  1426. if (!SCDeviceSupportsMetal()) {
  1427. [_captureResource.queuePerformer perform:^{
  1428. SCTraceStart();
  1429. _captureResource.appInBackground = NO;
  1430. if (!SCDeviceSupportsMetal()) {
  1431. [self _fixNonMetalSessionPreviewInconsistency];
  1432. }
  1433. // Doing this right now on iOS 10. It will probably work on iOS 9 as well, but need to verify.
  1434. if (SC_AT_LEAST_IOS_10) {
  1435. [self _runningConsistencyCheckAndFix];
  1436. // For OS version >= iOS 10, try to fix AVCaptureSession when app is entering foreground.
  1437. _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0;
  1438. [self _fixAVSessionIfNecessary];
  1439. }
  1440. }];
  1441. } else {
  1442. [_captureResource.queuePerformer perform:^{
  1443. SCTraceStart();
  1444. _captureResource.appInBackground = NO;
  1445. if (_captureResource.status == SCManagedCapturerStatusRunning) {
  1446. [_captureResource.videoDataSource startStreaming];
  1447. }
  1448. // Doing this right now on iOS 10. It will probably work on iOS 9 as well, but need to verify.
  1449. if (SC_AT_LEAST_IOS_10) {
  1450. [self _runningConsistencyCheckAndFix];
  1451. // For OS version >= iOS 10, try to fix AVCaptureSession when app is entering foreground.
  1452. _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0;
  1453. [self _fixAVSessionIfNecessary];
  1454. }
  1455. }];
  1456. }
  1457. [[SCManagedCapturePreviewLayerController sharedInstance] applicationWillEnterForeground];
  1458. }
  1459. - (void)applicationWillResignActive
  1460. {
  1461. SCTraceODPCompatibleStart(2);
  1462. [[SCManagedCapturePreviewLayerController sharedInstance] applicationWillResignActive];
  1463. [_captureResource.queuePerformer perform:^{
  1464. [self _pauseCaptureSessionKVOCheck];
  1465. }];
  1466. }
  1467. - (void)applicationDidBecomeActive
  1468. {
  1469. SCTraceODPCompatibleStart(2);
  1470. [[SCManagedCapturePreviewLayerController sharedInstance] applicationDidBecomeActive];
  1471. [_captureResource.queuePerformer perform:^{
  1472. SCTraceStart();
  1473. // Since we foreground it, do the running consistency check immediately.
  1474. // Reset number of retries for fixing status inconsistency
  1475. _captureResource.numRetriesFixInconsistencyWithCurrentSession = 0;
  1476. [self _runningConsistencyCheckAndFix];
  1477. if (!SC_AT_LEAST_IOS_10) {
  1478. // For OS version < iOS 10, try to fix AVCaptureSession after app becomes active.
  1479. _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0;
  1480. [self _fixAVSessionIfNecessary];
  1481. }
  1482. [self _resumeCaptureSessionKVOCheck];
  1483. if (_captureResource.status == SCManagedCapturerStatusRunning) {
  1484. // Reschedule the timer if we don't have it already
  1485. runOnMainThreadAsynchronously(^{
  1486. SCTraceStart();
  1487. [SCCaptureWorker setupLivenessConsistencyTimerIfForeground:_captureResource];
  1488. });
  1489. }
  1490. }];
  1491. }
  1492. - (void)_runningConsistencyCheckAndFix
  1493. {
  1494. SCTraceODPCompatibleStart(2);
  1495. // Don't enforce consistency on simulator, as it'll constantly false-positive and restart session.
  1496. SC_GUARD_ELSE_RETURN(![SCDeviceName isSimulator]);
  1497. if (_captureResource.state.arSessionActive) {
  1498. [self _runningARSessionConsistencyCheckAndFix];
  1499. } else {
  1500. [self _runningAVCaptureSessionConsistencyCheckAndFix];
  1501. }
  1502. }
  1503. - (void)_runningARSessionConsistencyCheckAndFix
  1504. {
  1505. SCTraceODPCompatibleStart(2);
  1506. SCAssert([_captureResource.queuePerformer isCurrentPerformer], @"");
  1507. SCAssert(_captureResource.state.arSessionActive, @"");
  1508. if (@available(iOS 11.0, *)) {
  1509. // Occassionally the capture session will get into a weird "stuck" state.
  1510. // If this happens, we'll see that the timestamp for the most recent frame is behind the current time.
  1511. // Pausinging the session for a moment and restarting to attempt to jog it loose.
  1512. NSTimeInterval timeSinceLastFrame = CACurrentMediaTime() - _captureResource.arSession.currentFrame.timestamp;
  1513. BOOL reset = NO;
  1514. if (_captureResource.arSession.currentFrame.camera.trackingStateReason == ARTrackingStateReasonInitializing) {
  1515. if (timeSinceLastFrame > kSCManagedCapturerFixInconsistencyARSessionHungInitThreshold) {
  1516. SCLogCapturerInfo(@"*** Found inconsistency for ARSession timestamp (possible hung init), fix now ***");
  1517. reset = YES;
  1518. }
  1519. } else if (timeSinceLastFrame > kSCManagedCapturerFixInconsistencyARSessionDelayThreshold) {
  1520. SCLogCapturerInfo(@"*** Found inconsistency for ARSession timestamp (init complete), fix now ***");
  1521. reset = YES;
  1522. }
  1523. if (reset) {
  1524. [SCCaptureWorker turnARSessionOff:_captureResource];
  1525. [SCCaptureWorker turnARSessionOn:_captureResource];
  1526. }
  1527. }
  1528. }
  1529. - (void)_runningAVCaptureSessionConsistencyCheckAndFix
  1530. {
  1531. SCTraceODPCompatibleStart(2);
  1532. SCAssert([_captureResource.queuePerformer isCurrentPerformer], @"");
  1533. SCAssert(!_captureResource.state.arSessionActive, @"");
  1534. [[SCLogger sharedInstance] logStepToEvent:@"CAMERA_OPEN_WITH_FIX_INCONSISTENCY"
  1535. uniqueId:@""
  1536. stepName:@"startConsistencyCheckAndFix"];
  1537. // If the video preview layer's hidden status is out of sync with the
  1538. // session's running status,
  1539. // fix that now. Also, we don't care that much if the status is not running.
  1540. if (!SCDeviceSupportsMetal()) {
  1541. [self _fixNonMetalSessionPreviewInconsistency];
  1542. }
  1543. // Skip the liveness consistency check if we are in background
  1544. if (_captureResource.appInBackground) {
  1545. SCLogCapturerInfo(@"*** Skipped liveness consistency check, as we are in the background ***");
  1546. return;
  1547. }
  1548. if (_captureResource.status == SCManagedCapturerStatusRunning && !_captureResource.managedSession.isRunning) {
  1549. SCGhostToSnappableSignalCameraFixInconsistency();
  1550. SCLogCapturerInfo(@"*** Found status inconsistency for running, fix now ***");
  1551. _captureResource.numRetriesFixInconsistencyWithCurrentSession++;
  1552. if (_captureResource.numRetriesFixInconsistencyWithCurrentSession <=
  1553. kSCManagedCapturerFixInconsistencyMaxRetriesWithCurrentSession) {
  1554. SCTraceStartSection("Fix non-running session")
  1555. {
  1556. if (!SCDeviceSupportsMetal()) {
  1557. [CATransaction begin];
  1558. [CATransaction setDisableActions:YES];
  1559. [_captureResource.managedSession startRunning];
  1560. [SCCaptureWorker setupVideoPreviewLayer:_captureResource];
  1561. [CATransaction commit];
  1562. } else {
  1563. [_captureResource.managedSession startRunning];
  1564. }
  1565. }
  1566. SCTraceEndSection();
  1567. } else {
  1568. SCTraceStartSection("Create new capturer session")
  1569. {
  1570. // start running with new capture session if the inconsistency fixing not succeeds
  1571. // after kSCManagedCapturerFixInconsistencyMaxRetriesWithCurrentSession retries
  1572. SCLogCapturerInfo(@"*** Recreate and run new capture session to fix the inconsistency ***");
  1573. [self _startRunningWithNewCaptureSession];
  1574. }
  1575. SCTraceEndSection();
  1576. }
  1577. BOOL sessionIsRunning = _captureResource.managedSession.isRunning;
  1578. if (sessionIsRunning && !SCDeviceSupportsMetal()) {
  1579. // If it is fixed, we signal received the first frame.
  1580. SCGhostToSnappableSignalDidReceiveFirstPreviewFrame();
  1581. runOnMainThreadAsynchronously(^{
  1582. // To approximate this did render timer, it is not accurate.
  1583. SCGhostToSnappableSignalDidRenderFirstPreviewFrame(CACurrentMediaTime());
  1584. });
  1585. }
  1586. SCLogCapturerInfo(@"*** Applied inconsistency fix, running state : %@ ***", sessionIsRunning ? @"YES" : @"NO");
  1587. if (_captureResource.managedSession.isRunning) {
  1588. [[SCLogger sharedInstance] logStepToEvent:@"CAMERA_OPEN_WITH_FIX_INCONSISTENCY"
  1589. uniqueId:@""
  1590. stepName:@"finishConsistencyCheckAndFix"];
  1591. [[SCLogger sharedInstance]
  1592. logTimedEventEnd:@"CAMERA_OPEN_WITH_FIX_INCONSISTENCY"
  1593. uniqueId:@""
  1594. parameters:@{
  1595. @"count" : @(_captureResource.numRetriesFixInconsistencyWithCurrentSession)
  1596. }];
  1597. }
  1598. } else {
  1599. [[SCLogger sharedInstance] cancelLogTimedEvent:@"CAMERA_OPEN_WITH_FIX_INCONSISTENCY" uniqueId:@""];
  1600. // Reset number of retries for fixing status inconsistency
  1601. _captureResource.numRetriesFixInconsistencyWithCurrentSession = 0;
  1602. }
  1603. [_captureResource.blackCameraDetector sessionDidChangeIsRunning:_captureResource.managedSession.isRunning];
  1604. }
  1605. - (void)mediaServicesWereReset
  1606. {
  1607. SCTraceODPCompatibleStart(2);
  1608. [self mediaServicesWereLost];
  1609. [_captureResource.queuePerformer perform:^{
  1610. /* If the current state requires the ARSession, restart it.
  1611. Explicitly flip the arSessionActive flag so that `turnSessionOn` thinks it can reset itself.
  1612. */
  1613. if (_captureResource.state.arSessionActive) {
  1614. _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state]
  1615. setArSessionActive:NO] build];
  1616. [SCCaptureWorker turnARSessionOn:_captureResource];
  1617. }
  1618. }];
  1619. }
  1620. - (void)mediaServicesWereLost
  1621. {
  1622. SCTraceODPCompatibleStart(2);
  1623. [_captureResource.queuePerformer perform:^{
  1624. if (!_captureResource.state.arSessionActive && !_captureResource.managedSession.isRunning) {
  1625. /*
  1626. If the session is running we will trigger
  1627. _sessionRuntimeError: so nothing else is
  1628. needed here.
  1629. */
  1630. [_captureResource.videoCapturer.outputURL reloadAssetKeys];
  1631. }
  1632. }];
  1633. }
  1634. - (void)_livenessConsistency:(NSTimer *)timer
  1635. {
  1636. SCTraceODPCompatibleStart(2);
  1637. SCAssertMainThread();
  1638. // We can directly check the application state because this timer is scheduled
  1639. // on the main thread.
  1640. if ([UIApplication sharedApplication].applicationState == UIApplicationStateActive) {
  1641. [_captureResource.queuePerformer perform:^{
  1642. [self _runningConsistencyCheckAndFix];
  1643. }];
  1644. }
  1645. }
  1646. - (void)_sessionRuntimeError:(NSNotification *)notification
  1647. {
  1648. SCTraceODPCompatibleStart(2);
  1649. NSError *sessionError = notification.userInfo[AVCaptureSessionErrorKey];
  1650. SCLogCapturerError(@"Encountered runtime error for capture session %@", sessionError);
  1651. NSString *errorString =
  1652. [sessionError.description stringByReplacingOccurrencesOfString:@" " withString:@"_"].uppercaseString
  1653. ?: @"UNKNOWN_ERROR";
  1654. [[SCUserTraceLogger shared]
  1655. logUserTraceEvent:[NSString sc_stringWithFormat:@"AVCAPTURESESSION_RUNTIME_ERROR_%@", errorString]];
  1656. if (sessionError.code == AVErrorMediaServicesWereReset) {
  1657. // If it is a AVErrorMediaServicesWereReset error, we can just call startRunning, it is much light weighted
  1658. [_captureResource.queuePerformer perform:^{
  1659. if (!SCDeviceSupportsMetal()) {
  1660. [CATransaction begin];
  1661. [CATransaction setDisableActions:YES];
  1662. [_captureResource.managedSession startRunning];
  1663. [SCCaptureWorker setupVideoPreviewLayer:_captureResource];
  1664. [CATransaction commit];
  1665. } else {
  1666. [_captureResource.managedSession startRunning];
  1667. }
  1668. }];
  1669. } else {
  1670. if (_captureResource.isRecreateSessionFixScheduled) {
  1671. SCLogCoreCameraInfo(@"Fixing session runtime error is scheduled, skip");
  1672. return;
  1673. }
  1674. _captureResource.isRecreateSessionFixScheduled = YES;
  1675. NSTimeInterval delay = 0;
  1676. NSTimeInterval timeNow = [NSDate timeIntervalSinceReferenceDate];
  1677. if (timeNow - _captureResource.lastSessionRuntimeErrorTime < kMinFixSessionRuntimeErrorInterval) {
  1678. SCLogCoreCameraInfo(@"Fixing runtime error session in less than %f, delay",
  1679. kMinFixSessionRuntimeErrorInterval);
  1680. delay = kMinFixSessionRuntimeErrorInterval;
  1681. }
  1682. _captureResource.lastSessionRuntimeErrorTime = timeNow;
  1683. [_captureResource.queuePerformer perform:^{
  1684. SCTraceStart();
  1685. // Occasionaly _captureResource.avSession will throw out an error when shutting down. If this happens while
  1686. // ARKit is starting up,
  1687. // _startRunningWithNewCaptureSession will throw a wrench in ARSession startup and freeze the image.
  1688. SC_GUARD_ELSE_RETURN(!_captureResource.state.arSessionActive);
  1689. // Need to reset the flag before _startRunningWithNewCaptureSession
  1690. _captureResource.isRecreateSessionFixScheduled = NO;
  1691. [self _startRunningWithNewCaptureSession];
  1692. [self _fixAVSessionIfNecessary];
  1693. }
  1694. after:delay];
  1695. }
  1696. [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsRuntimeError
  1697. parameters:@{
  1698. @"error" : sessionError == nil ? @"Unknown error" : sessionError.description,
  1699. }
  1700. secretParameters:nil
  1701. metrics:nil];
  1702. }
  1703. - (void)_startRunningWithNewCaptureSessionIfNecessary
  1704. {
  1705. SCTraceODPCompatibleStart(2);
  1706. if (_captureResource.isRecreateSessionFixScheduled) {
  1707. SCLogCapturerInfo(@"Session recreation is scheduled, return");
  1708. return;
  1709. }
  1710. _captureResource.isRecreateSessionFixScheduled = YES;
  1711. [_captureResource.queuePerformer perform:^{
  1712. // Need to reset the flag before _startRunningWithNewCaptureSession
  1713. _captureResource.isRecreateSessionFixScheduled = NO;
  1714. [self _startRunningWithNewCaptureSession];
  1715. }];
  1716. }
  1717. - (void)_startRunningWithNewCaptureSession
  1718. {
  1719. SCTraceODPCompatibleStart(2);
  1720. SCAssert([_captureResource.queuePerformer isCurrentPerformer], @"");
  1721. SCLogCapturerInfo(@"Start running with new capture session. isRecording:%d isStreaming:%d status:%lu",
  1722. _captureResource.videoRecording, _captureResource.videoDataSource.isStreaming,
  1723. (unsigned long)_captureResource.status);
  1724. // Mark the start of recreating session
  1725. [_captureResource.blackCameraDetector sessionWillRecreate];
  1726. // Light weight fix gating
  1727. BOOL lightWeightFix = SCCameraTweaksSessionLightWeightFixEnabled() || SCCameraTweaksBlackCameraRecoveryEnabled();
  1728. if (!lightWeightFix) {
  1729. [_captureResource.deviceCapacityAnalyzer removeListener:_captureResource.stillImageCapturer];
  1730. [self removeListener:_captureResource.stillImageCapturer];
  1731. [_captureResource.videoDataSource removeListener:_captureResource.lensProcessingCore.capturerListener];
  1732. [_captureResource.videoDataSource removeListener:_captureResource.deviceCapacityAnalyzer];
  1733. [_captureResource.videoDataSource removeListener:_captureResource.stillImageCapturer];
  1734. if (SCIsMasterBuild()) {
  1735. [_captureResource.videoDataSource removeListener:_captureResource.videoStreamReporter];
  1736. }
  1737. [_captureResource.videoDataSource removeListener:_captureResource.videoScanner];
  1738. [_captureResource.videoDataSource removeListener:_captureResource.videoCapturer];
  1739. [_captureResource.videoDataSource
  1740. removeListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector];
  1741. }
  1742. [_captureResource.videoCapturer.outputURL reloadAssetKeys];
  1743. BOOL isStreaming = _captureResource.videoDataSource.isStreaming;
  1744. if (_captureResource.videoRecording) {
  1745. // Stop video recording prematurely
  1746. [self stopRecordingAsynchronouslyWithContext:SCCapturerContext];
  1747. NSError *error = [NSError
  1748. errorWithDomain:kSCManagedCapturerErrorDomain
  1749. description:
  1750. [NSString
  1751. sc_stringWithFormat:@"Interrupt video recording to start new session. %@",
  1752. @{
  1753. @"isAVSessionRunning" : @(_captureResource.managedSession.isRunning),
  1754. @"numRetriesFixInconsistency" :
  1755. @(_captureResource.numRetriesFixInconsistencyWithCurrentSession),
  1756. @"numRetriesFixAVCaptureSession" :
  1757. @(_captureResource.numRetriesFixAVCaptureSessionWithCurrentSession),
  1758. @"lastSessionRuntimeErrorTime" :
  1759. @(_captureResource.lastSessionRuntimeErrorTime),
  1760. }]
  1761. code:-1];
  1762. [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoRecordingInterrupted
  1763. parameters:@{
  1764. @"error" : error.description
  1765. }
  1766. secretParameters:nil
  1767. metrics:nil];
  1768. }
  1769. @try {
  1770. if (@available(iOS 11.0, *)) {
  1771. [_captureResource.arSession pause];
  1772. if (!lightWeightFix) {
  1773. [_captureResource.videoDataSource removeListener:_captureResource.arImageCapturer];
  1774. }
  1775. }
  1776. [_captureResource.managedSession stopRunning];
  1777. [_captureResource.device removeDeviceAsInput:_captureResource.managedSession.avSession];
  1778. } @catch (NSException *exception) {
  1779. SCLogCapturerError(@"Encountered Exception %@", exception);
  1780. } @finally {
  1781. // Nil out device inputs from both devices
  1782. [[SCManagedCaptureDevice front] resetDeviceAsInput];
  1783. [[SCManagedCaptureDevice back] resetDeviceAsInput];
  1784. }
  1785. if (!SCDeviceSupportsMetal()) {
  1786. // Redo the video preview to mitigate https://ph.sc-corp.net/T42584
  1787. [SCCaptureWorker redoVideoPreviewLayer:_captureResource];
  1788. }
  1789. #if !TARGET_IPHONE_SIMULATOR
  1790. if (@available(iOS 11.0, *)) {
  1791. _captureResource.arSession = [[ARSession alloc] init];
  1792. _captureResource.arImageCapturer =
  1793. [_captureResource.arImageCaptureProvider arImageCapturerWith:_captureResource.queuePerformer
  1794. lensProcessingCore:_captureResource.lensProcessingCore];
  1795. }
  1796. [self _resetAVCaptureSession];
  1797. #endif
  1798. [_captureResource.managedSession.avSession setAutomaticallyConfiguresApplicationAudioSession:NO];
  1799. [_captureResource.device setDeviceAsInput:_captureResource.managedSession.avSession];
  1800. if (_captureResource.fileInputDecider.shouldProcessFileInput) {
  1801. // Keep the same logic, always create new VideoDataSource
  1802. [self _setupNewVideoFileDataSource];
  1803. } else {
  1804. if (!lightWeightFix) {
  1805. [self _setupNewVideoDataSource];
  1806. } else {
  1807. [self _setupVideoDataSourceWithNewSession];
  1808. }
  1809. }
  1810. if (_captureResource.status == SCManagedCapturerStatusRunning) {
  1811. if (!SCDeviceSupportsMetal()) {
  1812. [CATransaction begin];
  1813. [CATransaction setDisableActions:YES];
  1814. // Set the session to be the new session before start running.
  1815. _captureResource.videoPreviewLayer.session = _captureResource.managedSession.avSession;
  1816. if (!_captureResource.appInBackground) {
  1817. [_captureResource.managedSession startRunning];
  1818. }
  1819. [SCCaptureWorker setupVideoPreviewLayer:_captureResource];
  1820. [CATransaction commit];
  1821. } else {
  1822. if (!_captureResource.appInBackground) {
  1823. [_captureResource.managedSession startRunning];
  1824. }
  1825. }
  1826. }
  1827. // Since this start and stop happens in one block, we don't have to worry
  1828. // about streamingSequence issues
  1829. if (isStreaming) {
  1830. [_captureResource.videoDataSource startStreaming];
  1831. }
  1832. SCManagedCapturerState *state = [_captureResource.state copy];
  1833. AVCaptureVideoPreviewLayer *videoPreviewLayer = _captureResource.videoPreviewLayer;
  1834. runOnMainThreadAsynchronously(^{
  1835. [_captureResource.announcer managedCapturer:self didResetFromRuntimeError:state];
  1836. if (!SCDeviceSupportsMetal()) {
  1837. [_captureResource.announcer managedCapturer:self didChangeVideoPreviewLayer:videoPreviewLayer];
  1838. }
  1839. });
  1840. // Mark the end of recreating session
  1841. [_captureResource.blackCameraDetector sessionDidRecreate];
  1842. }
  1843. /**
  1844. * Heavy-weight session fixing approach: recreating everything
  1845. */
  1846. - (void)_setupNewVideoDataSource
  1847. {
  1848. if (@available(iOS 11.0, *)) {
  1849. _captureResource.videoDataSource =
  1850. [[SCManagedVideoStreamer alloc] initWithSession:_captureResource.managedSession.avSession
  1851. arSession:_captureResource.arSession
  1852. devicePosition:_captureResource.state.devicePosition];
  1853. [_captureResource.videoDataSource addListener:_captureResource.arImageCapturer];
  1854. if (_captureResource.state.isPortraitModeActive) {
  1855. [_captureResource.videoDataSource setDepthCaptureEnabled:YES];
  1856. SCProcessingPipelineBuilder *processingPipelineBuilder = [[SCProcessingPipelineBuilder alloc] init];
  1857. processingPipelineBuilder.portraitModeEnabled = YES;
  1858. SCProcessingPipeline *pipeline = [processingPipelineBuilder build];
  1859. [_captureResource.videoDataSource addProcessingPipeline:pipeline];
  1860. }
  1861. } else {
  1862. _captureResource.videoDataSource =
  1863. [[SCManagedVideoStreamer alloc] initWithSession:_captureResource.managedSession.avSession
  1864. devicePosition:_captureResource.state.devicePosition];
  1865. }
  1866. [self _setupVideoDataSourceListeners];
  1867. }
  1868. - (void)_setupNewVideoFileDataSource
  1869. {
  1870. _captureResource.videoDataSource =
  1871. [[SCManagedVideoFileStreamer alloc] initWithPlaybackForURL:_captureResource.fileInputDecider.fileURL];
  1872. [_captureResource.lensProcessingCore setLensesActive:YES
  1873. videoOrientation:_captureResource.videoDataSource.videoOrientation
  1874. filterFactory:nil];
  1875. runOnMainThreadAsynchronously(^{
  1876. [_captureResource.videoPreviewGLViewManager prepareViewIfNecessary];
  1877. });
  1878. [self _setupVideoDataSourceListeners];
  1879. }
  1880. /**
  1881. * Light-weight session fixing approach: recreating AVCaptureSession / AVCaptureOutput, and bind it to the new session
  1882. */
  1883. - (void)_setupVideoDataSourceWithNewSession
  1884. {
  1885. if (@available(iOS 11.0, *)) {
  1886. SCManagedVideoStreamer *streamer = (SCManagedVideoStreamer *)_captureResource.videoDataSource;
  1887. [streamer setupWithSession:_captureResource.managedSession.avSession
  1888. devicePosition:_captureResource.state.devicePosition];
  1889. [streamer setupWithARSession:_captureResource.arSession];
  1890. } else {
  1891. SCManagedVideoStreamer *streamer = (SCManagedVideoStreamer *)_captureResource.videoDataSource;
  1892. [streamer setupWithSession:_captureResource.managedSession.avSession
  1893. devicePosition:_captureResource.state.devicePosition];
  1894. }
  1895. [_captureResource.stillImageCapturer setupWithSession:_captureResource.managedSession.avSession];
  1896. }
  1897. - (void)_setupVideoDataSourceListeners
  1898. {
  1899. if (_captureResource.videoFrameSampler) {
  1900. [_captureResource.announcer addListener:_captureResource.videoFrameSampler];
  1901. }
  1902. [_captureResource.videoDataSource addSampleBufferDisplayController:_captureResource.sampleBufferDisplayController];
  1903. [_captureResource.videoDataSource addListener:_captureResource.lensProcessingCore.capturerListener];
  1904. [_captureResource.videoDataSource addListener:_captureResource.deviceCapacityAnalyzer];
  1905. if (SCIsMasterBuild()) {
  1906. [_captureResource.videoDataSource addListener:_captureResource.videoStreamReporter];
  1907. }
  1908. [_captureResource.videoDataSource addListener:_captureResource.videoScanner];
  1909. [_captureResource.videoDataSource addListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector];
  1910. _captureResource.stillImageCapturer = [SCManagedStillImageCapturer capturerWithCaptureResource:_captureResource];
  1911. [_captureResource.deviceCapacityAnalyzer addListener:_captureResource.stillImageCapturer];
  1912. [_captureResource.videoDataSource addListener:_captureResource.stillImageCapturer];
  1913. [self addListener:_captureResource.stillImageCapturer];
  1914. }
  1915. - (void)_resetAVCaptureSession
  1916. {
  1917. SCTraceODPCompatibleStart(2);
  1918. SCAssert([_captureResource.queuePerformer isCurrentPerformer], @"");
  1919. _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0;
  1920. // lazily initialize _captureResource.kvoController on background thread
  1921. if (!_captureResource.kvoController) {
  1922. _captureResource.kvoController = [[FBKVOController alloc] initWithObserver:self];
  1923. }
  1924. [_captureResource.kvoController unobserve:_captureResource.managedSession.avSession];
  1925. _captureResource.managedSession =
  1926. [[SCManagedCaptureSession alloc] initWithBlackCameraDetector:_captureResource.blackCameraDetector];
  1927. [_captureResource.kvoController observe:_captureResource.managedSession.avSession
  1928. keyPath:@keypath(_captureResource.managedSession.avSession, running)
  1929. options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld
  1930. action:_captureResource.handleAVSessionStatusChange];
  1931. }
  1932. - (void)_pauseCaptureSessionKVOCheck
  1933. {
  1934. SCTraceODPCompatibleStart(2);
  1935. SCAssert([_captureResource.queuePerformer isCurrentPerformer], @"");
  1936. [_captureResource.kvoController unobserve:_captureResource.managedSession.avSession];
  1937. }
  1938. - (void)_resumeCaptureSessionKVOCheck
  1939. {
  1940. SCTraceODPCompatibleStart(2);
  1941. SCAssert([_captureResource.queuePerformer isCurrentPerformer], @"");
  1942. [_captureResource.kvoController observe:_captureResource.managedSession.avSession
  1943. keyPath:@keypath(_captureResource.managedSession.avSession, running)
  1944. options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld
  1945. action:_captureResource.handleAVSessionStatusChange];
  1946. }
  1947. - (id<SCManagedVideoDataSource>)currentVideoDataSource
  1948. {
  1949. SCTraceODPCompatibleStart(2);
  1950. return _captureResource.videoDataSource;
  1951. }
  1952. - (void)checkRestrictedCamera:(void (^)(BOOL, BOOL, AVAuthorizationStatus))callback
  1953. {
  1954. SCTraceODPCompatibleStart(2);
  1955. [_captureResource.queuePerformer perform:^{
  1956. // Front and back should be available if user has no restriction on camera.
  1957. BOOL front = [[SCManagedCaptureDevice front] isAvailable];
  1958. BOOL back = [[SCManagedCaptureDevice back] isAvailable];
  1959. AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
  1960. runOnMainThreadAsynchronously(^{
  1961. callback(front, back, status);
  1962. });
  1963. }];
  1964. }
  1965. - (SCSnapCreationTriggers *)snapCreationTriggers
  1966. {
  1967. return _captureResource.snapCreationTriggers;
  1968. }
  1969. - (void)setBlackCameraDetector:(SCBlackCameraDetector *)blackCameraDetector
  1970. deviceMotionProvider:(id<SCDeviceMotionProvider>)deviceMotionProvider
  1971. fileInputDecider:(id<SCFileInputDecider>)fileInputDecider
  1972. arImageCaptureProvider:(id<SCManagedCapturerARImageCaptureProvider>)arImageCaptureProvider
  1973. glviewManager:(id<SCManagedCapturerGLViewManagerAPI>)glViewManager
  1974. lensAPIProvider:(id<SCManagedCapturerLensAPIProvider>)lensAPIProvider
  1975. lsaComponentTracker:(id<SCManagedCapturerLSAComponentTrackerAPI>)lsaComponentTracker
  1976. managedCapturerPreviewLayerControllerDelegate:
  1977. (id<SCManagedCapturePreviewLayerControllerDelegate>)previewLayerControllerDelegate
  1978. {
  1979. _captureResource.blackCameraDetector = blackCameraDetector;
  1980. _captureResource.deviceMotionProvider = deviceMotionProvider;
  1981. _captureResource.fileInputDecider = fileInputDecider;
  1982. _captureResource.arImageCaptureProvider = arImageCaptureProvider;
  1983. _captureResource.videoPreviewGLViewManager = glViewManager;
  1984. [_captureResource.videoPreviewGLViewManager configureWithCaptureResource:_captureResource];
  1985. _captureResource.lensAPIProvider = lensAPIProvider;
  1986. _captureResource.lsaTrackingComponentHandler = lsaComponentTracker;
  1987. [_captureResource.lsaTrackingComponentHandler configureWithCaptureResource:_captureResource];
  1988. _captureResource.previewLayerControllerDelegate = previewLayerControllerDelegate;
  1989. [SCManagedCapturePreviewLayerController sharedInstance].delegate = previewLayerControllerDelegate;
  1990. }
  1991. @end