2014 snapchat source code
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

667 lines
29 KiB

  1. //
  2. // SCManagedPhotoCapturer.m
  3. // Snapchat
  4. //
  5. // Created by Chao Pang on 10/5/16.
  6. // Copyright © 2016 Snapchat, Inc. All rights reserved.
  7. //
  8. #import "SCManagedPhotoCapturer.h"
  9. #import "AVCaptureConnection+InputDevice.h"
  10. #import "SCCameraTweaks.h"
  11. #import "SCLogger+Camera.h"
  12. #import "SCManagedCapturer.h"
  13. #import "SCManagedFrameHealthChecker.h"
  14. #import "SCManagedStillImageCapturer_Protected.h"
  15. #import "SCStillImageCaptureVideoInputMethod.h"
  16. #import "SCStillImageDepthBlurFilter.h"
  17. #import <SCCrashLogger/SCCrashLogger.h>
  18. #import <SCFoundation/SCAssertWrapper.h>
  19. #import <SCFoundation/SCLog.h>
  20. #import <SCFoundation/SCPerforming.h>
  21. #import <SCFoundation/SCQueuePerformer.h>
  22. #import <SCFoundation/SCTrace.h>
  23. #import <SCLenses/SCLens.h>
  24. #import <SCLogger/SCCameraMetrics.h>
  25. #import <SCLogger/SClogger+Performance.h>
  26. #import <SCWebP/UIImage+WebP.h>
  27. @import ImageIO;
  28. static NSString *const kSCManagedPhotoCapturerErrorDomain = @"kSCManagedPhotoCapturerErrorDomain";
  29. static NSInteger const kSCManagedPhotoCapturerErrorEncounteredException = 10000;
  30. static NSInteger const kSCManagedPhotoCapturerInconsistentStatus = 10001;
  31. typedef NS_ENUM(NSUInteger, SCManagedPhotoCapturerStatus) {
  32. SCManagedPhotoCapturerStatusPrepareToCapture,
  33. SCManagedPhotoCapturerStatusWillCapture,
  34. SCManagedPhotoCapturerStatusDidFinishProcess,
  35. };
  36. @interface SCManagedPhotoCapturer () <AVCapturePhotoCaptureDelegate>
  37. @end
  38. @implementation SCManagedPhotoCapturer {
  39. AVCapturePhotoOutput *_photoOutput;
  40. BOOL _shouldCapture;
  41. BOOL _shouldEnableHRSI;
  42. BOOL _portraitModeCaptureEnabled;
  43. NSUInteger _retries;
  44. CGPoint _portraitModePointOfInterest;
  45. SCStillImageDepthBlurFilter *_depthBlurFilter;
  46. sc_managed_still_image_capturer_capture_still_image_completion_handler_t _callbackBlock;
  47. SCStillImageCaptureVideoInputMethod *_videoFileMethod;
  48. SCManagedPhotoCapturerStatus _status;
  49. }
  50. - (instancetype)initWithSession:(AVCaptureSession *)session
  51. performer:(id<SCPerforming>)performer
  52. lensProcessingCore:(id<SCManagedCapturerLensAPI>)lensProcessingCore
  53. delegate:(id<SCManagedStillImageCapturerDelegate>)delegate
  54. {
  55. SCTraceStart();
  56. self = [super initWithSession:session performer:performer lensProcessingCore:lensProcessingCore delegate:delegate];
  57. if (self) {
  58. [self setupWithSession:session];
  59. _portraitModePointOfInterest = CGPointMake(0.5, 0.5);
  60. }
  61. return self;
  62. }
  63. - (void)setupWithSession:(AVCaptureSession *)session
  64. {
  65. SCTraceStart();
  66. _photoOutput = [[AVCapturePhotoOutput alloc] init];
  67. _photoOutput.highResolutionCaptureEnabled = YES;
  68. [self setAsOutput:session];
  69. }
  70. - (void)setAsOutput:(AVCaptureSession *)session
  71. {
  72. SCTraceStart();
  73. if ([session canAddOutput:_photoOutput]) {
  74. [session addOutput:_photoOutput];
  75. }
  76. }
  77. - (void)setHighResolutionStillImageOutputEnabled:(BOOL)highResolutionStillImageOutputEnabled
  78. {
  79. SCTraceStart();
  80. SCAssert([_performer isCurrentPerformer], @"");
  81. // Here we cannot directly set _photoOutput.highResolutionCaptureEnabled, since it will cause
  82. // black frame blink when enabling lenses. Instead, we enable HRSI in AVCapturePhotoSettings.
  83. // https://ph.sc-corp.net/T96228
  84. _shouldEnableHRSI = highResolutionStillImageOutputEnabled;
  85. }
  86. - (void)enableStillImageStabilization
  87. {
  88. // The lens stabilization is enabled when configure AVCapturePhotoSettings
  89. // instead of AVCapturePhotoOutput
  90. SCTraceStart();
  91. }
  92. - (void)setPortraitModeCaptureEnabled:(BOOL)enabled
  93. {
  94. _portraitModeCaptureEnabled = enabled;
  95. if (@available(ios 11.0, *)) {
  96. _photoOutput.depthDataDeliveryEnabled = enabled;
  97. }
  98. if (enabled && _depthBlurFilter == nil) {
  99. _depthBlurFilter = [[SCStillImageDepthBlurFilter alloc] init];
  100. }
  101. }
  102. - (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest
  103. {
  104. _portraitModePointOfInterest = pointOfInterest;
  105. }
  106. - (void)removeAsOutput:(AVCaptureSession *)session
  107. {
  108. SCTraceStart();
  109. [session removeOutput:_photoOutput];
  110. }
  111. - (void)captureStillImageWithAspectRatio:(CGFloat)aspectRatio
  112. atZoomFactor:(float)zoomFactor
  113. fieldOfView:(float)fieldOfView
  114. state:(SCManagedCapturerState *)state
  115. captureSessionID:(NSString *)captureSessionID
  116. shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo
  117. completionHandler:
  118. (sc_managed_still_image_capturer_capture_still_image_completion_handler_t)completionHandler
  119. {
  120. SCTraceStart();
  121. SCAssert(completionHandler, @"completionHandler shouldn't be nil");
  122. SCAssert([_performer isCurrentPerformer], @"");
  123. _retries = 6; // AVFoundation Unknown Error usually resolves itself within 0.5 seconds
  124. _aspectRatio = aspectRatio;
  125. _zoomFactor = zoomFactor;
  126. _fieldOfView = fieldOfView;
  127. _state = state;
  128. _captureSessionID = captureSessionID;
  129. _shouldCaptureFromVideo = shouldCaptureFromVideo;
  130. SCAssert(!_completionHandler, @"We shouldn't have a _completionHandler at this point otherwise we are destroying "
  131. @"current completion handler.");
  132. // The purpose of these lines is to attach a strong reference to self to the completion handler.
  133. // This is because AVCapturePhotoOutput does not hold a strong reference to its delegate, which acts as a completion
  134. // handler.
  135. // If self is deallocated during the call to _photoOuptut capturePhotoWithSettings:delegate:, which may happen if
  136. // any AVFoundationError occurs,
  137. // then it's callback method, captureOutput:didFinish..., will not be called, and the completion handler will be
  138. // forgotten.
  139. // This comes with a risk of a memory leak. If for whatever reason the completion handler field is never used and
  140. // then unset,
  141. // then we have a permanent retain cycle.
  142. _callbackBlock = completionHandler;
  143. __typeof(self) strongSelf = self;
  144. _completionHandler = ^(UIImage *fullScreenImage, NSDictionary *metadata, NSError *error) {
  145. strongSelf->_callbackBlock(fullScreenImage, metadata, error);
  146. strongSelf->_callbackBlock = nil;
  147. };
  148. [[SCLogger sharedInstance] logCameraExposureAdjustmentDelayStart];
  149. if (!_adjustingExposureManualDetect) {
  150. SCLogCoreCameraInfo(@"Capturing still image now");
  151. [self _capturePhotoWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyNo];
  152. _shouldCapture = NO;
  153. } else {
  154. SCLogCoreCameraInfo(@"Wait adjusting exposure (or after 0.4 seconds) and then capture still image");
  155. _shouldCapture = YES;
  156. [self _deadlineCapturePhoto];
  157. }
  158. }
  159. #pragma mark - SCManagedDeviceCapacityAnalyzerListener
  160. - (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer
  161. didChangeAdjustingExposure:(BOOL)adjustingExposure
  162. {
  163. SCTraceStart();
  164. @weakify(self);
  165. [_performer performImmediatelyIfCurrentPerformer:^{
  166. @strongify(self);
  167. SC_GUARD_ELSE_RETURN(self);
  168. // Since this is handled on a different thread, therefore, dispatch back to the queue we operated on.
  169. self->_adjustingExposureManualDetect = adjustingExposure;
  170. [self _didChangeAdjustingExposure:adjustingExposure
  171. withStrategy:kSCCameraExposureAdjustmentStrategyManualDetect];
  172. }];
  173. }
  174. - (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer
  175. didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition
  176. {
  177. SCTraceStart();
  178. @weakify(self);
  179. [_performer performImmediatelyIfCurrentPerformer:^{
  180. @strongify(self);
  181. SC_GUARD_ELSE_RETURN(self);
  182. self->_lightingConditionType = lightingCondition;
  183. }];
  184. }
  185. #pragma mark - SCManagedCapturerListener
  186. - (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state
  187. {
  188. SCTraceStart();
  189. @weakify(self);
  190. [_performer performImmediatelyIfCurrentPerformer:^{
  191. @strongify(self);
  192. SC_GUARD_ELSE_RETURN(self);
  193. // Since this is handled on a different thread, therefore, dispatch back to the queue we operated on.
  194. [self _didChangeAdjustingExposure:state.adjustingExposure withStrategy:kSCCameraExposureAdjustmentStrategyKVO];
  195. }];
  196. }
  197. #pragma mark - AVCapturePhotoCaptureDelegate
  198. - (void)captureOutput:(AVCapturePhotoOutput *)captureOutput
  199. didFinishProcessingPhotoSampleBuffer:(CMSampleBufferRef)photoSampleBuffer
  200. previewPhotoSampleBuffer:(CMSampleBufferRef)previewPhotoSampleBuffer
  201. resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings
  202. bracketSettings:(AVCaptureBracketedStillImageSettings *)bracketSettings
  203. error:(NSError *)error
  204. {
  205. SCTraceStart();
  206. if (photoSampleBuffer) {
  207. CFRetain(photoSampleBuffer);
  208. }
  209. @weakify(self);
  210. [_performer performImmediatelyIfCurrentPerformer:^{
  211. SCTraceStart();
  212. @strongify(self);
  213. SC_GUARD_ELSE_RETURN(self);
  214. SC_GUARD_ELSE_RUN_AND_RETURN(photoSampleBuffer, [self _photoCaptureDidFailWithError:error]);
  215. if (self->_status == SCManagedPhotoCapturerStatusWillCapture) {
  216. NSData *imageData = [AVCapturePhotoOutput JPEGPhotoDataRepresentationForJPEGSampleBuffer:photoSampleBuffer
  217. previewPhotoSampleBuffer:nil];
  218. [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay
  219. uniqueId:@"IMAGE"
  220. splitPoint:@"DID_FINISH_PROCESSING"];
  221. [self _capturePhotoFinishedWithImageData:imageData
  222. sampleBuffer:photoSampleBuffer
  223. cameraInfo:cameraInfoForBuffer(photoSampleBuffer)
  224. error:error];
  225. } else {
  226. SCLogCoreCameraInfo(@"DidFinishProcessingPhoto with unexpected status: %@",
  227. [self _photoCapturerStatusToString:self->_status]);
  228. [self _photoCaptureDidFailWithError:[NSError errorWithDomain:kSCManagedPhotoCapturerErrorDomain
  229. code:kSCManagedPhotoCapturerInconsistentStatus
  230. userInfo:nil]];
  231. }
  232. CFRelease(photoSampleBuffer);
  233. }];
  234. }
  235. - (void)captureOutput:(AVCapturePhotoOutput *)output
  236. didFinishProcessingPhoto:(nonnull AVCapturePhoto *)photo
  237. error:(nullable NSError *)error NS_AVAILABLE_IOS(11_0)
  238. {
  239. SCTraceStart();
  240. @weakify(self);
  241. [_performer performImmediatelyIfCurrentPerformer:^{
  242. SCTraceStart();
  243. @strongify(self);
  244. SC_GUARD_ELSE_RETURN(self);
  245. NSData *imageData = [photo fileDataRepresentation];
  246. SC_GUARD_ELSE_RUN_AND_RETURN(imageData, [self _photoCaptureDidFailWithError:error]);
  247. if (self->_status == SCManagedPhotoCapturerStatusWillCapture) {
  248. if (@available(ios 11.0, *)) {
  249. if (_portraitModeCaptureEnabled) {
  250. RenderData renderData = {
  251. .depthDataMap = photo.depthData.depthDataMap,
  252. .depthBlurPointOfInterest = &_portraitModePointOfInterest,
  253. };
  254. imageData = [_depthBlurFilter renderWithPhotoData:imageData renderData:renderData];
  255. }
  256. }
  257. [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay
  258. uniqueId:@"IMAGE"
  259. splitPoint:@"DID_FINISH_PROCESSING"];
  260. [self _capturePhotoFinishedWithImageData:imageData metadata:photo.metadata error:error];
  261. } else {
  262. SCLogCoreCameraInfo(@"DidFinishProcessingPhoto with unexpected status: %@",
  263. [self _photoCapturerStatusToString:self->_status]);
  264. [self _photoCaptureDidFailWithError:[NSError errorWithDomain:kSCManagedPhotoCapturerErrorDomain
  265. code:kSCManagedPhotoCapturerInconsistentStatus
  266. userInfo:nil]];
  267. }
  268. }];
  269. }
  270. - (void)captureOutput:(AVCapturePhotoOutput *)captureOutput
  271. willBeginCaptureForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings
  272. {
  273. SCTraceStart();
  274. @weakify(self);
  275. [_performer performImmediatelyIfCurrentPerformer:^{
  276. SCTraceStart();
  277. @strongify(self);
  278. SC_GUARD_ELSE_RETURN(self);
  279. if ([self->_delegate respondsToSelector:@selector(managedStillImageCapturerWillCapturePhoto:)]) {
  280. if (self->_status == SCManagedPhotoCapturerStatusPrepareToCapture) {
  281. self->_status = SCManagedPhotoCapturerStatusWillCapture;
  282. [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay
  283. uniqueId:@"IMAGE"
  284. splitPoint:@"WILL_BEGIN_CAPTURE"];
  285. [self->_delegate managedStillImageCapturerWillCapturePhoto:self];
  286. } else {
  287. SCLogCoreCameraInfo(@"WillBeginCapture with unexpected status: %@",
  288. [self _photoCapturerStatusToString:self->_status]);
  289. }
  290. }
  291. }];
  292. }
  293. - (void)captureOutput:(AVCapturePhotoOutput *)captureOutput
  294. didCapturePhotoForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings
  295. {
  296. SCTraceStart();
  297. @weakify(self);
  298. [_performer performImmediatelyIfCurrentPerformer:^{
  299. SCTraceStart();
  300. @strongify(self);
  301. SC_GUARD_ELSE_RETURN(self);
  302. if ([self->_delegate respondsToSelector:@selector(managedStillImageCapturerDidCapturePhoto:)]) {
  303. if (self->_status == SCManagedPhotoCapturerStatusWillCapture ||
  304. self->_status == SCManagedPhotoCapturerStatusDidFinishProcess) {
  305. [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay
  306. uniqueId:@"IMAGE"
  307. splitPoint:@"DID_CAPTURE_PHOTO"];
  308. [self->_delegate managedStillImageCapturerDidCapturePhoto:self];
  309. } else {
  310. SCLogCoreCameraInfo(@"DidCapturePhoto with unexpected status: %@",
  311. [self _photoCapturerStatusToString:self->_status]);
  312. }
  313. }
  314. }];
  315. }
  316. #pragma mark - Private methods
  317. - (void)_didChangeAdjustingExposure:(BOOL)adjustingExposure withStrategy:(NSString *)strategy
  318. {
  319. if (!adjustingExposure && self->_shouldCapture) {
  320. SCLogCoreCameraInfo(@"Capturing after adjusting exposure using strategy: %@", strategy);
  321. [self _capturePhotoWithExposureAdjustmentStrategy:strategy];
  322. self->_shouldCapture = NO;
  323. }
  324. }
  325. - (void)_capturePhotoFinishedWithImageData:(NSData *)imageData
  326. sampleBuffer:(CMSampleBufferRef)sampleBuffer
  327. cameraInfo:(NSDictionary *)cameraInfo
  328. error:(NSError *)error
  329. {
  330. [self _photoCaptureDidSucceedWithImageData:imageData
  331. sampleBuffer:sampleBuffer
  332. cameraInfo:cameraInfoForBuffer(sampleBuffer)
  333. error:error];
  334. self->_status = SCManagedPhotoCapturerStatusDidFinishProcess;
  335. }
  336. - (void)_capturePhotoFinishedWithImageData:(NSData *)imageData metadata:(NSDictionary *)metadata error:(NSError *)error
  337. {
  338. [self _photoCaptureDidSucceedWithImageData:imageData metadata:metadata error:error];
  339. self->_status = SCManagedPhotoCapturerStatusDidFinishProcess;
  340. }
  341. - (void)_deadlineCapturePhoto
  342. {
  343. SCTraceStart();
  344. // Use the SCManagedCapturer's private queue.
  345. @weakify(self);
  346. [_performer perform:^{
  347. @strongify(self);
  348. SC_GUARD_ELSE_RETURN(self);
  349. if (self->_shouldCapture) {
  350. [self _capturePhotoWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyDeadline];
  351. self->_shouldCapture = NO;
  352. }
  353. }
  354. after:SCCameraTweaksExposureDeadline()];
  355. }
  356. - (void)_capturePhotoWithExposureAdjustmentStrategy:(NSString *)strategy
  357. {
  358. SCTraceStart();
  359. [[SCLogger sharedInstance] logCameraExposureAdjustmentDelayEndWithStrategy:strategy];
  360. if (_shouldCaptureFromVideo) {
  361. [self captureStillImageFromVideoBuffer];
  362. return;
  363. }
  364. SCAssert([_performer isCurrentPerformer], @"");
  365. SCAssert(_photoOutput, @"_photoOutput shouldn't be nil");
  366. _status = SCManagedPhotoCapturerStatusPrepareToCapture;
  367. AVCapturePhotoOutput *photoOutput = _photoOutput;
  368. AVCaptureConnection *captureConnection = [self _captureConnectionFromPhotoOutput:photoOutput];
  369. SCManagedCapturerState *state = [_state copy];
  370. #if !TARGET_IPHONE_SIMULATOR
  371. if (!captureConnection) {
  372. sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler;
  373. _completionHandler = nil;
  374. completionHandler(nil, nil, [NSError errorWithDomain:kSCManagedStillImageCapturerErrorDomain
  375. code:kSCManagedStillImageCapturerNoStillImageConnection
  376. userInfo:nil]);
  377. }
  378. #endif
  379. AVCapturePhotoSettings *photoSettings =
  380. [self _photoSettingsWithPhotoOutput:photoOutput captureConnection:captureConnection captureState:state];
  381. // Select appropriate image capture method
  382. if ([_delegate managedStillImageCapturerShouldProcessFileInput:self]) {
  383. if (!_videoFileMethod) {
  384. _videoFileMethod = [[SCStillImageCaptureVideoInputMethod alloc] init];
  385. }
  386. [[SCLogger sharedInstance] logStillImageCaptureApi:@"SCStillImageCaptureVideoFileInput"];
  387. [[SCCoreCameraLogger sharedInstance]
  388. logCameraCreationDelaySplitPointStillImageCaptureApi:@"SCStillImageCaptureVideoFileInput"];
  389. [_delegate managedStillImageCapturerWillCapturePhoto:self];
  390. [_videoFileMethod captureStillImageWithCapturerState:state
  391. successBlock:^(NSData *imageData, NSDictionary *cameraInfo, NSError *error) {
  392. [_performer performImmediatelyIfCurrentPerformer:^{
  393. [self _photoCaptureDidSucceedWithImageData:imageData
  394. sampleBuffer:nil
  395. cameraInfo:cameraInfo
  396. error:error];
  397. }];
  398. }
  399. failureBlock:^(NSError *error) {
  400. [_performer performImmediatelyIfCurrentPerformer:^{
  401. [self _photoCaptureDidFailWithError:error];
  402. }];
  403. }];
  404. } else {
  405. [[SCLogger sharedInstance] logStillImageCaptureApi:@"AVCapturePhoto"];
  406. [[SCCoreCameraLogger sharedInstance] logCameraCreationDelaySplitPointStillImageCaptureApi:@"AVCapturePhoto"];
  407. @try {
  408. [photoOutput capturePhotoWithSettings:photoSettings delegate:self];
  409. } @catch (NSException *e) {
  410. [SCCrashLogger logHandledException:e];
  411. [self
  412. _photoCaptureDidFailWithError:[NSError errorWithDomain:kSCManagedPhotoCapturerErrorDomain
  413. code:kSCManagedPhotoCapturerErrorEncounteredException
  414. userInfo:@{
  415. @"exception" : e
  416. }]];
  417. }
  418. }
  419. }
  420. - (void)_photoCaptureDidSucceedWithImageData:(NSData *)imageData
  421. sampleBuffer:(CMSampleBufferRef)sampleBuffer
  422. cameraInfo:(NSDictionary *)cameraInfo
  423. error:(NSError *)error
  424. {
  425. SCTraceStart();
  426. SCAssert([_performer isCurrentPerformer], @"");
  427. [[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CACurrentMediaTime()];
  428. [[SCCoreCameraLogger sharedInstance]
  429. logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CACurrentMediaTime()];
  430. UIImage *fullScreenImage = [self imageFromData:imageData
  431. currentZoomFactor:_zoomFactor
  432. targetAspectRatio:_aspectRatio
  433. fieldOfView:_fieldOfView
  434. state:_state
  435. sampleBuffer:sampleBuffer];
  436. [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay
  437. uniqueId:@"IMAGE"
  438. splitPoint:@"WILL_START_COMPLETION_HANDLER"];
  439. sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler;
  440. _completionHandler = nil;
  441. if (completionHandler) {
  442. completionHandler(fullScreenImage, cameraInfo, error);
  443. }
  444. }
  445. - (void)_photoCaptureDidSucceedWithImageData:(NSData *)imageData
  446. metadata:(NSDictionary *)metadata
  447. error:(NSError *)error
  448. {
  449. SCTraceStart();
  450. SCAssert([_performer isCurrentPerformer], @"");
  451. [[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CACurrentMediaTime()];
  452. [[SCCoreCameraLogger sharedInstance]
  453. logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CACurrentMediaTime()];
  454. UIImage *fullScreenImage = [self imageFromData:imageData
  455. currentZoomFactor:_zoomFactor
  456. targetAspectRatio:_aspectRatio
  457. fieldOfView:_fieldOfView
  458. state:_state
  459. metadata:metadata];
  460. [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay
  461. uniqueId:@"IMAGE"
  462. splitPoint:@"WILL_START_COMPLETION_HANDLER"];
  463. sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler;
  464. _completionHandler = nil;
  465. if (completionHandler) {
  466. completionHandler(fullScreenImage, metadata, error);
  467. }
  468. }
  469. - (void)_photoCaptureDidFailWithError:(NSError *)error
  470. {
  471. SCTraceStart();
  472. SCAssert([_performer isCurrentPerformer], @"");
  473. sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler;
  474. _completionHandler = nil;
  475. if (completionHandler) {
  476. completionHandler(nil, nil, error);
  477. }
  478. }
  479. - (AVCaptureConnection *)_captureConnectionFromPhotoOutput:(AVCapturePhotoOutput *)photoOutput
  480. {
  481. SCTraceStart();
  482. SCAssert([_performer isCurrentPerformer], @"");
  483. NSArray *connections = [photoOutput.connections copy];
  484. for (AVCaptureConnection *connection in connections) {
  485. for (AVCaptureInputPort *port in [connection inputPorts]) {
  486. if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
  487. return connection;
  488. }
  489. }
  490. }
  491. return nil;
  492. }
  493. - (AVCapturePhotoSettings *)_photoSettingsWithPhotoOutput:(AVCapturePhotoOutput *)photoOutput
  494. captureConnection:(AVCaptureConnection *)captureConnection
  495. captureState:(SCManagedCapturerState *)state
  496. {
  497. SCTraceStart();
  498. if ([self _shouldUseBracketPhotoSettingsWithCaptureState:state]) {
  499. return [self _bracketPhotoSettingsWithPhotoOutput:photoOutput
  500. captureConnection:captureConnection
  501. captureState:state];
  502. } else {
  503. return [self _defaultPhotoSettingsWithPhotoOutput:photoOutput captureState:state];
  504. }
  505. }
  506. - (BOOL)_shouldUseBracketPhotoSettingsWithCaptureState:(SCManagedCapturerState *)state
  507. {
  508. // According to Apple docmentation, AVCapturePhotoBracketSettings do not support flashMode,
  509. // autoStillImageStabilizationEnabled, livePhotoMovieFileURL or livePhotoMovieMetadata.
  510. // Besides, we only use AVCapturePhotoBracketSettings if capture settings needs to be set manually.
  511. return !state.flashActive && !_portraitModeCaptureEnabled &&
  512. (([SCManagedCaptureDevice isEnhancedNightModeSupported] && state.isNightModeActive) ||
  513. [_delegate managedStillImageCapturerIsUnderDeviceMotion:self]);
  514. }
  515. - (AVCapturePhotoSettings *)_defaultPhotoSettingsWithPhotoOutput:(AVCapturePhotoOutput *)photoOutput
  516. captureState:(SCManagedCapturerState *)state
  517. {
  518. SCTraceStart();
  519. // Specify the output file format
  520. AVCapturePhotoSettings *photoSettings =
  521. [AVCapturePhotoSettings photoSettingsWithFormat:@{AVVideoCodecKey : AVVideoCodecJPEG}];
  522. // Enable HRSI if necessary
  523. if (photoSettings.isHighResolutionPhotoEnabled != _shouldEnableHRSI) {
  524. photoSettings.highResolutionPhotoEnabled = _shouldEnableHRSI;
  525. }
  526. // Turn on flash if active and supported by device
  527. if (state.flashActive && state.flashSupported) {
  528. photoSettings.flashMode = AVCaptureFlashModeOn;
  529. }
  530. // Turn on stabilization if available
  531. // Seems that setting autoStillImageStabilizationEnabled doesn't work during video capture session,
  532. // but we set enable it anyway as it is harmless.
  533. if (photoSettings.isAutoStillImageStabilizationEnabled) {
  534. photoSettings.autoStillImageStabilizationEnabled = YES;
  535. }
  536. if (_portraitModeCaptureEnabled) {
  537. if (@available(ios 11.0, *)) {
  538. photoSettings.depthDataDeliveryEnabled = YES;
  539. }
  540. }
  541. return photoSettings;
  542. }
  543. - (AVCapturePhotoSettings *)_bracketPhotoSettingsWithPhotoOutput:(AVCapturePhotoOutput *)photoOutput
  544. captureConnection:(AVCaptureConnection *)captureConnection
  545. captureState:(SCManagedCapturerState *)state
  546. {
  547. SCTraceStart();
  548. OSType rawPixelFormatType = [photoOutput.availableRawPhotoPixelFormatTypes.firstObject unsignedIntValue];
  549. NSArray<AVCaptureBracketedStillImageSettings *> *bracketedSettings =
  550. [self _bracketSettingsArray:captureConnection withCaptureState:state];
  551. SCAssert(bracketedSettings.count <= photoOutput.maxBracketedCapturePhotoCount,
  552. @"Bracket photo count cannot exceed maximum count");
  553. // Specify the output file format and raw pixel format
  554. AVCapturePhotoBracketSettings *photoSettings =
  555. [AVCapturePhotoBracketSettings photoBracketSettingsWithRawPixelFormatType:rawPixelFormatType
  556. processedFormat:@{
  557. AVVideoCodecKey : AVVideoCodecJPEG
  558. }
  559. bracketedSettings:bracketedSettings];
  560. // Enable HRSI if necessary
  561. if (photoSettings.isHighResolutionPhotoEnabled != _shouldEnableHRSI) {
  562. photoSettings.highResolutionPhotoEnabled = _shouldEnableHRSI;
  563. }
  564. // If lens stabilization is supportd, enable the stabilization when device is moving
  565. if (photoOutput.isLensStabilizationDuringBracketedCaptureSupported && !photoSettings.isLensStabilizationEnabled &&
  566. [_delegate managedStillImageCapturerIsUnderDeviceMotion:self]) {
  567. photoSettings.lensStabilizationEnabled = YES;
  568. }
  569. return photoSettings;
  570. }
  571. - (NSArray *)_bracketSettingsArray:(AVCaptureConnection *)stillImageConnection
  572. withCaptureState:(SCManagedCapturerState *)state
  573. {
  574. NSInteger const stillCount = 1;
  575. NSMutableArray *bracketSettingsArray = [NSMutableArray arrayWithCapacity:stillCount];
  576. AVCaptureDevice *device = [stillImageConnection inputDevice];
  577. CMTime exposureDuration = device.exposureDuration;
  578. if ([SCManagedCaptureDevice isEnhancedNightModeSupported] && state.isNightModeActive) {
  579. exposureDuration = [self adjustedExposureDurationForNightModeWithCurrentExposureDuration:exposureDuration];
  580. }
  581. AVCaptureBracketedStillImageSettings *settings = [AVCaptureManualExposureBracketedStillImageSettings
  582. manualExposureSettingsWithExposureDuration:exposureDuration
  583. ISO:AVCaptureISOCurrent];
  584. for (NSInteger i = 0; i < stillCount; i++) {
  585. [bracketSettingsArray addObject:settings];
  586. }
  587. return [bracketSettingsArray copy];
  588. }
  589. - (NSString *)_photoCapturerStatusToString:(SCManagedPhotoCapturerStatus)status
  590. {
  591. switch (status) {
  592. case SCManagedPhotoCapturerStatusPrepareToCapture:
  593. return @"PhotoCapturerStatusPrepareToCapture";
  594. case SCManagedPhotoCapturerStatusWillCapture:
  595. return @"PhotoCapturerStatusWillCapture";
  596. case SCManagedPhotoCapturerStatusDidFinishProcess:
  597. return @"PhotoCapturerStatusDidFinishProcess";
  598. }
  599. }
  600. @end