2014 snapchat source code
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1107 lines
53 KiB

  1. //
  2. // SCManagedVideoCapturer.m
  3. // Snapchat
  4. //
  5. // Created by Liu Liu on 5/1/15.
  6. // Copyright (c) 2015 Liu Liu. All rights reserved.
  7. //
  8. #import "SCManagedVideoCapturer.h"
  9. #import "NSURL+Asset.h"
  10. #import "SCAudioCaptureSession.h"
  11. #import "SCCameraTweaks.h"
  12. #import "SCCapturerBufferedVideoWriter.h"
  13. #import "SCCoreCameraLogger.h"
  14. #import "SCLogger+Camera.h"
  15. #import "SCManagedCapturer.h"
  16. #import "SCManagedFrameHealthChecker.h"
  17. #import "SCManagedVideoCapturerLogger.h"
  18. #import "SCManagedVideoCapturerTimeObserver.h"
  19. #import <SCAudio/SCAudioSession.h>
  20. #import <SCAudio/SCMutableAudioSession.h>
  21. #import <SCBase/SCMacros.h>
  22. #import <SCCameraFoundation/SCManagedAudioDataSourceListenerAnnouncer.h>
  23. #import <SCFoundation/SCAssertWrapper.h>
  24. #import <SCFoundation/SCCoreGraphicsUtils.h>
  25. #import <SCFoundation/SCDeviceName.h>
  26. #import <SCFoundation/SCFuture.h>
  27. #import <SCFoundation/SCLog.h>
  28. #import <SCFoundation/SCQueuePerformer.h>
  29. #import <SCFoundation/SCTrace.h>
  30. #import <SCFoundation/UIImage+CVPixelBufferRef.h>
  31. #import <SCImageProcess/SCSnapVideoFrameRawData.h>
  32. #import <SCImageProcess/SCVideoFrameRawDataCollector.h>
  33. #import <SCImageProcess/SnapVideoMetadata.h>
  34. #import <SCLogger/SCCameraMetrics.h>
  35. #import <SCLogger/SCLogger+Performance.h>
  36. #import <SCLogger/SCLogger.h>
  37. #import <SCAudioScope/SCAudioSessionExperimentAdapter.h>
  38. @import CoreMedia;
  39. @import ImageIO;
  40. static NSString *const kSCAudioCaptureAudioSessionLabel = @"CAMERA";
  41. // wild card audio queue error code
  42. static NSInteger const kSCAudioQueueErrorWildCard = -50;
  43. // kAudioHardwareIllegalOperationError, it means hardware failure
  44. static NSInteger const kSCAudioQueueErrorHardware = 1852797029;
  45. typedef NS_ENUM(NSUInteger, SCManagedVideoCapturerStatus) {
  46. SCManagedVideoCapturerStatusUnknown,
  47. SCManagedVideoCapturerStatusIdle,
  48. SCManagedVideoCapturerStatusPrepareToRecord,
  49. SCManagedVideoCapturerStatusReadyForRecording,
  50. SCManagedVideoCapturerStatusRecording,
  51. SCManagedVideoCapturerStatusError,
  52. };
  53. #define SCLogVideoCapturerInfo(fmt, ...) SCLogCoreCameraInfo(@"[SCManagedVideoCapturer] " fmt, ##__VA_ARGS__)
  54. #define SCLogVideoCapturerWarning(fmt, ...) SCLogCoreCameraWarning(@"[SCManagedVideoCapturer] " fmt, ##__VA_ARGS__)
  55. #define SCLogVideoCapturerError(fmt, ...) SCLogCoreCameraError(@"[SCManagedVideoCapturer] " fmt, ##__VA_ARGS__)
  56. @interface SCManagedVideoCapturer () <SCAudioCaptureSessionDelegate>
  57. // This value has to be atomic because it is read on a different thread (write
  58. // on output queue, as always)
  59. @property (atomic, assign, readwrite) SCManagedVideoCapturerStatus status;
  60. @property (nonatomic, assign) CMTime firstWrittenAudioBufferDelay;
  61. @end
  62. static char *const kSCManagedVideoCapturerQueueLabel = "com.snapchat.managed-video-capturer-queue";
  63. static char *const kSCManagedVideoCapturerPromiseQueueLabel = "com.snapchat.video-capture-promise";
  64. static NSString *const kSCManagedVideoCapturerErrorDomain = @"kSCManagedVideoCapturerErrorDomain";
  65. static NSInteger const kSCManagedVideoCapturerCannotAddAudioVideoInput = 1001;
  66. static NSInteger const kSCManagedVideoCapturerEmptyFrame = 1002;
  67. static NSInteger const kSCManagedVideoCapturerStopBeforeStart = 1003;
  68. static NSInteger const kSCManagedVideoCapturerStopWithoutStart = 1004;
  69. static NSInteger const kSCManagedVideoCapturerZeroVideoSize = -111;
  70. static NSUInteger const kSCVideoContentComplexitySamplingRate = 90;
  71. // This is the maximum time we will wait for the Recording Capturer pipeline to drain
  72. // When video stabilization is turned on the extra frame delay is around 20 frames.
  73. // @30 fps this is 0.66 seconds
  74. static NSTimeInterval const kSCManagedVideoCapturerStopRecordingDeadline = 1.0;
  75. static const char *SCPlaceholderImageGenerationQueueLabel = "com.snapchat.video-capturer-placeholder-queue";
  76. static const char *SCVideoRecordingPreparationQueueLabel = "com.snapchat.video-recording-preparation-queue";
  77. static dispatch_queue_t SCPlaceholderImageGenerationQueue(void)
  78. {
  79. static dispatch_queue_t queue;
  80. static dispatch_once_t onceToken;
  81. dispatch_once(&onceToken, ^{
  82. queue = dispatch_queue_create(SCPlaceholderImageGenerationQueueLabel, DISPATCH_QUEUE_SERIAL);
  83. });
  84. return queue;
  85. }
  86. @interface SCManagedVideoCapturer () <SCCapturerBufferedVideoWriterDelegate>
  87. @end
  88. @implementation SCManagedVideoCapturer {
  89. NSTimeInterval _maxDuration;
  90. NSTimeInterval _recordStartTime;
  91. SCCapturerBufferedVideoWriter *_videoWriter;
  92. BOOL _hasWritten;
  93. SCQueuePerformer *_performer;
  94. SCQueuePerformer *_videoPreparationPerformer;
  95. SCAudioCaptureSession *_audioCaptureSession;
  96. NSError *_lastError;
  97. UIImage *_placeholderImage;
  98. // For logging purpose
  99. BOOL _isVideoSnap;
  100. NSDictionary *_videoOutputSettings;
  101. // The following value is used to control the encoder shutdown following a stop recording message.
  102. // When a shutdown is requested this value will be the timestamp of the last captured frame.
  103. CFTimeInterval _stopTime;
  104. NSInteger _stopSession;
  105. SCAudioConfigurationToken *_preparedAudioConfiguration;
  106. SCAudioConfigurationToken *_audioConfiguration;
  107. dispatch_semaphore_t _startRecordingSemaphore;
  108. // For store the raw frame datas
  109. NSInteger _rawDataFrameNum;
  110. NSURL *_rawDataURL;
  111. SCVideoFrameRawDataCollector *_videoFrameRawDataCollector;
  112. CMTime _startSessionTime;
  113. // Indicates how actual processing time of first frame. Also used for camera timer animation start offset.
  114. NSTimeInterval _startSessionRealTime;
  115. CMTime _endSessionTime;
  116. sc_managed_capturer_recording_session_t _sessionId;
  117. SCManagedVideoCapturerTimeObserver *_timeObserver;
  118. SCManagedVideoCapturerLogger *_capturerLogger;
  119. CGSize _outputSize;
  120. BOOL _isFrontFacingCamera;
  121. SCPromise<id<SCManagedRecordedVideo>> *_recordedVideoPromise;
  122. SCManagedAudioDataSourceListenerAnnouncer *_announcer;
  123. NSString *_captureSessionID;
  124. CIContext *_ciContext;
  125. }
  126. @synthesize performer = _performer;
  127. - (instancetype)init
  128. {
  129. SCTraceStart();
  130. return [self initWithQueuePerformer:[[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoCapturerQueueLabel
  131. qualityOfService:QOS_CLASS_USER_INTERACTIVE
  132. queueType:DISPATCH_QUEUE_SERIAL
  133. context:SCQueuePerformerContextCamera]];
  134. }
  135. - (instancetype)initWithQueuePerformer:(SCQueuePerformer *)queuePerformer
  136. {
  137. SCTraceStart();
  138. self = [super init];
  139. if (self) {
  140. _performer = queuePerformer;
  141. _audioCaptureSession = [[SCAudioCaptureSession alloc] init];
  142. _audioCaptureSession.delegate = self;
  143. _announcer = [SCManagedAudioDataSourceListenerAnnouncer new];
  144. self.status = SCManagedVideoCapturerStatusIdle;
  145. _capturerLogger = [[SCManagedVideoCapturerLogger alloc] init];
  146. _startRecordingSemaphore = dispatch_semaphore_create(0);
  147. }
  148. return self;
  149. }
  150. - (void)dealloc
  151. {
  152. SCLogVideoCapturerInfo(@"SCVideoCaptureSessionInfo before dealloc: %@",
  153. SCVideoCaptureSessionInfoGetDebugDescription(self.activeSession));
  154. }
  155. - (SCVideoCaptureSessionInfo)activeSession
  156. {
  157. return SCVideoCaptureSessionInfoMake(_startSessionTime, _endSessionTime, _sessionId);
  158. }
  159. - (CGSize)defaultSizeForDeviceFormat:(AVCaptureDeviceFormat *)format
  160. {
  161. SCTraceStart();
  162. // if there is no device, and no format
  163. if (format == nil) {
  164. // hard code 720p
  165. return CGSizeMake(kSCManagedCapturerDefaultVideoActiveFormatWidth,
  166. kSCManagedCapturerDefaultVideoActiveFormatHeight);
  167. }
  168. CMVideoDimensions videoDimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
  169. CGSize size = CGSizeMake(videoDimensions.width, videoDimensions.height);
  170. if (videoDimensions.width > kSCManagedCapturerDefaultVideoActiveFormatWidth &&
  171. videoDimensions.height > kSCManagedCapturerDefaultVideoActiveFormatHeight) {
  172. CGFloat scaleFactor = MAX((kSCManagedCapturerDefaultVideoActiveFormatWidth / videoDimensions.width),
  173. (kSCManagedCapturerDefaultVideoActiveFormatHeight / videoDimensions.height));
  174. size = SCSizeMakeAlignTo(SCSizeApplyScale(size, scaleFactor), 2);
  175. }
  176. if ([SCDeviceName isIphoneX]) {
  177. size = SCSizeApplyScale(size, kSCIPhoneXCapturedImageVideoCropRatio);
  178. }
  179. return size;
  180. }
  181. - (CGSize)cropSize:(CGSize)size toAspectRatio:(CGFloat)aspectRatio
  182. {
  183. if (aspectRatio == kSCManagedCapturerAspectRatioUnspecified) {
  184. return size;
  185. }
  186. // video input is always in landscape mode
  187. aspectRatio = 1.0 / aspectRatio;
  188. if (size.width > size.height * aspectRatio) {
  189. size.width = size.height * aspectRatio;
  190. } else {
  191. size.height = size.width / aspectRatio;
  192. }
  193. return CGSizeMake(roundf(size.width / 2) * 2, roundf(size.height / 2) * 2);
  194. }
  195. - (SCManagedVideoCapturerOutputSettings *)defaultRecordingOutputSettingsWithDeviceFormat:
  196. (AVCaptureDeviceFormat *)deviceFormat
  197. {
  198. SCTraceStart();
  199. CGFloat aspectRatio = SCManagedCapturedImageAndVideoAspectRatio();
  200. CGSize outputSize = [self defaultSizeForDeviceFormat:deviceFormat];
  201. outputSize = [self cropSize:outputSize toAspectRatio:aspectRatio];
  202. // [TODO](Chao): remove the dependency of SCManagedVideoCapturer on SnapVideoMetaData
  203. NSInteger videoBitRate = [SnapVideoMetadata averageTranscodingBitRate:outputSize
  204. isRecording:YES
  205. highQuality:YES
  206. duration:0
  207. iFrameOnly:NO
  208. originalVideoBitRate:0
  209. overlayImageFileSizeBits:0
  210. videoPlaybackRate:1
  211. isLagunaVideo:NO
  212. hasOverlayToBlend:NO
  213. sourceType:SCSnapVideoFilterSourceTypeUndefined];
  214. SCTraceSignal(@"Setup transcoding video bitrate");
  215. [_capturerLogger logStartingStep:kSCCapturerStartingStepTranscodeingVideoBitrate];
  216. SCManagedVideoCapturerOutputSettings *outputSettings =
  217. [[SCManagedVideoCapturerOutputSettings alloc] initWithWidth:outputSize.width
  218. height:outputSize.height
  219. videoBitRate:videoBitRate
  220. audioBitRate:64000.0
  221. keyFrameInterval:15
  222. outputType:SCManagedVideoCapturerOutputTypeVideoSnap];
  223. return outputSettings;
  224. }
  225. - (SCQueuePerformer *)_getVideoPreparationPerformer
  226. {
  227. SCAssert([_performer isCurrentPerformer], @"must run on _performer");
  228. if (!_videoPreparationPerformer) {
  229. _videoPreparationPerformer = [[SCQueuePerformer alloc] initWithLabel:SCVideoRecordingPreparationQueueLabel
  230. qualityOfService:QOS_CLASS_USER_INTERACTIVE
  231. queueType:DISPATCH_QUEUE_SERIAL
  232. context:SCQueuePerformerContextCamera];
  233. }
  234. return _videoPreparationPerformer;
  235. }
  236. - (void)prepareForRecordingWithAudioConfiguration:(SCAudioConfiguration *)configuration
  237. {
  238. SCTraceStart();
  239. [_performer performImmediatelyIfCurrentPerformer:^{
  240. SCTraceStart();
  241. self.status = SCManagedVideoCapturerStatusPrepareToRecord;
  242. if (_audioConfiguration) {
  243. [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration performer:nil completion:nil];
  244. }
  245. __block NSError *audioSessionError = nil;
  246. _preparedAudioConfiguration = _audioConfiguration =
  247. [SCAudioSessionExperimentAdapter configureWith:configuration
  248. performer:[self _getVideoPreparationPerformer]
  249. completion:^(NSError *error) {
  250. audioSessionError = error;
  251. if (self.status == SCManagedVideoCapturerStatusPrepareToRecord) {
  252. dispatch_semaphore_signal(_startRecordingSemaphore);
  253. }
  254. }];
  255. // Wait until preparation for recording is done
  256. dispatch_semaphore_wait(_startRecordingSemaphore, DISPATCH_TIME_FOREVER);
  257. [_delegate managedVideoCapturer:self
  258. didGetError:audioSessionError
  259. forType:SCManagedVideoCapturerInfoAudioSessionError
  260. session:self.activeSession];
  261. }];
  262. }
  263. - (SCVideoCaptureSessionInfo)startRecordingAsynchronouslyWithOutputSettings:
  264. (SCManagedVideoCapturerOutputSettings *)outputSettings
  265. audioConfiguration:(SCAudioConfiguration *)audioConfiguration
  266. maxDuration:(NSTimeInterval)maxDuration
  267. toURL:(NSURL *)URL
  268. deviceFormat:(AVCaptureDeviceFormat *)deviceFormat
  269. orientation:(AVCaptureVideoOrientation)videoOrientation
  270. captureSessionID:(NSString *)captureSessionID
  271. {
  272. SCTraceStart();
  273. _captureSessionID = [captureSessionID copy];
  274. [_capturerLogger prepareForStartingLog];
  275. [[SCLogger sharedInstance] logTimedEventStart:kSCCameraMetricsAudioDelay
  276. uniqueId:_captureSessionID
  277. isUniqueEvent:NO];
  278. NSTimeInterval startTime = CACurrentMediaTime();
  279. [[SCLogger sharedInstance] logPreCaptureOperationRequestedAt:startTime];
  280. [[SCCoreCameraLogger sharedInstance] logCameraCreationDelaySplitPointPreCaptureOperationRequested];
  281. _sessionId = arc4random();
  282. // Set a invalid time so that we don't process videos when no frame available
  283. _startSessionTime = kCMTimeInvalid;
  284. _endSessionTime = kCMTimeInvalid;
  285. _firstWrittenAudioBufferDelay = kCMTimeInvalid;
  286. _audioQueueStarted = NO;
  287. SCLogVideoCapturerInfo(@"SCVideoCaptureSessionInfo at start of recording: %@",
  288. SCVideoCaptureSessionInfoGetDebugDescription(self.activeSession));
  289. SCVideoCaptureSessionInfo sessionInfo = self.activeSession;
  290. [_performer performImmediatelyIfCurrentPerformer:^{
  291. _maxDuration = maxDuration;
  292. dispatch_block_t startRecordingBlock = ^{
  293. _rawDataFrameNum = 0;
  294. // Begin audio recording asynchronously, first, need to have correct audio session.
  295. SCTraceStart();
  296. SCLogVideoCapturerInfo(@"Dequeue begin recording with audio session change delay: %lf seconds",
  297. CACurrentMediaTime() - startTime);
  298. if (self.status != SCManagedVideoCapturerStatusReadyForRecording) {
  299. SCLogVideoCapturerInfo(@"SCManagedVideoCapturer status: %lu", (unsigned long)self.status);
  300. // We may already released, but this should be OK.
  301. [SCAudioSessionExperimentAdapter relinquishConfiguration:_preparedAudioConfiguration
  302. performer:nil
  303. completion:nil];
  304. return;
  305. }
  306. if (_preparedAudioConfiguration != _audioConfiguration) {
  307. SCLogVideoCapturerInfo(
  308. @"SCManagedVideoCapturer has mismatched audio session token, prepared: %@, have: %@",
  309. _preparedAudioConfiguration.token, _audioConfiguration.token);
  310. // We are on a different audio session token already.
  311. [SCAudioSessionExperimentAdapter relinquishConfiguration:_preparedAudioConfiguration
  312. performer:nil
  313. completion:nil];
  314. return;
  315. }
  316. // Divide start recording workflow into different steps to log delay time.
  317. // And checkpoint is the end of a step
  318. [_capturerLogger logStartingStep:kSCCapturerStartingStepAudioSession];
  319. [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay
  320. uniqueId:_captureSessionID
  321. stepName:@"audio_session_start_end"];
  322. SCLogVideoCapturerInfo(@"Prepare to begin recording");
  323. _lastError = nil;
  324. // initialize stopTime to a number much larger than the CACurrentMediaTime() which is the time from Jan 1,
  325. // 2001
  326. _stopTime = kCFAbsoluteTimeIntervalSince1970;
  327. // Restart everything
  328. _hasWritten = NO;
  329. SCManagedVideoCapturerOutputSettings *finalOutputSettings =
  330. outputSettings ? outputSettings : [self defaultRecordingOutputSettingsWithDeviceFormat:deviceFormat];
  331. _isVideoSnap = finalOutputSettings.outputType == SCManagedVideoCapturerOutputTypeVideoSnap;
  332. _outputSize = CGSizeMake(finalOutputSettings.height, finalOutputSettings.width);
  333. [[SCLogger sharedInstance] logEvent:kSCCameraMetricsVideoRecordingStart
  334. parameters:@{
  335. @"video_width" : @(finalOutputSettings.width),
  336. @"video_height" : @(finalOutputSettings.height),
  337. @"bit_rate" : @(finalOutputSettings.videoBitRate),
  338. @"is_video_snap" : @(_isVideoSnap),
  339. }];
  340. _outputURL = [URL copy];
  341. _rawDataURL = [_outputURL URLByAppendingPathExtension:@"dat"];
  342. [_capturerLogger logStartingStep:kSCCapturerStartingStepOutputSettings];
  343. // Make sure the raw frame data file is gone
  344. SCTraceSignal(@"Setup video frame raw data");
  345. [[NSFileManager defaultManager] removeItemAtURL:_rawDataURL error:NULL];
  346. if ([SnapVideoMetadata deviceMeetsRequirementsForContentAdaptiveVideoEncoding]) {
  347. if (!_videoFrameRawDataCollector) {
  348. _videoFrameRawDataCollector = [[SCVideoFrameRawDataCollector alloc] initWithPerformer:_performer];
  349. }
  350. [_videoFrameRawDataCollector prepareForCollectingVideoFrameRawDataWithRawDataURL:_rawDataURL];
  351. }
  352. [_capturerLogger logStartingStep:kSCCapturerStartingStepVideoFrameRawData];
  353. SCLogVideoCapturerInfo(@"Prepare to begin audio recording");
  354. [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay
  355. uniqueId:_captureSessionID
  356. stepName:@"audio_queue_start_begin"];
  357. [self _beginAudioQueueRecordingWithCompleteHandler:^(NSError *error) {
  358. [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay
  359. uniqueId:_captureSessionID
  360. stepName:@"audio_queue_start_end"];
  361. if (error) {
  362. [_delegate managedVideoCapturer:self
  363. didGetError:error
  364. forType:SCManagedVideoCapturerInfoAudioQueueError
  365. session:sessionInfo];
  366. } else {
  367. _audioQueueStarted = YES;
  368. }
  369. if (self.status == SCManagedVideoCapturerStatusRecording) {
  370. [_delegate managedVideoCapturer:self didBeginAudioRecording:sessionInfo];
  371. }
  372. }];
  373. // Call this delegate first so that we have proper state transition from begin recording to finish / error
  374. [_delegate managedVideoCapturer:self didBeginVideoRecording:sessionInfo];
  375. // We need to start with a fresh recording file, make sure it's gone
  376. [[NSFileManager defaultManager] removeItemAtURL:_outputURL error:NULL];
  377. [_capturerLogger logStartingStep:kSCCapturerStartingStepAudioRecording];
  378. SCTraceSignal(@"Setup asset writer");
  379. NSError *error = nil;
  380. _videoWriter = [[SCCapturerBufferedVideoWriter alloc] initWithPerformer:_performer
  381. outputURL:self.outputURL
  382. delegate:self
  383. error:&error];
  384. if (error) {
  385. self.status = SCManagedVideoCapturerStatusError;
  386. _lastError = error;
  387. _placeholderImage = nil;
  388. [_delegate managedVideoCapturer:self
  389. didGetError:error
  390. forType:SCManagedVideoCapturerInfoAssetWriterError
  391. session:sessionInfo];
  392. [_delegate managedVideoCapturer:self didFailWithError:_lastError session:sessionInfo];
  393. return;
  394. }
  395. [_capturerLogger logStartingStep:kSCCapturerStartingStepAssetWriterConfiguration];
  396. if (![_videoWriter prepareWritingWithOutputSettings:finalOutputSettings]) {
  397. _lastError = [NSError errorWithDomain:kSCManagedVideoCapturerErrorDomain
  398. code:kSCManagedVideoCapturerCannotAddAudioVideoInput
  399. userInfo:nil];
  400. _placeholderImage = nil;
  401. [_delegate managedVideoCapturer:self didFailWithError:_lastError session:sessionInfo];
  402. return;
  403. }
  404. SCTraceSignal(@"Observe asset writer status change");
  405. SCCAssert(_placeholderImage == nil, @"placeholderImage should be nil");
  406. self.status = SCManagedVideoCapturerStatusRecording;
  407. // Only log the recording delay event from camera view (excluding video note recording)
  408. if (_isVideoSnap) {
  409. [[SCLogger sharedInstance] logTimedEventEnd:kSCCameraMetricsRecordingDelay
  410. uniqueId:@"VIDEO"
  411. parameters:@{
  412. @"type" : @"video"
  413. }];
  414. }
  415. _recordStartTime = CACurrentMediaTime();
  416. };
  417. [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay
  418. uniqueId:_captureSessionID
  419. stepName:@"audio_session_start_begin"];
  420. if (self.status == SCManagedVideoCapturerStatusPrepareToRecord) {
  421. self.status = SCManagedVideoCapturerStatusReadyForRecording;
  422. startRecordingBlock();
  423. } else {
  424. self.status = SCManagedVideoCapturerStatusReadyForRecording;
  425. if (_audioConfiguration) {
  426. [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration
  427. performer:nil
  428. completion:nil];
  429. }
  430. _preparedAudioConfiguration = _audioConfiguration = [SCAudioSessionExperimentAdapter
  431. configureWith:audioConfiguration
  432. performer:_performer
  433. completion:^(NSError *error) {
  434. if (error) {
  435. [_delegate managedVideoCapturer:self
  436. didGetError:error
  437. forType:SCManagedVideoCapturerInfoAudioSessionError
  438. session:sessionInfo];
  439. }
  440. startRecordingBlock();
  441. }];
  442. }
  443. }];
  444. return sessionInfo;
  445. }
  446. - (NSError *)_handleRetryBeginAudioRecordingErrorCode:(NSInteger)errorCode
  447. error:(NSError *)error
  448. micResult:(NSDictionary *)resultInfo
  449. {
  450. SCTraceStart();
  451. NSString *resultStr = SC_CAST_TO_CLASS_OR_NIL(resultInfo[SCAudioSessionRetryDataSourceInfoKey], NSString);
  452. BOOL changeMicSuccess = [resultInfo[SCAudioSessionRetryDataSourceResultKey] boolValue];
  453. if (!error) {
  454. SCManagedVideoCapturerInfoType type = SCManagedVideoCapturerInfoAudioQueueRetrySuccess;
  455. if (changeMicSuccess) {
  456. if (errorCode == kSCAudioQueueErrorWildCard) {
  457. type = SCManagedVideoCapturerInfoAudioQueueRetryDataSourceSuccess_audioQueue;
  458. } else if (errorCode == kSCAudioQueueErrorHardware) {
  459. type = SCManagedVideoCapturerInfoAudioQueueRetryDataSourceSuccess_hardware;
  460. }
  461. }
  462. [_delegate managedVideoCapturer:self didGetError:nil forType:type session:self.activeSession];
  463. } else {
  464. error = [self _appendInfo:resultStr forInfoKey:@"retry_datasource_result" toError:error];
  465. SCLogVideoCapturerError(@"Retry setting audio session failed with error:%@", error);
  466. }
  467. return error;
  468. }
  469. - (BOOL)_isBottomMicBrokenCode:(NSInteger)errorCode
  470. {
  471. // we consider both -50 and 1852797029 as a broken microphone case
  472. return (errorCode == kSCAudioQueueErrorWildCard || errorCode == kSCAudioQueueErrorHardware);
  473. }
  474. - (void)_beginAudioQueueRecordingWithCompleteHandler:(audio_capture_session_block)block
  475. {
  476. SCTraceStart();
  477. SCAssert(block, @"block can not be nil");
  478. @weakify(self);
  479. void (^beginAudioBlock)(NSError *error) = ^(NSError *error) {
  480. @strongify(self);
  481. SC_GUARD_ELSE_RETURN(self);
  482. [_performer performImmediatelyIfCurrentPerformer:^{
  483. SCTraceStart();
  484. NSInteger errorCode = error.code;
  485. if ([self _isBottomMicBrokenCode:errorCode] &&
  486. (self.status == SCManagedVideoCapturerStatusReadyForRecording ||
  487. self.status == SCManagedVideoCapturerStatusRecording)) {
  488. SCLogVideoCapturerError(@"Start to retry begin audio queue (error code: %@)", @(errorCode));
  489. // use front microphone to retry
  490. NSDictionary *resultInfo = [[SCAudioSession sharedInstance] tryUseFrontMicWithErrorCode:errorCode];
  491. [self _retryRequestRecordingWithCompleteHandler:^(NSError *error) {
  492. // then retry audio queue again
  493. [_audioCaptureSession
  494. beginAudioRecordingAsynchronouslyWithSampleRate:kSCAudioCaptureSessionDefaultSampleRate
  495. completionHandler:^(NSError *innerError) {
  496. NSError *modifyError = [self
  497. _handleRetryBeginAudioRecordingErrorCode:errorCode
  498. error:innerError
  499. micResult:resultInfo];
  500. block(modifyError);
  501. }];
  502. }];
  503. } else {
  504. block(error);
  505. }
  506. }];
  507. };
  508. [_audioCaptureSession beginAudioRecordingAsynchronouslyWithSampleRate:kSCAudioCaptureSessionDefaultSampleRate
  509. completionHandler:^(NSError *error) {
  510. beginAudioBlock(error);
  511. }];
  512. }
  513. // This method must not change nullability of error, it should only either append info into userInfo,
  514. // or return the NSError as it is.
  515. - (NSError *)_appendInfo:(NSString *)infoStr forInfoKey:(NSString *)infoKey toError:(NSError *)error
  516. {
  517. if (!error || infoStr.length == 0 || infoKey.length == 0 || error.domain.length == 0) {
  518. return error;
  519. }
  520. NSMutableDictionary *errorInfo = [[error userInfo] mutableCopy];
  521. errorInfo[infoKey] = infoStr.length > 0 ? infoStr : @"(null)";
  522. return [NSError errorWithDomain:error.domain code:error.code userInfo:errorInfo];
  523. }
  524. - (void)_retryRequestRecordingWithCompleteHandler:(audio_capture_session_block)block
  525. {
  526. SCTraceStart();
  527. if (_audioConfiguration) {
  528. [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration performer:nil completion:nil];
  529. }
  530. SCVideoCaptureSessionInfo sessionInfo = self.activeSession;
  531. _preparedAudioConfiguration = _audioConfiguration = [SCAudioSessionExperimentAdapter
  532. configureWith:_audioConfiguration.configuration
  533. performer:_performer
  534. completion:^(NSError *error) {
  535. if (error) {
  536. [_delegate managedVideoCapturer:self
  537. didGetError:error
  538. forType:SCManagedVideoCapturerInfoAudioSessionError
  539. session:sessionInfo];
  540. }
  541. if (block) {
  542. block(error);
  543. }
  544. }];
  545. }
  546. #pragma SCCapturerBufferedVideoWriterDelegate
  547. - (void)videoWriterDidFailWritingWithError:(NSError *)error
  548. {
  549. // If it failed, we call the delegate method, release everything else we
  550. // have, well, on the output queue obviously
  551. SCTraceStart();
  552. [_performer performImmediatelyIfCurrentPerformer:^{
  553. SCTraceStart();
  554. SCVideoCaptureSessionInfo sessionInfo = self.activeSession;
  555. [_outputURL reloadAssetKeys];
  556. [self _cleanup];
  557. [self _disposeAudioRecording];
  558. self.status = SCManagedVideoCapturerStatusError;
  559. _lastError = error;
  560. _placeholderImage = nil;
  561. [_delegate managedVideoCapturer:self
  562. didGetError:error
  563. forType:SCManagedVideoCapturerInfoAssetWriterError
  564. session:sessionInfo];
  565. [_delegate managedVideoCapturer:self didFailWithError:_lastError session:sessionInfo];
  566. }];
  567. }
  568. - (void)_willStopRecording
  569. {
  570. if (self.status == SCManagedVideoCapturerStatusRecording) {
  571. // To notify UI continue the preview processing
  572. SCQueuePerformer *promisePerformer =
  573. [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoCapturerPromiseQueueLabel
  574. qualityOfService:QOS_CLASS_USER_INTERACTIVE
  575. queueType:DISPATCH_QUEUE_SERIAL
  576. context:SCQueuePerformerContextCamera];
  577. _recordedVideoPromise = [[SCPromise alloc] initWithPerformer:promisePerformer];
  578. [_delegate managedVideoCapturer:self
  579. willStopWithRecordedVideoFuture:_recordedVideoPromise.future
  580. videoSize:_outputSize
  581. placeholderImage:_placeholderImage
  582. session:self.activeSession];
  583. }
  584. }
  585. - (void)_stopRecording
  586. {
  587. SCTraceStart();
  588. SCAssert([_performer isCurrentPerformer], @"Needs to be on the performing queue");
  589. // Reset stop session as well as stop time.
  590. ++_stopSession;
  591. _stopTime = kCFAbsoluteTimeIntervalSince1970;
  592. SCPromise<id<SCManagedRecordedVideo>> *recordedVideoPromise = _recordedVideoPromise;
  593. _recordedVideoPromise = nil;
  594. sc_managed_capturer_recording_session_t sessionId = _sessionId;
  595. if (self.status == SCManagedVideoCapturerStatusRecording) {
  596. self.status = SCManagedVideoCapturerStatusIdle;
  597. if (CMTIME_IS_VALID(_endSessionTime)) {
  598. [_videoWriter
  599. finishWritingAtSourceTime:_endSessionTime
  600. withCompletionHanlder:^{
  601. // actually, make sure everything happens on outputQueue
  602. [_performer performImmediatelyIfCurrentPerformer:^{
  603. if (sessionId != _sessionId) {
  604. SCLogVideoCapturerError(@"SessionId mismatch: before: %@, after: %@", @(sessionId),
  605. @(_sessionId));
  606. return;
  607. }
  608. [self _disposeAudioRecording];
  609. // Log the video snap recording success event w/ parameters, not including video
  610. // note
  611. if (_isVideoSnap) {
  612. [SnapVideoMetadata logVideoEvent:kSCCameraMetricsVideoRecordingSuccess
  613. videoSettings:_videoOutputSettings
  614. isSave:NO];
  615. }
  616. void (^stopRecordingCompletionBlock)(NSURL *) = ^(NSURL *rawDataURL) {
  617. SCAssert([_performer isCurrentPerformer], @"Needs to be on the performing queue");
  618. SCVideoCaptureSessionInfo sessionInfo = self.activeSession;
  619. [self _cleanup];
  620. [[SCLogger sharedInstance] logTimedEventStart:@"SNAP_VIDEO_SIZE_LOADING"
  621. uniqueId:@""
  622. isUniqueEvent:NO];
  623. CGSize videoSize =
  624. [SnapVideoMetadata videoSizeForURL:_outputURL waitWhileLoadingTracksIfNeeded:YES];
  625. [[SCLogger sharedInstance] logTimedEventEnd:@"SNAP_VIDEO_SIZE_LOADING"
  626. uniqueId:@""
  627. parameters:nil];
  628. // Log error if video file is not really ready
  629. if (videoSize.width == 0.0 || videoSize.height == 0.0) {
  630. _lastError = [NSError errorWithDomain:kSCManagedVideoCapturerErrorDomain
  631. code:kSCManagedVideoCapturerZeroVideoSize
  632. userInfo:nil];
  633. [recordedVideoPromise completeWithError:_lastError];
  634. [_delegate managedVideoCapturer:self
  635. didFailWithError:_lastError
  636. session:sessionInfo];
  637. _placeholderImage = nil;
  638. return;
  639. }
  640. // If the video duration is too short, the future object will complete
  641. // with error as well
  642. SCManagedRecordedVideo *recordedVideo =
  643. [[SCManagedRecordedVideo alloc] initWithVideoURL:_outputURL
  644. rawVideoDataFileURL:_rawDataURL
  645. placeholderImage:_placeholderImage
  646. isFrontFacingCamera:_isFrontFacingCamera];
  647. [recordedVideoPromise completeWithValue:recordedVideo];
  648. [_delegate managedVideoCapturer:self
  649. didSucceedWithRecordedVideo:recordedVideo
  650. session:sessionInfo];
  651. _placeholderImage = nil;
  652. };
  653. if (_videoFrameRawDataCollector) {
  654. [_videoFrameRawDataCollector
  655. drainFrameDataCollectionWithCompletionHandler:^(NSURL *rawDataURL) {
  656. stopRecordingCompletionBlock(rawDataURL);
  657. }];
  658. } else {
  659. stopRecordingCompletionBlock(nil);
  660. }
  661. }];
  662. }];
  663. } else {
  664. [self _disposeAudioRecording];
  665. SCVideoCaptureSessionInfo sessionInfo = self.activeSession;
  666. [self _cleanup];
  667. self.status = SCManagedVideoCapturerStatusError;
  668. _lastError = [NSError errorWithDomain:kSCManagedVideoCapturerErrorDomain
  669. code:kSCManagedVideoCapturerEmptyFrame
  670. userInfo:nil];
  671. _placeholderImage = nil;
  672. [recordedVideoPromise completeWithError:_lastError];
  673. [_delegate managedVideoCapturer:self didFailWithError:_lastError session:sessionInfo];
  674. }
  675. } else {
  676. if (self.status == SCManagedVideoCapturerStatusPrepareToRecord ||
  677. self.status == SCManagedVideoCapturerStatusReadyForRecording) {
  678. _lastError = [NSError errorWithDomain:kSCManagedVideoCapturerErrorDomain
  679. code:kSCManagedVideoCapturerStopBeforeStart
  680. userInfo:nil];
  681. } else {
  682. _lastError = [NSError errorWithDomain:kSCManagedVideoCapturerErrorDomain
  683. code:kSCManagedVideoCapturerStopWithoutStart
  684. userInfo:nil];
  685. }
  686. SCVideoCaptureSessionInfo sessionInfo = self.activeSession;
  687. [self _cleanup];
  688. _placeholderImage = nil;
  689. if (_audioConfiguration) {
  690. [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration performer:nil completion:nil];
  691. _audioConfiguration = nil;
  692. }
  693. [recordedVideoPromise completeWithError:_lastError];
  694. [_delegate managedVideoCapturer:self didFailWithError:_lastError session:sessionInfo];
  695. self.status = SCManagedVideoCapturerStatusIdle;
  696. [_capturerLogger logEventIfStartingTooSlow];
  697. }
  698. }
  699. - (void)stopRecordingAsynchronously
  700. {
  701. SCTraceStart();
  702. NSTimeInterval stopTime = CACurrentMediaTime();
  703. [_performer performImmediatelyIfCurrentPerformer:^{
  704. _stopTime = stopTime;
  705. NSInteger stopSession = _stopSession;
  706. [self _willStopRecording];
  707. [_performer perform:^{
  708. // If we haven't stopped yet, call the stop now nevertheless.
  709. if (stopSession == _stopSession) {
  710. [self _stopRecording];
  711. }
  712. }
  713. after:kSCManagedVideoCapturerStopRecordingDeadline];
  714. }];
  715. }
  716. - (void)cancelRecordingAsynchronously
  717. {
  718. SCTraceStart();
  719. [_performer performImmediatelyIfCurrentPerformer:^{
  720. SCTraceStart();
  721. SCLogVideoCapturerInfo(@"Cancel recording. status: %lu", (unsigned long)self.status);
  722. if (self.status == SCManagedVideoCapturerStatusRecording) {
  723. self.status = SCManagedVideoCapturerStatusIdle;
  724. [self _disposeAudioRecording];
  725. [_videoWriter cancelWriting];
  726. SCVideoCaptureSessionInfo sessionInfo = self.activeSession;
  727. [self _cleanup];
  728. _placeholderImage = nil;
  729. [_delegate managedVideoCapturer:self didCancelVideoRecording:sessionInfo];
  730. } else if ((self.status == SCManagedVideoCapturerStatusPrepareToRecord) ||
  731. (self.status == SCManagedVideoCapturerStatusReadyForRecording)) {
  732. SCVideoCaptureSessionInfo sessionInfo = self.activeSession;
  733. [self _cleanup];
  734. self.status = SCManagedVideoCapturerStatusIdle;
  735. _placeholderImage = nil;
  736. if (_audioConfiguration) {
  737. [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration
  738. performer:nil
  739. completion:nil];
  740. _audioConfiguration = nil;
  741. }
  742. [_delegate managedVideoCapturer:self didCancelVideoRecording:sessionInfo];
  743. }
  744. [_capturerLogger logEventIfStartingTooSlow];
  745. }];
  746. }
  747. - (void)addTimedTask:(SCTimedTask *)task
  748. {
  749. [_performer performImmediatelyIfCurrentPerformer:^{
  750. // Only allow to add observers when we are not recording.
  751. if (!self->_timeObserver) {
  752. self->_timeObserver = [SCManagedVideoCapturerTimeObserver new];
  753. }
  754. [self->_timeObserver addTimedTask:task];
  755. SCLogVideoCapturerInfo(@"Added timetask: %@", task);
  756. }];
  757. }
  758. - (void)clearTimedTasks
  759. {
  760. // _timeObserver will be initialized lazily when adding timed tasks
  761. SCLogVideoCapturerInfo(@"Clearing time observer");
  762. [_performer performImmediatelyIfCurrentPerformer:^{
  763. if (self->_timeObserver) {
  764. self->_timeObserver = nil;
  765. }
  766. }];
  767. }
  768. - (void)_cleanup
  769. {
  770. [_videoWriter cleanUp];
  771. _timeObserver = nil;
  772. SCLogVideoCapturerInfo(@"SCVideoCaptureSessionInfo before cleanup: %@",
  773. SCVideoCaptureSessionInfoGetDebugDescription(self.activeSession));
  774. _startSessionTime = kCMTimeInvalid;
  775. _endSessionTime = kCMTimeInvalid;
  776. _firstWrittenAudioBufferDelay = kCMTimeInvalid;
  777. _sessionId = 0;
  778. _captureSessionID = nil;
  779. _audioQueueStarted = NO;
  780. }
  781. - (void)_disposeAudioRecording
  782. {
  783. SCLogVideoCapturerInfo(@"Disposing audio recording");
  784. SCAssert([_performer isCurrentPerformer], @"");
  785. // Setup the audio session token correctly
  786. SCAudioConfigurationToken *audioConfiguration = _audioConfiguration;
  787. [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay
  788. uniqueId:_captureSessionID
  789. stepName:@"audio_queue_stop_begin"];
  790. NSString *captureSessionID = _captureSessionID;
  791. [_audioCaptureSession disposeAudioRecordingSynchronouslyWithCompletionHandler:^{
  792. [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay
  793. uniqueId:captureSessionID
  794. stepName:@"audio_queue_stop_end"];
  795. SCLogVideoCapturerInfo(@"Did dispose audio recording");
  796. if (audioConfiguration) {
  797. [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay
  798. uniqueId:captureSessionID
  799. stepName:@"audio_session_stop_begin"];
  800. [SCAudioSessionExperimentAdapter
  801. relinquishConfiguration:audioConfiguration
  802. performer:_performer
  803. completion:^(NSError *_Nullable error) {
  804. [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay
  805. uniqueId:captureSessionID
  806. stepName:@"audio_session_stop_end"];
  807. [[SCLogger sharedInstance] logTimedEventEnd:kSCCameraMetricsAudioDelay
  808. uniqueId:captureSessionID
  809. parameters:nil];
  810. }];
  811. }
  812. }];
  813. _audioConfiguration = nil;
  814. }
  815. - (CIContext *)ciContext
  816. {
  817. if (!_ciContext) {
  818. _ciContext = [CIContext contextWithOptions:nil];
  819. }
  820. return _ciContext;
  821. }
  822. #pragma mark - SCAudioCaptureSessionDelegate
  823. - (void)audioCaptureSession:(SCAudioCaptureSession *)audioCaptureSession
  824. didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
  825. {
  826. SCTraceStart();
  827. if (self.status != SCManagedVideoCapturerStatusRecording) {
  828. return;
  829. }
  830. CFRetain(sampleBuffer);
  831. [_performer performImmediatelyIfCurrentPerformer:^{
  832. if (self.status == SCManagedVideoCapturerStatusRecording) {
  833. // Audio always follows video, there is no other way around this :)
  834. if (_hasWritten && CACurrentMediaTime() - _recordStartTime <= _maxDuration) {
  835. [self _processAudioSampleBuffer:sampleBuffer];
  836. [_videoWriter appendAudioSampleBuffer:sampleBuffer];
  837. }
  838. }
  839. CFRelease(sampleBuffer);
  840. }];
  841. }
  842. #pragma mark - SCManagedVideoDataSourceListener
  843. - (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource
  844. didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
  845. devicePosition:(SCManagedCaptureDevicePosition)devicePosition
  846. {
  847. SCTraceStart();
  848. if (self.status != SCManagedVideoCapturerStatusRecording) {
  849. return;
  850. }
  851. CFRetain(sampleBuffer);
  852. [_performer performImmediatelyIfCurrentPerformer:^{
  853. // the following check will allow the capture pipeline to drain
  854. if (CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) > _stopTime) {
  855. [self _stopRecording];
  856. } else {
  857. if (self.status == SCManagedVideoCapturerStatusRecording) {
  858. _isFrontFacingCamera = (devicePosition == SCManagedCaptureDevicePositionFront);
  859. CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
  860. if (CMTIME_IS_VALID(presentationTime)) {
  861. SCLogVideoCapturerInfo(@"Obtained video data source at time %lld", presentationTime.value);
  862. } else {
  863. SCLogVideoCapturerInfo(@"Obtained video data source with an invalid time");
  864. }
  865. if (!_hasWritten) {
  866. // Start writing!
  867. [_videoWriter startWritingAtSourceTime:presentationTime];
  868. [_capturerLogger endLoggingForStarting];
  869. _startSessionTime = presentationTime;
  870. _startSessionRealTime = CACurrentMediaTime();
  871. SCLogVideoCapturerInfo(@"First frame processed %f seconds after presentation Time",
  872. _startSessionRealTime - CMTimeGetSeconds(presentationTime));
  873. _hasWritten = YES;
  874. [[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CMTimeGetSeconds(presentationTime)];
  875. [[SCCoreCameraLogger sharedInstance]
  876. logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CMTimeGetSeconds(
  877. presentationTime)];
  878. SCLogVideoCapturerInfo(@"SCVideoCaptureSessionInfo after first frame: %@",
  879. SCVideoCaptureSessionInfoGetDebugDescription(self.activeSession));
  880. }
  881. // Only respect video end session time, audio can be cut off, not video,
  882. // not video
  883. if (CMTIME_IS_INVALID(_endSessionTime)) {
  884. _endSessionTime = presentationTime;
  885. } else {
  886. _endSessionTime = CMTimeMaximum(_endSessionTime, presentationTime);
  887. }
  888. if (CACurrentMediaTime() - _recordStartTime <= _maxDuration) {
  889. [_videoWriter appendVideoSampleBuffer:sampleBuffer];
  890. [self _processVideoSampleBuffer:sampleBuffer];
  891. }
  892. if (_timeObserver) {
  893. [_timeObserver processTime:CMTimeSubtract(presentationTime, _startSessionTime)
  894. sessionStartTimeDelayInSecond:_startSessionRealTime - CMTimeGetSeconds(_startSessionTime)];
  895. }
  896. }
  897. }
  898. CFRelease(sampleBuffer);
  899. }];
  900. }
  901. - (void)_generatePlaceholderImageWithPixelBuffer:(CVImageBufferRef)pixelBuffer metaData:(NSDictionary *)metadata
  902. {
  903. SCTraceStart();
  904. CVImageBufferRef imageBuffer = CVPixelBufferRetain(pixelBuffer);
  905. if (imageBuffer) {
  906. dispatch_async(SCPlaceholderImageGenerationQueue(), ^{
  907. UIImage *placeholderImage = [UIImage imageWithPixelBufferRef:imageBuffer
  908. backingType:UIImageBackingTypeCGImage
  909. orientation:UIImageOrientationRight
  910. context:[self ciContext]];
  911. placeholderImage =
  912. SCCropImageToTargetAspectRatio(placeholderImage, SCManagedCapturedImageAndVideoAspectRatio());
  913. [_performer performImmediatelyIfCurrentPerformer:^{
  914. // After processing, assign it back.
  915. if (self.status == SCManagedVideoCapturerStatusRecording) {
  916. _placeholderImage = placeholderImage;
  917. // Check video frame health by placeholder image
  918. [[SCManagedFrameHealthChecker sharedInstance]
  919. checkVideoHealthForCaptureFrameImage:placeholderImage
  920. metedata:metadata
  921. captureSessionID:_captureSessionID];
  922. }
  923. CVPixelBufferRelease(imageBuffer);
  924. }];
  925. });
  926. }
  927. }
  928. #pragma mark - Pixel Buffer methods
  929. - (void)_processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer
  930. {
  931. SC_GUARD_ELSE_RETURN(sampleBuffer);
  932. CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
  933. BOOL shouldGeneratePlaceholderImage = CMTimeCompare(presentationTime, _startSessionTime) == 0;
  934. CVImageBufferRef outputPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
  935. if (outputPixelBuffer) {
  936. [self _addVideoRawDataWithPixelBuffer:outputPixelBuffer];
  937. if (shouldGeneratePlaceholderImage) {
  938. NSDictionary *extraInfo = [_delegate managedVideoCapturerGetExtraFrameHealthInfo:self];
  939. NSDictionary *metadata =
  940. [[[SCManagedFrameHealthChecker sharedInstance] metadataForSampleBuffer:sampleBuffer extraInfo:extraInfo]
  941. copy];
  942. [self _generatePlaceholderImageWithPixelBuffer:outputPixelBuffer metaData:metadata];
  943. }
  944. }
  945. [_delegate managedVideoCapturer:self
  946. didAppendVideoSampleBuffer:sampleBuffer
  947. presentationTimestamp:CMTimeSubtract(presentationTime, _startSessionTime)];
  948. }
  949. - (void)_processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer
  950. {
  951. [_announcer managedAudioDataSource:self didOutputSampleBuffer:sampleBuffer];
  952. if (!CMTIME_IS_VALID(self.firstWrittenAudioBufferDelay)) {
  953. self.firstWrittenAudioBufferDelay =
  954. CMTimeSubtract(CMSampleBufferGetPresentationTimeStamp(sampleBuffer), _startSessionTime);
  955. }
  956. }
  957. - (void)_addVideoRawDataWithPixelBuffer:(CVImageBufferRef)pixelBuffer
  958. {
  959. if (_videoFrameRawDataCollector && [SnapVideoMetadata deviceMeetsRequirementsForContentAdaptiveVideoEncoding] &&
  960. ((_rawDataFrameNum % kSCVideoContentComplexitySamplingRate) == 0) && (_rawDataFrameNum > 0)) {
  961. if (_videoFrameRawDataCollector) {
  962. CVImageBufferRef imageBuffer = CVPixelBufferRetain(pixelBuffer);
  963. [_videoFrameRawDataCollector collectVideoFrameRawDataWithImageBuffer:imageBuffer
  964. frameNum:_rawDataFrameNum
  965. completion:^{
  966. CVPixelBufferRelease(imageBuffer);
  967. }];
  968. }
  969. }
  970. _rawDataFrameNum++;
  971. }
  972. #pragma mark - SCManagedAudioDataSource
  973. - (void)addListener:(id<SCManagedAudioDataSourceListener>)listener
  974. {
  975. [_announcer addListener:listener];
  976. }
  977. - (void)removeListener:(id<SCManagedAudioDataSourceListener>)listener
  978. {
  979. [_announcer removeListener:listener];
  980. }
  981. - (void)startStreamingWithAudioConfiguration:(SCAudioConfiguration *)configuration
  982. {
  983. SCAssertFail(@"Controlled by recorder");
  984. }
  985. - (void)stopStreaming
  986. {
  987. SCAssertFail(@"Controlled by recorder");
  988. }
  989. - (BOOL)isStreaming
  990. {
  991. return self.status == SCManagedVideoCapturerStatusRecording;
  992. }
  993. @end