2014 snapchat source code
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

823 lines
32 KiB

  1. //
  2. // SCManagedVideoStreamer.m
  3. // Snapchat
  4. //
  5. // Created by Liu Liu on 4/30/15.
  6. // Copyright (c) 2015 Liu Liu. All rights reserved.
  7. //
  8. #import "SCManagedVideoStreamer.h"
  9. #import "ARConfiguration+SCConfiguration.h"
  10. #import "SCCameraTweaks.h"
  11. #import "SCCapturerDefines.h"
  12. #import "SCLogger+Camera.h"
  13. #import "SCManagedCapturePreviewLayerController.h"
  14. #import "SCMetalUtils.h"
  15. #import "SCProcessingPipeline.h"
  16. #import "SCProcessingPipelineBuilder.h"
  17. #import <SCCameraFoundation/SCManagedVideoDataSourceListenerAnnouncer.h>
  18. #import <SCFoundation/NSString+SCFormat.h>
  19. #import <SCFoundation/SCLog.h>
  20. #import <SCFoundation/SCQueuePerformer.h>
  21. #import <SCFoundation/SCTrace.h>
  22. #import <SCLogger/SCCameraMetrics.h>
  23. #import <Looksery/Looksery.h>
  24. #import <libkern/OSAtomic.h>
  25. #import <stdatomic.h>
  26. @import ARKit;
  27. @import AVFoundation;
  28. #define SCLogVideoStreamerInfo(fmt, ...) SCLogCoreCameraInfo(@"[SCManagedVideoStreamer] " fmt, ##__VA_ARGS__)
  29. #define SCLogVideoStreamerWarning(fmt, ...) SCLogCoreCameraWarning(@"[SCManagedVideoStreamer] " fmt, ##__VA_ARGS__)
  30. #define SCLogVideoStreamerError(fmt, ...) SCLogCoreCameraError(@"[SCManagedVideoStreamer] " fmt, ##__VA_ARGS__)
  31. static NSInteger const kSCCaptureFrameRate = 30;
  32. static CGFloat const kSCLogInterval = 3.0;
  33. static char *const kSCManagedVideoStreamerQueueLabel = "com.snapchat.managed-video-streamer";
  34. static char *const kSCManagedVideoStreamerCallbackQueueLabel = "com.snapchat.managed-video-streamer.dequeue";
  35. static NSTimeInterval const kSCManagedVideoStreamerMaxAllowedLatency = 1; // Drop the frame if it is 1 second late.
  36. static NSTimeInterval const kSCManagedVideoStreamerStalledDisplay =
  37. 5; // If the frame is not updated for 5 seconds, it is considered to be stalled.
  38. static NSTimeInterval const kSCManagedVideoStreamerARSessionFramerateCap =
  39. 1.0 / (kSCCaptureFrameRate + 1); // Restrict ARSession to 30fps
  40. static int32_t const kSCManagedVideoStreamerMaxProcessingBuffers = 15;
  41. @interface SCManagedVideoStreamer () <AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureDepthDataOutputDelegate,
  42. AVCaptureDataOutputSynchronizerDelegate, ARSessionDelegate>
  43. @property (nonatomic, strong) AVCaptureSession *captureSession;
  44. @end
  45. @implementation SCManagedVideoStreamer {
  46. AVCaptureVideoDataOutput *_videoDataOutput;
  47. AVCaptureDepthDataOutput *_depthDataOutput NS_AVAILABLE_IOS(11_0);
  48. AVCaptureDataOutputSynchronizer *_dataOutputSynchronizer NS_AVAILABLE_IOS(11_0);
  49. BOOL _performingConfigurations;
  50. SCManagedCaptureDevicePosition _devicePosition;
  51. BOOL _videoStabilizationEnabledIfSupported;
  52. SCManagedVideoDataSourceListenerAnnouncer *_announcer;
  53. BOOL _sampleBufferDisplayEnabled;
  54. id<SCManagedSampleBufferDisplayController> _sampleBufferDisplayController;
  55. dispatch_block_t _flushOutdatedPreviewBlock;
  56. NSMutableArray<NSArray *> *_waitUntilSampleBufferDisplayedBlocks;
  57. SCProcessingPipeline *_processingPipeline;
  58. NSTimeInterval _lastDisplayedFrameTimestamp;
  59. #ifdef SC_USE_ARKIT_FACE
  60. NSTimeInterval _lastDisplayedDepthFrameTimestamp;
  61. #endif
  62. BOOL _depthCaptureEnabled;
  63. CGPoint _portraitModePointOfInterest;
  64. // For sticky video tweaks
  65. BOOL _keepLateFrames;
  66. SCQueuePerformer *_callbackPerformer;
  67. atomic_int _processingBuffersCount;
  68. }
  69. @synthesize isStreaming = _isStreaming;
  70. @synthesize performer = _performer;
  71. @synthesize currentFrame = _currentFrame;
  72. @synthesize fieldOfView = _fieldOfView;
  73. #ifdef SC_USE_ARKIT_FACE
  74. @synthesize lastDepthData = _lastDepthData;
  75. #endif
  76. @synthesize videoOrientation = _videoOrientation;
  77. - (instancetype)initWithSession:(AVCaptureSession *)session
  78. devicePosition:(SCManagedCaptureDevicePosition)devicePosition
  79. {
  80. SCTraceStart();
  81. self = [super init];
  82. if (self) {
  83. _sampleBufferDisplayEnabled = YES;
  84. _announcer = [[SCManagedVideoDataSourceListenerAnnouncer alloc] init];
  85. // We discard frames to support lenses in real time
  86. _keepLateFrames = NO;
  87. _performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoStreamerQueueLabel
  88. qualityOfService:QOS_CLASS_USER_INTERACTIVE
  89. queueType:DISPATCH_QUEUE_SERIAL
  90. context:SCQueuePerformerContextCamera];
  91. _videoOrientation = AVCaptureVideoOrientationLandscapeRight;
  92. [self setupWithSession:session devicePosition:devicePosition];
  93. SCLogVideoStreamerInfo(@"init with position:%lu", (unsigned long)devicePosition);
  94. }
  95. return self;
  96. }
  97. - (instancetype)initWithSession:(AVCaptureSession *)session
  98. arSession:(ARSession *)arSession
  99. devicePosition:(SCManagedCaptureDevicePosition)devicePosition NS_AVAILABLE_IOS(11_0)
  100. {
  101. self = [self initWithSession:session devicePosition:devicePosition];
  102. if (self) {
  103. [self setupWithARSession:arSession];
  104. self.currentFrame = nil;
  105. #ifdef SC_USE_ARKIT_FACE
  106. self.lastDepthData = nil;
  107. #endif
  108. }
  109. return self;
  110. }
  111. - (AVCaptureVideoDataOutput *)_newVideoDataOutput
  112. {
  113. AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
  114. // All inbound frames are going to be the native format of the camera avoid
  115. // any need for transcoding.
  116. output.videoSettings =
  117. @{(NSString *) kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) };
  118. return output;
  119. }
  120. - (void)setupWithSession:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition
  121. {
  122. [self stopStreaming];
  123. self.captureSession = session;
  124. _devicePosition = devicePosition;
  125. _videoDataOutput = [self _newVideoDataOutput];
  126. if (SCDeviceSupportsMetal()) {
  127. // We default to start the streaming if the Metal is supported at startup time.
  128. _isStreaming = YES;
  129. // Set the sample buffer delegate before starting it.
  130. [_videoDataOutput setSampleBufferDelegate:self queue:[self callbackPerformer].queue];
  131. }
  132. if ([session canAddOutput:_videoDataOutput]) {
  133. [session addOutput:_videoDataOutput];
  134. [self _enableVideoMirrorForDevicePosition:devicePosition];
  135. }
  136. if (SCCameraTweaksEnablePortraitModeButton()) {
  137. if (@available(iOS 11.0, *)) {
  138. _depthDataOutput = [[AVCaptureDepthDataOutput alloc] init];
  139. [[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:NO];
  140. if ([session canAddOutput:_depthDataOutput]) {
  141. [session addOutput:_depthDataOutput];
  142. [_depthDataOutput setDelegate:self callbackQueue:_performer.queue];
  143. }
  144. _depthCaptureEnabled = NO;
  145. }
  146. _portraitModePointOfInterest = CGPointMake(0.5, 0.5);
  147. }
  148. [self setVideoStabilizationEnabledIfSupported:YES];
  149. }
  150. - (void)setupWithARSession:(ARSession *)arSession NS_AVAILABLE_IOS(11_0)
  151. {
  152. arSession.delegateQueue = _performer.queue;
  153. arSession.delegate = self;
  154. }
  155. - (void)addSampleBufferDisplayController:(id<SCManagedSampleBufferDisplayController>)sampleBufferDisplayController
  156. {
  157. [_performer perform:^{
  158. _sampleBufferDisplayController = sampleBufferDisplayController;
  159. SCLogVideoStreamerInfo(@"add sampleBufferDisplayController:%@", _sampleBufferDisplayController);
  160. }];
  161. }
  162. - (void)setSampleBufferDisplayEnabled:(BOOL)sampleBufferDisplayEnabled
  163. {
  164. [_performer perform:^{
  165. _sampleBufferDisplayEnabled = sampleBufferDisplayEnabled;
  166. SCLogVideoStreamerInfo(@"sampleBufferDisplayEnabled set to:%d", _sampleBufferDisplayEnabled);
  167. }];
  168. }
  169. - (void)waitUntilSampleBufferDisplayed:(dispatch_queue_t)queue completionHandler:(dispatch_block_t)completionHandler
  170. {
  171. SCAssert(queue, @"callback queue must be provided");
  172. SCAssert(completionHandler, @"completion handler must be provided");
  173. SCLogVideoStreamerInfo(@"waitUntilSampleBufferDisplayed queue:%@ completionHandler:%p isStreaming:%d", queue,
  174. completionHandler, _isStreaming);
  175. if (_isStreaming) {
  176. [_performer perform:^{
  177. if (!_waitUntilSampleBufferDisplayedBlocks) {
  178. _waitUntilSampleBufferDisplayedBlocks = [NSMutableArray array];
  179. }
  180. [_waitUntilSampleBufferDisplayedBlocks addObject:@[ queue, completionHandler ]];
  181. SCLogVideoStreamerInfo(@"waitUntilSampleBufferDisplayed add block:%p", completionHandler);
  182. }];
  183. } else {
  184. dispatch_async(queue, completionHandler);
  185. }
  186. }
  187. - (void)startStreaming
  188. {
  189. SCTraceStart();
  190. SCLogVideoStreamerInfo(@"start streaming. _isStreaming:%d", _isStreaming);
  191. if (!_isStreaming) {
  192. _isStreaming = YES;
  193. [self _cancelFlushOutdatedPreview];
  194. if (@available(ios 11.0, *)) {
  195. if (_depthCaptureEnabled) {
  196. [[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:YES];
  197. }
  198. }
  199. [_videoDataOutput setSampleBufferDelegate:self queue:[self callbackPerformer].queue];
  200. }
  201. }
  202. - (void)setAsOutput:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition
  203. {
  204. SCTraceStart();
  205. if ([session canAddOutput:_videoDataOutput]) {
  206. SCLogVideoStreamerError(@"add videoDataOutput:%@", _videoDataOutput);
  207. [session addOutput:_videoDataOutput];
  208. [self _enableVideoMirrorForDevicePosition:devicePosition];
  209. } else {
  210. SCLogVideoStreamerError(@"cannot add videoDataOutput:%@ to session:%@", _videoDataOutput, session);
  211. }
  212. [self _enableVideoStabilizationIfSupported];
  213. }
  214. - (void)removeAsOutput:(AVCaptureSession *)session
  215. {
  216. SCTraceStart();
  217. SCLogVideoStreamerInfo(@"remove videoDataOutput:%@ from session:%@", _videoDataOutput, session);
  218. [session removeOutput:_videoDataOutput];
  219. }
  220. - (void)_cancelFlushOutdatedPreview
  221. {
  222. SCLogVideoStreamerInfo(@"cancel flush outdated preview:%p", _flushOutdatedPreviewBlock);
  223. if (_flushOutdatedPreviewBlock) {
  224. dispatch_block_cancel(_flushOutdatedPreviewBlock);
  225. _flushOutdatedPreviewBlock = nil;
  226. }
  227. }
  228. - (SCQueuePerformer *)callbackPerformer
  229. {
  230. // If sticky video tweak is on, use a separated performer queue
  231. if (_keepLateFrames) {
  232. if (!_callbackPerformer) {
  233. _callbackPerformer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoStreamerCallbackQueueLabel
  234. qualityOfService:QOS_CLASS_USER_INTERACTIVE
  235. queueType:DISPATCH_QUEUE_SERIAL
  236. context:SCQueuePerformerContextCamera];
  237. }
  238. return _callbackPerformer;
  239. }
  240. return _performer;
  241. }
  242. - (void)pauseStreaming
  243. {
  244. SCTraceStart();
  245. SCLogVideoStreamerInfo(@"pauseStreaming isStreaming:%d", _isStreaming);
  246. if (_isStreaming) {
  247. _isStreaming = NO;
  248. [_videoDataOutput setSampleBufferDelegate:nil queue:NULL];
  249. if (@available(ios 11.0, *)) {
  250. if (_depthCaptureEnabled) {
  251. [[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:NO];
  252. }
  253. }
  254. @weakify(self);
  255. _flushOutdatedPreviewBlock = dispatch_block_create(0, ^{
  256. SCLogVideoStreamerInfo(@"execute flushOutdatedPreviewBlock");
  257. @strongify(self);
  258. SC_GUARD_ELSE_RETURN(self);
  259. [self->_sampleBufferDisplayController flushOutdatedPreview];
  260. });
  261. [_performer perform:_flushOutdatedPreviewBlock
  262. after:SCCameraTweaksEnableKeepLastFrameOnCamera() ? kSCManagedVideoStreamerStalledDisplay : 0];
  263. [_performer perform:^{
  264. [self _performCompletionHandlersForWaitUntilSampleBufferDisplayed];
  265. }];
  266. }
  267. }
  268. - (void)stopStreaming
  269. {
  270. SCTraceStart();
  271. SCLogVideoStreamerInfo(@"stopStreaming isStreaming:%d", _isStreaming);
  272. if (_isStreaming) {
  273. _isStreaming = NO;
  274. [_videoDataOutput setSampleBufferDelegate:nil queue:NULL];
  275. if (@available(ios 11.0, *)) {
  276. if (_depthCaptureEnabled) {
  277. [[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:NO];
  278. }
  279. }
  280. }
  281. [self _cancelFlushOutdatedPreview];
  282. [_performer perform:^{
  283. SCLogVideoStreamerInfo(@"stopStreaming in perfome queue");
  284. [_sampleBufferDisplayController flushOutdatedPreview];
  285. [self _performCompletionHandlersForWaitUntilSampleBufferDisplayed];
  286. }];
  287. }
  288. - (void)beginConfiguration
  289. {
  290. SCLogVideoStreamerInfo(@"enter beginConfiguration");
  291. [_performer perform:^{
  292. SCLogVideoStreamerInfo(@"performingConfigurations set to YES");
  293. _performingConfigurations = YES;
  294. }];
  295. }
  296. - (void)setDevicePosition:(SCManagedCaptureDevicePosition)devicePosition
  297. {
  298. SCLogVideoStreamerInfo(@"setDevicePosition with newPosition:%lu", (unsigned long)devicePosition);
  299. [self _enableVideoMirrorForDevicePosition:devicePosition];
  300. [self _enableVideoStabilizationIfSupported];
  301. [_performer perform:^{
  302. SCLogVideoStreamerInfo(@"setDevicePosition in perform queue oldPosition:%lu newPosition:%lu",
  303. (unsigned long)_devicePosition, (unsigned long)devicePosition);
  304. if (_devicePosition != devicePosition) {
  305. _devicePosition = devicePosition;
  306. }
  307. }];
  308. }
  309. - (void)setVideoOrientation:(AVCaptureVideoOrientation)videoOrientation
  310. {
  311. SCTraceStart();
  312. // It is not neccessary call these changes on private queue, because is is just only data output configuration.
  313. // It should be called from manged capturer queue to prevent lock capture session in two different(private and
  314. // managed capturer) queues that will cause the deadlock.
  315. SCLogVideoStreamerInfo(@"setVideoOrientation oldOrientation:%lu newOrientation:%lu",
  316. (unsigned long)_videoOrientation, (unsigned long)videoOrientation);
  317. _videoOrientation = videoOrientation;
  318. AVCaptureConnection *connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
  319. connection.videoOrientation = _videoOrientation;
  320. }
  321. - (void)setKeepLateFrames:(BOOL)keepLateFrames
  322. {
  323. SCTraceStart();
  324. [_performer perform:^{
  325. SCTraceStart();
  326. if (keepLateFrames != _keepLateFrames) {
  327. _keepLateFrames = keepLateFrames;
  328. // Get and set corresponding queue base on keepLateFrames.
  329. // We don't use AVCaptureVideoDataOutput.alwaysDiscardsLateVideo anymore, because it will potentially
  330. // result in lenses regression, and we could use all 15 sample buffers by adding a separated calllback
  331. // queue.
  332. [_videoDataOutput setSampleBufferDelegate:self queue:[self callbackPerformer].queue];
  333. SCLogVideoStreamerInfo(@"keepLateFrames was set to:%d", keepLateFrames);
  334. }
  335. }];
  336. }
  337. - (void)setDepthCaptureEnabled:(BOOL)enabled NS_AVAILABLE_IOS(11_0)
  338. {
  339. _depthCaptureEnabled = enabled;
  340. [[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:enabled];
  341. if (enabled) {
  342. _dataOutputSynchronizer =
  343. [[AVCaptureDataOutputSynchronizer alloc] initWithDataOutputs:@[ _videoDataOutput, _depthDataOutput ]];
  344. [_dataOutputSynchronizer setDelegate:self queue:_performer.queue];
  345. } else {
  346. _dataOutputSynchronizer = nil;
  347. }
  348. }
  349. - (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest
  350. {
  351. _portraitModePointOfInterest = pointOfInterest;
  352. }
  353. - (BOOL)getKeepLateFrames
  354. {
  355. return _keepLateFrames;
  356. }
  357. - (void)commitConfiguration
  358. {
  359. SCLogVideoStreamerInfo(@"enter commitConfiguration");
  360. [_performer perform:^{
  361. SCLogVideoStreamerInfo(@"performingConfigurations set to NO");
  362. _performingConfigurations = NO;
  363. }];
  364. }
  365. - (void)addListener:(id<SCManagedVideoDataSourceListener>)listener
  366. {
  367. SCTraceStart();
  368. SCLogVideoStreamerInfo(@"add listener:%@", listener);
  369. [_announcer addListener:listener];
  370. }
  371. - (void)removeListener:(id<SCManagedVideoDataSourceListener>)listener
  372. {
  373. SCTraceStart();
  374. SCLogVideoStreamerInfo(@"remove listener:%@", listener);
  375. [_announcer removeListener:listener];
  376. }
  377. - (void)addProcessingPipeline:(SCProcessingPipeline *)processingPipeline
  378. {
  379. SCLogVideoStreamerInfo(@"enter addProcessingPipeline:%@", processingPipeline);
  380. [_performer perform:^{
  381. SCLogVideoStreamerInfo(@"processingPipeline set to %@", processingPipeline);
  382. _processingPipeline = processingPipeline;
  383. }];
  384. }
  385. - (void)removeProcessingPipeline
  386. {
  387. SCLogVideoStreamerInfo(@"enter removeProcessingPipeline");
  388. [_performer perform:^{
  389. SCLogVideoStreamerInfo(@"processingPipeline set to nil");
  390. _processingPipeline = nil;
  391. }];
  392. }
  393. - (BOOL)isVideoMirrored
  394. {
  395. SCTraceStart();
  396. AVCaptureConnection *connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
  397. return connection.isVideoMirrored;
  398. }
  399. #pragma mark - Common Sample Buffer Handling
  400. - (void)didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
  401. {
  402. return [self didOutputSampleBuffer:sampleBuffer depthData:nil];
  403. }
  404. - (void)didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer depthData:(CVPixelBufferRef)depthDataMap
  405. {
  406. // Don't send the sample buffer if we are perform configurations
  407. if (_performingConfigurations) {
  408. SCLogVideoStreamerError(@"didOutputSampleBuffer return because performingConfigurations is YES");
  409. return;
  410. }
  411. SC_GUARD_ELSE_RETURN([_performer isCurrentPerformer]);
  412. // We can't set alwaysDiscardsLateVideoFrames to YES when lens is activated because it will cause camera freezing.
  413. // When alwaysDiscardsLateVideoFrames is set to NO, the late frames will not be dropped until it reach 15 frames,
  414. // so we should simulate the dropping behaviour as AVFoundation do.
  415. NSTimeInterval presentationTime = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer));
  416. _lastDisplayedFrameTimestamp = presentationTime;
  417. NSTimeInterval frameLatency = CACurrentMediaTime() - presentationTime;
  418. // Log interval definied in macro LOG_INTERVAL, now is 3.0s
  419. BOOL shouldLog =
  420. (long)(CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * kSCCaptureFrameRate) %
  421. ((long)(kSCCaptureFrameRate * kSCLogInterval)) ==
  422. 0;
  423. if (shouldLog) {
  424. SCLogVideoStreamerInfo(@"didOutputSampleBuffer:%p", sampleBuffer);
  425. }
  426. if (_processingPipeline) {
  427. RenderData renderData = {
  428. .sampleBuffer = sampleBuffer,
  429. .depthDataMap = depthDataMap,
  430. .depthBlurPointOfInterest =
  431. SCCameraTweaksEnablePortraitModeAutofocus() || SCCameraTweaksEnablePortraitModeTapToFocus()
  432. ? &_portraitModePointOfInterest
  433. : nil,
  434. };
  435. // Ensure we are doing all render operations (i.e. accessing textures) on performer to prevent race condition
  436. SCAssertPerformer(_performer);
  437. sampleBuffer = [_processingPipeline render:renderData];
  438. if (shouldLog) {
  439. SCLogVideoStreamerInfo(@"rendered sampleBuffer:%p in processingPipeline:%@", sampleBuffer,
  440. _processingPipeline);
  441. }
  442. }
  443. if (sampleBuffer && _sampleBufferDisplayEnabled) {
  444. // Send the buffer only if it is valid, set it to be displayed immediately (See the enqueueSampleBuffer method
  445. // header, need to get attachments array and set the dictionary).
  446. CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, YES);
  447. if (!attachmentsArray) {
  448. SCLogVideoStreamerError(@"Error getting attachment array for CMSampleBuffer");
  449. } else if (CFArrayGetCount(attachmentsArray) > 0) {
  450. CFMutableDictionaryRef attachment = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachmentsArray, 0);
  451. CFDictionarySetValue(attachment, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue);
  452. }
  453. // Warn if frame that went through is not most recent enough.
  454. if (frameLatency >= kSCManagedVideoStreamerMaxAllowedLatency) {
  455. SCLogVideoStreamerWarning(
  456. @"The sample buffer we received is too late, why? presentationTime:%lf frameLatency:%f",
  457. presentationTime, frameLatency);
  458. }
  459. [_sampleBufferDisplayController enqueueSampleBuffer:sampleBuffer];
  460. if (shouldLog) {
  461. SCLogVideoStreamerInfo(@"displayed sampleBuffer:%p in Metal", sampleBuffer);
  462. }
  463. [self _performCompletionHandlersForWaitUntilSampleBufferDisplayed];
  464. }
  465. if (shouldLog) {
  466. SCLogVideoStreamerInfo(@"begin annoucing sampleBuffer:%p of devicePosition:%lu", sampleBuffer,
  467. (unsigned long)_devicePosition);
  468. }
  469. [_announcer managedVideoDataSource:self didOutputSampleBuffer:sampleBuffer devicePosition:_devicePosition];
  470. if (shouldLog) {
  471. SCLogVideoStreamerInfo(@"end annoucing sampleBuffer:%p", sampleBuffer);
  472. }
  473. }
  474. - (void)didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
  475. {
  476. if (_performingConfigurations) {
  477. return;
  478. }
  479. SC_GUARD_ELSE_RETURN([_performer isCurrentPerformer]);
  480. NSTimeInterval currentProcessingTime = CACurrentMediaTime();
  481. NSTimeInterval currentSampleTime = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer));
  482. // Only logging it when sticky tweak is on, which means sticky time is too long, and AVFoundation have to drop the
  483. // sampleBuffer
  484. if (_keepLateFrames) {
  485. SCLogVideoStreamerInfo(@"didDropSampleBuffer:%p timestamp:%f latency:%f", sampleBuffer, currentProcessingTime,
  486. currentSampleTime);
  487. }
  488. [_announcer managedVideoDataSource:self didDropSampleBuffer:sampleBuffer devicePosition:_devicePosition];
  489. }
  490. #pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate
  491. - (void)captureOutput:(AVCaptureOutput *)captureOutput
  492. didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
  493. fromConnection:(AVCaptureConnection *)connection NS_AVAILABLE_IOS(11_0)
  494. {
  495. // Sticky video tweak is off, i.e. lenses is on,
  496. // we use same queue for callback and processing, and let AVFoundation decide which frame should be dropped
  497. if (!_keepLateFrames) {
  498. [self didOutputSampleBuffer:sampleBuffer];
  499. }
  500. // Sticky video tweak is on
  501. else {
  502. if ([_performer isCurrentPerformer]) {
  503. // Note: there might be one frame callbacked in processing queue when switching callback queue,
  504. // it should be fine. But if following log appears too much, it is not our design.
  505. SCLogVideoStreamerWarning(@"The callback queue should be a separated queue when sticky tweak is on");
  506. }
  507. // TODO: In sticky video v2, we should consider check free memory
  508. if (_processingBuffersCount >= kSCManagedVideoStreamerMaxProcessingBuffers - 1) {
  509. SCLogVideoStreamerWarning(@"processingBuffersCount reached to the max. current count:%d",
  510. _processingBuffersCount);
  511. [self didDropSampleBuffer:sampleBuffer];
  512. return;
  513. }
  514. atomic_fetch_add(&_processingBuffersCount, 1);
  515. CFRetain(sampleBuffer);
  516. // _performer should always be the processing queue
  517. [_performer perform:^{
  518. [self didOutputSampleBuffer:sampleBuffer];
  519. CFRelease(sampleBuffer);
  520. atomic_fetch_sub(&_processingBuffersCount, 1);
  521. }];
  522. }
  523. }
  524. - (void)captureOutput:(AVCaptureOutput *)captureOutput
  525. didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
  526. fromConnection:(AVCaptureConnection *)connection
  527. {
  528. [self didDropSampleBuffer:sampleBuffer];
  529. }
  530. #pragma mark - AVCaptureDataOutputSynchronizer (Video + Depth)
  531. - (void)dataOutputSynchronizer:(AVCaptureDataOutputSynchronizer *)synchronizer
  532. didOutputSynchronizedDataCollection:(AVCaptureSynchronizedDataCollection *)synchronizedDataCollection
  533. NS_AVAILABLE_IOS(11_0)
  534. {
  535. AVCaptureSynchronizedDepthData *syncedDepthData = (AVCaptureSynchronizedDepthData *)[synchronizedDataCollection
  536. synchronizedDataForCaptureOutput:_depthDataOutput];
  537. AVDepthData *depthData = nil;
  538. if (syncedDepthData && !syncedDepthData.depthDataWasDropped) {
  539. depthData = syncedDepthData.depthData;
  540. }
  541. AVCaptureSynchronizedSampleBufferData *syncedVideoData =
  542. (AVCaptureSynchronizedSampleBufferData *)[synchronizedDataCollection
  543. synchronizedDataForCaptureOutput:_videoDataOutput];
  544. if (syncedVideoData && !syncedVideoData.sampleBufferWasDropped) {
  545. CMSampleBufferRef videoSampleBuffer = syncedVideoData.sampleBuffer;
  546. [self didOutputSampleBuffer:videoSampleBuffer depthData:depthData ? depthData.depthDataMap : nil];
  547. }
  548. }
  549. #pragma mark - ARSessionDelegate
  550. - (void)session:(ARSession *)session cameraDidChangeTrackingState:(ARCamera *)camera NS_AVAILABLE_IOS(11_0)
  551. {
  552. NSString *state = nil;
  553. NSString *reason = nil;
  554. switch (camera.trackingState) {
  555. case ARTrackingStateNormal:
  556. state = @"Normal";
  557. break;
  558. case ARTrackingStateLimited:
  559. state = @"Limited";
  560. break;
  561. case ARTrackingStateNotAvailable:
  562. state = @"Not Available";
  563. break;
  564. }
  565. switch (camera.trackingStateReason) {
  566. case ARTrackingStateReasonNone:
  567. reason = @"None";
  568. break;
  569. case ARTrackingStateReasonInitializing:
  570. reason = @"Initializing";
  571. break;
  572. case ARTrackingStateReasonExcessiveMotion:
  573. reason = @"Excessive Motion";
  574. break;
  575. case ARTrackingStateReasonInsufficientFeatures:
  576. reason = @"Insufficient Features";
  577. break;
  578. #if SC_AT_LEAST_SDK_11_3
  579. case ARTrackingStateReasonRelocalizing:
  580. reason = @"Relocalizing";
  581. break;
  582. #endif
  583. }
  584. SCLogVideoStreamerInfo(@"ARKit changed tracking state - %@ (reason: %@)", state, reason);
  585. }
  586. - (void)session:(ARSession *)session didUpdateFrame:(ARFrame *)frame NS_AVAILABLE_IOS(11_0)
  587. {
  588. #ifdef SC_USE_ARKIT_FACE
  589. // This is extremely weird, but LOOK-10251 indicates that despite the class having it defined, on some specific
  590. // devices there are ARFrame instances that don't respond to `capturedDepthData`.
  591. // (note: this was discovered to be due to some people staying on iOS 11 betas).
  592. AVDepthData *depth = nil;
  593. if ([frame respondsToSelector:@selector(capturedDepthData)]) {
  594. depth = frame.capturedDepthData;
  595. }
  596. #endif
  597. CGFloat timeSince = frame.timestamp - _lastDisplayedFrameTimestamp;
  598. // Don't deliver more than 30 frames per sec
  599. BOOL framerateMinimumElapsed = timeSince >= kSCManagedVideoStreamerARSessionFramerateCap;
  600. #ifdef SC_USE_ARKIT_FACE
  601. if (depth) {
  602. CGFloat timeSince = frame.timestamp - _lastDisplayedDepthFrameTimestamp;
  603. framerateMinimumElapsed |= timeSince >= kSCManagedVideoStreamerARSessionFramerateCap;
  604. }
  605. #endif
  606. SC_GUARD_ELSE_RETURN(framerateMinimumElapsed);
  607. #ifdef SC_USE_ARKIT_FACE
  608. if (depth) {
  609. self.lastDepthData = depth;
  610. _lastDisplayedDepthFrameTimestamp = frame.timestamp;
  611. }
  612. #endif
  613. // Make sure that current frame is no longer being used, otherwise drop current frame.
  614. SC_GUARD_ELSE_RETURN(self.currentFrame == nil);
  615. CVPixelBufferRef pixelBuffer = frame.capturedImage;
  616. CVPixelBufferLockBaseAddress(pixelBuffer, 0);
  617. CMTime time = CMTimeMakeWithSeconds(frame.timestamp, 1000000);
  618. CMSampleTimingInfo timing = {kCMTimeInvalid, time, kCMTimeInvalid};
  619. CMVideoFormatDescriptionRef videoInfo;
  620. CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &videoInfo);
  621. CMSampleBufferRef buffer;
  622. CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, YES, nil, nil, videoInfo, &timing, &buffer);
  623. CFRelease(videoInfo);
  624. CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
  625. self.currentFrame = frame;
  626. [self didOutputSampleBuffer:buffer];
  627. [self _updateFieldOfViewWithARFrame:frame];
  628. CFRelease(buffer);
  629. }
  630. - (void)session:(ARSession *)session didAddAnchors:(NSArray<ARAnchor *> *)anchors NS_AVAILABLE_IOS(11_0)
  631. {
  632. for (ARAnchor *anchor in anchors) {
  633. if ([anchor isKindOfClass:[ARPlaneAnchor class]]) {
  634. SCLogVideoStreamerInfo(@"ARKit added plane anchor");
  635. return;
  636. }
  637. }
  638. }
  639. - (void)session:(ARSession *)session didFailWithError:(NSError *)error NS_AVAILABLE_IOS(11_0)
  640. {
  641. SCLogVideoStreamerError(@"ARKit session failed with error: %@. Resetting", error);
  642. [session runWithConfiguration:[ARConfiguration sc_configurationForDevicePosition:_devicePosition]];
  643. }
  644. - (void)sessionWasInterrupted:(ARSession *)session NS_AVAILABLE_IOS(11_0)
  645. {
  646. SCLogVideoStreamerWarning(@"ARKit session interrupted");
  647. }
  648. - (void)sessionInterruptionEnded:(ARSession *)session NS_AVAILABLE_IOS(11_0)
  649. {
  650. SCLogVideoStreamerInfo(@"ARKit interruption ended");
  651. }
  652. #pragma mark - Private methods
  653. - (void)_performCompletionHandlersForWaitUntilSampleBufferDisplayed
  654. {
  655. for (NSArray *completion in _waitUntilSampleBufferDisplayedBlocks) {
  656. // Call the completion handlers.
  657. dispatch_async(completion[0], completion[1]);
  658. }
  659. [_waitUntilSampleBufferDisplayedBlocks removeAllObjects];
  660. }
  661. // This is the magic that ensures the VideoDataOutput will have the correct
  662. // orientation.
  663. - (void)_enableVideoMirrorForDevicePosition:(SCManagedCaptureDevicePosition)devicePosition
  664. {
  665. SCLogVideoStreamerInfo(@"enable video mirror for device position:%lu", (unsigned long)devicePosition);
  666. AVCaptureConnection *connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
  667. connection.videoOrientation = _videoOrientation;
  668. if (devicePosition == SCManagedCaptureDevicePositionFront) {
  669. connection.videoMirrored = YES;
  670. }
  671. }
  672. - (void)_enableVideoStabilizationIfSupported
  673. {
  674. SCTraceStart();
  675. if (!SCCameraTweaksEnableVideoStabilization()) {
  676. SCLogVideoStreamerWarning(@"SCCameraTweaksEnableVideoStabilization is NO, won't enable video stabilization");
  677. return;
  678. }
  679. AVCaptureConnection *videoConnection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
  680. if (!videoConnection) {
  681. SCLogVideoStreamerError(@"cannot get videoConnection from videoDataOutput:%@", videoConnection);
  682. return;
  683. }
  684. // Set the video stabilization mode to auto. Default is off.
  685. if ([videoConnection isVideoStabilizationSupported]) {
  686. videoConnection.preferredVideoStabilizationMode = _videoStabilizationEnabledIfSupported
  687. ? AVCaptureVideoStabilizationModeStandard
  688. : AVCaptureVideoStabilizationModeOff;
  689. NSDictionary *params = @{ @"iOS8_Mode" : @(videoConnection.activeVideoStabilizationMode) };
  690. [[SCLogger sharedInstance] logEvent:@"VIDEO_STABILIZATION_MODE" parameters:params];
  691. SCLogVideoStreamerInfo(@"set video stabilization mode:%ld to videoConnection:%@",
  692. (long)videoConnection.preferredVideoStabilizationMode, videoConnection);
  693. } else {
  694. SCLogVideoStreamerInfo(@"video stabilization isn't supported on videoConnection:%@", videoConnection);
  695. }
  696. }
  697. - (void)setVideoStabilizationEnabledIfSupported:(BOOL)videoStabilizationIfSupported
  698. {
  699. SCLogVideoStreamerInfo(@"setVideoStabilizationEnabledIfSupported:%d", videoStabilizationIfSupported);
  700. _videoStabilizationEnabledIfSupported = videoStabilizationIfSupported;
  701. [self _enableVideoStabilizationIfSupported];
  702. }
  703. - (void)_updateFieldOfViewWithARFrame:(ARFrame *)frame NS_AVAILABLE_IOS(11_0)
  704. {
  705. SC_GUARD_ELSE_RETURN(frame.camera);
  706. CGSize imageResolution = frame.camera.imageResolution;
  707. matrix_float3x3 intrinsics = frame.camera.intrinsics;
  708. float xFovDegrees = 2 * atan(imageResolution.width / (2 * intrinsics.columns[0][0])) * 180 / M_PI;
  709. if (_fieldOfView != xFovDegrees) {
  710. self.fieldOfView = xFovDegrees;
  711. }
  712. }
  713. - (NSString *)description
  714. {
  715. return [self debugDescription];
  716. }
  717. - (NSString *)debugDescription
  718. {
  719. NSDictionary *debugDict = @{
  720. @"_sampleBufferDisplayEnabled" : _sampleBufferDisplayEnabled ? @"Yes" : @"No",
  721. @"_videoStabilizationEnabledIfSupported" : _videoStabilizationEnabledIfSupported ? @"Yes" : @"No",
  722. @"_performingConfigurations" : _performingConfigurations ? @"Yes" : @"No",
  723. @"alwaysDiscardLateVideoFrames" : _videoDataOutput.alwaysDiscardsLateVideoFrames ? @"Yes" : @"No"
  724. };
  725. return [NSString sc_stringWithFormat:@"%@", debugDict];
  726. }
  727. @end