2014 snapchat source code
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

299 lines
9.4 KiB

  1. //
  2. // SCManagedVideoFileStreamer.m
  3. // Snapchat
  4. //
  5. // Created by Alexander Grytsiuk on 3/4/16.
  6. // Copyright © 2016 Snapchat, Inc. All rights reserved.
  7. //
  8. #import "SCManagedVideoFileStreamer.h"
  9. #import "SCManagedCapturePreviewLayerController.h"
  10. #import <SCCameraFoundation/SCManagedVideoDataSourceListenerAnnouncer.h>
  11. #import <SCFoundation/SCLog.h>
  12. #import <SCFoundation/SCPlayer.h>
  13. #import <SCFoundation/SCQueuePerformer.h>
  14. #import <SCFoundation/SCTrace.h>
  15. @import AVFoundation;
  16. @import CoreMedia;
  17. static char *const kSCManagedVideoFileStreamerQueueLabel = "com.snapchat.managed-video-file-streamer";
  18. @interface SCManagedVideoFileStreamer () <AVPlayerItemOutputPullDelegate>
  19. @end
  20. @implementation SCManagedVideoFileStreamer {
  21. SCManagedVideoDataSourceListenerAnnouncer *_announcer;
  22. SCManagedCaptureDevicePosition _devicePosition;
  23. sc_managed_video_file_streamer_pixel_buffer_completion_handler_t _nextPixelBufferHandler;
  24. id _notificationToken;
  25. id<SCPerforming> _performer;
  26. dispatch_semaphore_t _semaphore;
  27. CADisplayLink *_displayLink;
  28. AVPlayerItemVideoOutput *_videoOutput;
  29. AVPlayer *_player;
  30. BOOL _sampleBufferDisplayEnabled;
  31. id<SCManagedSampleBufferDisplayController> _sampleBufferDisplayController;
  32. }
  33. @synthesize isStreaming = _isStreaming;
  34. @synthesize performer = _performer;
  35. @synthesize videoOrientation = _videoOrientation;
  36. - (instancetype)initWithPlaybackForURL:(NSURL *)URL
  37. {
  38. SCTraceStart();
  39. self = [super init];
  40. if (self) {
  41. _videoOrientation = AVCaptureVideoOrientationLandscapeRight;
  42. _announcer = [[SCManagedVideoDataSourceListenerAnnouncer alloc] init];
  43. _semaphore = dispatch_semaphore_create(1);
  44. _performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoFileStreamerQueueLabel
  45. qualityOfService:QOS_CLASS_UNSPECIFIED
  46. queueType:DISPATCH_QUEUE_SERIAL
  47. context:SCQueuePerformerContextStories];
  48. // Setup CADisplayLink which will callback displayPixelBuffer: at every vsync.
  49. _displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkCallback:)];
  50. [_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSDefaultRunLoopMode];
  51. [_displayLink setPaused:YES];
  52. // Prepare player
  53. _player = [[SCPlayer alloc] initWithPlayerDomain:SCPlayerDomainCameraFileStreamer URL:URL];
  54. #if TARGET_IPHONE_SIMULATOR
  55. _player.volume = 0.0;
  56. #endif
  57. // Configure output
  58. [self configureOutput];
  59. }
  60. return self;
  61. }
  62. - (void)addSampleBufferDisplayController:(id<SCManagedSampleBufferDisplayController>)sampleBufferDisplayController
  63. {
  64. _sampleBufferDisplayController = sampleBufferDisplayController;
  65. }
  66. - (void)setSampleBufferDisplayEnabled:(BOOL)sampleBufferDisplayEnabled
  67. {
  68. _sampleBufferDisplayEnabled = sampleBufferDisplayEnabled;
  69. SCLogGeneralInfo(@"[SCManagedVideoFileStreamer] sampleBufferDisplayEnabled set to:%d", _sampleBufferDisplayEnabled);
  70. }
  71. - (void)setKeepLateFrames:(BOOL)keepLateFrames
  72. {
  73. // Do nothing
  74. }
  75. - (BOOL)getKeepLateFrames
  76. {
  77. // return default NO value
  78. return NO;
  79. }
  80. - (void)waitUntilSampleBufferDisplayed:(dispatch_queue_t)queue completionHandler:(dispatch_block_t)completionHandler
  81. {
  82. SCAssert(queue, @"callback queue must be provided");
  83. SCAssert(completionHandler, @"completion handler must be provided");
  84. dispatch_async(queue, completionHandler);
  85. }
  86. - (void)startStreaming
  87. {
  88. SCTraceStart();
  89. if (!_isStreaming) {
  90. _isStreaming = YES;
  91. [self addDidPlayToEndTimeNotificationForPlayerItem:_player.currentItem];
  92. [_player play];
  93. }
  94. }
  95. - (void)stopStreaming
  96. {
  97. SCTraceStart();
  98. if (_isStreaming) {
  99. _isStreaming = NO;
  100. [_player pause];
  101. [self removePlayerObservers];
  102. }
  103. }
  104. - (void)pauseStreaming
  105. {
  106. [self stopStreaming];
  107. }
  108. - (void)addListener:(id<SCManagedVideoDataSourceListener>)listener
  109. {
  110. SCTraceStart();
  111. [_announcer addListener:listener];
  112. }
  113. - (void)removeListener:(id<SCManagedVideoDataSourceListener>)listener
  114. {
  115. SCTraceStart();
  116. [_announcer removeListener:listener];
  117. }
  118. - (void)setAsOutput:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition
  119. {
  120. _devicePosition = devicePosition;
  121. }
  122. - (void)setDevicePosition:(SCManagedCaptureDevicePosition)devicePosition
  123. {
  124. _devicePosition = devicePosition;
  125. }
  126. - (void)setVideoOrientation:(AVCaptureVideoOrientation)videoOrientation
  127. {
  128. _videoOrientation = videoOrientation;
  129. }
  130. - (void)removeAsOutput:(AVCaptureSession *)session
  131. {
  132. // Ignored
  133. }
  134. - (void)setVideoStabilizationEnabledIfSupported:(BOOL)videoStabilizationIfSupported
  135. {
  136. // Ignored
  137. }
  138. - (void)beginConfiguration
  139. {
  140. // Ignored
  141. }
  142. - (void)commitConfiguration
  143. {
  144. // Ignored
  145. }
  146. - (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest
  147. {
  148. // Ignored
  149. }
  150. #pragma mark - AVPlayerItemOutputPullDelegate
  151. - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender
  152. {
  153. if (![_videoOutput hasNewPixelBufferForItemTime:CMTimeMake(1, 10)]) {
  154. [self configureOutput];
  155. }
  156. [_displayLink setPaused:NO];
  157. }
  158. #pragma mark - Internal
  159. - (void)displayLinkCallback:(CADisplayLink *)sender
  160. {
  161. CFTimeInterval nextVSync = [sender timestamp] + [sender duration];
  162. CMTime time = [_videoOutput itemTimeForHostTime:nextVSync];
  163. if (dispatch_semaphore_wait(_semaphore, DISPATCH_TIME_NOW) == 0) {
  164. [_performer perform:^{
  165. if ([_videoOutput hasNewPixelBufferForItemTime:time]) {
  166. CVPixelBufferRef pixelBuffer = [_videoOutput copyPixelBufferForItemTime:time itemTimeForDisplay:NULL];
  167. if (pixelBuffer != NULL) {
  168. if (_nextPixelBufferHandler) {
  169. _nextPixelBufferHandler(pixelBuffer);
  170. _nextPixelBufferHandler = nil;
  171. } else {
  172. CMSampleBufferRef sampleBuffer =
  173. [self createSampleBufferFromPixelBuffer:pixelBuffer
  174. presentationTime:CMTimeMake(CACurrentMediaTime() * 1000, 1000)];
  175. if (sampleBuffer) {
  176. if (_sampleBufferDisplayEnabled) {
  177. [_sampleBufferDisplayController enqueueSampleBuffer:sampleBuffer];
  178. }
  179. [_announcer managedVideoDataSource:self
  180. didOutputSampleBuffer:sampleBuffer
  181. devicePosition:_devicePosition];
  182. CFRelease(sampleBuffer);
  183. }
  184. }
  185. CVBufferRelease(pixelBuffer);
  186. }
  187. }
  188. dispatch_semaphore_signal(_semaphore);
  189. }];
  190. }
  191. }
  192. - (CMSampleBufferRef)createSampleBufferFromPixelBuffer:(CVPixelBufferRef)pixelBuffer presentationTime:(CMTime)time
  193. {
  194. CMSampleBufferRef sampleBuffer = NULL;
  195. CMVideoFormatDescriptionRef formatDesc = NULL;
  196. OSStatus err = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &formatDesc);
  197. if (err != noErr) {
  198. return NULL;
  199. }
  200. CMSampleTimingInfo sampleTimingInfo = {kCMTimeInvalid, time, kCMTimeInvalid};
  201. CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, formatDesc,
  202. &sampleTimingInfo, &sampleBuffer);
  203. CFRelease(formatDesc);
  204. return sampleBuffer;
  205. }
  206. - (void)configureOutput
  207. {
  208. // Remove old output
  209. if (_videoOutput) {
  210. [[_player currentItem] removeOutput:_videoOutput];
  211. }
  212. // Setup AVPlayerItemVideoOutput with the required pixelbuffer attributes.
  213. _videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:@{
  214. (id) kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
  215. }];
  216. _videoOutput.suppressesPlayerRendering = YES;
  217. [_videoOutput setDelegate:self queue:_performer.queue];
  218. // Add new output
  219. [[_player currentItem] addOutput:_videoOutput];
  220. [_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:1.0 / 30.0];
  221. }
  222. - (void)getNextPixelBufferWithCompletion:(sc_managed_video_file_streamer_pixel_buffer_completion_handler_t)completion
  223. {
  224. _nextPixelBufferHandler = completion;
  225. }
  226. - (void)addDidPlayToEndTimeNotificationForPlayerItem:(AVPlayerItem *)item
  227. {
  228. if (_notificationToken) {
  229. _notificationToken = nil;
  230. }
  231. _player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
  232. _notificationToken =
  233. [[NSNotificationCenter defaultCenter] addObserverForName:AVPlayerItemDidPlayToEndTimeNotification
  234. object:item
  235. queue:[NSOperationQueue mainQueue]
  236. usingBlock:^(NSNotification *note) {
  237. [[_player currentItem] seekToTime:kCMTimeZero];
  238. }];
  239. }
  240. - (void)removePlayerObservers
  241. {
  242. if (_notificationToken) {
  243. [[NSNotificationCenter defaultCenter] removeObserver:_notificationToken
  244. name:AVPlayerItemDidPlayToEndTimeNotification
  245. object:_player.currentItem];
  246. _notificationToken = nil;
  247. }
  248. }
  249. @end