2014 snapchat source code
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

430 lines
19 KiB

  1. //
  2. // SCCapturerBufferedVideoWriter.m
  3. // Snapchat
  4. //
  5. // Created by Chao Pang on 12/5/17.
  6. //
  7. #import "SCCapturerBufferedVideoWriter.h"
  8. #import "SCAudioCaptureSession.h"
  9. #import "SCCaptureCommon.h"
  10. #import "SCManagedCapturerUtils.h"
  11. #import <SCBase/SCMacros.h>
  12. #import <SCFoundation/SCAssertWrapper.h>
  13. #import <SCFoundation/SCDeviceName.h>
  14. #import <SCFoundation/SCLog.h>
  15. #import <SCFoundation/SCTrace.h>
  16. #import <FBKVOController/FBKVOController.h>
  17. @implementation SCCapturerBufferedVideoWriter {
  18. SCQueuePerformer *_performer;
  19. __weak id<SCCapturerBufferedVideoWriterDelegate> _delegate;
  20. FBKVOController *_observeController;
  21. AVAssetWriter *_assetWriter;
  22. AVAssetWriterInput *_audioWriterInput;
  23. AVAssetWriterInput *_videoWriterInput;
  24. AVAssetWriterInputPixelBufferAdaptor *_pixelBufferAdaptor;
  25. CVPixelBufferPoolRef _defaultPixelBufferPool;
  26. CVPixelBufferPoolRef _nightPixelBufferPool;
  27. CVPixelBufferPoolRef _lensesPixelBufferPool;
  28. CMBufferQueueRef _videoBufferQueue;
  29. CMBufferQueueRef _audioBufferQueue;
  30. }
  31. - (instancetype)initWithPerformer:(id<SCPerforming>)performer
  32. outputURL:(NSURL *)outputURL
  33. delegate:(id<SCCapturerBufferedVideoWriterDelegate>)delegate
  34. error:(NSError **)error
  35. {
  36. self = [super init];
  37. if (self) {
  38. _performer = performer;
  39. _delegate = delegate;
  40. _observeController = [[FBKVOController alloc] initWithObserver:self];
  41. CMBufferQueueCreate(kCFAllocatorDefault, 0, CMBufferQueueGetCallbacksForUnsortedSampleBuffers(),
  42. &_videoBufferQueue);
  43. CMBufferQueueCreate(kCFAllocatorDefault, 0, CMBufferQueueGetCallbacksForUnsortedSampleBuffers(),
  44. &_audioBufferQueue);
  45. _assetWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeMPEG4 error:error];
  46. if (*error) {
  47. self = nil;
  48. return self;
  49. }
  50. }
  51. return self;
  52. }
  53. - (BOOL)prepareWritingWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
  54. {
  55. SCTraceStart();
  56. SCAssert([_performer isCurrentPerformer], @"");
  57. SCAssert(outputSettings, @"empty output setting");
  58. // Audio
  59. SCTraceSignal(@"Derive audio output setting");
  60. NSDictionary *audioOutputSettings = @{
  61. AVFormatIDKey : @(kAudioFormatMPEG4AAC),
  62. AVNumberOfChannelsKey : @(1),
  63. AVSampleRateKey : @(kSCAudioCaptureSessionDefaultSampleRate),
  64. AVEncoderBitRateKey : @(outputSettings.audioBitRate)
  65. };
  66. _audioWriterInput =
  67. [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
  68. _audioWriterInput.expectsMediaDataInRealTime = YES;
  69. // Video
  70. SCTraceSignal(@"Derive video output setting");
  71. size_t outputWidth = outputSettings.width;
  72. size_t outputHeight = outputSettings.height;
  73. SCAssert(outputWidth > 0 && outputHeight > 0 && (outputWidth % 2 == 0) && (outputHeight % 2 == 0),
  74. @"invalid output size");
  75. NSDictionary *videoCompressionSettings = @{
  76. AVVideoAverageBitRateKey : @(outputSettings.videoBitRate),
  77. AVVideoMaxKeyFrameIntervalKey : @(outputSettings.keyFrameInterval)
  78. };
  79. NSDictionary *videoOutputSettings = @{
  80. AVVideoCodecKey : AVVideoCodecH264,
  81. AVVideoWidthKey : @(outputWidth),
  82. AVVideoHeightKey : @(outputHeight),
  83. AVVideoScalingModeKey : AVVideoScalingModeResizeAspectFill,
  84. AVVideoCompressionPropertiesKey : videoCompressionSettings
  85. };
  86. _videoWriterInput =
  87. [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:videoOutputSettings];
  88. _videoWriterInput.expectsMediaDataInRealTime = YES;
  89. CGAffineTransform transform = CGAffineTransformMakeTranslation(outputHeight, 0);
  90. _videoWriterInput.transform = CGAffineTransformRotate(transform, M_PI_2);
  91. _pixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc]
  92. initWithAssetWriterInput:_videoWriterInput
  93. sourcePixelBufferAttributes:@{
  94. (NSString *)
  95. kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), (NSString *)
  96. kCVPixelBufferWidthKey : @(outputWidth), (NSString *)
  97. kCVPixelBufferHeightKey : @(outputHeight)
  98. }];
  99. SCTraceSignal(@"Setup video writer input");
  100. if ([_assetWriter canAddInput:_videoWriterInput]) {
  101. [_assetWriter addInput:_videoWriterInput];
  102. } else {
  103. return NO;
  104. }
  105. SCTraceSignal(@"Setup audio writer input");
  106. if ([_assetWriter canAddInput:_audioWriterInput]) {
  107. [_assetWriter addInput:_audioWriterInput];
  108. } else {
  109. return NO;
  110. }
  111. return YES;
  112. }
  113. - (void)appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer
  114. {
  115. SCAssert([_performer isCurrentPerformer], @"");
  116. SC_GUARD_ELSE_RETURN(sampleBuffer);
  117. if (!CMBufferQueueIsEmpty(_videoBufferQueue)) {
  118. // We need to drain the buffer queue in this case
  119. while (_videoWriterInput.readyForMoreMediaData) { // TODO: also need to break out in case of errors
  120. CMSampleBufferRef dequeuedSampleBuffer =
  121. (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_videoBufferQueue);
  122. if (dequeuedSampleBuffer == NULL) {
  123. break;
  124. }
  125. [self _appendVideoSampleBuffer:dequeuedSampleBuffer];
  126. CFRelease(dequeuedSampleBuffer);
  127. }
  128. }
  129. // Fast path, just append this sample buffer if ready
  130. if (_videoWriterInput.readyForMoreMediaData) {
  131. [self _appendVideoSampleBuffer:sampleBuffer];
  132. } else {
  133. // It is not ready, queuing the sample buffer
  134. CMBufferQueueEnqueue(_videoBufferQueue, sampleBuffer);
  135. }
  136. }
  137. - (void)appendAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer
  138. {
  139. SCAssert([_performer isCurrentPerformer], @"");
  140. SC_GUARD_ELSE_RETURN(sampleBuffer);
  141. if (!CMBufferQueueIsEmpty(_audioBufferQueue)) {
  142. // We need to drain the buffer queue in this case
  143. while (_audioWriterInput.readyForMoreMediaData) {
  144. CMSampleBufferRef dequeuedSampleBuffer =
  145. (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_audioBufferQueue);
  146. if (dequeuedSampleBuffer == NULL) {
  147. break;
  148. }
  149. [_audioWriterInput appendSampleBuffer:sampleBuffer];
  150. CFRelease(dequeuedSampleBuffer);
  151. }
  152. }
  153. // fast path, just append this sample buffer if ready
  154. if ((_audioWriterInput.readyForMoreMediaData)) {
  155. [_audioWriterInput appendSampleBuffer:sampleBuffer];
  156. } else {
  157. // it is not ready, queuing the sample buffer
  158. CMBufferQueueEnqueue(_audioBufferQueue, sampleBuffer);
  159. }
  160. }
  161. - (void)startWritingAtSourceTime:(CMTime)sourceTime
  162. {
  163. SCTraceStart();
  164. SCAssert([_performer isCurrentPerformer], @"");
  165. // To observe the status change on assetWriter because when assetWriter errors out, it only changes the
  166. // status, no further delegate callbacks etc.
  167. [_observeController observe:_assetWriter
  168. keyPath:@keypath(_assetWriter, status)
  169. options:NSKeyValueObservingOptionNew
  170. action:@selector(assetWriterStatusChanged:)];
  171. [_assetWriter startWriting];
  172. [_assetWriter startSessionAtSourceTime:sourceTime];
  173. }
  174. - (void)cancelWriting
  175. {
  176. SCTraceStart();
  177. SCAssert([_performer isCurrentPerformer], @"");
  178. CMBufferQueueReset(_videoBufferQueue);
  179. CMBufferQueueReset(_audioBufferQueue);
  180. [_assetWriter cancelWriting];
  181. }
  182. - (void)finishWritingAtSourceTime:(CMTime)sourceTime withCompletionHanlder:(dispatch_block_t)completionBlock
  183. {
  184. SCTraceStart();
  185. SCAssert([_performer isCurrentPerformer], @"");
  186. while (_audioWriterInput.readyForMoreMediaData && !CMBufferQueueIsEmpty(_audioBufferQueue)) {
  187. CMSampleBufferRef audioSampleBuffer = (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_audioBufferQueue);
  188. if (audioSampleBuffer == NULL) {
  189. break;
  190. }
  191. [_audioWriterInput appendSampleBuffer:audioSampleBuffer];
  192. CFRelease(audioSampleBuffer);
  193. }
  194. while (_videoWriterInput.readyForMoreMediaData && !CMBufferQueueIsEmpty(_videoBufferQueue)) {
  195. CMSampleBufferRef videoSampleBuffer = (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_videoBufferQueue);
  196. if (videoSampleBuffer == NULL) {
  197. break;
  198. }
  199. [_videoWriterInput appendSampleBuffer:videoSampleBuffer];
  200. CFRelease(videoSampleBuffer);
  201. }
  202. dispatch_block_t finishWritingBlock = ^() {
  203. [_assetWriter endSessionAtSourceTime:sourceTime];
  204. [_audioWriterInput markAsFinished];
  205. [_videoWriterInput markAsFinished];
  206. [_assetWriter finishWritingWithCompletionHandler:^{
  207. if (completionBlock) {
  208. completionBlock();
  209. }
  210. }];
  211. };
  212. if (CMBufferQueueIsEmpty(_audioBufferQueue) && CMBufferQueueIsEmpty(_videoBufferQueue)) {
  213. finishWritingBlock();
  214. } else {
  215. // We need to drain the samples from the queues before finish writing
  216. __block BOOL isAudioDone = NO;
  217. __block BOOL isVideoDone = NO;
  218. // Audio
  219. [_audioWriterInput
  220. requestMediaDataWhenReadyOnQueue:_performer.queue
  221. usingBlock:^{
  222. if (!CMBufferQueueIsEmpty(_audioBufferQueue) &&
  223. _assetWriter.status == AVAssetWriterStatusWriting) {
  224. CMSampleBufferRef audioSampleBuffer =
  225. (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_audioBufferQueue);
  226. if (audioSampleBuffer) {
  227. [_audioWriterInput appendSampleBuffer:audioSampleBuffer];
  228. CFRelease(audioSampleBuffer);
  229. }
  230. } else if (!isAudioDone) {
  231. isAudioDone = YES;
  232. }
  233. if (isAudioDone && isVideoDone) {
  234. finishWritingBlock();
  235. }
  236. }];
  237. // Video
  238. [_videoWriterInput
  239. requestMediaDataWhenReadyOnQueue:_performer.queue
  240. usingBlock:^{
  241. if (!CMBufferQueueIsEmpty(_videoBufferQueue) &&
  242. _assetWriter.status == AVAssetWriterStatusWriting) {
  243. CMSampleBufferRef videoSampleBuffer =
  244. (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_videoBufferQueue);
  245. if (videoSampleBuffer) {
  246. [_videoWriterInput appendSampleBuffer:videoSampleBuffer];
  247. CFRelease(videoSampleBuffer);
  248. }
  249. } else if (!isVideoDone) {
  250. isVideoDone = YES;
  251. }
  252. if (isAudioDone && isVideoDone) {
  253. finishWritingBlock();
  254. }
  255. }];
  256. }
  257. }
  258. - (void)cleanUp
  259. {
  260. _assetWriter = nil;
  261. _videoWriterInput = nil;
  262. _audioWriterInput = nil;
  263. _pixelBufferAdaptor = nil;
  264. }
  265. - (void)dealloc
  266. {
  267. CFRelease(_videoBufferQueue);
  268. CFRelease(_audioBufferQueue);
  269. CVPixelBufferPoolRelease(_defaultPixelBufferPool);
  270. CVPixelBufferPoolRelease(_nightPixelBufferPool);
  271. CVPixelBufferPoolRelease(_lensesPixelBufferPool);
  272. [_observeController unobserveAll];
  273. }
  274. - (void)assetWriterStatusChanged:(NSDictionary *)change
  275. {
  276. SCTraceStart();
  277. if (_assetWriter.status == AVAssetWriterStatusFailed) {
  278. SCTraceSignal(@"Asset writer status failed %@, error %@", change, _assetWriter.error);
  279. [_delegate videoWriterDidFailWritingWithError:[_assetWriter.error copy]];
  280. }
  281. }
  282. #pragma - Private methods
  283. - (CVImageBufferRef)_croppedPixelBufferWithInputPixelBuffer:(CVImageBufferRef)inputPixelBuffer
  284. {
  285. SCAssertTrue([SCDeviceName isIphoneX]);
  286. const size_t inputBufferWidth = CVPixelBufferGetWidth(inputPixelBuffer);
  287. const size_t inputBufferHeight = CVPixelBufferGetHeight(inputPixelBuffer);
  288. const size_t croppedBufferWidth = (size_t)(inputBufferWidth * kSCIPhoneXCapturedImageVideoCropRatio) / 2 * 2;
  289. const size_t croppedBufferHeight =
  290. (size_t)(croppedBufferWidth * SCManagedCapturedImageAndVideoAspectRatio()) / 2 * 2;
  291. const size_t offsetPointX = inputBufferWidth - croppedBufferWidth;
  292. const size_t offsetPointY = (inputBufferHeight - croppedBufferHeight) / 4 * 2;
  293. SC_GUARD_ELSE_RUN_AND_RETURN_VALUE((inputBufferWidth >= croppedBufferWidth) &&
  294. (inputBufferHeight >= croppedBufferHeight) && (offsetPointX % 2 == 0) &&
  295. (offsetPointY % 2 == 0) &&
  296. (inputBufferWidth >= croppedBufferWidth + offsetPointX) &&
  297. (inputBufferHeight >= croppedBufferHeight + offsetPointY),
  298. SCLogGeneralError(@"Invalid cropping configuration"), NULL);
  299. CVPixelBufferRef croppedPixelBuffer = NULL;
  300. CVPixelBufferPoolRef pixelBufferPool =
  301. [self _pixelBufferPoolWithInputSize:CGSizeMake(inputBufferWidth, inputBufferHeight)
  302. croppedSize:CGSizeMake(croppedBufferWidth, croppedBufferHeight)];
  303. if (pixelBufferPool) {
  304. CVReturn result = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferPool, &croppedPixelBuffer);
  305. if ((result != kCVReturnSuccess) || (croppedPixelBuffer == NULL)) {
  306. SCLogGeneralError(@"[SCCapturerVideoWriterInput] Error creating croppedPixelBuffer");
  307. return NULL;
  308. }
  309. } else {
  310. SCAssertFail(@"[SCCapturerVideoWriterInput] PixelBufferPool is NULL with inputBufferWidth:%@, "
  311. @"inputBufferHeight:%@, croppedBufferWidth:%@, croppedBufferHeight:%@",
  312. @(inputBufferWidth), @(inputBufferHeight), @(croppedBufferWidth), @(croppedBufferHeight));
  313. return NULL;
  314. }
  315. CVPixelBufferLockBaseAddress(inputPixelBuffer, kCVPixelBufferLock_ReadOnly);
  316. CVPixelBufferLockBaseAddress(croppedPixelBuffer, 0);
  317. const size_t planesCount = CVPixelBufferGetPlaneCount(inputPixelBuffer);
  318. for (int planeIndex = 0; planeIndex < planesCount; planeIndex++) {
  319. size_t inPlaneHeight = CVPixelBufferGetHeightOfPlane(inputPixelBuffer, planeIndex);
  320. size_t inPlaneBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(inputPixelBuffer, planeIndex);
  321. uint8_t *inPlaneAdress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(inputPixelBuffer, planeIndex);
  322. size_t croppedPlaneHeight = CVPixelBufferGetHeightOfPlane(croppedPixelBuffer, planeIndex);
  323. size_t croppedPlaneBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(croppedPixelBuffer, planeIndex);
  324. uint8_t *croppedPlaneAdress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(croppedPixelBuffer, planeIndex);
  325. // Note that inPlaneBytesPerRow is not strictly 2x of inPlaneWidth for some devices (e.g. iPhone X).
  326. // However, since UV are packed together in memory, we can use offsetPointX for all planes
  327. size_t offsetPlaneBytesX = offsetPointX;
  328. size_t offsetPlaneBytesY = offsetPointY * inPlaneHeight / inputBufferHeight;
  329. inPlaneAdress = inPlaneAdress + offsetPlaneBytesY * inPlaneBytesPerRow + offsetPlaneBytesX;
  330. size_t bytesToCopyPerRow = MIN(inPlaneBytesPerRow - offsetPlaneBytesX, croppedPlaneBytesPerRow);
  331. for (int i = 0; i < croppedPlaneHeight; i++) {
  332. memcpy(croppedPlaneAdress, inPlaneAdress, bytesToCopyPerRow);
  333. inPlaneAdress += inPlaneBytesPerRow;
  334. croppedPlaneAdress += croppedPlaneBytesPerRow;
  335. }
  336. }
  337. CVPixelBufferUnlockBaseAddress(inputPixelBuffer, kCVPixelBufferLock_ReadOnly);
  338. CVPixelBufferUnlockBaseAddress(croppedPixelBuffer, 0);
  339. return croppedPixelBuffer;
  340. }
  341. - (CVPixelBufferPoolRef)_pixelBufferPoolWithInputSize:(CGSize)inputSize croppedSize:(CGSize)croppedSize
  342. {
  343. if (CGSizeEqualToSize(inputSize, [SCManagedCaptureDevice defaultActiveFormatResolution])) {
  344. if (_defaultPixelBufferPool == NULL) {
  345. _defaultPixelBufferPool = [self _newPixelBufferPoolWithWidth:croppedSize.width height:croppedSize.height];
  346. }
  347. return _defaultPixelBufferPool;
  348. } else if (CGSizeEqualToSize(inputSize, [SCManagedCaptureDevice nightModeActiveFormatResolution])) {
  349. if (_nightPixelBufferPool == NULL) {
  350. _nightPixelBufferPool = [self _newPixelBufferPoolWithWidth:croppedSize.width height:croppedSize.height];
  351. }
  352. return _nightPixelBufferPool;
  353. } else {
  354. if (_lensesPixelBufferPool == NULL) {
  355. _lensesPixelBufferPool = [self _newPixelBufferPoolWithWidth:croppedSize.width height:croppedSize.height];
  356. }
  357. return _lensesPixelBufferPool;
  358. }
  359. }
  360. - (CVPixelBufferPoolRef)_newPixelBufferPoolWithWidth:(size_t)width height:(size_t)height
  361. {
  362. NSDictionary *attributes = @{
  363. (NSString *) kCVPixelBufferIOSurfacePropertiesKey : @{}, (NSString *)
  364. kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), (NSString *)
  365. kCVPixelBufferWidthKey : @(width), (NSString *)
  366. kCVPixelBufferHeightKey : @(height)
  367. };
  368. CVPixelBufferPoolRef pixelBufferPool = NULL;
  369. CVReturn result = CVPixelBufferPoolCreate(kCFAllocatorDefault, NULL,
  370. (__bridge CFDictionaryRef _Nullable)(attributes), &pixelBufferPool);
  371. if (result != kCVReturnSuccess) {
  372. SCLogGeneralError(@"[SCCapturerBufferredVideoWriter] Error creating pixel buffer pool %i", result);
  373. return NULL;
  374. }
  375. return pixelBufferPool;
  376. }
  377. - (void)_appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer
  378. {
  379. SCAssert([_performer isCurrentPerformer], @"");
  380. CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
  381. CVImageBufferRef inputPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
  382. if ([SCDeviceName isIphoneX]) {
  383. CVImageBufferRef croppedPixelBuffer = [self _croppedPixelBufferWithInputPixelBuffer:inputPixelBuffer];
  384. if (croppedPixelBuffer) {
  385. [_pixelBufferAdaptor appendPixelBuffer:croppedPixelBuffer withPresentationTime:presentationTime];
  386. CVPixelBufferRelease(croppedPixelBuffer);
  387. }
  388. } else {
  389. [_pixelBufferAdaptor appendPixelBuffer:inputPixelBuffer withPresentationTime:presentationTime];
  390. }
  391. }
  392. @end