2014 snapchat source code
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

252 lines
11 KiB

  1. //
  2. // SCManagedVideoCapturerHandler.m
  3. // Snapchat
  4. //
  5. // Created by Jingtian Yang on 11/12/2017.
  6. //
  7. #import "SCManagedVideoCapturerHandler.h"
  8. #import "SCCaptureResource.h"
  9. #import "SCManagedCaptureDevice+SCManagedCapturer.h"
  10. #import "SCManagedCapturer.h"
  11. #import "SCManagedCapturerLensAPI.h"
  12. #import "SCManagedCapturerLogging.h"
  13. #import "SCManagedCapturerSampleMetadata.h"
  14. #import "SCManagedCapturerState.h"
  15. #import "SCManagedDeviceCapacityAnalyzer.h"
  16. #import "SCManagedFrontFlashController.h"
  17. #import "SCManagedVideoFileStreamer.h"
  18. #import "SCManagedVideoFrameSampler.h"
  19. #import "SCManagedVideoStreamer.h"
  20. #import <SCCameraFoundation/SCManagedDataSource.h>
  21. #import <SCFoundation/SCAssertWrapper.h>
  22. #import <SCFoundation/SCQueuePerformer.h>
  23. #import <SCFoundation/SCThreadHelpers.h>
  24. #import <SCFoundation/SCTraceODPCompatible.h>
  25. @interface SCManagedVideoCapturerHandler () {
  26. __weak SCCaptureResource *_captureResource;
  27. }
  28. @end
  29. @implementation SCManagedVideoCapturerHandler
  30. - (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource
  31. {
  32. self = [super init];
  33. if (self) {
  34. SCAssert(captureResource, @"");
  35. _captureResource = captureResource;
  36. }
  37. return self;
  38. }
  39. - (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer
  40. didBeginVideoRecording:(SCVideoCaptureSessionInfo)sessionInfo
  41. {
  42. SCTraceODPCompatibleStart(2);
  43. SCLogCapturerInfo(@"Did begin video recording. sessionId:%u", sessionInfo.sessionId);
  44. [_captureResource.queuePerformer perform:^{
  45. SCTraceStart();
  46. SCManagedCapturerState *state = [_captureResource.state copy];
  47. runOnMainThreadAsynchronously(^{
  48. [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
  49. didBeginVideoRecording:state
  50. session:sessionInfo];
  51. });
  52. }];
  53. }
  54. - (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer
  55. didBeginAudioRecording:(SCVideoCaptureSessionInfo)sessionInfo
  56. {
  57. SCTraceODPCompatibleStart(2);
  58. SCLogCapturerInfo(@"Did begin audio recording. sessionId:%u", sessionInfo.sessionId);
  59. [_captureResource.queuePerformer perform:^{
  60. if ([_captureResource.fileInputDecider shouldProcessFileInput]) {
  61. [_captureResource.videoDataSource startStreaming];
  62. }
  63. SCTraceStart();
  64. SCManagedCapturerState *state = [_captureResource.state copy];
  65. runOnMainThreadAsynchronously(^{
  66. [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
  67. didBeginAudioRecording:state
  68. session:sessionInfo];
  69. });
  70. }];
  71. }
  72. - (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer
  73. willStopWithRecordedVideoFuture:(SCFuture<id<SCManagedRecordedVideo>> *)recordedVideoFuture
  74. videoSize:(CGSize)videoSize
  75. placeholderImage:(UIImage *)placeholderImage
  76. session:(SCVideoCaptureSessionInfo)sessionInfo
  77. {
  78. SCTraceODPCompatibleStart(2);
  79. SCLogCapturerInfo(@"Will stop recording. sessionId:%u placeHolderImage:%@ videoSize:(%f, %f)",
  80. sessionInfo.sessionId, placeholderImage, videoSize.width, videoSize.height);
  81. [_captureResource.queuePerformer perform:^{
  82. SCTraceStart();
  83. if (_captureResource.videoRecording) {
  84. SCManagedCapturerState *state = [_captureResource.state copy];
  85. // Then, sync back to main thread to notify will finish recording
  86. runOnMainThreadAsynchronously(^{
  87. [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
  88. willFinishRecording:state
  89. session:sessionInfo
  90. recordedVideoFuture:recordedVideoFuture
  91. videoSize:videoSize
  92. placeholderImage:placeholderImage];
  93. });
  94. }
  95. }];
  96. }
  97. - (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer
  98. didSucceedWithRecordedVideo:(SCManagedRecordedVideo *)recordedVideo
  99. session:(SCVideoCaptureSessionInfo)sessionInfo
  100. {
  101. SCTraceODPCompatibleStart(2);
  102. SCLogCapturerInfo(@"Did succeed recording. sessionId:%u recordedVideo:%@", sessionInfo.sessionId, recordedVideo);
  103. [_captureResource.queuePerformer perform:^{
  104. SCTraceStart();
  105. if (_captureResource.videoRecording) {
  106. [self _videoRecordingCleanup];
  107. SCManagedCapturerState *state = [_captureResource.state copy];
  108. // Then, sync back to main thread to notify the finish recording
  109. runOnMainThreadAsynchronously(^{
  110. [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
  111. didFinishRecording:state
  112. session:sessionInfo
  113. recordedVideo:recordedVideo];
  114. });
  115. }
  116. }];
  117. }
  118. - (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer
  119. didFailWithError:(NSError *)error
  120. session:(SCVideoCaptureSessionInfo)sessionInfo
  121. {
  122. SCTraceODPCompatibleStart(2);
  123. SCLogCapturerInfo(@"Did fail recording. sessionId:%u", sessionInfo.sessionId);
  124. [_captureResource.queuePerformer perform:^{
  125. SCTraceStart();
  126. if (_captureResource.videoRecording) {
  127. [self _videoRecordingCleanup];
  128. SCManagedCapturerState *state = [_captureResource.state copy];
  129. runOnMainThreadAsynchronously(^{
  130. [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
  131. didFailRecording:state
  132. session:sessionInfo
  133. error:error];
  134. });
  135. }
  136. }];
  137. }
  138. - (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer
  139. didCancelVideoRecording:(SCVideoCaptureSessionInfo)sessionInfo
  140. {
  141. SCTraceODPCompatibleStart(2);
  142. SCLogCapturerInfo(@"Did cancel recording. sessionId:%u", sessionInfo.sessionId);
  143. [_captureResource.queuePerformer perform:^{
  144. SCTraceStart();
  145. if (_captureResource.videoRecording) {
  146. [self _videoRecordingCleanup];
  147. SCManagedCapturerState *state = [_captureResource.state copy];
  148. runOnMainThreadAsynchronously(^{
  149. [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
  150. didCancelRecording:state
  151. session:sessionInfo];
  152. });
  153. }
  154. }];
  155. }
  156. - (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer
  157. didGetError:(NSError *)error
  158. forType:(SCManagedVideoCapturerInfoType)type
  159. session:(SCVideoCaptureSessionInfo)sessionInfo
  160. {
  161. SCTraceODPCompatibleStart(2);
  162. SCLogCapturerInfo(@"Did get error. sessionId:%u errorType:%lu, error:%@", sessionInfo.sessionId, (long)type, error);
  163. [_captureResource.queuePerformer perform:^{
  164. runOnMainThreadAsynchronously(^{
  165. [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
  166. didGetError:error
  167. forType:type
  168. session:sessionInfo];
  169. });
  170. }];
  171. }
  172. - (NSDictionary *)managedVideoCapturerGetExtraFrameHealthInfo:(SCManagedVideoCapturer *)managedVideoCapturer
  173. {
  174. SCTraceODPCompatibleStart(2);
  175. if (_captureResource.state.lensesActive) {
  176. return @{
  177. @"lens_active" : @(YES),
  178. @"lens_id" : ([_captureResource.lensProcessingCore activeLensId] ?: [NSNull null])
  179. };
  180. }
  181. return nil;
  182. }
  183. - (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer
  184. didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer
  185. presentationTimestamp:(CMTime)presentationTimestamp
  186. {
  187. CFRetain(sampleBuffer);
  188. [_captureResource.queuePerformer perform:^{
  189. SCManagedCapturerSampleMetadata *sampleMetadata =
  190. [[SCManagedCapturerSampleMetadata alloc] initWithPresentationTimestamp:presentationTimestamp
  191. fieldOfView:_captureResource.device.fieldOfView];
  192. [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
  193. didAppendVideoSampleBuffer:sampleBuffer
  194. sampleMetadata:sampleMetadata];
  195. CFRelease(sampleBuffer);
  196. }];
  197. }
  198. - (void)_videoRecordingCleanup
  199. {
  200. SCTraceODPCompatibleStart(2);
  201. SCAssert(_captureResource.videoRecording, @"clean up function only can be called if the "
  202. @"video recording is still in progress.");
  203. SCAssert([_captureResource.queuePerformer isCurrentPerformer], @"");
  204. SCLogCapturerInfo(@"Video recording cleanup. previous state:%@", _captureResource.state);
  205. [_captureResource.videoDataSource removeListener:_captureResource.videoCapturer];
  206. if (_captureResource.videoFrameSampler) {
  207. SCManagedVideoFrameSampler *sampler = _captureResource.videoFrameSampler;
  208. _captureResource.videoFrameSampler = nil;
  209. [_captureResource.announcer removeListener:sampler];
  210. }
  211. // Add back other listeners to video streamer
  212. [_captureResource.videoDataSource addListener:_captureResource.deviceCapacityAnalyzer];
  213. if (!_captureResource.state.torchActive) {
  214. // We should turn off torch for the device that we specifically turned on
  215. // for recording
  216. [_captureResource.device setTorchActive:NO];
  217. if (_captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) {
  218. _captureResource.frontFlashController.torchActive = NO;
  219. }
  220. }
  221. // Unlock focus on both front and back camera if they were locked.
  222. // Even if ARKit was being used during recording, it'll be shut down by the time we get here
  223. // So DON'T match the ARKit check we use around [_ setRecording:YES]
  224. SCManagedCaptureDevice *front = [SCManagedCaptureDevice front];
  225. SCManagedCaptureDevice *back = [SCManagedCaptureDevice back];
  226. [front setRecording:NO];
  227. [back setRecording:NO];
  228. _captureResource.videoRecording = NO;
  229. if (_captureResource.state.lensesActive) {
  230. BOOL modifySource = _captureResource.videoRecording || _captureResource.state.liveVideoStreaming;
  231. [_captureResource.lensProcessingCore setModifySource:modifySource];
  232. }
  233. }
  234. @end