2014 snapchat source code
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

709 lines
30 KiB

  1. //
  2. // SCManagedFrameHealthChecker.m
  3. // Snapchat
  4. //
  5. // Created by Pinlin Chen on 30/08/2017.
  6. //
  7. #import "SCManagedFrameHealthChecker.h"
  8. #import "SCCameraSettingUtils.h"
  9. #import "SCCameraTweaks.h"
  10. #import <SCFoundation/AVAsset+Helpers.h>
  11. #import <SCFoundation/SCLog.h>
  12. #import <SCFoundation/SCLogHelper.h>
  13. #import <SCFoundation/SCQueuePerformer.h>
  14. #import <SCFoundation/SCTraceODPCompatible.h>
  15. #import <SCFoundation/UIImage+Helpers.h>
  16. #import <SCLogger/SCCameraMetrics.h>
  17. #import <SCLogger/SCLogger+Stats.h>
  18. #import <SCWebP/UIImage+WebP.h>
  19. #import <ImageIO/CGImageProperties.h>
  20. @import Accelerate;
  21. static const char *kSCManagedFrameHealthCheckerQueueLabel = "com.snapchat.frame_health_checker";
  22. static const int kSCManagedFrameHealthCheckerMaxSamples = 2304;
  23. static const float kSCManagedFrameHealthCheckerPossibleBlackThreshold = 20.0;
  24. static const float kSCManagedFrameHealthCheckerScaledImageMaxEdgeLength = 300.0;
  25. static const float kSCManagedFrameHealthCheckerScaledImageScale = 1.0;
  26. // assume we could process at most of 2 RGBA images which are 2304*4096 RGBA image
  27. static const double kSCManagedFrameHealthCheckerMinFreeMemMB = 72.0;
  28. typedef NS_ENUM(NSUInteger, SCManagedFrameHealthCheckType) {
  29. SCManagedFrameHealthCheck_ImageCapture = 0,
  30. SCManagedFrameHealthCheck_ImagePreTranscoding,
  31. SCManagedFrameHealthCheck_ImagePostTranscoding,
  32. SCManagedFrameHealthCheck_VideoCapture,
  33. SCManagedFrameHealthCheck_VideoOverlayImage,
  34. SCManagedFrameHealthCheck_VideoPostTranscoding,
  35. };
  36. typedef NS_ENUM(NSUInteger, SCManagedFrameHealthCheckErrorType) {
  37. SCManagedFrameHealthCheckError_None = 0,
  38. SCManagedFrameHealthCheckError_Invalid_Bitmap,
  39. SCManagedFrameHealthCheckError_Frame_Possibly_Black,
  40. SCManagedFrameHealthCheckError_Frame_Totally_Black,
  41. SCManagedFrameHealthCheckError_Execution_Error,
  42. };
  43. typedef struct {
  44. float R;
  45. float G;
  46. float B;
  47. float A;
  48. } FloatRGBA;
  49. @class SCManagedFrameHealthCheckerTask;
  50. typedef NSMutableDictionary * (^sc_managed_frame_checker_block)(SCManagedFrameHealthCheckerTask *task);
  51. float vDspColorElementSum(const Byte *data, NSInteger stripLength, NSInteger bufferLength)
  52. {
  53. float sum = 0;
  54. float colorArray[bufferLength];
  55. // Convert to float for DSP registerator
  56. vDSP_vfltu8(data, stripLength, colorArray, 1, bufferLength);
  57. // Calculate sum of color element
  58. vDSP_sve(colorArray, 1, &sum, bufferLength);
  59. return sum;
  60. }
  61. @interface SCManagedFrameHealthCheckerTask : NSObject
  62. @property (nonatomic, assign) SCManagedFrameHealthCheckType type;
  63. @property (nonatomic, strong) id targetObject;
  64. @property (nonatomic, assign) CGSize sourceImageSize;
  65. @property (nonatomic, strong) UIImage *unifiedImage;
  66. @property (nonatomic, strong) NSDictionary *metadata;
  67. @property (nonatomic, strong) NSDictionary *videoProperties;
  68. @property (nonatomic, assign) SCManagedFrameHealthCheckErrorType errorType;
  69. + (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type
  70. targetObject:(id)targetObject
  71. metadata:(NSDictionary *)metadata
  72. videoProperties:(NSDictionary *)videoProperties;
  73. + (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type
  74. targetObject:(id)targetObject
  75. metadata:(NSDictionary *)metadata;
  76. @end
  77. @implementation SCManagedFrameHealthCheckerTask
  78. + (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type
  79. targetObject:(id)targetObject
  80. metadata:(NSDictionary *)metadata
  81. {
  82. return [self taskWithType:type targetObject:targetObject metadata:metadata videoProperties:nil];
  83. }
  84. + (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type
  85. targetObject:(id)targetObject
  86. metadata:(NSDictionary *)metadata
  87. videoProperties:(NSDictionary *)videoProperties
  88. {
  89. SCManagedFrameHealthCheckerTask *task = [[SCManagedFrameHealthCheckerTask alloc] init];
  90. task.type = type;
  91. task.targetObject = targetObject;
  92. task.metadata = metadata;
  93. task.videoProperties = videoProperties;
  94. return task;
  95. }
  96. - (NSString *)textForSnapType
  97. {
  98. switch (self.type) {
  99. case SCManagedFrameHealthCheck_ImageCapture:
  100. case SCManagedFrameHealthCheck_ImagePreTranscoding:
  101. case SCManagedFrameHealthCheck_ImagePostTranscoding:
  102. return @"IMAGE";
  103. case SCManagedFrameHealthCheck_VideoCapture:
  104. case SCManagedFrameHealthCheck_VideoOverlayImage:
  105. case SCManagedFrameHealthCheck_VideoPostTranscoding:
  106. return @"VIDEO";
  107. }
  108. }
  109. - (NSString *)textForSource
  110. {
  111. switch (self.type) {
  112. case SCManagedFrameHealthCheck_ImageCapture:
  113. return @"CAPTURE";
  114. case SCManagedFrameHealthCheck_ImagePreTranscoding:
  115. return @"PRE_TRANSCODING";
  116. case SCManagedFrameHealthCheck_ImagePostTranscoding:
  117. return @"POST_TRANSCODING";
  118. case SCManagedFrameHealthCheck_VideoCapture:
  119. return @"CAPTURE";
  120. case SCManagedFrameHealthCheck_VideoOverlayImage:
  121. return @"OVERLAY_IMAGE";
  122. case SCManagedFrameHealthCheck_VideoPostTranscoding:
  123. return @"POST_TRANSCODING";
  124. }
  125. }
  126. - (NSString *)textForErrorType
  127. {
  128. switch (self.errorType) {
  129. case SCManagedFrameHealthCheckError_None:
  130. return nil;
  131. case SCManagedFrameHealthCheckError_Invalid_Bitmap:
  132. return @"Invalid_Bitmap";
  133. case SCManagedFrameHealthCheckError_Frame_Possibly_Black:
  134. return @"Frame_Possibly_Black";
  135. case SCManagedFrameHealthCheckError_Frame_Totally_Black:
  136. return @"Frame_Totally_Black";
  137. case SCManagedFrameHealthCheckError_Execution_Error:
  138. return @"Execution_Error";
  139. }
  140. }
  141. @end
  142. @interface SCManagedFrameHealthChecker () {
  143. id<SCPerforming> _performer;
  144. // Dictionary structure
  145. // Key - NSString, captureSessionID
  146. // Value - NSMutableArray<SCManagedFrameHealthCheckerTask>
  147. NSMutableDictionary *_frameCheckTasks;
  148. }
  149. @end
  150. @implementation SCManagedFrameHealthChecker
  151. + (SCManagedFrameHealthChecker *)sharedInstance
  152. {
  153. SCTraceODPCompatibleStart(2);
  154. static SCManagedFrameHealthChecker *checker;
  155. static dispatch_once_t onceToken;
  156. dispatch_once(&onceToken, ^{
  157. checker = [[SCManagedFrameHealthChecker alloc] _init];
  158. });
  159. return checker;
  160. }
  161. - (instancetype)_init
  162. {
  163. SCTraceODPCompatibleStart(2);
  164. if (self = [super init]) {
  165. // Use the lowest QoS level
  166. _performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedFrameHealthCheckerQueueLabel
  167. qualityOfService:QOS_CLASS_UTILITY
  168. queueType:DISPATCH_QUEUE_SERIAL
  169. context:SCQueuePerformerContextCamera];
  170. _frameCheckTasks = [NSMutableDictionary dictionary];
  171. }
  172. return self;
  173. }
  174. - (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer
  175. {
  176. SCTraceODPCompatibleStart(2);
  177. // add exposure, ISO, brightness
  178. NSMutableDictionary *metadata = [NSMutableDictionary dictionary];
  179. if (!sampleBuffer || !CMSampleBufferDataIsReady(sampleBuffer)) {
  180. return metadata;
  181. }
  182. CFDictionaryRef exifAttachments =
  183. (CFDictionaryRef)CMGetAttachment(sampleBuffer, kCGImagePropertyExifDictionary, NULL);
  184. NSNumber *exposureTimeNum = retrieveExposureTimeFromEXIFAttachments(exifAttachments);
  185. if (exposureTimeNum) {
  186. metadata[@"exposure"] = exposureTimeNum;
  187. }
  188. NSNumber *isoSpeedRatingNum = retrieveISOSpeedRatingFromEXIFAttachments(exifAttachments);
  189. if (isoSpeedRatingNum) {
  190. metadata[@"iso"] = isoSpeedRatingNum;
  191. }
  192. NSNumber *brightnessNum = retrieveBrightnessFromEXIFAttachments(exifAttachments);
  193. if (brightnessNum) {
  194. float brightness = [brightnessNum floatValue];
  195. metadata[@"brightness"] = isfinite(brightness) ? @(brightness) : @(0);
  196. }
  197. return metadata;
  198. }
  199. - (NSMutableDictionary *)metadataForMetadata:(NSDictionary *)metadata
  200. {
  201. SCTraceODPCompatibleStart(2);
  202. // add exposure, ISO, brightness
  203. NSMutableDictionary *newMetadata = [NSMutableDictionary dictionary];
  204. CFDictionaryRef exifAttachments = (__bridge CFDictionaryRef)metadata;
  205. NSNumber *exposureTimeNum = retrieveExposureTimeFromEXIFAttachments(exifAttachments);
  206. if (exposureTimeNum) {
  207. newMetadata[@"exposure"] = exposureTimeNum;
  208. }
  209. NSNumber *isoSpeedRatingNum = retrieveISOSpeedRatingFromEXIFAttachments(exifAttachments);
  210. if (isoSpeedRatingNum) {
  211. newMetadata[@"iso"] = isoSpeedRatingNum;
  212. }
  213. NSNumber *brightnessNum = retrieveBrightnessFromEXIFAttachments(exifAttachments);
  214. if (brightnessNum) {
  215. float brightness = [brightnessNum floatValue];
  216. newMetadata[@"brightness"] = isfinite(brightness) ? @(brightness) : @(0);
  217. }
  218. return newMetadata;
  219. }
  220. - (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer extraInfo:(NSDictionary *)extraInfo
  221. {
  222. SCTraceODPCompatibleStart(2);
  223. NSMutableDictionary *metadata = [self metadataForSampleBuffer:sampleBuffer];
  224. [metadata addEntriesFromDictionary:extraInfo];
  225. return metadata;
  226. }
  227. - (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer
  228. photoCapturerEnabled:(BOOL)photoCapturerEnabled
  229. lensEnabled:(BOOL)lensesEnabled
  230. lensID:(NSString *)lensID
  231. {
  232. SCTraceODPCompatibleStart(2);
  233. NSMutableDictionary *metadata = [self metadataForSampleBuffer:sampleBuffer];
  234. metadata[@"photo_capturer_enabled"] = @(photoCapturerEnabled);
  235. metadata[@"lens_enabled"] = @(lensesEnabled);
  236. if (lensesEnabled) {
  237. metadata[@"lens_id"] = lensID ?: @"";
  238. }
  239. return metadata;
  240. }
  241. - (NSMutableDictionary *)metadataForMetadata:(NSDictionary *)metadata
  242. photoCapturerEnabled:(BOOL)photoCapturerEnabled
  243. lensEnabled:(BOOL)lensesEnabled
  244. lensID:(NSString *)lensID
  245. {
  246. SCTraceODPCompatibleStart(2);
  247. NSMutableDictionary *newMetadata = [self metadataForMetadata:metadata];
  248. newMetadata[@"photo_capturer_enabled"] = @(photoCapturerEnabled);
  249. newMetadata[@"lens_enabled"] = @(lensesEnabled);
  250. if (lensesEnabled) {
  251. newMetadata[@"lens_id"] = lensID ?: @"";
  252. }
  253. return newMetadata;
  254. }
  255. - (NSMutableDictionary *)getPropertiesFromAsset:(AVAsset *)asset
  256. {
  257. SCTraceODPCompatibleStart(2);
  258. SC_GUARD_ELSE_RETURN_VALUE(asset != nil, nil);
  259. NSMutableDictionary *properties = [NSMutableDictionary dictionary];
  260. // file size
  261. properties[@"file_size"] = @([asset fileSize]);
  262. // duration
  263. properties[@"duration"] = @(CMTimeGetSeconds(asset.duration));
  264. // video track count
  265. NSArray<AVAssetTrack *> *videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
  266. properties[@"video_track_count"] = @(videoTracks.count);
  267. if (videoTracks.count > 0) {
  268. // video bitrate
  269. properties[@"video_bitrate"] = @([videoTracks.firstObject estimatedDataRate]);
  270. // frame rate
  271. properties[@"video_frame_rate"] = @([videoTracks.firstObject nominalFrameRate]);
  272. }
  273. // audio track count
  274. NSArray<AVAssetTrack *> *audioTracks = [asset tracksWithMediaType:AVMediaTypeAudio];
  275. properties[@"audio_track_count"] = @(audioTracks.count);
  276. if (audioTracks.count > 0) {
  277. // audio bitrate
  278. properties[@"audio_bitrate"] = @([audioTracks.firstObject estimatedDataRate]);
  279. }
  280. // playable
  281. properties[@"playable"] = @(asset.isPlayable);
  282. return properties;
  283. }
  284. #pragma mark - Image snap
  285. - (void)checkImageHealthForCaptureFrameImage:(UIImage *)image
  286. captureSettings:(NSDictionary *)captureSettings
  287. captureSessionID:(NSString *)captureSessionID
  288. {
  289. SCTraceODPCompatibleStart(2);
  290. if (captureSessionID.length == 0) {
  291. SCLogCoreCameraError(@"[FrameHealthChecker] #IMAGE:CAPTURE - captureSessionID shouldn't be empty");
  292. return;
  293. }
  294. SCManagedFrameHealthCheckerTask *task =
  295. [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_ImageCapture
  296. targetObject:image
  297. metadata:captureSettings];
  298. [self _addTask:task withCaptureSessionID:captureSessionID];
  299. }
  300. - (void)checkImageHealthForPreTranscoding:(UIImage *)image
  301. metadata:(NSDictionary *)metadata
  302. captureSessionID:(NSString *)captureSessionID
  303. {
  304. SCTraceODPCompatibleStart(2);
  305. if (captureSessionID.length == 0) {
  306. SCLogCoreCameraError(@"[FrameHealthChecker] #IMAGE:PRE_CAPTURE - captureSessionID shouldn't be empty");
  307. return;
  308. }
  309. SCManagedFrameHealthCheckerTask *task =
  310. [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_ImagePreTranscoding
  311. targetObject:image
  312. metadata:metadata];
  313. [self _addTask:task withCaptureSessionID:captureSessionID];
  314. }
  315. - (void)checkImageHealthForPostTranscoding:(NSData *)imageData
  316. metadata:(NSDictionary *)metadata
  317. captureSessionID:(NSString *)captureSessionID
  318. {
  319. SCTraceODPCompatibleStart(2);
  320. if (captureSessionID.length == 0) {
  321. SCLogCoreCameraError(@"[FrameHealthChecker] #IMAGE:POST_CAPTURE - captureSessionID shouldn't be empty");
  322. return;
  323. }
  324. SCManagedFrameHealthCheckerTask *task =
  325. [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_ImagePostTranscoding
  326. targetObject:imageData
  327. metadata:metadata];
  328. [self _addTask:task withCaptureSessionID:captureSessionID];
  329. }
  330. #pragma mark - Video snap
  331. - (void)checkVideoHealthForCaptureFrameImage:(UIImage *)image
  332. metedata:(NSDictionary *)metadata
  333. captureSessionID:(NSString *)captureSessionID
  334. {
  335. SCTraceODPCompatibleStart(2);
  336. if (captureSessionID.length == 0) {
  337. SCLogCoreCameraError(@"[FrameHealthChecker] #VIDEO:CAPTURE - captureSessionID shouldn't be empty");
  338. return;
  339. }
  340. SCManagedFrameHealthCheckerTask *task =
  341. [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_VideoCapture
  342. targetObject:image
  343. metadata:metadata];
  344. [self _addTask:task withCaptureSessionID:captureSessionID];
  345. }
  346. - (void)checkVideoHealthForOverlayImage:(UIImage *)image
  347. metedata:(NSDictionary *)metadata
  348. captureSessionID:(NSString *)captureSessionID
  349. {
  350. SCTraceODPCompatibleStart(2);
  351. if (captureSessionID.length == 0) {
  352. SCLogCoreCameraError(@"[FrameHealthChecker] #VIDEO:OVERLAY_IMAGE - captureSessionID shouldn't be empty");
  353. return;
  354. }
  355. // Overlay image could be nil
  356. if (!image) {
  357. SCLogCoreCameraInfo(@"[FrameHealthChecker] #VIDEO:OVERLAY_IMAGE - overlayImage is nil.");
  358. return;
  359. }
  360. SCManagedFrameHealthCheckerTask *task =
  361. [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_VideoOverlayImage
  362. targetObject:image
  363. metadata:metadata];
  364. [self _addTask:task withCaptureSessionID:captureSessionID];
  365. }
  366. - (void)checkVideoHealthForPostTranscodingThumbnail:(UIImage *)image
  367. metedata:(NSDictionary *)metadata
  368. properties:(NSDictionary *)properties
  369. captureSessionID:(NSString *)captureSessionID
  370. {
  371. SCTraceODPCompatibleStart(2);
  372. if (captureSessionID.length == 0) {
  373. SCLogCoreCameraError(@"[FrameHealthChecker] #VIDEO:POST_TRANSCODING - captureSessionID shouldn't be empty");
  374. return;
  375. }
  376. SCManagedFrameHealthCheckerTask *task =
  377. [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_VideoPostTranscoding
  378. targetObject:image
  379. metadata:metadata
  380. videoProperties:properties];
  381. [self _addTask:task withCaptureSessionID:captureSessionID];
  382. }
  383. #pragma mark - Task management
  384. - (void)reportFrameHealthCheckForCaptureSessionID:(NSString *)captureSessionID
  385. {
  386. SCTraceODPCompatibleStart(2);
  387. if (!captureSessionID) {
  388. SCLogCoreCameraError(@"[FrameHealthChecker] report - captureSessionID shouldn't be nil");
  389. return;
  390. }
  391. [self _asynchronouslyCheckForCaptureSessionID:captureSessionID];
  392. }
  393. #pragma mark - Private functions
  394. /// Scale the source image to a new image with edges less than kSCManagedFrameHealthCheckerScaledImageMaxEdgeLength.
  395. - (UIImage *)_unifyImage:(UIImage *)sourceImage
  396. {
  397. CGFloat sourceWidth = sourceImage.size.width;
  398. CGFloat sourceHeight = sourceImage.size.height;
  399. if (sourceWidth == 0.0 || sourceHeight == 0.0) {
  400. SCLogCoreCameraInfo(@"[FrameHealthChecker] Tried scaling image with no size");
  401. return sourceImage;
  402. }
  403. CGFloat maxEdgeLength = kSCManagedFrameHealthCheckerScaledImageMaxEdgeLength;
  404. CGFloat widthScalingFactor = maxEdgeLength / sourceWidth;
  405. CGFloat heightScalingFactor = maxEdgeLength / sourceHeight;
  406. CGFloat scalingFactor = MIN(widthScalingFactor, heightScalingFactor);
  407. if (scalingFactor >= 1) {
  408. SCLogCoreCameraInfo(@"[FrameHealthChecker] No need to scale image.");
  409. return sourceImage;
  410. }
  411. CGSize targetSize = CGSizeMake(sourceWidth * scalingFactor, sourceHeight * scalingFactor);
  412. SCLogCoreCameraInfo(@"[FrameHealthChecker] Scaling image from %@ to %@", NSStringFromCGSize(sourceImage.size),
  413. NSStringFromCGSize(targetSize));
  414. return [sourceImage scaledImageToSize:targetSize scale:kSCManagedFrameHealthCheckerScaledImageScale];
  415. }
  416. - (void)_addTask:(SCManagedFrameHealthCheckerTask *)newTask withCaptureSessionID:(NSString *)captureSessionID
  417. {
  418. SCTraceODPCompatibleStart(2);
  419. if (captureSessionID.length == 0) {
  420. return;
  421. }
  422. [_performer perform:^{
  423. SCTraceODPCompatibleStart(2);
  424. CFTimeInterval beforeScaling = CACurrentMediaTime();
  425. if (newTask.targetObject) {
  426. if ([newTask.targetObject isKindOfClass:[UIImage class]]) {
  427. UIImage *sourceImage = (UIImage *)newTask.targetObject;
  428. newTask.unifiedImage = [self _unifyImage:sourceImage];
  429. newTask.sourceImageSize = sourceImage.size;
  430. } else if ([newTask.targetObject isKindOfClass:[NSData class]]) {
  431. UIImage *sourceImage = [UIImage sc_imageWithData:newTask.targetObject];
  432. CFTimeInterval betweenDecodingAndScaling = CACurrentMediaTime();
  433. SCLogCoreCameraInfo(@"[FrameHealthChecker] #Image decoding delay: %f",
  434. betweenDecodingAndScaling - beforeScaling);
  435. beforeScaling = betweenDecodingAndScaling;
  436. newTask.unifiedImage = [self _unifyImage:sourceImage];
  437. newTask.sourceImageSize = sourceImage.size;
  438. } else {
  439. SCLogCoreCameraError(@"[FrameHealthChecker] Invalid targetObject class:%@",
  440. NSStringFromClass([newTask.targetObject class]));
  441. }
  442. newTask.targetObject = nil;
  443. }
  444. SCLogCoreCameraInfo(@"[FrameHealthChecker] #Scale image delay: %f", CACurrentMediaTime() - beforeScaling);
  445. NSMutableArray *taskQueue = _frameCheckTasks[captureSessionID];
  446. if (!taskQueue) {
  447. taskQueue = [NSMutableArray array];
  448. _frameCheckTasks[captureSessionID] = taskQueue;
  449. }
  450. // Remove previous same type task, avoid meaningless task,
  451. // for example repeat click "Send Button" and then "Back button"
  452. // will produce a lot of PRE_TRANSCODING and POST_TRANSCODING
  453. for (SCManagedFrameHealthCheckerTask *task in taskQueue) {
  454. if (task.type == newTask.type) {
  455. [taskQueue removeObject:task];
  456. break;
  457. }
  458. }
  459. [taskQueue addObject:newTask];
  460. }];
  461. }
  462. - (void)_asynchronouslyCheckForCaptureSessionID:(NSString *)captureSessionID
  463. {
  464. SCTraceODPCompatibleStart(2);
  465. [_performer perform:^{
  466. SCTraceODPCompatibleStart(2);
  467. NSMutableArray *tasksQueue = _frameCheckTasks[captureSessionID];
  468. if (!tasksQueue) {
  469. return;
  470. }
  471. // Check the free memory, if it is too low, drop these tasks
  472. double memFree = [SCLogger memoryFreeMB];
  473. if (memFree < kSCManagedFrameHealthCheckerMinFreeMemMB) {
  474. SCLogCoreCameraWarning(
  475. @"[FrameHealthChecker] mem_free:%f is too low, dropped checking tasks for captureSessionID:%@", memFree,
  476. captureSessionID);
  477. [_frameCheckTasks removeObjectForKey:captureSessionID];
  478. return;
  479. }
  480. __block NSMutableArray *frameHealthInfoArray = [NSMutableArray array];
  481. // Execute all tasks and wait for complete
  482. [tasksQueue enumerateObjectsUsingBlock:^(id _Nonnull obj, NSUInteger idx, BOOL *_Nonnull stop) {
  483. SCManagedFrameHealthCheckerTask *task = (SCManagedFrameHealthCheckerTask *)obj;
  484. NSMutableDictionary *frameHealthInfo;
  485. UIImage *image = task.unifiedImage;
  486. if (image) {
  487. // Get frame health info
  488. frameHealthInfo = [self _getFrameHealthInfoForImage:image
  489. source:[task textForSource]
  490. snapType:[task textForSnapType]
  491. metadata:task.metadata
  492. sourceImageSize:task.sourceImageSize
  493. captureSessionID:captureSessionID];
  494. NSNumber *isPossibleBlackNum = frameHealthInfo[@"is_possible_black"];
  495. NSNumber *isTotallyBlackNum = frameHealthInfo[@"is_total_black"];
  496. NSNumber *hasExecutionError = frameHealthInfo[@"execution_error"];
  497. if ([isTotallyBlackNum boolValue]) {
  498. task.errorType = SCManagedFrameHealthCheckError_Frame_Totally_Black;
  499. } else if ([isPossibleBlackNum boolValue]) {
  500. task.errorType = SCManagedFrameHealthCheckError_Frame_Possibly_Black;
  501. } else if ([hasExecutionError boolValue]) {
  502. task.errorType = SCManagedFrameHealthCheckError_Execution_Error;
  503. }
  504. } else {
  505. frameHealthInfo = [NSMutableDictionary dictionary];
  506. task.errorType = SCManagedFrameHealthCheckError_Invalid_Bitmap;
  507. }
  508. if (frameHealthInfo) {
  509. frameHealthInfo[@"frame_source"] = [task textForSource];
  510. frameHealthInfo[@"snap_type"] = [task textForSnapType];
  511. frameHealthInfo[@"error_type"] = [task textForErrorType];
  512. frameHealthInfo[@"capture_session_id"] = captureSessionID;
  513. frameHealthInfo[@"metadata"] = task.metadata;
  514. if (task.videoProperties.count > 0) {
  515. [frameHealthInfo addEntriesFromDictionary:task.videoProperties];
  516. }
  517. [frameHealthInfoArray addObject:frameHealthInfo];
  518. }
  519. // Release the image as soon as possible to mitigate the memory pressure
  520. task.unifiedImage = nil;
  521. }];
  522. for (NSDictionary *frameHealthInfo in frameHealthInfoArray) {
  523. if ([frameHealthInfo[@"is_total_black"] boolValue] || [frameHealthInfo[@"is_possible_black"] boolValue]) {
  524. // // TODO: Zi Kai Chen - add this back. Normally we use id<SCManiphestTicketCreator> for
  525. // this but as this is a shared instance we cannot easily inject it. The work would
  526. // involve making this not a shared instance.
  527. // SCShakeBetaLogEvent(SCShakeBetaLoggerKeyCCamBlackSnap,
  528. // JSONStringSerializeObjectForLogging(frameHealthInfo));
  529. }
  530. [[SCLogger sharedInstance] logUnsampledEventToEventLogger:kSCCameraMetricsFrameHealthCheckIndex
  531. parameters:frameHealthInfo
  532. secretParameters:nil
  533. metrics:nil];
  534. }
  535. [_frameCheckTasks removeObjectForKey:captureSessionID];
  536. }];
  537. }
  538. - (NSMutableDictionary *)_getFrameHealthInfoForImage:(UIImage *)image
  539. source:(NSString *)source
  540. snapType:(NSString *)snapType
  541. metadata:(NSDictionary *)metadata
  542. sourceImageSize:(CGSize)sourceImageSize
  543. captureSessionID:(NSString *)captureSessionID
  544. {
  545. SCTraceODPCompatibleStart(2);
  546. NSMutableDictionary *parameters = [NSMutableDictionary dictionary];
  547. size_t samplesCount = 0;
  548. CFTimeInterval start = CACurrentMediaTime();
  549. CGImageRef imageRef = image.CGImage;
  550. size_t imageWidth = CGImageGetWidth(imageRef);
  551. size_t imageHeight = CGImageGetHeight(imageRef);
  552. CFDataRef pixelData = CGDataProviderCopyData(CGImageGetDataProvider(imageRef));
  553. CFTimeInterval getImageDataTime = CACurrentMediaTime();
  554. if (pixelData) {
  555. const Byte *imageData = CFDataGetBytePtr(pixelData);
  556. NSInteger stripLength = 0;
  557. NSInteger bufferLength = 0;
  558. NSInteger imagePixels = imageWidth * imageHeight;
  559. // Limit the max sampled frames
  560. if (imagePixels > kSCManagedFrameHealthCheckerMaxSamples) {
  561. stripLength = imagePixels / kSCManagedFrameHealthCheckerMaxSamples * 4;
  562. bufferLength = kSCManagedFrameHealthCheckerMaxSamples;
  563. } else {
  564. stripLength = 4;
  565. bufferLength = imagePixels;
  566. }
  567. samplesCount = bufferLength;
  568. // Avoid dividing by zero
  569. if (samplesCount != 0) {
  570. FloatRGBA sumRGBA = [self _getSumRGBAFromData:imageData
  571. stripLength:stripLength
  572. bufferLength:bufferLength
  573. bitmapInfo:CGImageGetBitmapInfo(imageRef)];
  574. float averageR = sumRGBA.R / samplesCount;
  575. float averageG = sumRGBA.G / samplesCount;
  576. float averageB = sumRGBA.B / samplesCount;
  577. float averageA = sumRGBA.A / samplesCount;
  578. parameters[@"average_sampled_rgba_r"] = @(averageR);
  579. parameters[@"average_sampled_rgba_g"] = @(averageG);
  580. parameters[@"average_sampled_rgba_b"] = @(averageB);
  581. parameters[@"average_sampled_rgba_a"] = @(averageA);
  582. parameters[@"origin_frame_width"] = @(sourceImageSize.width);
  583. parameters[@"origin_frame_height"] = @(sourceImageSize.height);
  584. // Also report possible black to identify the intentional black snap by covering camera.
  585. // Normally, the averageA very near 255, but for video overlay image, it is very small.
  586. // So we use averageA > 250 to avoid considing video overlay image as possible black.
  587. if (averageA > 250 && averageR < kSCManagedFrameHealthCheckerPossibleBlackThreshold &&
  588. averageG < kSCManagedFrameHealthCheckerPossibleBlackThreshold &&
  589. averageB < kSCManagedFrameHealthCheckerPossibleBlackThreshold) {
  590. parameters[@"is_possible_black"] = @(YES);
  591. // Use this parameters for BigQuery conditions in Grafana
  592. if (averageR == 0 && averageG == 0 && averageB == 0) {
  593. parameters[@"is_total_black"] = @(YES);
  594. }
  595. }
  596. } else {
  597. SCLogCoreCameraError(@"[FrameHealthChecker] #%@:%@ - samplesCount is zero! captureSessionID:%@", snapType,
  598. source, captureSessionID);
  599. parameters[@"execution_error"] = @(YES);
  600. }
  601. CFRelease(pixelData);
  602. } else {
  603. SCLogCoreCameraError(@"[FrameHealthChecker] #%@:%@ - pixelData is nil! captureSessionID:%@", snapType, source,
  604. captureSessionID);
  605. parameters[@"execution_error"] = @(YES);
  606. }
  607. parameters[@"sample_size"] = @(samplesCount);
  608. CFTimeInterval end = CACurrentMediaTime();
  609. SCLogCoreCameraInfo(@"[FrameHealthChecker] #%@:%@ - GET_IMAGE_DATA_TIME:%f SAMPLE_DATA_TIME:%f TOTAL_TIME:%f",
  610. snapType, source, getImageDataTime - start, end - getImageDataTime, end - start);
  611. return parameters;
  612. }
  613. - (FloatRGBA)_getSumRGBAFromData:(const Byte *)imageData
  614. stripLength:(NSInteger)stripLength
  615. bufferLength:(NSInteger)bufferLength
  616. bitmapInfo:(CGBitmapInfo)bitmapInfo
  617. {
  618. SCTraceODPCompatibleStart(2);
  619. FloatRGBA sumRGBA;
  620. if ((bitmapInfo & kCGImageAlphaPremultipliedFirst) && (bitmapInfo & kCGImageByteOrder32Little)) {
  621. // BGRA
  622. sumRGBA.B = vDspColorElementSum(imageData, stripLength, bufferLength);
  623. sumRGBA.G = vDspColorElementSum(imageData + 1, stripLength, bufferLength);
  624. sumRGBA.R = vDspColorElementSum(imageData + 2, stripLength, bufferLength);
  625. sumRGBA.A = vDspColorElementSum(imageData + 3, stripLength, bufferLength);
  626. } else {
  627. // TODO. support other types beside RGBA
  628. sumRGBA.R = vDspColorElementSum(imageData, stripLength, bufferLength);
  629. sumRGBA.G = vDspColorElementSum(imageData + 1, stripLength, bufferLength);
  630. sumRGBA.B = vDspColorElementSum(imageData + 2, stripLength, bufferLength);
  631. sumRGBA.A = vDspColorElementSum(imageData + 3, stripLength, bufferLength);
  632. }
  633. return sumRGBA;
  634. }
  635. @end