Team Fortress 2 Source Code as on 22/4/2020
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

2146 lines
63 KiB

  1. //========= Copyright Valve Corporation, All rights reserved. ============//
  2. //
  3. //----------------------------------------------------------------------------------------
  4. #define WIN32_LEAN_AND_MEAN
  5. #include "quicktime_recorder.h"
  6. #include "filesystem.h"
  7. #ifdef _WIN32
  8. #include "windows.h"
  9. #endif
  10. //-----------------------------------------------------------------------------
  11. //-----------------------------------------------------------------------------
  12. #define SAFE_DISPOSE_HANDLE( _handle ) if ( _handle != nullptr ) { DisposeHandle( (Handle) _handle ); _handle = nullptr; }
  13. #define SAFE_DISPOSE_GWORLD( _gworld ) if ( _gworld != nullptr ) { DisposeGWorld( _gworld ); _gworld = nullptr; }
  14. #define SAFE_DISPOSE_MOVIE( _movie ) if ( _movie != nullptr ) { DisposeMovie( _movie ); ThreadSleep(10); Assert( GetMoviesError() == noErr ); _movie = nullptr; }
  15. // Platform check
  16. #if defined ( OSX ) || defined ( WIN32 )
  17. // platform is supported
  18. #else
  19. #error "Unsupported Platform for QuickTime"
  20. #endif
  21. //-----------------------------------------------------------------------------
  22. // Helper functions for copying and converting bitmaps
  23. //-----------------------------------------------------------------------------
  24. enum PixelComponent_t
  25. {
  26. RED = 0,
  27. GREEN,
  28. BLUE,
  29. ALPHA
  30. };
  31. int GetBytesPerPixel( OSType pixelFormat )
  32. {
  33. int bpp = ( pixelFormat == k24BGRPixelFormat || pixelFormat == k24RGBPixelFormat ) ? 3 :
  34. ( pixelFormat == k32BGRAPixelFormat || pixelFormat == k32RGBAPixelFormat ) ? 4 : 0;
  35. Assert( bpp > 0 );
  36. return bpp;
  37. }
  38. int GetPixelCompnentByteOffset( OSType format, PixelComponent_t component )
  39. {
  40. if ( component == RED )
  41. {
  42. return ( format == k24RGBPixelFormat || format == k32RGBAPixelFormat ) ? 0 :
  43. ( format == k24BGRPixelFormat || format == k32BGRAPixelFormat ) ? 2 : -1;
  44. }
  45. if ( component == GREEN )
  46. {
  47. return ( format == k24RGBPixelFormat || format == k32RGBAPixelFormat ) ? 1 :
  48. ( format == k24BGRPixelFormat || format == k32BGRAPixelFormat ) ? 1 : -1;
  49. }
  50. if ( component == BLUE )
  51. {
  52. return ( format == k24RGBPixelFormat || format == k32RGBAPixelFormat ) ? 2 :
  53. ( format == k24BGRPixelFormat || format == k32BGRAPixelFormat ) ? 0 : -1;
  54. }
  55. if ( component == ALPHA )
  56. {
  57. return ( format == k32BGRAPixelFormat || format == k32RGBAPixelFormat ) ? 3 : -1;
  58. }
  59. Assert( false );
  60. return -1;
  61. }
  62. bool CopyBitMapPixels( int width, int height, OSType srcFmt, byte *srcBase, int srcStride, OSType dstFmt, byte *dstBase, int dstStride )
  63. {
  64. AssertExitF( width > 0 && height > 0 && srcBase != nullptr && srcStride > 0 && dstBase != nullptr && dstStride > 0 );
  65. // copy the bitmap pixels into our GWorld
  66. if ( srcFmt == dstFmt ) // identical formats, memcopy each line
  67. {
  68. int srcLineSize = width * GetBytesPerPixel( srcFmt );
  69. AssertExitF( srcLineSize <= dstStride && srcLineSize <= srcStride );
  70. for ( int y = 0; y < height; y++ )
  71. {
  72. byte *src = srcBase + srcStride * y;
  73. byte *dst = dstBase + dstStride * y;
  74. memcpy( dst, src, srcLineSize );
  75. }
  76. return true;
  77. }
  78. // ok, we got some byte swizzling to do.. get the info we need
  79. int srcBPP = GetBytesPerPixel( srcFmt );
  80. int dstBPP = GetBytesPerPixel( dstFmt );
  81. int rSrcIndex = GetPixelCompnentByteOffset( srcFmt, RED );
  82. int gSrcIndex = GetPixelCompnentByteOffset( srcFmt, GREEN );
  83. int bSrcIndex = GetPixelCompnentByteOffset( srcFmt, BLUE );
  84. int aSrcIndex = GetPixelCompnentByteOffset( srcFmt, ALPHA );
  85. int rDstIndex = GetPixelCompnentByteOffset( dstFmt, RED );
  86. int gDstIndex = GetPixelCompnentByteOffset( dstFmt, GREEN );
  87. int bDstIndex = GetPixelCompnentByteOffset( dstFmt, BLUE );
  88. int aDstIndex = GetPixelCompnentByteOffset( dstFmt, ALPHA );
  89. Assert( rSrcIndex >= 0 && gSrcIndex >= 0 && bSrcIndex >= 0 );
  90. // 3 byte format to 3 byte format or a 4 byte format to a 3 byte format?
  91. if ( dstBPP == 3 )
  92. {
  93. for ( int y = 0; y < height; y++ )
  94. {
  95. byte *src = srcBase + srcStride * y;
  96. byte *dst = dstBase + dstStride * y;
  97. for ( int x = 0; x < width; x++, dst+=dstBPP, src+=srcBPP )
  98. {
  99. dst[rDstIndex] = src[rSrcIndex];
  100. dst[gDstIndex] = src[gSrcIndex];
  101. dst[bDstIndex] = src[bSrcIndex];
  102. }
  103. }
  104. return true;
  105. }
  106. AssertExitF( aDstIndex >= 0 );
  107. // 3 byte format to 4 byte format?
  108. if ( srcBPP == 3 && dstBPP == 4 )
  109. {
  110. for ( int y = 0; y < height; y++ )
  111. {
  112. byte *src = srcBase + srcStride * y;
  113. byte *dst = dstBase + dstStride * y;
  114. for ( int x = 0; x < width; x++, dst+=dstBPP, src+=srcBPP )
  115. {
  116. dst[rDstIndex] = src[rSrcIndex];
  117. dst[gDstIndex] = src[gSrcIndex];
  118. dst[bDstIndex] = src[bSrcIndex];
  119. dst[aDstIndex] = 0xFF;
  120. }
  121. }
  122. return true;
  123. }
  124. // 4 byte format to 4 byte format?
  125. if ( srcBPP == 4 && dstBPP == 4 )
  126. {
  127. for ( int y = 0; y < height; y++ )
  128. {
  129. byte *src = srcBase + srcStride * y;
  130. byte *dst = dstBase + dstStride * y;
  131. for ( int x = 0; x < width; x++, dst+=dstBPP, src+=srcBPP )
  132. {
  133. dst[rDstIndex] = src[rSrcIndex];
  134. dst[gDstIndex] = src[gSrcIndex];
  135. dst[bDstIndex] = src[bSrcIndex];
  136. dst[aDstIndex] = src[aSrcIndex];
  137. }
  138. }
  139. return true;
  140. }
  141. // didn't find the format?
  142. Assert( false );
  143. return false;
  144. }
  145. //-----------------------------------------------------------------------------
  146. // Utility functions to save targa images
  147. //-----------------------------------------------------------------------------
  148. #pragma pack( push, 1 )
  149. struct TGA_Header
  150. {
  151. public:
  152. byte identsize; // size of ID field that follows 18 byte header (0 usually)
  153. byte colourmaptype; // type of colour map 0=none, 1=has palette
  154. byte imagetype; // type of image 0=none,1=indexed,2=rgb,3=grey,+8=rle packed
  155. short colourmapstart; // first colour map entry in palette
  156. short colourmaplength; // number of colours in palette
  157. byte colourmapbits; // number of bits per palette entry 15,16,24,32
  158. short xstart; // image x origin
  159. short ystart; // image y origin
  160. short width; // image width in pixels
  161. short height; // image height in pixels
  162. byte bits; // image bits per pixel 8,16,24,32
  163. byte descriptor; // image descriptor bits (vh flip bits)
  164. // pixel data follows header
  165. };
  166. #pragma pack(pop)
  167. void SaveToTargaFile( int frameNum, const char* pBaseFileName, int width, int height, void *pPixels, OSType PixelFormat, int strideAdjust )
  168. {
  169. if ( pBaseFileName == nullptr || pPixels== nullptr ) return;
  170. Assert( sizeof( TGA_Header ) == 18 );
  171. TGA_Header theHeader;
  172. ZeroVar( theHeader );
  173. int BytesPerPixel = GetBytesPerPixel( PixelFormat );
  174. Assert( BytesPerPixel > 0 );
  175. theHeader.imagetype = 2;
  176. theHeader.width = (short) width;
  177. theHeader.height = (short) height;
  178. theHeader.colourmapbits = BytesPerPixel * 8;
  179. theHeader.bits = BytesPerPixel * 8;
  180. theHeader.descriptor = ( BytesPerPixel == 4) ? ( 8 | 32 ) : 32; // Targa32, Upper Left Origin, attribute (alpha) bits in bits 0-3
  181. char TGAFileName[MAX_PATH];
  182. V_snprintf( TGAFileName, MAX_PATH, "%s%.4d.tga", pBaseFileName, frameNum );
  183. FileHandle_t TGAFile = g_pFullFileSystem->Open( TGAFileName, "wb" );
  184. g_pFullFileSystem->Write( &theHeader, sizeof( theHeader ), TGAFile );
  185. // is the buffer in BGR format?
  186. if ( PixelFormat == k24BGRPixelFormat || PixelFormat == k32BGRAPixelFormat )
  187. {
  188. if ( strideAdjust == 0 )
  189. {
  190. g_pFullFileSystem->Write( pPixels, width * height * BytesPerPixel, TGAFile );
  191. }
  192. else
  193. {
  194. int lineWidth = width * BytesPerPixel;
  195. int lineOffset = lineWidth + strideAdjust;
  196. for ( int y = 0; y < height; y++ )
  197. {
  198. byte *pData = (byte*) pPixels + ( y * lineOffset );
  199. g_pFullFileSystem->Write( pData, lineWidth, TGAFile );
  200. }
  201. }
  202. }
  203. else // we need to convert the bits from RGB to BGR
  204. {
  205. byte *pData = new byte[width * height * BytesPerPixel];
  206. OSType tgaFormat = ( PixelFormat == k24RGBPixelFormat ) ? k24BGRPixelFormat :
  207. ( PixelFormat == k32RGBAPixelFormat ) ? k32BGRAPixelFormat : 0;
  208. CopyBitMapPixels( width, height, PixelFormat, (byte*) pPixels, width * BytesPerPixel + strideAdjust, tgaFormat, pData, width * BytesPerPixel );
  209. g_pFullFileSystem->Write( pData, width * height * BytesPerPixel, TGAFile );
  210. delete [] pData;
  211. }
  212. g_pFullFileSystem->Close( TGAFile );
  213. }
  214. // ===========================================================================
  215. // Data tables used to estimate file size
  216. // ===========================================================================
  217. enum EstVideoEncodeQuality_t
  218. {
  219. cVEQuality_Min = 0,
  220. cVEQuality_Low = 25,
  221. cVEQuality_Normal = 50,
  222. cVEQuality_High = 75,
  223. cVEQuality_Max = 100
  224. };
  225. struct EncodingDataRateInfo_t
  226. {
  227. EstVideoEncodeQuality_t m_QualitySetting;
  228. int m_XResolution;
  229. int m_YResolution;
  230. float m_DataRate; // in MBits / second
  231. };
  232. struct VideoRes_t
  233. {
  234. int X, Y;
  235. };
  236. static EstVideoEncodeQuality_t s_QualityPresets[] =
  237. {
  238. cVEQuality_Min,
  239. cVEQuality_Low,
  240. cVEQuality_Normal,
  241. cVEQuality_High,
  242. cVEQuality_Max
  243. };
  244. static VideoRes_t s_ResolutionPresets[] =
  245. {
  246. { 16, 16 },
  247. { 720, 480 },
  248. { 640, 960 },
  249. { 960, 640 },
  250. { 1280, 720 },
  251. { 1920, 1080 },
  252. { 2048, 2048 },
  253. };
  254. static EncodingDataRateInfo_t s_H264EncodeRates[] =
  255. {
  256. { cVEQuality_Min, 16, 160, 2.00f },
  257. { cVEQuality_Min, 720, 480, 2.26f },
  258. { cVEQuality_Min, 640, 960, 2.73f },
  259. { cVEQuality_Min, 960, 640, 2.91f },
  260. { cVEQuality_Min, 1280, 720, 3.56f },
  261. { cVEQuality_Min, 1920, 1080, 5.6f },
  262. { cVEQuality_Min, 2048, 2048, 6.6f },
  263. { cVEQuality_Low, 16, 160, 3.00f },
  264. { cVEQuality_Low, 720, 480, 3.65f },
  265. { cVEQuality_Low, 640, 960, 4.57f },
  266. { cVEQuality_Low, 960, 640, 5.03f },
  267. { cVEQuality_Low, 1280, 720, 6.41f },
  268. { cVEQuality_Low, 1920, 1080, 10.57f },
  269. { cVEQuality_Low, 2048, 2048, 13.0f },
  270. { cVEQuality_Normal, 16, 160, 5.00f },
  271. { cVEQuality_Normal, 720, 480, 6.4f },
  272. { cVEQuality_Normal, 640, 960, 8.25f },
  273. { cVEQuality_Normal, 960, 640, 9.24f },
  274. { cVEQuality_Normal, 1280, 720, 12.1f },
  275. { cVEQuality_Normal, 1920, 1080, 20.64f },
  276. { cVEQuality_Normal, 2048, 2048, 25.0f },
  277. { cVEQuality_High, 16, 160, 9.50f },
  278. { cVEQuality_High, 720, 480, 11.3f },
  279. { cVEQuality_High, 640, 960, 15.06f },
  280. { cVEQuality_High, 960, 640, 16.9f },
  281. { cVEQuality_High, 1280, 720, 22.72 },
  282. { cVEQuality_High, 1920, 1080, 40.06f },
  283. { cVEQuality_High, 2048, 2048, 52.5f },
  284. { cVEQuality_Max, 16, 160, 15.50f },
  285. { cVEQuality_Max, 720, 480, 19.33f },
  286. { cVEQuality_Max, 640, 960, 29.89f },
  287. { cVEQuality_Max, 960, 640, 26.82f },
  288. { cVEQuality_Max, 1280, 720, 41.08f },
  289. { cVEQuality_Max, 1920, 1080, 75.14f },
  290. { cVEQuality_Max, 2048, 2048, 90.0f },
  291. };
  292. // ===========================================================================
  293. // CQuickTimeVideoRecorder class - implements IVideoRecorder interface for
  294. // QuickTime, and buffers commands to the actual encoder object
  295. // ===========================================================================
  296. CQuickTimeVideoRecorder::CQuickTimeVideoRecorder() :
  297. m_pEncoder( nullptr ),
  298. m_LastResult( VideoResult::SUCCESS ),
  299. m_bHasAudio( false ),
  300. m_bMovieFinished( false )
  301. {
  302. }
  303. CQuickTimeVideoRecorder::~CQuickTimeVideoRecorder()
  304. {
  305. if ( m_pEncoder != nullptr )
  306. {
  307. // Abort any encoding in progress
  308. if ( !m_bMovieFinished )
  309. {
  310. AbortMovie();
  311. }
  312. SAFE_DELETE( m_pEncoder );
  313. }
  314. }
  315. bool CQuickTimeVideoRecorder::CreateNewMovieFile( const char *pFilename, bool hasAudio )
  316. {
  317. SetResult( VideoResult::BAD_INPUT_PARAMETERS );
  318. AssertExitF( IS_NOT_EMPTY( pFilename ) );
  319. SetResult( VideoResult::OPERATION_ALREADY_PERFORMED );
  320. AssertExitF( m_pEncoder == nullptr && !m_bMovieFinished );
  321. // Create new video recorder
  322. m_pEncoder = new CQTVideoFileComposer();
  323. if ( !m_pEncoder->CreateNewMovie( pFilename, hasAudio ) )
  324. {
  325. SetResult( m_pEncoder->GetResult() ); // save the error result for after the encoder goes poof
  326. SAFE_DELETE( m_pEncoder );
  327. return false;
  328. }
  329. m_bHasAudio = hasAudio;
  330. SetResult( VideoResult::SUCCESS );
  331. return true;
  332. }
  333. bool CQuickTimeVideoRecorder::SetMovieVideoParameters( VideoEncodeCodec_t theCodec, int videoQuality, int movieFrameWidth, int movieFrameHeight, VideoFrameRate_t movieFPS, VideoEncodeGamma_t gamma )
  334. {
  335. SetResult( VideoResult::BAD_INPUT_PARAMETERS );
  336. AssertExitF( IS_IN_RANGECOUNT( theCodec, VideoEncodeCodec::DEFAULT_CODEC, VideoEncodeCodec::CODEC_COUNT ) );
  337. AssertExitF( IS_IN_RANGE( videoQuality, VideoEncodeQuality::MIN_QUALITY, VideoEncodeQuality::MAX_QUALITY ) );
  338. AssertExitF( IS_IN_RANGE( movieFrameWidth, cMinVideoFrameWidth, cMaxVideoFrameWidth ) && IS_IN_RANGE( movieFrameHeight, cMinVideoFrameHeight, cMaxVideoFrameHeight ) );
  339. AssertExitF( IS_IN_RANGE( movieFPS.GetFPS(), cMinFPS, cMaxFPS ) );
  340. AssertExitF( IS_IN_RANGECOUNT( gamma, VideoEncodeGamma::NO_GAMMA_ADJUST, VideoEncodeGamma::GAMMA_COUNT ) );
  341. SetResult( VideoResult::OPERATION_OUT_OF_SEQUENCE );
  342. AssertExitF( m_pEncoder != nullptr && !m_bMovieFinished );
  343. return m_pEncoder->SetMovieVideoParameters( movieFrameWidth, movieFrameHeight, movieFPS, theCodec, videoQuality, gamma );
  344. }
  345. bool CQuickTimeVideoRecorder::SetMovieSourceImageParameters( VideoEncodeSourceFormat_t srcImageFormat, int imgWidth, int imgHeight )
  346. {
  347. SetResult( VideoResult::BAD_INPUT_PARAMETERS );
  348. AssertExitF( IS_IN_RANGECOUNT( srcImageFormat, VideoEncodeSourceFormat::VIDEO_FORMAT_FIRST, VideoEncodeSourceFormat::VIDEO_FORMAT_COUNT ) );
  349. AssertExitF( IS_IN_RANGE( imgWidth, cMinVideoFrameWidth, cMaxVideoFrameWidth ) && IS_IN_RANGE( imgHeight, cMinVideoFrameHeight, cMaxVideoFrameHeight ) );
  350. SetResult( VideoResult::OPERATION_OUT_OF_SEQUENCE );
  351. AssertExitF( m_pEncoder != nullptr && !m_bMovieFinished );
  352. return m_pEncoder->SetMovieSourceImageParameters( imgWidth, imgHeight, srcImageFormat );
  353. }
  354. bool CQuickTimeVideoRecorder::SetMovieSourceAudioParameters( AudioEncodeSourceFormat_t srcAudioFormat, int audioSampleRate, AudioEncodeOptions_t audioOptions, int audioSampleGroupSize )
  355. {
  356. SetResult( VideoResult::ILLEGAL_OPERATION );
  357. AssertExitF( m_bHasAudio );
  358. SetResult( VideoResult::BAD_INPUT_PARAMETERS );
  359. AssertExitF( IS_IN_RANGECOUNT( srcAudioFormat, AudioEncodeSourceFormat::AUDIO_NONE, AudioEncodeSourceFormat::AUDIO_FORMAT_COUNT ) );
  360. AssertExitF( audioSampleRate == 0 || IS_IN_RANGE( audioSampleRate, cMinSampleRate, cMaxSampleRate ) );
  361. SetResult( VideoResult::OPERATION_OUT_OF_SEQUENCE );
  362. AssertExitF( m_pEncoder != nullptr && !m_bMovieFinished );
  363. bool result = m_pEncoder->SetMovieSourceAudioParameters( srcAudioFormat, audioSampleRate, audioOptions, audioSampleGroupSize );
  364. m_bHasAudio = m_pEncoder->HasAudio(); // Audio can be turned off after specifying, so reload status
  365. return result;
  366. }
  367. bool CQuickTimeVideoRecorder::IsReadyToRecord()
  368. {
  369. return ( m_pEncoder == nullptr || m_bMovieFinished ) ? false : m_pEncoder->IsReadyToRecord();
  370. }
  371. VideoResult_t CQuickTimeVideoRecorder::GetLastResult()
  372. {
  373. return ( m_pEncoder == nullptr ) ? m_LastResult : m_pEncoder->GetResult();
  374. }
  375. void CQuickTimeVideoRecorder::SetResult( VideoResult_t resultCode )
  376. {
  377. m_LastResult = resultCode;
  378. if ( m_pEncoder != nullptr )
  379. {
  380. m_pEncoder->SetResult( resultCode );
  381. }
  382. }
  383. bool CQuickTimeVideoRecorder::AppendVideoFrame( void *pFrameBuffer, int nStrideAdjustBytes )
  384. {
  385. SetResult( VideoResult::BAD_INPUT_PARAMETERS );
  386. AssertExitF( pFrameBuffer != nullptr );
  387. SetResult( VideoResult::OPERATION_OUT_OF_SEQUENCE );
  388. AssertExitF( IsReadyToRecord() );
  389. return m_pEncoder->AppendVideoFrameToMedia( pFrameBuffer, nStrideAdjustBytes );
  390. }
  391. bool CQuickTimeVideoRecorder::AppendAudioSamples( void *pSampleBuffer, size_t sampleSize )
  392. {
  393. SetResult( VideoResult::ILLEGAL_OPERATION );
  394. AssertExitF( m_bHasAudio );
  395. SetResult( VideoResult::BAD_INPUT_PARAMETERS );
  396. AssertExitF( pSampleBuffer != nullptr );
  397. SetResult( VideoResult::OPERATION_OUT_OF_SEQUENCE );
  398. AssertExitF( IsReadyToRecord() );
  399. return m_pEncoder->AppendAudioSamplesToMedia( pSampleBuffer, sampleSize );
  400. }
  401. int CQuickTimeVideoRecorder::GetFrameCount()
  402. {
  403. return ( m_pEncoder == nullptr ) ? 0 : m_pEncoder->GetFrameCount();
  404. }
  405. int CQuickTimeVideoRecorder::GetSampleCount()
  406. {
  407. return ( m_pEncoder == nullptr ) ? 0 : m_pEncoder->GetSampleCount();
  408. }
  409. VideoFrameRate_t CQuickTimeVideoRecorder::GetFPS()
  410. {
  411. return ( m_pEncoder == nullptr ) ? VideoFrameRate_t( 0 ) : m_pEncoder->GetFPS();
  412. }
  413. int CQuickTimeVideoRecorder::GetSampleRate()
  414. {
  415. return ( m_pEncoder == nullptr ) ? 0 : m_pEncoder->GetSampleRate();
  416. }
  417. bool CQuickTimeVideoRecorder::AbortMovie()
  418. {
  419. SetResult( VideoResult::OPERATION_OUT_OF_SEQUENCE );
  420. AssertExitF( m_pEncoder != nullptr && !m_bMovieFinished );
  421. m_bMovieFinished = true;
  422. return m_pEncoder->AbortMovie();
  423. }
  424. bool CQuickTimeVideoRecorder::FinishMovie( bool SaveMovieToDisk )
  425. {
  426. SetResult( VideoResult::OPERATION_OUT_OF_SEQUENCE );
  427. AssertExitF( m_pEncoder != nullptr && !m_bMovieFinished );
  428. m_bMovieFinished = true;
  429. return m_pEncoder->FinishMovie( SaveMovieToDisk );
  430. }
  431. #ifdef ENABLE_EXTERNAL_ENCODER_LOGGING
  432. bool CQuickTimeVideoRecorder::LogMessage( const char *pMsg )
  433. {
  434. if ( m_pEncoder != nullptr )
  435. {
  436. m_pEncoder->LogMessage( pMsg );
  437. }
  438. return true;
  439. }
  440. #endif
  441. bool CQuickTimeVideoRecorder::EstimateMovieFileSize( size_t *pEstSize, int movieWidth, int movieHeight, VideoFrameRate_t movieFps, float movieDuration, VideoEncodeCodec_t theCodec, int videoQuality, AudioEncodeSourceFormat_t srcAudioFormat, int audioSampleRate )
  442. {
  443. SetResult( VideoResult::BAD_INPUT_PARAMETERS );
  444. AssertPtrExitF( pEstSize );
  445. *pEstSize = 0;
  446. AssertExitF( IS_IN_RANGE( movieWidth, cMinVideoFrameWidth, cMaxVideoFrameWidth ) && IS_IN_RANGE( movieHeight, cMinVideoFrameHeight, cMaxVideoFrameHeight ) );
  447. AssertExitF( IS_IN_RANGE( movieFps.GetFPS(), cMinFPS, cMaxFPS ) && movieDuration > 0.0f );
  448. AssertExitF( IS_IN_RANGECOUNT( theCodec, VideoEncodeCodec::DEFAULT_CODEC, VideoEncodeCodec::CODEC_COUNT ) );
  449. AssertExitF( IS_IN_RANGE( videoQuality, VideoEncodeQuality::MIN_QUALITY, VideoEncodeQuality::MAX_QUALITY ) );
  450. AssertExitF( IS_IN_RANGECOUNT( srcAudioFormat, AudioEncodeSourceFormat::AUDIO_NONE, AudioEncodeSourceFormat::AUDIO_FORMAT_COUNT ) );
  451. AssertExitF( audioSampleRate == 0 || IS_IN_RANGE( audioSampleRate, cMinSampleRate, cMaxSampleRate ) );
  452. // Determine the Quality LERP
  453. int Q1 = VideoEncodeQuality::MIN_QUALITY, Q2 = VideoEncodeQuality::MAX_QUALITY;
  454. float Qlerp = 0.0f;
  455. bool bQLerp = true;
  456. for ( int i = 0; i < ARRAYSIZE( s_QualityPresets ); i++ )
  457. {
  458. if ( s_QualityPresets[i] == videoQuality )
  459. {
  460. Q1 = videoQuality;
  461. Q2 = videoQuality;
  462. Qlerp = 0.0f;
  463. bQLerp = false;
  464. break;
  465. }
  466. else if ( s_QualityPresets[i] < videoQuality && s_QualityPresets[i] > Q1 )
  467. {
  468. Q1 = s_QualityPresets[i];
  469. }
  470. else if ( s_QualityPresets[i] > videoQuality && s_QualityPresets[i] < Q2 )
  471. {
  472. Q2 = s_QualityPresets[i];
  473. }
  474. }
  475. if ( bQLerp )
  476. {
  477. Qlerp = ( (float) videoQuality - (float) Q1 ) / ( (float) Q2 - (float) Q1 ) ;
  478. }
  479. // determine the resolution lerp
  480. VideoRes_t RES1 = { cMinVideoFrameWidth, cMinVideoFrameHeight }, RES2 = { cMaxVideoFrameWidth, cMaxVideoFrameHeight };
  481. float RLerp = 0.0f;
  482. bool bRLerp = true;
  483. int nPixels = movieHeight * movieWidth;
  484. int R1pixels = RES1.X * RES1.Y;
  485. int R2pixels = RES2.X * RES2.Y;
  486. for ( int i = 0; i < ARRAYSIZE( s_ResolutionPresets ); i++ )
  487. {
  488. if ( s_ResolutionPresets[i].X == movieWidth && s_ResolutionPresets[i].Y == movieHeight )
  489. {
  490. RES1 = s_ResolutionPresets[i];
  491. RES2 = s_ResolutionPresets[i];
  492. RLerp = 0.0f;
  493. bRLerp = false;
  494. break;
  495. }
  496. int rPixels = s_ResolutionPresets[i].X * s_ResolutionPresets[i].Y;
  497. if ( rPixels <= nPixels && rPixels > R1pixels )
  498. {
  499. RES1 = s_ResolutionPresets[i];
  500. R1pixels = rPixels;
  501. }
  502. else if ( rPixels > nPixels && rPixels < R2pixels )
  503. {
  504. RES2 = s_ResolutionPresets[i];
  505. R2pixels = rPixels;
  506. }
  507. }
  508. if ( bRLerp )
  509. {
  510. RLerp = (float) (nPixels - R1pixels) / (float) ( R2pixels - R1pixels );
  511. }
  512. // Now we see what we need to do
  513. // We determine the estimated Data Rate
  514. float DR = 0.0f;
  515. if ( bQLerp == false && bRLerp == false )
  516. {
  517. DR = GetDataRate( videoQuality, movieWidth, movieHeight );
  518. }
  519. else if ( bQLerp == true && bRLerp == false )
  520. {
  521. float D1 = GetDataRate( Q1, movieWidth, movieHeight );
  522. float D2 = GetDataRate( Q2, movieWidth, movieHeight );
  523. DR = D1 + Qlerp * ( D2 - D1 );
  524. }
  525. else if ( bQLerp == false && bRLerp == true )
  526. {
  527. float D1 = GetDataRate( videoQuality, RES1.X, RES1.Y );
  528. float D2 = GetDataRate( videoQuality, RES2.X, RES2.Y );
  529. DR = D1 + RLerp * ( D2 - D1 );
  530. }
  531. else // need the full filter
  532. {
  533. float D1 = GetDataRate( Q1, RES1.X, RES1.Y );
  534. float D2 = GetDataRate( Q1, RES2.X, RES2.Y );
  535. float D3 = GetDataRate( Q2, RES1.X, RES1.Y );
  536. float D4 = GetDataRate( Q2, RES2.X, RES2.Y );
  537. float I1 = D1 + Qlerp * ( D3 - D1 );
  538. float I2 = D2 + Qlerp * ( D4 - D2 );
  539. DR = I1 + RLerp * ( I2 - I1 );
  540. }
  541. // Now do the big computation
  542. // should this be 1024 * 1024?
  543. double VideoData = DR * 1000000 / 8 * movieDuration ;
  544. // Quick hack to guess at audio data size
  545. double audioData = 0;
  546. if ( srcAudioFormat == AudioEncodeSourceFormat::AUDIO_16BIT_PCMStereo )
  547. {
  548. audioData = ( audioSampleRate * 2 ) * ( 0.05 * DR );
  549. }
  550. *pEstSize = (size_t) VideoData + (size_t) audioData;
  551. SetResult( VideoResult::SUCCESS );
  552. return true;
  553. }
  554. float CQuickTimeVideoRecorder::GetDataRate( int quality, int width, int height )
  555. {
  556. for (int i = 0; i < ARRAYSIZE( s_H264EncodeRates ); i++ )
  557. {
  558. if ( s_H264EncodeRates[i].m_QualitySetting == quality && s_H264EncodeRates[i].m_XResolution == width && s_H264EncodeRates[i].m_YResolution == height )
  559. {
  560. return s_H264EncodeRates[i].m_DataRate;
  561. }
  562. }
  563. Assert( false );
  564. return 0.0f;
  565. }
  566. // ------------------------------------------------------------------------
  567. // CQTVideoFileComposer - Class to encapsulate the creation of a QuickTime
  568. // Movie from a sequence of uncompressed images and (future) audio samples
  569. // ------------------------------------------------------------------------
  570. CQTVideoFileComposer::CQTVideoFileComposer() :
  571. m_LastResult( VideoResult::SUCCESS ),
  572. m_bMovieCreated( false ),
  573. m_bHasAudioTrack( false ),
  574. m_bMovieConfigured( false ),
  575. m_bSourceImagesConfigured( false ),
  576. m_bSourceAudioConfigured( false ),
  577. m_bComposingMovie( false ),
  578. m_bMovieCompleted( false ),
  579. m_nFramesAdded( 0 ),
  580. m_nAudioFramesAdded( 0 ),
  581. m_nSamplesAdded( 0 ),
  582. m_nSamplesAddedToMedia( 0 ),
  583. m_MovieFrameWidth( 0 ),
  584. m_MovieFrameHeight( 0 ),
  585. m_MovieTimeScale( 0 ),
  586. m_DurationPerFrame( 0 ),
  587. m_AudioOptions( AudioEncodeOptions::NO_AUDIO_OPTIONS ),
  588. m_SampleGrouping( AG_NONE ),
  589. m_nAudioSampleGroupSize( 0 ),
  590. m_AudioSourceFrequency( 0 ),
  591. m_AudioBytesPerSample( 0 ),
  592. m_bBufferSourceAudio( false ),
  593. m_bLimitAudioDurationToVideo( false ),
  594. m_srcAudioBuffer( nullptr ),
  595. m_srcAudioBufferSize( 0 ),
  596. m_srcAudioBufferCurrentSize( 0 ),
  597. m_AudioSampleFrameCounter( 0 ),
  598. m_FileName( nullptr ),
  599. m_SrcImageWidth( 0 ),
  600. m_SrcImageHeight( 0 ),
  601. m_ScrImageMaxCompressedSize( 0 ),
  602. m_SrcImageBuffer( nullptr ),
  603. m_SrcImageCompressedBuffer( nullptr ),
  604. m_SrcPixelFormat( 0 ),
  605. m_SrcBytesPerPixel( 0 ),
  606. m_GWorldPixelFormat( 0 ),
  607. m_GWorldBytesPerPixel( 0 ),
  608. m_GWorldImageWidth( 0 ),
  609. m_GWorldImageHeight( 0 ),
  610. m_srcSoundDescription( nullptr ),
  611. m_EncodeQuality( CQTVideoFileComposer::DEFAULT_ENCODE_QUALITY ),
  612. m_VideoCodecToUse( CQTVideoFileComposer::DEFAULT_CODEC ),
  613. m_EncodeGamma( CQTVideoFileComposer::DEFAULT_GAMMA ),
  614. m_theSrcGWorld( nullptr ),
  615. m_MovieFileDataRef( nullptr ),
  616. m_MovieFileDataRefType( 0 ),
  617. m_MovieFileDataHandler( nullptr ),
  618. m_theMovie( nullptr ),
  619. m_theVideoTrack( nullptr),
  620. m_theAudioTrack( nullptr ),
  621. m_theVideoMedia( nullptr ),
  622. m_theAudioMedia( nullptr )
  623. #ifdef LOG_ENCODER_OPERATIONS
  624. ,m_LogFile( FILESYSTEM_INVALID_HANDLE )
  625. #endif
  626. {
  627. m_MovieRecordFPS.SetFPS( 0, false );
  628. m_GWorldRect.top = m_GWorldRect.left = m_GWorldRect.bottom = m_GWorldRect.right = 0;
  629. #ifdef LOG_FRAMES_TO_TGA
  630. ZeroVar( m_TGAFileBase );
  631. #endif
  632. }
  633. CQTVideoFileComposer::~CQTVideoFileComposer()
  634. {
  635. if ( m_bComposingMovie )
  636. {
  637. AbortMovie();
  638. }
  639. #ifdef LOG_ENCODER_OPERATIONS
  640. if ( m_LogFile != FILESYSTEM_INVALID_HANDLE )
  641. {
  642. g_pFullFileSystem->Close( m_LogFile );
  643. m_LogFile = FILESYSTEM_INVALID_HANDLE;
  644. }
  645. #endif
  646. SAFE_DELETE_ARRAY( m_FileName );
  647. SAFE_DELETE_ARRAY( m_SrcImageBuffer );
  648. SAFE_DELETE_ARRAY( m_srcAudioBuffer );
  649. SAFE_DISPOSE_HANDLE( m_MovieFileDataRef );
  650. SAFE_DISPOSE_HANDLE( m_srcSoundDescription );
  651. SAFE_DISPOSE_HANDLE( m_SrcImageCompressedBuffer );
  652. SAFE_DISPOSE_GWORLD( m_theSrcGWorld );
  653. }
  654. #ifdef LOG_ENCODER_OPERATIONS
  655. void CQTVideoFileComposer::LogMsg( const char* pMsg, ... )
  656. {
  657. const int MAX_TEXT = 8192;
  658. static char messageBuf[MAX_TEXT];
  659. if ( m_LogFile == FILESYSTEM_INVALID_HANDLE || pMsg == nullptr )
  660. {
  661. return;
  662. }
  663. va_list marker;
  664. va_start( marker, pMsg );
  665. #ifdef _WIN32
  666. int len = _vsnprintf( messageBuf, MAX_TEXT, pMsg, marker );
  667. #elif POSIX
  668. int len = vsnprintf( messageBuf, MAX_TEXT, pMsg, marker );
  669. #else
  670. #error "define vsnprintf type."
  671. #endif
  672. // Len < 0 represents an overflow
  673. if( len < 0 )
  674. {
  675. ((char*) pMsg)[MAX_TEXT-1] = nullchar;
  676. }
  677. va_end( marker );
  678. g_pFullFileSystem->Write( messageBuf, V_strlen( messageBuf ), m_LogFile );
  679. }
  680. #endif
  681. bool CQTVideoFileComposer::CreateNewMovie( const char *fileName, bool hasAudio )
  682. {
  683. // Validate input and state
  684. SetResult( VideoResult::BAD_INPUT_PARAMETERS );
  685. AssertExitF( IS_NOT_EMPTY( fileName ) );
  686. SetResult( VideoResult::OPERATION_OUT_OF_SEQUENCE );
  687. AssertExitF( !m_bMovieCreated && !m_bMovieCompleted );
  688. #ifdef LOG_ENCODER_OPERATIONS
  689. char logFileName[MAX_PATH];
  690. V_strncpy( logFileName, fileName, MAX_PATH );
  691. V_SetExtension( logFileName, ".log", MAX_PATH );
  692. m_LogFile = g_pFullFileSystem->Open( logFileName, "wb" );
  693. const char* aMsg = (hasAudio) ? "HAS" : "DOES NOT HAVE";
  694. LogMsg( "Creating Video File: '%s' - %s AUDIO TRACK\n", fileName, aMsg );
  695. #endif
  696. // now create the movie file
  697. OSErr status = noErr;
  698. OSType movieType = FOUR_CHAR_CODE('TVOD'); // todo - change movie type??
  699. m_MovieFileDataRef = nullptr;
  700. m_MovieFileDataRefType = 0;
  701. m_MovieFileDataHandler = nullptr;
  702. CFStringRef imageStrRef = CFStringCreateWithCString ( NULL, fileName, 0 );
  703. status = QTNewDataReferenceFromFullPathCFString( imageStrRef, (QTPathStyle) kQTNativeDefaultPathStyle, 0, &m_MovieFileDataRef, &m_MovieFileDataRefType );
  704. AssertExitF( status == noErr );
  705. status = CreateMovieStorage( m_MovieFileDataRef, m_MovieFileDataRefType, movieType, smCurrentScript, createMovieFileDeleteCurFile | createMovieFileDontCreateResFile, &m_MovieFileDataHandler, &m_theMovie );
  706. AssertExitF( status == noErr );
  707. CFRelease( imageStrRef );
  708. m_FileName = COPY_STRING( fileName );
  709. #ifdef LOG_FRAMES_TO_TGA
  710. V_strncpy( m_TGAFileBase, m_FileName, sizeof( m_TGAFileBase ) );
  711. V_StripExtension( m_TGAFileBase, m_TGAFileBase, sizeof( m_TGAFileBase ) );
  712. #endif
  713. // we did it! party on...
  714. SetResult( VideoResult::SUCCESS );
  715. m_bMovieCreated = true;
  716. m_bHasAudioTrack = hasAudio;
  717. return m_bMovieCreated;
  718. }
  719. bool CQTVideoFileComposer::SetMovieVideoParameters( int width, int height, VideoFrameRate_t movieFPS, VideoEncodeCodec_t desiredCodec, int encodeQuality, VideoEncodeGamma_t gamma )
  720. {
  721. // Validate input and state
  722. SetResult( VideoResult::BAD_INPUT_PARAMETERS );
  723. AssertExitF( IS_IN_RANGE( width, cMinVideoFrameWidth, cMaxVideoFrameWidth ) && IS_IN_RANGE( height, cMinVideoFrameHeight, cMaxVideoFrameHeight ) );
  724. AssertExitF( IS_IN_RANGE( movieFPS.GetFPS(), cMinFPS, cMaxFPS ) );
  725. AssertExitF( IS_IN_RANGECOUNT( desiredCodec, VideoEncodeCodec::DEFAULT_CODEC, VideoEncodeCodec::CODEC_COUNT ) );
  726. AssertExitF( IS_IN_RANGE( encodeQuality, VideoEncodeQuality::MIN_QUALITY, VideoEncodeQuality::MAX_QUALITY ) );
  727. AssertExitF( IS_IN_RANGECOUNT( gamma, VideoEncodeGamma::NO_GAMMA_ADJUST, VideoEncodeGamma::GAMMA_COUNT ) );
  728. SetResult( VideoResult::OPERATION_OUT_OF_SEQUENCE );
  729. AssertExitF( m_bMovieCreated && !m_bMovieConfigured );
  730. // Configure video parameters
  731. m_MovieFrameWidth = width;
  732. m_MovieFrameHeight = height;
  733. // map the requested codec in
  734. switch( desiredCodec )
  735. {
  736. case VideoEncodeCodec::MPEG2_CODEC:
  737. {
  738. m_VideoCodecToUse = kMpegYUV420CodecType;
  739. break;
  740. }
  741. case VideoEncodeCodec::MPEG4_CODEC:
  742. {
  743. m_VideoCodecToUse = kMPEG4VisualCodecType;
  744. break;
  745. }
  746. case VideoEncodeCodec::H261_CODEC:
  747. {
  748. m_VideoCodecToUse = kH261CodecType;
  749. break;
  750. }
  751. case VideoEncodeCodec::H263_CODEC:
  752. {
  753. m_VideoCodecToUse = kH263CodecType;
  754. break;
  755. }
  756. case VideoEncodeCodec::H264_CODEC:
  757. {
  758. m_VideoCodecToUse = kH264CodecType;
  759. break;
  760. }
  761. case VideoEncodeCodec::MJPEG_A_CODEC:
  762. {
  763. m_VideoCodecToUse = kMotionJPEGACodecType;
  764. break;
  765. }
  766. case VideoEncodeCodec::MJPEG_B_CODEC:
  767. {
  768. m_VideoCodecToUse = kMotionJPEGBCodecType;
  769. break;
  770. }
  771. case VideoEncodeCodec::SORENSON3_CODEC:
  772. {
  773. m_VideoCodecToUse = kSorenson3CodecType;
  774. break;
  775. }
  776. case VideoEncodeCodec::CINEPACK_CODEC:
  777. {
  778. m_VideoCodecToUse = kCinepakCodecType;
  779. break;
  780. }
  781. default: // should never hit this because we are already range checked
  782. {
  783. m_VideoCodecToUse = CQTVideoFileComposer::DEFAULT_CODEC;
  784. break;
  785. }
  786. }
  787. // Determine if codec is available...
  788. CodecInfo theInfo;
  789. OSErr status = GetCodecInfo( &theInfo, m_VideoCodecToUse, 0 );
  790. if ( status == noCodecErr )
  791. {
  792. SetResult( VideoResult::CODEC_NOT_AVAILABLE );
  793. return false;
  794. }
  795. AssertExitF( status == noErr );
  796. #ifdef LOG_ENCODER_OPERATIONS
  797. char codecName[64];
  798. ZeroVar( codecName );
  799. V_memcpy( codecName, &theInfo.typeName[1], (int) theInfo.typeName[0] );
  800. LogMsg( "Video Image Size is (%d x %d)\n", m_MovieFrameWidth, m_MovieFrameHeight );
  801. LogMsg( "Codec selected is %s\n", codecName );
  802. LogMsg( "Encoding Quality = %d\n", (int) encodeQuality );
  803. LogMsg( "Encode Gamma = %d\n", (int) gamma );
  804. #endif
  805. // convert encoding quality into quicktime specific value
  806. int Q = (int) encodeQuality; - (int) VideoEncodeQuality::MIN_QUALITY;
  807. int MaxQ = (int) VideoEncodeQuality::MAX_QUALITY - (int) VideoEncodeQuality::MIN_QUALITY;
  808. m_EncodeQuality = codecLosslessQuality * ( (float) Q / (float) MaxQ ) ;
  809. clamp( m_EncodeQuality, codecMinQuality, codecMaxQuality );
  810. // convert the gamma correction value into quicktime specific values
  811. switch( gamma )
  812. {
  813. case VideoEncodeGamma::NO_GAMMA_ADJUST:
  814. {
  815. m_EncodeGamma = kQTUseSourceGammaLevel;
  816. break;
  817. }
  818. case VideoEncodeGamma::PLATFORM_STANDARD_GAMMA:
  819. {
  820. m_EncodeGamma = kQTUsePlatformDefaultGammaLevel;
  821. break;
  822. }
  823. case VideoEncodeGamma::GAMMA_1_8:
  824. {
  825. m_EncodeGamma = 0x0001CCCC; // (Fixed) Gamma 1.8
  826. break;
  827. }
  828. case VideoEncodeGamma::GAMMA_2_2:
  829. {
  830. m_EncodeGamma = kQTCCIR601VideoGammaLevel;
  831. break;
  832. }
  833. case VideoEncodeGamma::GAMMA_2_5:
  834. {
  835. m_EncodeGamma = 0x00028000; // (Fixed) Gamma 2.5
  836. break;
  837. }
  838. default:
  839. {
  840. m_EncodeGamma = CQTVideoFileComposer::DEFAULT_GAMMA;
  841. break;
  842. }
  843. }
  844. // Process the framerate into usable values
  845. m_MovieRecordFPS = movieFPS;
  846. m_DurationPerFrame = m_MovieRecordFPS.GetUnitsPerFrame();
  847. m_MovieTimeScale = m_MovieRecordFPS.GetUnitsPerSecond();
  848. AssertExitF( m_DurationPerFrame > 0 && m_MovieTimeScale > 0 );
  849. /* if ( movieFPS.IsNTSCRate() )
  850. {
  851. m_MovieTimeScale = movieFPS.GetIntFPS() * 1000;
  852. m_DurationPerFrame = 1001;
  853. }
  854. else if ( movieFPS.GetUnitsPerSecond() % movieFPS.GetUnitsPerFrame() == 0 ) // integer frame rate?
  855. {
  856. m_MovieTimeScale = movieFPS.GetIntFPS() * 1000;
  857. m_DurationPerFrame = 1000;
  858. }
  859. else // round to nearest .001 second
  860. {
  861. m_MovieTimeScale = (int) ( movieFPS.GetFPS() * 1000 );
  862. m_DurationPerFrame = 1000;
  863. }
  864. */
  865. #ifdef LOG_ENCODER_OPERATIONS
  866. LogMsg( "Video Frame Rate = %f FPS\n %d time units per second\n %d time units per frame\n", m_MovieRecordFPS.GetFPS(), m_MovieRecordFPS.GetUnitsPerSecond(), m_MovieRecordFPS.GetUnitsPerFrame() );
  867. if ( m_MovieRecordFPS.IsNTSCRate() )
  868. LogMsg( " IS CONSIDERED NTSC RATE\n");
  869. LogMsg( "MovieTimeScale is being set to %d\nDuration Per Frame is %d\n\n", m_MovieTimeScale, m_DurationPerFrame );
  870. #endif
  871. // Create the video track and media
  872. SetResult( VideoResult::VIDEO_ERROR_OCCURED );
  873. m_theVideoTrack = NewMovieTrack( m_theMovie, FixRatio( width, 1 ), FixRatio( height, 1 ), kNoVolume );
  874. AssertExitF( GetMoviesError() == noErr );
  875. m_theVideoMedia = NewTrackMedia( m_theVideoTrack, VideoMediaType, m_MovieTimeScale, NULL, 0 );
  876. AssertExitF( GetMoviesError() == noErr );
  877. // we have successfully configured the output movie
  878. SetResult( VideoResult::SUCCESS );
  879. m_bMovieConfigured = true;
  880. return true;
  881. }
  882. bool CQTVideoFileComposer::SetMovieSourceImageParameters( int srcWidth, int srcHeight, VideoEncodeSourceFormat_t srcImageFormat )
  883. {
  884. // Validate input and state
  885. SetResult( VideoResult::BAD_INPUT_PARAMETERS );
  886. AssertExitF( IS_IN_RANGE( srcWidth, cMinVideoFrameWidth, cMaxVideoFrameWidth ) && IS_IN_RANGE( srcHeight, cMinVideoFrameHeight, cMaxVideoFrameHeight ) );
  887. AssertExitF( IS_IN_RANGECOUNT( srcImageFormat, VideoEncodeSourceFormat::VIDEO_FORMAT_FIRST, VideoEncodeSourceFormat::VIDEO_FORMAT_COUNT ) );
  888. SetResult( VideoResult::OPERATION_OUT_OF_SEQUENCE );
  889. AssertExitF( m_bMovieCreated && !m_bMovieCompleted && m_bMovieConfigured && !m_bSourceImagesConfigured );
  890. // Setup source image format related stuff
  891. m_SrcPixelFormat = ( srcImageFormat == VideoEncodeSourceFormat::BGRA_32BIT ) ? k32BGRAPixelFormat :
  892. ( srcImageFormat == VideoEncodeSourceFormat::BGR_24BIT ) ? k24BGRPixelFormat :
  893. ( srcImageFormat == VideoEncodeSourceFormat::RGB_24BIT ) ? k24RGBPixelFormat :
  894. ( srcImageFormat == VideoEncodeSourceFormat::RGBA_32BIT ) ? k32RGBAPixelFormat : 0;
  895. m_SrcBytesPerPixel = GetBytesPerPixel( m_SrcPixelFormat );
  896. // Setup source image size related stuff
  897. m_SrcImageWidth = srcWidth;
  898. m_SrcImageHeight = srcHeight;
  899. m_SrcImageSize = srcWidth * srcHeight * m_SrcBytesPerPixel;
  900. // Setup the GWorld to hold the frame to video compress
  901. m_GWorldPixelFormat = k32BGRAPixelFormat; // can use k24BGRPixelFormat on Win32.. but it compresses wrong on OSX?
  902. m_GWorldBytesPerPixel = 4;
  903. m_GWorldImageWidth = CONTAINING_MULTIPLE_OF( srcWidth, 4 ); // make sure the encoded surface is a multiple of 4 in each dimensions
  904. m_GWorldImageHeight = CONTAINING_MULTIPLE_OF( srcHeight, 4 );
  905. m_GWorldRect.top = m_GWorldRect.left = 0;
  906. m_GWorldRect.bottom = m_GWorldImageHeight;
  907. m_GWorldRect.right = m_GWorldImageWidth;
  908. // Setup the QuiuckTime Graphics World for incoming frames of video
  909. // Always use a 32-bit GWORD to avoid encoding bugs
  910. SetResult( VideoResult::VIDEO_ERROR_OCCURED );
  911. OSErr status = QTNewGWorld( &m_theSrcGWorld, m_GWorldPixelFormat, &m_GWorldRect, nil, nil, 0 );
  912. AssertExitF( status == noErr );
  913. PixMapHandle thePixMap = GetGWorldPixMap( m_theSrcGWorld );
  914. AssertPtrExitF( thePixMap );
  915. status = QTSetPixMapHandleRequestedGammaLevel( thePixMap, m_EncodeGamma );
  916. AssertExitF( status == noErr );
  917. // Set encoding buffer to max size at max quality
  918. // Should we try it with the actual quality setting?
  919. status = GetMaxCompressionSize( thePixMap, &m_GWorldRect, 0, m_EncodeQuality, m_VideoCodecToUse,
  920. (CompressorComponent)anyCodec, (long*) &m_ScrImageMaxCompressedSize );
  921. AssertExitF( status == noErr && m_ScrImageMaxCompressedSize > 0 );
  922. // allocated buffers for the uncompressed and compressed images
  923. m_SrcImageBuffer = new byte[ m_SrcImageSize ];
  924. m_SrcImageCompressedBuffer = NewHandle( m_ScrImageMaxCompressedSize );
  925. // we have successfully configured the video input images
  926. SetResult( VideoResult::SUCCESS );
  927. m_bSourceImagesConfigured = true;
  928. return CheckForReadyness(); // We are ready to go if audio is...
  929. }
  930. bool CQTVideoFileComposer::SetMovieSourceAudioParameters( AudioEncodeSourceFormat_t srcAudioFormat, int audioSampleRate, AudioEncodeOptions_t audioOptions, int audioSampleGroupSize )
  931. {
  932. SetResult( VideoResult::ILLEGAL_OPERATION );
  933. AssertExitF( m_bHasAudioTrack );
  934. // Validate input and state
  935. SetResult( VideoResult::BAD_INPUT_PARAMETERS );
  936. AssertExitF( IS_IN_RANGECOUNT( srcAudioFormat, AudioEncodeSourceFormat::AUDIO_NONE, AudioEncodeSourceFormat::AUDIO_FORMAT_COUNT ) );
  937. AssertExitF( audioSampleRate == 0 || IS_IN_RANGE( audioSampleRate, cMinSampleRate, cMaxSampleRate ) );
  938. SetResult( VideoResult::OPERATION_OUT_OF_SEQUENCE );
  939. AssertExitF( m_bMovieCreated && !m_bMovieCompleted && m_bMovieConfigured && !m_bSourceAudioConfigured );
  940. // it is possible to disable audio here by passing in AudioEncodeSourceFormat::AUDIO_NONE even
  941. // if the movie was created with the hasHadio flag set to true, or by setting the sample rate to 0
  942. if ( srcAudioFormat == AudioEncodeSourceFormat::AUDIO_NONE || audioSampleRate == 0 )
  943. {
  944. m_bHasAudioTrack = false;
  945. }
  946. else
  947. {
  948. m_AudioOptions = audioOptions;
  949. // Setup the audio frequency
  950. m_AudioSourceFrequency = audioSampleRate;
  951. // Create the audio track and media
  952. SetResult( VideoResult::AUDIO_ERROR_OCCURED );
  953. m_theAudioTrack = NewMovieTrack( m_theMovie, 0, 0, kFullVolume );
  954. AssertExitF( GetMoviesError() == noErr );
  955. m_theAudioMedia = NewTrackMedia( m_theAudioTrack, SoundMediaType, (TimeScale) audioSampleRate, NULL, 0 );
  956. AssertExitF( GetMoviesError() == noErr );
  957. // Setup the Audio Sound description
  958. AudioStreamBasicDescription inASBD;
  959. switch( srcAudioFormat )
  960. {
  961. case AudioEncodeSourceFormat::AUDIO_16BIT_PCMStereo:
  962. {
  963. inASBD.mSampleRate = Float64( audioSampleRate );
  964. inASBD.mFormatID = kAudioFormatLinearPCM;
  965. inASBD.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
  966. inASBD.mBytesPerPacket = 4;
  967. inASBD.mFramesPerPacket = 1;
  968. inASBD.mBytesPerPacket = 4;
  969. inASBD.mChannelsPerFrame = 2;
  970. inASBD.mBitsPerChannel = 16;
  971. inASBD.mReserved = 0;
  972. break;
  973. }
  974. default:
  975. {
  976. Assert( false ); // Impossible.. we hope
  977. return false;
  978. }
  979. }
  980. m_AudioBytesPerSample = inASBD.mBytesPerPacket;
  981. OSStatus result = QTSoundDescriptionCreate( &inASBD, NULL, 0, NULL, 0, kQTSoundDescriptionKind_Movie_LowestPossibleVersion, (SoundDescriptionHandle*) &m_srcSoundDescription );
  982. AssertExitF( result == noErr );
  983. // Setup audio sample buffering if needed
  984. m_bLimitAudioDurationToVideo = BITFLAGS_SET( audioOptions, AudioEncodeOptions::LIMIT_AUDIO_TRACK_TO_VIDEO_DURATION );
  985. m_SampleGrouping = ( BITFLAGS_SET( audioOptions, AudioEncodeOptions::GROUP_SIZE_IS_VIDEO_FRAME ) ) ? CQTVideoFileComposer::AG_PER_FRAME :
  986. ( BITFLAGS_SET( audioOptions, AudioEncodeOptions::USE_AUDIO_ENCODE_GROUP_SIZE ) ) ? CQTVideoFileComposer::AG_FIXED_SIZE : AG_NONE;
  987. // check for invalid sample grouping duration
  988. if ( m_SampleGrouping == AG_FIXED_SIZE && ( audioSampleGroupSize < MIN_AUDIO_SAMPLE_GROUP_SIZE || audioSampleGroupSize > MAX_AUDIO_GROUP_SIZE_IN_SEC * m_AudioSourceFrequency ) )
  989. {
  990. SetResult( VideoResult::BAD_INPUT_PARAMETERS );
  991. Assert( false );
  992. return false;
  993. }
  994. m_bBufferSourceAudio = ( m_SampleGrouping != AG_NONE ) || m_bLimitAudioDurationToVideo;
  995. // Set up an audio buffer than can hold the maxium specified duration
  996. if ( m_bBufferSourceAudio )
  997. {
  998. m_srcAudioBufferSize = m_AudioSourceFrequency * m_AudioBytesPerSample * MAX_AUDIO_GROUP_SIZE_IN_SEC;
  999. m_srcAudioBuffer = new byte[m_srcAudioBufferSize];
  1000. m_srcAudioBufferCurrentSize = 0;
  1001. }
  1002. if ( m_SampleGrouping == AG_FIXED_SIZE )
  1003. {
  1004. // Set up to emit audio after fixed number of samples
  1005. m_nAudioSampleGroupSize = audioSampleGroupSize;
  1006. }
  1007. if ( m_SampleGrouping == AG_PER_FRAME )
  1008. {
  1009. m_AudioSampleFrameCounter = 0;
  1010. }
  1011. m_bSourceAudioConfigured = true;
  1012. }
  1013. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1014. LogMsg( "Audio Sample Grouping Mode = %d\nSample Group Size = %d \n", (int) m_SampleGrouping, m_nAudioSampleGroupSize );
  1015. LogMsg( "Audio Track Sample Rate is %d samples per second\n", m_AudioSourceFrequency );
  1016. LogMsg( "Estimated Samples per frame = %d\n\n", (int) ( m_AudioSourceFrequency / m_MovieRecordFPS.GetFPS() ) );
  1017. #endif
  1018. // finish up
  1019. SetResult( VideoResult::SUCCESS );
  1020. return CheckForReadyness(); // We are ready to go if video is...
  1021. }
  1022. // Returns true if we are not ready, or if we began movie creation successfully
  1023. // This ONLY returns false if it tried to begin the movie creation process and failed
  1024. bool CQTVideoFileComposer::CheckForReadyness()
  1025. {
  1026. return ( m_bMovieCreated && !m_bMovieCompleted && !m_bComposingMovie && m_bMovieConfigured && m_bSourceImagesConfigured &&
  1027. m_bSourceAudioConfigured == m_bHasAudioTrack ) ? BeginMovieCreation() : true;
  1028. }
  1029. bool CQTVideoFileComposer::BeginMovieCreation()
  1030. {
  1031. SetResult( VideoResult::OPERATION_OUT_OF_SEQUENCE );
  1032. AssertExitF( m_bMovieCreated && !m_bMovieCompleted && !m_bComposingMovie &&
  1033. m_bMovieConfigured && m_bSourceImagesConfigured && m_bSourceAudioConfigured == m_bHasAudioTrack );
  1034. // Open the tracks up for editing
  1035. SetResult( VideoResult::VIDEO_ERROR_OCCURED );
  1036. OSErr status = BeginMediaEdits( m_theVideoMedia );
  1037. AssertExitF( status == noErr );
  1038. if ( m_bHasAudioTrack )
  1039. {
  1040. OSErr status = BeginMediaEdits( m_theAudioMedia );
  1041. AssertExitF( status == noErr );
  1042. }
  1043. #ifdef LOG_ENCODER_OPERATIONS
  1044. LogMsg( "Media Tracks opened for editing\n\n" );
  1045. #endif
  1046. // We are now ready to take in data to make a movie with
  1047. SetResult( VideoResult::SUCCESS );
  1048. m_bComposingMovie = true;
  1049. return true;
  1050. }
  1051. bool CQTVideoFileComposer::AppendVideoFrameToMedia( void *ImageBuffer, int strideAdjustBytes )
  1052. {
  1053. #ifdef LOG_ENCODER_OPERATIONS
  1054. LogMsg( "AppendVideoFrameToMedia( %8.8x ) called for %d --- ", ImageBuffer, m_nFramesAdded+1 );
  1055. #endif
  1056. // Validate input and state
  1057. SetResult( VideoResult::BAD_INPUT_PARAMETERS );
  1058. AssertExitF( ImageBuffer != nullptr );
  1059. SetResult( VideoResult::OPERATION_OUT_OF_SEQUENCE );
  1060. AssertExitF( m_bComposingMovie && !m_bMovieCompleted );
  1061. SetResult( VideoResult::VIDEO_ERROR_OCCURED );
  1062. // Get the pixmap
  1063. PixMapHandle thePixMap = GetGWorldPixMap( m_theSrcGWorld );
  1064. AssertPtrExitF( thePixMap );
  1065. // copy the raw image into our bitmap
  1066. AssertExitF( LockPixels( thePixMap ) );
  1067. byte *srcBase = (byte*) ImageBuffer;
  1068. int srcStride = m_SrcImageWidth * m_SrcBytesPerPixel + strideAdjustBytes;
  1069. byte *dstBase = nullptr;
  1070. int dstStride = 0;
  1071. #if defined ( WIN32 )
  1072. // Get the HBITMAP of our GWorld
  1073. HBITMAP theHBITMAP = (HBITMAP) GetPortHBITMAP( (GrafPtr) m_theSrcGWorld );
  1074. // retrieve the bitmap info header information
  1075. BITMAP bmp;
  1076. AssertExitF( GetObject( theHBITMAP, sizeof(BITMAP), (LPSTR) &bmp) );
  1077. // validate the BMP we just got
  1078. AssertExitF( bmp.bmWidth == m_SrcImageWidth && bmp.bmHeight == m_SrcImageHeight && bmp.bmBitsPixel == 8 * m_GWorldBytesPerPixel );
  1079. // setup the pixel copy info
  1080. dstBase = (byte*)bmp.bmBits;
  1081. dstStride = bmp.bmWidthBytes;
  1082. #elif defined ( OSX )
  1083. // setup the pixel copy info
  1084. dstBase = (byte*) GetPixBaseAddr( thePixMap );
  1085. dstStride = GetPixRowBytes( thePixMap );
  1086. #endif
  1087. AssertExitF( dstBase != nullptr && dstStride > 0 );
  1088. // save a TGA if we are running diagnostics
  1089. #if defined ( LOG_FRAMES_TO_TGA )
  1090. if ( ( m_nFramesAdded % LOG_FRAMES_TO_TGA_INTERVAL ) == 0 )
  1091. {
  1092. SaveToTargaFile( m_nFramesAdded, m_TGAFileBase, m_SrcImageWidth, m_SrcImageHeight, srcBase, m_SrcPixelFormat, strideAdjustBytes );
  1093. }
  1094. #endif
  1095. // copy the supplied pixel buffer into our GWORLD data
  1096. if ( !CopyBitMapPixels( m_SrcImageWidth, m_SrcImageHeight,
  1097. m_SrcPixelFormat, srcBase, srcStride,
  1098. m_GWorldPixelFormat, dstBase, dstStride ) )
  1099. {
  1100. Assert( false );
  1101. return false;
  1102. }
  1103. // You are now free to move about the cabin...
  1104. UnlockPixels( thePixMap );
  1105. // allocate a handle which CompressImage will resize...
  1106. ImageDescriptionHandle theImageDescHandle = (ImageDescriptionHandle) NewHandle( sizeof(ImageDescriptionHandle) );
  1107. AssertExitF( theImageDescHandle != nullptr );
  1108. // compress the single image
  1109. OSErr status = CompressImage( thePixMap, &m_GWorldRect, m_EncodeQuality,
  1110. m_VideoCodecToUse, theImageDescHandle, *m_SrcImageCompressedBuffer );
  1111. if ( status != noErr )
  1112. {
  1113. Assert( false ); // tell the user
  1114. SAFE_DISPOSE_HANDLE( theImageDescHandle );
  1115. return false;
  1116. }
  1117. TimeValue addedTime = 0;
  1118. // Lets add gamma info the image description
  1119. if ( m_EncodeGamma != kQTUseSourceGammaLevel )
  1120. {
  1121. Fixed newGamma = m_EncodeGamma;
  1122. status = ICMImageDescriptionSetProperty( theImageDescHandle, kQTPropertyClass_ImageDescription, kICMImageDescriptionPropertyID_GammaLevel, sizeof( newGamma ), &newGamma );
  1123. AssertExitF( status == noErr );
  1124. }
  1125. // add the compressed image to the movie stream
  1126. status = AddMediaSample( m_theVideoMedia, m_SrcImageCompressedBuffer, 0, (**theImageDescHandle).dataSize, m_DurationPerFrame,
  1127. (SampleDescriptionHandle) theImageDescHandle, 1, 0, &addedTime );
  1128. if ( status != noErr )
  1129. {
  1130. Assert( false ); // tell the user
  1131. SAFE_DISPOSE_HANDLE( theImageDescHandle );
  1132. return false;
  1133. }
  1134. #ifdef LOG_ENCODER_OPERATIONS
  1135. LogMsg( "Video Frame %d added to Video Media: Duration = %d, Inserted at Time %d\n", m_nFramesAdded+1, m_DurationPerFrame, addedTime );
  1136. #endif
  1137. // free up dynamic resources
  1138. SAFE_DISPOSE_HANDLE( theImageDescHandle );
  1139. // Report success
  1140. SetResult( VideoResult::SUCCESS );
  1141. m_nFramesAdded++;
  1142. return true;
  1143. }
  1144. int CQTVideoFileComposer::GetAudioSampleCountThruFrame( int frameNo )
  1145. {
  1146. if ( frameNo < 1 )
  1147. {
  1148. return 0;
  1149. }
  1150. double secondsSoFar = (double) ( frameNo * m_DurationPerFrame ) / (double) m_MovieTimeScale;
  1151. int nAudioSamples = (int) floor( secondsSoFar * (double) m_AudioSourceFrequency );
  1152. return nAudioSamples;
  1153. }
  1154. bool CQTVideoFileComposer::AppendAudioSamplesToMedia( void *soundBuffer, size_t bufferSize )
  1155. {
  1156. SetResult( VideoResult::ILLEGAL_OPERATION );
  1157. AssertExitF( m_bHasAudioTrack );
  1158. // Validate input and state
  1159. SetResult( VideoResult::BAD_INPUT_PARAMETERS );
  1160. AssertExitF( soundBuffer != nullptr && bufferSize % m_AudioBytesPerSample == 0 );
  1161. SetResult( VideoResult::OPERATION_OUT_OF_SEQUENCE );
  1162. AssertExitF( m_bComposingMovie && !m_bMovieCompleted );
  1163. int nSamples = bufferSize / m_AudioBytesPerSample;
  1164. Assert( bufferSize % m_AudioBytesPerSample == 0 );
  1165. TimeValue64 insertTime64 = 0;
  1166. OSErr status = noErr;
  1167. bool retest;
  1168. int samplesToEmit, MaxCanAdd, nSamplesAvailable;
  1169. m_nSamplesAdded+= nSamples; // track samples given to encoder
  1170. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1171. LogMsg( "%d Audio Samples Submitted (%d total) -- ", nSamples, m_nSamplesAdded );
  1172. #endif
  1173. // We can pass in 0 bytes to trigger a flush...
  1174. if ( nSamples == 0 )
  1175. {
  1176. if ( !m_bBufferSourceAudio || m_srcAudioBufferCurrentSize == 0 )
  1177. {
  1178. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1179. LogMsg( "NO SAMPLES TO PROCESS. EXIT\n" );
  1180. #endif
  1181. goto finish_up;
  1182. }
  1183. }
  1184. // Are we not buffering audio?
  1185. if ( !m_bBufferSourceAudio && nSamples > 0 )
  1186. {
  1187. SetResult( VideoResult::AUDIO_ERROR_OCCURED );
  1188. status = AddMediaSample2( m_theAudioMedia, (const UInt8 *) soundBuffer, (ByteCount) bufferSize,
  1189. (TimeValue64) 1, (TimeValue64) 0, (SampleDescriptionHandle) m_srcSoundDescription,
  1190. (ItemCount) nSamples, 0, &insertTime64 );
  1191. AssertExitF( status == noErr );
  1192. m_nSamplesAddedToMedia+= nSamples;
  1193. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1194. LogMsg( "%d samples (%d total) inserted into Video at time %ld\n", nSamples, m_nSamplesAddedToMedia, insertTime64 );
  1195. #endif
  1196. goto finish_up;
  1197. }
  1198. // Buffering audio ....
  1199. if ( nSamples > 0 )
  1200. {
  1201. memaddr_t pSrc = (memaddr_t) soundBuffer;
  1202. size_t bytesToCopy = nSamples * m_AudioBytesPerSample;
  1203. // Is buffer big enough to hold it all?
  1204. if ( m_srcAudioBufferCurrentSize + bytesToCopy > m_srcAudioBufferSize )
  1205. {
  1206. // get a bigger buffer
  1207. size_t newBufferSize = m_srcAudioBufferSize * 2 + bytesToCopy;
  1208. byte *newBuffer = new byte[newBufferSize];
  1209. // copy buffered sound and swap out buffers
  1210. V_memcpy( newBuffer, m_srcAudioBuffer, m_srcAudioBufferCurrentSize );
  1211. delete[] m_srcAudioBuffer;
  1212. m_srcAudioBuffer = newBuffer;
  1213. m_srcAudioBufferSize = newBufferSize;
  1214. }
  1215. // Append samples to buffer
  1216. V_memcpy( m_srcAudioBuffer + m_srcAudioBufferCurrentSize, pSrc, bytesToCopy );
  1217. m_srcAudioBufferCurrentSize += bytesToCopy;
  1218. }
  1219. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1220. LogMsg( "%d Samples buffered. Buffer=%d Samples -- ", nSamples, ( m_srcAudioBufferCurrentSize / m_AudioBytesPerSample ) );
  1221. #endif
  1222. retest_here:
  1223. nSamplesAvailable = m_srcAudioBufferCurrentSize / m_AudioBytesPerSample;
  1224. samplesToEmit = 0;
  1225. retest = false;
  1226. MaxCanAdd = ( m_bLimitAudioDurationToVideo ) ? ( GetAudioSampleCountThruFrame( m_nFramesAdded ) - m_nSamplesAddedToMedia ) : INT32_MAX;
  1227. if ( MaxCanAdd <= 0 )
  1228. {
  1229. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1230. LogMsg( "Can't Add Audio Now.\n" );
  1231. #endif
  1232. goto finish_up;
  1233. }
  1234. // Now.. we determine if we are ready to insert the audio samples into the media..and if so, how much...
  1235. if ( m_SampleGrouping == AG_NONE )
  1236. {
  1237. // are we keeping audio from getting ahead of video?
  1238. Assert( m_bLimitAudioDurationToVideo );
  1239. samplesToEmit = MIN( MaxCanAdd, nSamplesAvailable );
  1240. }
  1241. else if ( m_SampleGrouping == AG_FIXED_SIZE )
  1242. {
  1243. // do we have enough to emit a sample?
  1244. if ( ( nSamplesAvailable < m_nAudioSampleGroupSize ) || ( m_bLimitAudioDurationToVideo && ( MaxCanAdd < m_nAudioSampleGroupSize ) ) )
  1245. {
  1246. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1247. if ( nSamplesAvailable < m_nAudioSampleGroupSize )
  1248. LogMsg( "Need %d Samples to emit sample group\n", m_nAudioSampleGroupSize );
  1249. else
  1250. LogMsg( "Audio is caught up to Video (can add %d) \n", MaxCanAdd );
  1251. #endif
  1252. goto finish_up;
  1253. }
  1254. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1255. LogMsg( "emitting 1 group of audio (%d samples)\n", m_nAudioSampleGroupSize );
  1256. #endif
  1257. samplesToEmit = m_nAudioSampleGroupSize;
  1258. retest = true;
  1259. }
  1260. else if ( m_SampleGrouping == AG_PER_FRAME )
  1261. {
  1262. // is the audio already caught up with the current video frame?
  1263. if ( m_bLimitAudioDurationToVideo && m_AudioSampleFrameCounter >= m_nFramesAdded )
  1264. {
  1265. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1266. LogMsg( "Audio is caught up to Video\n" );
  1267. #endif
  1268. goto finish_up;
  1269. }
  1270. int curSampleCount = GetAudioSampleCountThruFrame( m_AudioSampleFrameCounter );
  1271. int nextSampleCount = GetAudioSampleCountThruFrame( m_AudioSampleFrameCounter+1 );
  1272. int thisGroupSize = nextSampleCount - curSampleCount;
  1273. Assert( m_nSamplesAddedToMedia == curSampleCount );
  1274. if ( nSamplesAvailable < thisGroupSize )
  1275. {
  1276. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1277. LogMsg( "Not enough samples to fill video frame (need %d)\n", thisGroupSize );
  1278. #endif
  1279. goto finish_up;
  1280. }
  1281. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1282. LogMsg( "emitting 1 video frame of audio (%d samples)\n", thisGroupSize );
  1283. #endif
  1284. samplesToEmit = thisGroupSize;
  1285. m_AudioSampleFrameCounter++;
  1286. retest = true;
  1287. }
  1288. else
  1289. {
  1290. Assert( false );
  1291. }
  1292. if ( samplesToEmit > 0 )
  1293. {
  1294. SetResult( VideoResult::AUDIO_ERROR_OCCURED );
  1295. status = AddMediaSample2( m_theAudioMedia, (const UInt8 *) m_srcAudioBuffer, (ByteCount) samplesToEmit * m_AudioBytesPerSample,
  1296. (TimeValue64) 1, (TimeValue64) 0, (SampleDescriptionHandle) m_srcSoundDescription,
  1297. (ItemCount) samplesToEmit, 0, &insertTime64 );
  1298. AssertExitF( status == noErr );
  1299. m_nSamplesAddedToMedia+= samplesToEmit;
  1300. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1301. LogMsg( "%d samples inserted into Video (%d total) at time %ld -- ", samplesToEmit, m_nSamplesAddedToMedia, insertTime64 );
  1302. #endif
  1303. // remove added samples from sound buffer
  1304. // (this really should be a circular buffer.. but that has its own problems)
  1305. m_srcAudioBufferCurrentSize -= samplesToEmit * m_AudioBytesPerSample;
  1306. nSamplesAvailable -= samplesToEmit;
  1307. if ( nSamplesAvailable > 0 )
  1308. {
  1309. V_memcpy( m_srcAudioBuffer, m_srcAudioBuffer + (samplesToEmit * m_AudioBytesPerSample), nSamplesAvailable * m_AudioBytesPerSample );
  1310. }
  1311. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1312. LogMsg( "Buffer now =%d samples", nSamplesAvailable );
  1313. #endif
  1314. if ( retest )
  1315. {
  1316. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1317. LogMsg( " -- rechecking -- " );
  1318. #endif
  1319. goto retest_here;
  1320. }
  1321. }
  1322. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1323. LogMsg( "\n" );
  1324. #endif
  1325. finish_up:
  1326. // Report success
  1327. SetResult( VideoResult::SUCCESS );
  1328. return true;
  1329. }
  1330. bool CQTVideoFileComposer::SyncAndFlushAudio()
  1331. {
  1332. if ( !m_bHasAudioTrack )
  1333. {
  1334. return false;
  1335. }
  1336. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1337. LogMsg( "Resolving Audio Track...\n" );
  1338. #endif
  1339. restart_sync:
  1340. bool bPadWithSilence = BITFLAGS_SET( m_AudioOptions, AudioEncodeOptions::PAD_AUDIO_WITH_SILENCE );
  1341. int VideoDurationInSamples = GetAudioSampleCountThruFrame( m_nFramesAdded );
  1342. int CurShortfall = VideoDurationInSamples - m_nSamplesAddedToMedia;
  1343. int SilenceToEmit = 0;
  1344. bool forceFlush = false;
  1345. bool forcePartialGroupFlush = false;
  1346. int nSamplesInBuffer = ( m_bBufferSourceAudio ) ? m_srcAudioBufferCurrentSize / m_AudioBytesPerSample : 0;
  1347. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1348. LogMsg( "Video duration is %d frames, which is %d Audio Samples\n", m_nFramesAdded, VideoDurationInSamples );
  1349. LogMsg( "%d Samples emitted to Audio track so far. %d samples remain in audio buffer\n", m_nSamplesAddedToMedia, nSamplesInBuffer );
  1350. LogMsg( "Delta to sync end of audio to end of video is %d Samples\n", CurShortfall );
  1351. LogMsg( "Pad With Silence Mode = %d, Align End of Audio With Video Mode = %d\n", (int) bPadWithSilence, (int) m_bLimitAudioDurationToVideo );
  1352. #endif
  1353. // not grouping samples mode
  1354. if ( m_SampleGrouping == AG_NONE )
  1355. {
  1356. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1357. LogMsg( "No sample grouping Mode\n" );
  1358. #endif
  1359. Assert( m_bLimitAudioDurationToVideo == m_bBufferSourceAudio ); // if we're not limiting, we're not buffering
  1360. if ( m_bLimitAudioDurationToVideo && CurShortfall > 0 && bPadWithSilence )
  1361. {
  1362. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1363. LogMsg( "Padding with %d samples to match video duration", CurShortfall );
  1364. #endif
  1365. SilenceToEmit = CurShortfall; // pad with silence
  1366. }
  1367. if ( nSamplesInBuffer > 0 || SilenceToEmit > 0 ) // force if we have something to add
  1368. {
  1369. forceFlush = true;
  1370. }
  1371. }
  1372. // Fixed sized grouping (and buffering)
  1373. if ( m_SampleGrouping == AG_FIXED_SIZE )
  1374. {
  1375. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1376. LogMsg( "Fixed sample grouping mode. Group size of %d\n", m_nAudioSampleGroupSize );
  1377. #endif
  1378. // No matter what, if we have a partially filled buffer, we add silence to it to make a complete group
  1379. // do we have a partially full buffer? if so pad with silence to make a full group
  1380. if ( nSamplesInBuffer > 0 && nSamplesInBuffer % m_nAudioSampleGroupSize != 0 )
  1381. {
  1382. SilenceToEmit = m_nAudioSampleGroupSize - ( nSamplesInBuffer % m_nAudioSampleGroupSize );
  1383. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1384. LogMsg( "Adding %d silence samples to complete group\n", SilenceToEmit );
  1385. #endif
  1386. }
  1387. int bufferedSamples = nSamplesInBuffer + SilenceToEmit;
  1388. int newShortFall = VideoDurationInSamples - m_nSamplesAddedToMedia - bufferedSamples;
  1389. if ( bPadWithSilence && newShortFall > 0 )
  1390. {
  1391. SilenceToEmit += newShortFall; // pad with silence until audio matches video duration
  1392. forceFlush = true;
  1393. forcePartialGroupFlush = true;
  1394. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1395. LogMsg( "Adding %d silence samples to pad to match audio to video duration\n", newShortFall );
  1396. #endif
  1397. }
  1398. }
  1399. if ( m_SampleGrouping == AG_PER_FRAME )
  1400. {
  1401. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1402. LogMsg( "Video Frame duraiton Audio grouping mode\n" );
  1403. #endif
  1404. // Have we already enough audio to match the video
  1405. if ( m_bLimitAudioDurationToVideo && m_AudioSampleFrameCounter >= m_nFramesAdded )
  1406. {
  1407. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1408. LogMsg( "Audio is caught up to Video\n" );
  1409. #endif
  1410. goto audio_complete;
  1411. }
  1412. // if we have anything in the buffer... pad it out with zeros
  1413. if ( nSamplesInBuffer > 0 )
  1414. {
  1415. // get the group size for the video frame the audio is currently on
  1416. int thisGroupSize = GetAudioSampleCountThruFrame( m_AudioSampleFrameCounter+1 ) - GetAudioSampleCountThruFrame( m_AudioSampleFrameCounter );
  1417. Assert( m_nSamplesAddedToMedia == GetAudioSampleCountThruFrame( m_AudioSampleFrameCounter ) );
  1418. // if we already have 1 (or more) groups in the buffer.. emit them, and restart
  1419. if ( nSamplesInBuffer >= thisGroupSize )
  1420. {
  1421. char n = nullchar;
  1422. AppendAudioSamplesToMedia( &n, 0 );
  1423. goto restart_sync;
  1424. }
  1425. SilenceToEmit = thisGroupSize - nSamplesInBuffer;
  1426. forceFlush = true;
  1427. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1428. LogMsg( "Adding %d silence samples to pad current group to match video frame duration\n", SilenceToEmit );
  1429. #endif
  1430. }
  1431. // with the output being aligned to a video frame, do we need to add more to pad to end of the video
  1432. int bufferedSamples = nSamplesInBuffer + SilenceToEmit;
  1433. int newShortFall = VideoDurationInSamples - m_nSamplesAddedToMedia - bufferedSamples;
  1434. if ( bPadWithSilence && newShortFall > 0 )
  1435. {
  1436. SilenceToEmit += newShortFall; // pad with silence until audio matches video duration
  1437. forceFlush = true;
  1438. forcePartialGroupFlush = true;
  1439. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1440. LogMsg( "Adding %d silence samples to pad audio to match video duration", newShortFall );
  1441. #endif
  1442. }
  1443. }
  1444. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1445. LogMsg( "\n" );
  1446. #endif
  1447. // now we append any needed silence to the audio stream...
  1448. if ( SilenceToEmit > 0 )
  1449. {
  1450. int bufferSize = SilenceToEmit * m_AudioBytesPerSample;
  1451. byte *pSilenceBuf = new byte[ bufferSize ];
  1452. V_memset( pSilenceBuf, nullchar, bufferSize );
  1453. #ifdef LOG_ENCODER_AUDIO_OPERATIONS
  1454. LogMsg( "Appending %d Silence samples\n", SilenceToEmit );
  1455. #endif
  1456. AppendAudioSamplesToMedia( pSilenceBuf, bufferSize );
  1457. }
  1458. else
  1459. {
  1460. if ( forceFlush )
  1461. {
  1462. char n = nullchar;
  1463. AppendAudioSamplesToMedia( &n, 0 );
  1464. }
  1465. }
  1466. if ( forcePartialGroupFlush && m_srcAudioBufferCurrentSize >0 )
  1467. {
  1468. int nSamplesThisAdd = m_srcAudioBufferCurrentSize / m_AudioBytesPerSample;
  1469. TimeValue64 insertTime64 = 0;
  1470. OSErr status = AddMediaSample2( m_theAudioMedia, (const UInt8 *) m_srcAudioBuffer, (ByteCount) m_srcAudioBufferCurrentSize,
  1471. (TimeValue64) 1, (TimeValue64) 0, (SampleDescriptionHandle) m_srcSoundDescription,
  1472. (ItemCount) nSamplesThisAdd, 0, &insertTime64 );
  1473. AssertExitF( status == noErr );
  1474. m_srcAudioBufferCurrentSize = 0;
  1475. m_nSamplesAddedToMedia+= nSamplesThisAdd;
  1476. #ifdef LOG_ENCODER_OPERATIONS
  1477. LogMsg( "FORCED FLUSH - Audio Samples added to media. %d added, %d total samples, inserted at time %ld\n", nSamplesThisAdd, m_nSamplesAddedToMedia, insertTime64 );
  1478. #endif
  1479. }
  1480. audio_complete:
  1481. return true;
  1482. }
  1483. bool CQTVideoFileComposer::EndMovieCreation( bool saveMovieData )
  1484. {
  1485. #ifdef LOG_ENCODER_OPERATIONS
  1486. LogMsg( "\nEndMovieCreation Called Composing=%d Completed=%d\n\n", (int) m_bComposingMovie, (int) m_bMovieCompleted );
  1487. #endif
  1488. SetResult( VideoResult::OPERATION_OUT_OF_SEQUENCE );
  1489. AssertExitF( m_bComposingMovie && !m_bMovieCompleted );
  1490. #ifdef LOG_ENCODER_OPERATIONS
  1491. LogMsg( "\nEndMovieCreation Called\n\n" );
  1492. #endif
  1493. // Stop adding to and (optionally) save the video media into the file
  1494. SetResult( VideoResult::VIDEO_ERROR_OCCURED );
  1495. if ( m_nFramesAdded > 0 )
  1496. {
  1497. TimeValue VideoDuration = GetMediaDuration( m_theVideoMedia );
  1498. AssertExitF( VideoDuration == m_nFramesAdded * m_DurationPerFrame );
  1499. OSErr status = EndMediaEdits( m_theVideoMedia );
  1500. AssertExitF( status == noErr );
  1501. if ( saveMovieData )
  1502. {
  1503. status = InsertMediaIntoTrack( m_theVideoTrack, 0, 0, VideoDuration, fixed1 );
  1504. Assert( status == noErr );
  1505. #ifdef LOG_ENCODER_OPERATIONS
  1506. LogMsg( "\nVideo Media inserted into Track\n" );
  1507. #endif
  1508. }
  1509. }
  1510. // Stop adding to and (optionally) save the audio media into the file
  1511. SetResult( VideoResult::AUDIO_ERROR_OCCURED );
  1512. if ( m_bHasAudioTrack && m_nSamplesAdded > 0 )
  1513. {
  1514. // flush any remaining samples in the buffer to the media
  1515. #ifdef LOG_ENCODER_OPERATIONS
  1516. LogMsg( "Calling SyncAndFlushAudio()\n" );
  1517. #endif
  1518. SyncAndFlushAudio();
  1519. TimeValue AudioDuration = GetMediaDuration( m_theAudioMedia );
  1520. #ifdef LOG_ENCODER_OPERATIONS
  1521. LogMsg( "Audio Duration = %d nSamples Added = %d\n", AudioDuration, m_nSamplesAdded );
  1522. #endif
  1523. // AssertExitF( AudioDuration == m_nSamplesAdded );
  1524. OSErr status = EndMediaEdits( m_theAudioMedia );
  1525. AssertExitF( status == noErr );
  1526. if ( saveMovieData )
  1527. {
  1528. status = InsertMediaIntoTrack( m_theAudioTrack, 0, 0, AudioDuration, fixed1 );
  1529. AssertExitF( status == noErr );
  1530. #ifdef LOG_ENCODER_OPERATIONS
  1531. LogMsg( "\nAudio Media inserted into Track\n" );
  1532. #endif
  1533. }
  1534. }
  1535. if ( saveMovieData )
  1536. {
  1537. #ifdef LOG_ENCODER_OPERATIONS
  1538. LogMsg( "Saving Movie Data...\n" );
  1539. #endif
  1540. SetResult( VideoResult::FILE_ERROR_OCCURED );
  1541. OSErr status = AddMovieToStorage( m_theMovie, m_MovieFileDataHandler );
  1542. AssertExitF( status == noErr );
  1543. if ( status != noErr )
  1544. {
  1545. DataHDeleteFile( m_MovieFileDataHandler );
  1546. }
  1547. #ifdef LOG_ENCODER_OPERATIONS
  1548. LogMsg( "\nMovie Resource added to file. Returned Status = %d\n", (int) status );
  1549. #endif
  1550. }
  1551. // free our resources
  1552. if ( m_MovieFileDataHandler != nullptr )
  1553. {
  1554. OSErr status = CloseMovieStorage( m_MovieFileDataHandler );
  1555. m_MovieFileDataHandler = nullptr;
  1556. AssertExitF( status == noErr );
  1557. }
  1558. SAFE_DISPOSE_HANDLE( m_MovieFileDataRef );
  1559. SAFE_DISPOSE_HANDLE( m_SrcImageCompressedBuffer );
  1560. SAFE_DISPOSE_GWORLD( m_theSrcGWorld );
  1561. SAFE_DISPOSE_HANDLE( m_srcSoundDescription );
  1562. SetResult( VideoResult::SUCCESS );
  1563. m_bComposingMovie = false;
  1564. return true;
  1565. }
  1566. // The movie can be aborted at any time before completion
  1567. bool CQTVideoFileComposer::AbortMovie()
  1568. {
  1569. SetResult( VideoResult::OPERATION_OUT_OF_SEQUENCE );
  1570. AssertExitF( !m_bMovieCompleted );
  1571. // Shut down the movie if we are recording
  1572. if ( m_bComposingMovie )
  1573. {
  1574. if ( !EndMovieCreation( false ) )
  1575. {
  1576. return false;
  1577. }
  1578. }
  1579. if ( m_bMovieCreated )
  1580. {
  1581. if ( m_MovieFileDataHandler != nullptr )
  1582. {
  1583. SetResult( VideoResult::FILE_ERROR_OCCURED );
  1584. OSErr status = CloseMovieStorage( m_MovieFileDataHandler );
  1585. AssertExitF( status == noErr );
  1586. m_MovieFileDataHandler = nullptr;
  1587. }
  1588. SAFE_DISPOSE_HANDLE( m_MovieFileDataRef );
  1589. SAFE_DISPOSE_MOVIE( m_theMovie );
  1590. }
  1591. if ( m_FileName )
  1592. {
  1593. g_pFullFileSystem->RemoveFile( m_FileName );
  1594. }
  1595. SetResult( VideoResult::SUCCESS );
  1596. m_bMovieCompleted = true;
  1597. return true;
  1598. }
  1599. bool CQTVideoFileComposer::FinishMovie( bool SaveMovieToDisk )
  1600. {
  1601. #ifdef LOG_ENCODER_OPERATIONS
  1602. LogMsg( "\nFinish Movie Called\n" );
  1603. #endif
  1604. SetResult( VideoResult::OPERATION_OUT_OF_SEQUENCE );
  1605. AssertExitF( m_bComposingMovie && !m_bMovieCompleted );
  1606. // Shutdown movie creation
  1607. if ( !EndMovieCreation( SaveMovieToDisk ) )
  1608. {
  1609. #ifdef LOG_ENCODER_OPERATIONS
  1610. LogMsg( "\nEndMovieCreation Aborted\n" );
  1611. #endif
  1612. return false;
  1613. }
  1614. // todo: check on Disposing of theMovie and theMedia
  1615. if ( m_MovieFileDataHandler != nullptr )
  1616. {
  1617. SetResult( VideoResult::FILE_ERROR_OCCURED );
  1618. OSErr status = CloseMovieStorage( m_MovieFileDataHandler );
  1619. AssertExitF( status == noErr );
  1620. m_MovieFileDataHandler = nullptr;
  1621. #ifdef LOG_ENCODER_OPERATIONS
  1622. LogMsg( "Movie File Closed\n" );
  1623. #endif
  1624. }
  1625. SAFE_DISPOSE_HANDLE( m_MovieFileDataRef );
  1626. SAFE_DISPOSE_MOVIE( m_theMovie );
  1627. // if no frames have been added.. delete files
  1628. if ( SaveMovieToDisk == false || ( m_nFramesAdded <= 0 && m_nSamplesAdded <= 0) )
  1629. {
  1630. g_pFullFileSystem->RemoveFile( m_FileName );
  1631. }
  1632. SetResult( VideoResult::SUCCESS );
  1633. m_bMovieCompleted = true;
  1634. #ifdef LOG_ENCODER_OPERATIONS
  1635. g_pFullFileSystem->Close( m_LogFile );
  1636. m_LogFile = FILESYSTEM_INVALID_HANDLE;
  1637. #endif
  1638. return true;
  1639. }
  1640. void CQTVideoFileComposer::SetResult( VideoResult_t status )
  1641. {
  1642. m_LastResult = status;
  1643. }
  1644. VideoResult_t CQTVideoFileComposer::GetResult()
  1645. {
  1646. return m_LastResult;
  1647. }
  1648. bool CQTVideoFileComposer::IsReadyToRecord()
  1649. {
  1650. return ( m_bComposingMovie && !m_bMovieCompleted );
  1651. }
  1652. #ifdef ENABLE_EXTERNAL_ENCODER_LOGGING
  1653. bool CQTVideoFileComposer::LogMessage( const char *msg )
  1654. {
  1655. #ifdef LOG_ENCODER_OPERATIONS
  1656. if ( IS_NOT_EMPTY(msg) && m_LogFile != FILESYSTEM_INVALID_HANDLE )
  1657. {
  1658. g_pFullFileSystem->Write( msg, V_strlen( msg ), m_LogFile );
  1659. }
  1660. #endif
  1661. return true;
  1662. }
  1663. #endif