Team Fortress 2 Source Code as on 22/4/2020
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1014 lines
33 KiB

  1. //========= Copyright Valve Corporation, All rights reserved. ============//
  2. //
  3. //=======================================================================================//
  4. #include "cbase.h"
  5. #if defined( REPLAY_ENABLED )
  6. #include "replayrenderer.h"
  7. #include "materialsystem/imaterialvar.h"
  8. #include "materialsystem/itexture.h"
  9. #include "materialsystem/imaterialproxy.h"
  10. #include "replay/vgui/replayrenderoverlay.h"
  11. #include "replay/replay.h"
  12. #include "replay/ireplaymoviemanager.h"
  13. #include "replay/ireplayperformancecontroller.h"
  14. #include "replay/ireplaymovie.h"
  15. #include "replay/ireplaymanager.h"
  16. #include "replay/ienginereplay.h"
  17. #include "replay/iclientreplaycontext.h"
  18. #include "view.h"
  19. #include "iviewrender.h"
  20. #include "view_shared.h"
  21. #include "replay/replaycamera.h"
  22. #include "bitmap/tgawriter.h"
  23. #include "filesystem.h"
  24. #define REPLAY_RECORDING_ENABLE
  25. #ifdef REPLAY_RECORDING_ENABLE
  26. #include "video/ivideoservices.h"
  27. #endif
  28. #define TMP_WAVE_FILENAME "tmpaudio"
  29. //#define TRACE_REPLAY_STATE_MACHINE
  30. // memdbgon must be the last include file in a .cpp file!!!
  31. #include <tier0/memdbgon.h>
  32. //-----------------------------------------------------------------------------
  33. extern IReplayMovieManager *g_pReplayMovieManager;
  34. extern IReplayPerformanceController *g_pReplayPerformanceController;
  35. // Map quality index to number of samples
  36. static int s_DoFQualityToSamples[MAX_DOF_QUALITY+1] = {8, 16, 32};//, 64, 128 };
  37. // 4-entry table of values in 2D -1 to +1 range using Poisson disk distribution
  38. static Vector2D g_vJitterTable4[4] = { Vector2D (0.5318f, -0.6902f ), Vector2D (-0.5123f, 0.8362f ), Vector2D (-0.5193f, -0.2195f ), Vector2D (0.4749f, 0.3478f ) };
  39. // 8-entry table of values in 2D -1 to +1 range using Poisson disk distribution
  40. static Vector2D g_vJitterTable8[8] = { Vector2D (0.3475f, 0.0042f ),Vector2D (0.8806f, 0.3430f ),Vector2D (-0.0041f, -0.6197f ),Vector2D (0.0472f, 0.4964f ),
  41. Vector2D (-0.3730f, 0.0874f ),Vector2D (-0.9217f, -0.3177f ),Vector2D (-0.6289f, 0.7388f ),Vector2D (0.5744f, -0.7741f ) };
  42. // 16-entry table of values in 2D -1 to +1 range using Poisson disk distribution (disk size 0.38f)
  43. static Vector2D g_vJitterTable16[16] = { Vector2D (0.0747f, -0.8341f ),Vector2D (-0.9138f, 0.3251f ),Vector2D (0.8667f, -0.3029f ),Vector2D (-0.4642f, 0.2187f ),
  44. Vector2D (-0.1505f, 0.7320f ),Vector2D (0.7310f, -0.6786f ),Vector2D (0.2859f, -0.3254f ),Vector2D (-0.1311f, -0.2292f ),
  45. Vector2D (0.3518f, 0.6470f ),Vector2D (-0.7485f, -0.6307f ),Vector2D (0.1687f, 0.1873f ),Vector2D (-0.3604f, -0.7483f ),
  46. Vector2D (-0.5658f, -0.1521f ),Vector2D (0.7102f, 0.0536f ),Vector2D (-0.6056f, 0.7747f ),Vector2D (0.7793f, 0.6194f ) };
  47. // 32-entry table of values in 2D -1 to +1 range using Poisson disk distribution (disk size 0.28f)
  48. static Vector2D g_vJitterTable32[32] = { Vector2D (0.0854f, -0.0644f ),Vector2D (0.8744f, 0.1665f ),Vector2D (0.2329f, 0.3995f ),Vector2D (-0.7804f, 0.5482f ),
  49. Vector2D (-0.4577f, 0.7647f ),Vector2D (-0.1936f, 0.5564f ),Vector2D (0.4205f, -0.5768f ),Vector2D (-0.0304f, -0.9050f ),
  50. Vector2D (-0.5215f, 0.1854f ),Vector2D (0.3161f, -0.2954f ),Vector2D (0.0666f, -0.5564f ),Vector2D (-0.2137f, -0.0072f ),
  51. Vector2D (-0.4112f, -0.3311f ),Vector2D (0.6438f, -0.2484f ),Vector2D (-0.9055f, -0.0360f ),Vector2D (0.8323f, 0.5268f ),
  52. Vector2D (0.5592f, 0.3459f ),Vector2D (-0.6797f, -0.5201f ),Vector2D (-0.4325f, -0.8857f ),Vector2D (0.8768f, -0.4197f ),
  53. Vector2D (0.3090f, -0.8646f ),Vector2D (0.5034f, 0.8603f ),Vector2D (0.3752f, 0.0627f ),Vector2D (-0.0161f, 0.2627f ),
  54. Vector2D (0.0969f, 0.7054f ),Vector2D (-0.2291f, -0.6595f ),Vector2D (-0.5887f, -0.1100f ),Vector2D (0.7048f, -0.6528f ),
  55. Vector2D (-0.8438f, 0.2706f ),Vector2D (-0.5061f, 0.4653f ),Vector2D (-0.1245f, -0.3302f ),Vector2D (-0.1801f, 0.8486f )};
  56. //-----------------------------------------------------------------------------
  57. //
  58. // Accumulation material proxy for ping-pong accumulation buffer imp.
  59. //
  60. struct AccumParams_t
  61. {
  62. ITexture *m_pTexture0;
  63. ITexture *m_pTexture1;
  64. float m_fSampleWeight;
  65. bool m_bClear;
  66. };
  67. class CAccumBuffProxy : public IMaterialProxy
  68. {
  69. public:
  70. CAccumBuffProxy();
  71. virtual ~CAccumBuffProxy();
  72. virtual bool Init( IMaterial *pMaterial, KeyValues *pKeyValues );
  73. virtual void OnBind( void *pC_BaseEntity );
  74. virtual void Release( void ) { delete this; }
  75. virtual IMaterial *GetMaterial();
  76. private:
  77. IMaterialVar *m_pTexture0;
  78. IMaterialVar *m_pTexture1;
  79. IMaterialVar *m_pAccumBuffWeights;
  80. };
  81. //-----------------------------------------------------------------------------
  82. CAccumBuffProxy::CAccumBuffProxy()
  83. {
  84. m_pTexture0 = NULL;
  85. m_pTexture1 = NULL;
  86. m_pAccumBuffWeights = NULL;
  87. }
  88. CAccumBuffProxy::~CAccumBuffProxy()
  89. {
  90. }
  91. bool CAccumBuffProxy::Init( IMaterial *pMaterial, KeyValues *pKeyValues )
  92. {
  93. bool foundVar;
  94. // Grab the Material variables for the accumulation shader
  95. m_pTexture0 = pMaterial->FindVar( "$TEXTURE0", &foundVar, false );
  96. if( !foundVar )
  97. return false;
  98. m_pTexture1 = pMaterial->FindVar( "$TEXTURE1", &foundVar, false );
  99. if( !foundVar )
  100. return false;
  101. m_pAccumBuffWeights = pMaterial->FindVar( "$WEIGHTS", &foundVar, false );
  102. if( !foundVar )
  103. return false;
  104. return true;
  105. }
  106. void CAccumBuffProxy::OnBind( void *pC_BaseEntity )
  107. {
  108. AccumParams_t *pAccumParams = (AccumParams_t *) pC_BaseEntity;
  109. if( !m_pTexture0 || !m_pTexture1 || !m_pAccumBuffWeights )
  110. {
  111. return;
  112. }
  113. m_pTexture0->SetTextureValue( pAccumParams->m_pTexture0 );
  114. m_pTexture1->SetTextureValue( pAccumParams->m_pTexture1 );
  115. // If we're just using this material to do a clear to black...
  116. if ( pAccumParams->m_bClear )
  117. {
  118. m_pAccumBuffWeights->SetVecValue( 0.0f, 0.0f, 0.0f, 0.0f );
  119. }
  120. else
  121. {
  122. m_pAccumBuffWeights->SetVecValue( pAccumParams->m_fSampleWeight, 1.0f - pAccumParams->m_fSampleWeight, 0.0f, 0.0f );
  123. }
  124. }
  125. IMaterial *CAccumBuffProxy::GetMaterial()
  126. {
  127. return m_pAccumBuffWeights ? m_pAccumBuffWeights->GetOwningMaterial() : NULL;
  128. }
  129. //-----------------------------------------------------------------------------
  130. EXPOSE_INTERFACE( CAccumBuffProxy, IMaterialProxy, "accumbuff4sample" IMATERIAL_PROXY_INTERFACE_VERSION );
  131. //-----------------------------------------------------------------------------
  132. CReplayRenderer::CReplayRenderer( CReplayRenderOverlay *pOverlay )
  133. : m_bIsAudioSyncFrame( false ),
  134. m_pRenderOverlay( pOverlay ),
  135. m_nCurrentPingPong( 0 ),
  136. m_nCurSample( 0 ),
  137. m_nTimeStep( 0 ),
  138. m_curSampleTime( 0 ),
  139. m_nFrame( 0 ),
  140. m_nNumJitterSamples( 0 ),
  141. m_iTgaFrame( 0 ),
  142. m_pLayoffBuf( NULL ),
  143. m_pMovie( NULL ),
  144. m_pMovieMaker( NULL ),
  145. m_pJitterTable( NULL ),
  146. m_pViewmodelFov( NULL ),
  147. m_pDefaultFov( NULL ),
  148. m_bCacheFullSceneState( false ),
  149. m_bShutterClosed( false ),
  150. m_bForceCheapDoF( false )
  151. {
  152. }
  153. CReplayRenderer::~CReplayRenderer()
  154. {
  155. }
  156. const CReplayPerformance *CReplayRenderer::GetPerformance() const
  157. {
  158. CReplay *pReplay = g_pReplayManager->GetPlayingReplay();
  159. if ( !pReplay )
  160. return NULL;
  161. return m_RenderParams.m_iPerformance >= 0 ? pReplay->GetPerformance( m_RenderParams.m_iPerformance ) : NULL;
  162. }
  163. const char *CReplayRenderer::GetMovieFilename() const
  164. {
  165. if ( !m_pMovie )
  166. return NULL;
  167. return m_pMovie->GetMovieFilename();
  168. }
  169. // -------------------------------------------------------------------
  170. // Functions used by audio engine to distinguish between sub-frames
  171. // rendered for motion blur, and the actual frames being recorded
  172. // -------------------------------------------------------------------
  173. void CReplayRenderer::SetAudioSyncFrame( bool isSync )
  174. {
  175. m_bIsAudioSyncFrame = isSync;
  176. }
  177. bool CReplayRenderer::IsAudioSyncFrame()
  178. {
  179. return m_bIsAudioSyncFrame;
  180. }
  181. float CReplayRenderer::GetRecordingFrameDuration()
  182. {
  183. double actualFPS = m_RenderParams.m_Settings.m_FPS.GetFPS();
  184. if ( actualFPS <= 0.0 )
  185. {
  186. Assert( false );
  187. return 30.0f;
  188. }
  189. double interval = 1.0 / actualFPS;
  190. return (float) interval;
  191. }
  192. bool CReplayRenderer::SetupRenderer( RenderMovieParams_t &params, IReplayMovie *pMovie )
  193. {
  194. // Cache render parameters
  195. V_memcpy( &m_RenderParams, &params, sizeof( params ) );
  196. // Cache movie
  197. m_pMovie = pMovie;
  198. // Reset current frame
  199. m_nFrame = 0;
  200. m_nTimeStep = 0;
  201. m_nCurSample = 0;
  202. m_iTgaFrame = 0;
  203. m_curSampleTime = DmeTime_t(0);
  204. m_pViewmodelFov = ( ConVar * )cvar->FindVar( "viewmodel_fov" );
  205. m_pDefaultFov = ( ConVar * )cvar->FindVar( "default_fov" );
  206. InitBuffers( params );
  207. #ifdef REPLAY_RECORDING_ENABLE
  208. // Record directly to a .wav file if desired via 'startmovie' and write out TGA's
  209. if ( params.m_bExportRaw )
  210. {
  211. // Create the temporary wave file
  212. g_pEngineClientReplay->Wave_CreateTmpFile( TMP_WAVE_FILENAME );
  213. // Create the path for the movie
  214. m_fmtTgaRenderDirName = g_pClientReplayContext->GetMovieManager()->GetRawExportDir();
  215. g_pFullFileSystem->CreateDirHierarchy( m_fmtTgaRenderDirName.Access() );
  216. }
  217. else
  218. {
  219. // Record to a movie using video services.
  220. if ( !g_pVideo )
  221. return false;
  222. #ifdef USE_WEBM_FOR_REPLAY
  223. m_pMovieMaker = g_pVideo->CreateVideoRecorder( VideoSystem::WEBM );
  224. #else
  225. m_pMovieMaker = g_pVideo->CreateVideoRecorder( VideoSystem::QUICKTIME );
  226. #endif
  227. if ( !m_pMovieMaker )
  228. return false;
  229. CFmtStr fmtMovieFullFilename( "%s%s", g_pReplayMovieManager->GetRenderDir(), pMovie->GetMovieFilename() );
  230. bool bSuccess = false;
  231. if ( m_pMovieMaker->CreateNewMovieFile( fmtMovieFullFilename.Access(), true ) )
  232. {
  233. const ReplayRenderSettings_t &Settings = params.m_Settings;
  234. #ifndef USE_WEBM_FOR_REPLAY
  235. ConVarRef QTEncodeGamma( "video_quicktime_encode_gamma" );
  236. VideoEncodeGamma_t encodeGamma = ( QTEncodeGamma.IsValid() ) ? (VideoEncodeGamma_t) QTEncodeGamma.GetInt() : VideoEncodeGamma::GAMMA_2_2;
  237. #else
  238. VideoEncodeGamma_t encodeGamma = VideoEncodeGamma::GAMMA_2_2;
  239. #endif
  240. if ( m_pMovieMaker->SetMovieVideoParameters( Settings.m_Codec, Settings.m_nEncodingQuality, (int)Settings.m_nWidth, (int)Settings.m_nHeight, Settings.m_FPS, encodeGamma ) )
  241. {
  242. if ( m_pMovieMaker->SetMovieSourceImageParameters( VideoEncodeSourceFormat::BGRA_32BIT, (int)Settings.m_nWidth, (int)Settings.m_nHeight ) )
  243. {
  244. AudioEncodeOptions_t audioOptions = AudioEncodeOptions::USE_AUDIO_ENCODE_GROUP_SIZE | AudioEncodeOptions::GROUP_SIZE_IS_VIDEO_FRAME |
  245. AudioEncodeOptions::LIMIT_AUDIO_TRACK_TO_VIDEO_DURATION | AudioEncodeOptions::PAD_AUDIO_WITH_SILENCE ;
  246. if ( m_pMovieMaker->SetMovieSourceAudioParameters( AudioEncodeSourceFormat::AUDIO_16BIT_PCMStereo, 44100, audioOptions ) )
  247. {
  248. bSuccess = true;
  249. }
  250. }
  251. }
  252. }
  253. if ( !bSuccess )
  254. {
  255. g_pVideo->DestroyVideoRecorder( m_pMovieMaker );
  256. m_pMovieMaker = NULL;
  257. return false;
  258. }
  259. }
  260. SetupJitterTable();
  261. #endif
  262. m_pRenderOverlay->Show();
  263. return true;
  264. }
  265. bool CReplayRenderer::SetupJitterTable()
  266. {
  267. const int nNumSamples = NumMotionBlurTimeSteps();
  268. switch ( nNumSamples )
  269. {
  270. case 4: m_pJitterTable = g_vJitterTable4; break;
  271. case 8: m_pJitterTable = g_vJitterTable8; break;
  272. case 16: m_pJitterTable = g_vJitterTable16; break;
  273. case 32: m_pJitterTable = g_vJitterTable32; break;
  274. // case 64: m_pJitterTable = g_vJitterTable64; break;
  275. // case 128: m_pJitterTable = g_vJitterTable128; break;
  276. default: return false;
  277. }
  278. m_nNumJitterSamples = nNumSamples;
  279. return true;
  280. }
  281. void CReplayRenderer::InitBuffers( const RenderMovieParams_t &params )
  282. {
  283. const ReplayRenderSettings_t &Settings = params.m_Settings;
  284. Assert( m_pLayoffBuf == NULL );
  285. m_pLayoffBuf = new BGRA8888_t[ Settings.m_nWidth * Settings.m_nHeight ];
  286. CFmtStr fmtHostFramerateCmd( "host_framerate %f\n", params.m_flEngineFps );
  287. engine->ClientCmd_Unrestricted( fmtHostFramerateCmd.Access() );
  288. g_pMaterialSystem->BeginRenderTargetAllocation(); // Begin allocating RTs which IFM can scribble into
  289. // Offscreen surface for rendering individual samples
  290. ImageFormat AccumSampleFormat = (g_pMaterialSystemHardwareConfig->GetHDRType() == HDR_TYPE_FLOAT) ? IMAGE_FORMAT_RGBA16161616F : g_pMaterialSystem->GetBackBufferFormat();
  291. m_AccumBuffSample.Init(
  292. g_pMaterialSystem->CreateNamedRenderTargetTextureEx2(
  293. "_rt_Replay_Accum_Sample", Settings.m_nWidth, Settings.m_nHeight, RT_SIZE_OFFSCREEN,
  294. AccumSampleFormat, MATERIAL_RT_DEPTH_SHARED, TEXTUREFLAGS_CLAMPS | TEXTUREFLAGS_CLAMPT | TEXTUREFLAGS_POINTSAMPLE
  295. )
  296. );
  297. // Ping-Pong textures for accumulating result prior to final tone map
  298. ImageFormat PingPongFormat = IMAGE_FORMAT_BGR888;
  299. m_AccumBuffPingPong[0].Init(g_pMaterialSystem->CreateNamedRenderTargetTextureEx2(
  300. "_rt_Replay_Ping", Settings.m_nWidth, Settings.m_nHeight, RT_SIZE_OFFSCREEN,
  301. PingPongFormat, MATERIAL_RT_DEPTH_NONE, TEXTUREFLAGS_CLAMPS | TEXTUREFLAGS_CLAMPT | TEXTUREFLAGS_POINTSAMPLE ));
  302. m_AccumBuffPingPong[1].Init(g_pMaterialSystem->CreateNamedRenderTargetTextureEx2(
  303. "_rt_Replay_Pong", Settings.m_nWidth, Settings.m_nHeight, RT_SIZE_OFFSCREEN,
  304. PingPongFormat, MATERIAL_RT_DEPTH_NONE, TEXTUREFLAGS_CLAMPS | TEXTUREFLAGS_CLAMPT | TEXTUREFLAGS_POINTSAMPLE ));
  305. // LDR final result of either HDR or LDR rendering
  306. m_LayoffResult.Init(g_pMaterialSystem->CreateNamedRenderTargetTextureEx2(
  307. "_rt_LayoffResult", Settings.m_nWidth, Settings.m_nHeight, RT_SIZE_OFFSCREEN,
  308. g_pMaterialSystem->GetBackBufferFormat(), MATERIAL_RT_DEPTH_SHARED, TEXTUREFLAGS_BORDER | TEXTUREFLAGS_POINTSAMPLE ));
  309. g_pMaterialSystem->EndRenderTargetAllocation(); // Begin allocating RTs which IFM can scribble into
  310. KeyValues *pVMTKeyValues = new KeyValues( "accumbuff4sample" );
  311. pVMTKeyValues->SetString( "$TEXTURE0", m_AccumBuffSample->GetName() ); // Dummy
  312. pVMTKeyValues->SetString( "$TEXTURE1", m_AccumBuffSample->GetName() ); // Dummy
  313. pVMTKeyValues->SetString( "$TEXTURE2", m_AccumBuffSample->GetName() ); // Dummy
  314. pVMTKeyValues->SetString( "$TEXTURE3", m_AccumBuffSample->GetName() ); // Dummy
  315. pVMTKeyValues->SetString( "$WEIGHTS", "[0.25 0.75 0.0 0.0]" );
  316. pVMTKeyValues->SetInt( "$nocull", 1 );
  317. KeyValues *pProxiesKV = pVMTKeyValues->FindKey( "proxies", true ); // create a subkey
  318. pProxiesKV->FindKey( "accumbuff4sample", true ); // create
  319. m_FourSampleResolveMatRef.Init( "accumbuff4sample", pVMTKeyValues );
  320. m_FourSampleResolveMatRef->Refresh();
  321. }
  322. void CReplayRenderer::ShutdownRenderer()
  323. {
  324. if ( m_LayoffResult.IsValid() )
  325. {
  326. m_LayoffResult.Shutdown( true );
  327. }
  328. if ( m_AccumBuffSample.IsValid() )
  329. {
  330. m_AccumBuffSample.Shutdown( true );
  331. }
  332. for ( int i = 0; i < 2; ++i )
  333. {
  334. if ( m_AccumBuffPingPong[i].IsValid() )
  335. {
  336. m_AccumBuffPingPong[i].Shutdown( true );
  337. }
  338. }
  339. delete [] m_pLayoffBuf;
  340. m_pLayoffBuf = NULL;
  341. #ifdef REPLAY_RECORDING_ENABLE
  342. if ( m_pMovieMaker )
  343. {
  344. m_pMovieMaker->FinishMovie( true );
  345. if ( g_pVideo )
  346. {
  347. g_pVideo->DestroyVideoRecorder( m_pMovieMaker );
  348. }
  349. m_pMovieMaker = NULL;
  350. m_pRenderOverlay->Hide();
  351. }
  352. else
  353. #endif
  354. if ( m_RenderParams.m_bExportRaw )
  355. {
  356. // Mimicking what "startmovie" does here.
  357. g_pEngineClientReplay->Wave_FixupTmpFile( TMP_WAVE_FILENAME );
  358. // Move the temp wave file to the destination dir
  359. CFmtStr fmtTmpFilename( "%s%c%s.wav", engine->GetGameDirectory(), CORRECT_PATH_SEPARATOR, TMP_WAVE_FILENAME );
  360. CFmtStr fmtDstFilename( "%s%s", m_fmtTgaRenderDirName.Access(), "audio.wav" );
  361. g_pFullFileSystem->RenameFile( fmtTmpFilename.Access(), fmtDstFilename.Access() );
  362. }
  363. // Reset framerate
  364. engine->ClientCmd_Unrestricted( "host_framerate 0" );
  365. // Notify of performance end
  366. g_pReplayPerformanceController->Stop();
  367. }
  368. void CReplayRenderer::DrawResolvingQuad( int nWidth, int nHeight )
  369. {
  370. CMatRenderContextPtr pRenderContext( g_pMaterialSystem );
  371. IMesh *pMesh = pRenderContext->GetDynamicMesh();
  372. CMeshBuilder meshBuilder;
  373. // Epsilons for 1:1 texel to pixel mapping
  374. float fWidthEpsilon = IsOSX() ? 0.0f : 0.5f / ((float) nWidth);
  375. float fHeightEpsilon = IsOSX() ? 0.0f : 0.5f / ((float) nHeight);
  376. meshBuilder.Begin( pMesh, MATERIAL_QUADS, 1 );
  377. meshBuilder.Position3f( -1.0f, 1.0f, 0.5f ); // Upper left
  378. meshBuilder.TexCoord2f( 0, 0.0f + fWidthEpsilon, 0.0f + fHeightEpsilon );
  379. meshBuilder.AdvanceVertex();
  380. meshBuilder.Position3f( -1.0f, -1.0f, 0.5f ); // Lower left
  381. meshBuilder.TexCoord2f( 0, 0.0f + fWidthEpsilon, 1.0f + fHeightEpsilon );
  382. meshBuilder.AdvanceVertex();
  383. meshBuilder.Position3f( 1.0f, -1.0f, 0.5f ); // Lower right
  384. meshBuilder.TexCoord2f( 0, 1.0f + fWidthEpsilon, 1.0f + fHeightEpsilon );
  385. meshBuilder.AdvanceVertex();
  386. meshBuilder.Position3f( 1.0f, 1.0f, 0.5f ); // Upper right
  387. meshBuilder.TexCoord2f( 0, 1.0f + fWidthEpsilon, 0.0f + fHeightEpsilon );
  388. meshBuilder.AdvanceVertex();
  389. meshBuilder.End();
  390. pMesh->Draw();
  391. }
  392. void CReplayRenderer::BeginRenderingSample( int nSample, int x, int y, int nWidth, int nHeight, float fTonemapScale )
  393. {
  394. // Always start on ping-pong buffer zero
  395. if ( nSample == 0 )
  396. {
  397. m_nCurrentPingPong = 0;
  398. }
  399. CMatRenderContextPtr pRenderContext( g_pMaterialSystem );
  400. pRenderContext->PushRenderTargetAndViewport( m_AccumBuffSample, x, y, nWidth, nHeight );
  401. }
  402. void CReplayRenderer::ResolveSamples( int nSample, DmeTime_t frametime, int x, int y, int nWidth, int nHeight, bool bLayoffResult, float flBloomScale )
  403. {
  404. CMatRenderContextPtr pRenderContext( g_pMaterialSystem );
  405. // Render resolving quad to current ping-pong buffer
  406. AccumParams_t accParms = {
  407. m_AccumBuffSample,
  408. m_AccumBuffPingPong[ ( m_nCurrentPingPong + 1 ) % 2 ],
  409. 1.0f / (float)( nSample + 1 ),
  410. false
  411. };
  412. pRenderContext->Bind( m_FourSampleResolveMatRef, &accParms );
  413. pRenderContext->PushRenderTargetAndViewport( m_AccumBuffPingPong[m_nCurrentPingPong], x, y, nWidth, nHeight );
  414. DrawResolvingQuad( nWidth, nHeight );
  415. pRenderContext->PopRenderTargetAndViewport();
  416. // If we want to show accumulated result to user...
  417. if ( bLayoffResult )
  418. {
  419. accParms.m_pTexture0 = m_AccumBuffPingPong[m_nCurrentPingPong];
  420. accParms.m_pTexture1 = m_AccumBuffPingPong[m_nCurrentPingPong];
  421. accParms.m_fSampleWeight = 1.0f;
  422. accParms.m_bClear = false;
  423. pRenderContext->Bind( m_FourSampleResolveMatRef, &accParms );
  424. pRenderContext->PushRenderTargetAndViewport( m_LayoffResult, x, y, nWidth, nHeight );
  425. DrawResolvingQuad( nWidth, nHeight );
  426. pRenderContext->PopRenderTargetAndViewport();
  427. }
  428. m_nCurrentPingPong = (m_nCurrentPingPong + 1) % 2; // Flip the ping-pong buffers
  429. }
  430. bool CReplayRenderer::IsHDR() const
  431. {
  432. return g_pMaterialSystemHardwareConfig->GetHDRType() == HDR_TYPE_FLOAT;
  433. }
  434. float CReplayRenderer::GetViewModelFOVOffset()
  435. {
  436. // float flVMDefaultFov = m_pViewmodelFov ? m_pViewmodelFov->GetFloat() : 54.0f;
  437. float flVMDefaultFov = 54.0f;
  438. float flDefaultFov = m_pDefaultFov ? m_pDefaultFov->GetFloat() : 75.0f;
  439. return flVMDefaultFov - flDefaultFov;
  440. }
  441. void CReplayRenderer::SetupSampleView( int x, int y, int w, int h, int nSample, CViewSetup& viewSetup )
  442. {
  443. // Frustum stuff
  444. // FIXME: This currently matches the client DLL for HL2
  445. // but we probably need a way of getting this state from the client DLL
  446. viewSetup.zNear = 3;
  447. viewSetup.zFar = 16384.0f * 1.73205080757f;
  448. viewSetup.x = x;
  449. viewSetup.y = y;
  450. viewSetup.width = w;
  451. viewSetup.height = h;
  452. viewSetup.m_flAspectRatio = (float)viewSetup.width / (float)viewSetup.height;
  453. const float fov = viewSetup.fov;
  454. float fHalfAngleRadians = DEG2RAD( 0.5f * fov );
  455. float t = tan( fHalfAngleRadians ) * (viewSetup.m_flAspectRatio / ( 4.0f / 3.0f ));
  456. viewSetup.fov = RAD2DEG( 2.0f * atan( t ) );
  457. viewSetup.fovViewmodel = viewSetup.fov + GetViewModelFOVOffset();
  458. viewSetup.zNearViewmodel = 1;
  459. viewSetup.zFarViewmodel = viewSetup.zFar;
  460. viewSetup.m_bOrtho = false;
  461. viewSetup.m_bRenderToSubrectOfLargerScreen = true;
  462. SetupDOFMatrixSkewView( viewSetup.origin, viewSetup.angles, nSample, viewSetup ); // Sheared matrix method more comparable to image-space DoF approximation
  463. // Only have the engine do bloom and tone mapping if not HDR
  464. viewSetup.m_bDoBloomAndToneMapping = !IsHDR();
  465. viewSetup.m_bCacheFullSceneState = m_bCacheFullSceneState;
  466. }
  467. void CReplayRenderer::SetupDOFMatrixSkewView( const Vector &pos, const QAngle &angles, int nSample, CViewSetup& viewSetup )
  468. {
  469. Vector vPosition = pos;
  470. matrix3x4_t matViewMatrix; // Get transform
  471. AngleMatrix( angles, matViewMatrix );
  472. Vector vViewDirection, vViewLeft, vViewUp;
  473. MatrixGetColumn( matViewMatrix, 0, vViewDirection );
  474. MatrixGetColumn( matViewMatrix, 1, vViewLeft );
  475. MatrixGetColumn( matViewMatrix, 2, vViewUp );
  476. // Be sure these are normalized
  477. vViewDirection.NormalizeInPlace();
  478. vViewLeft.NormalizeInPlace();
  479. vViewUp.NormalizeInPlace();
  480. // Set up a non-skewed off-center projection matrix to start with... (Posters already have this set up)
  481. viewSetup.m_bOffCenter = true;
  482. viewSetup.m_flOffCenterBottom = 0.0f;
  483. viewSetup.m_flOffCenterTop = 1.0f;
  484. viewSetup.m_flOffCenterLeft = 0.0f;
  485. viewSetup.m_flOffCenterRight = 1.0f;
  486. if ( IsAntialiasingEnabled() && !IsDepthOfFieldEnabled() && !m_bForceCheapDoF ) // AA jitter but no DoF
  487. {
  488. Vector2D vAAJitter = m_pJitterTable[nSample % m_nNumJitterSamples];
  489. const float fHalfPixelRadius = 0.65;
  490. viewSetup.m_flOffCenterBottom += (vAAJitter.y / (float) viewSetup.height) * fHalfPixelRadius;
  491. viewSetup.m_flOffCenterTop += (vAAJitter.y / (float) viewSetup.height) * fHalfPixelRadius;
  492. viewSetup.m_flOffCenterLeft += (vAAJitter.x / (float) viewSetup.width) * fHalfPixelRadius;
  493. viewSetup.m_flOffCenterRight += (vAAJitter.x / (float) viewSetup.width) * fHalfPixelRadius;
  494. viewSetup.origin = vPosition;
  495. }
  496. #if 0
  497. if ( IsDepthOfFieldEnabled() || m_bForceCheapDoF ) // DoF (independent of AA jitter)
  498. {
  499. // Try to match the amount of blurriness from legacy fulcrum method
  500. const float flDoFHack = 0.0008f;
  501. Vector2D vDoFJitter = DepthOfFieldJitter( nSample ) * pCamera->GetAperture() * flDoFHack;
  502. float fov43 = pCamera->GetFOVx();
  503. float fHalfAngleRadians43 = DEG2RAD( 0.5f * fov43 );
  504. float t = tan( fHalfAngleRadians43 ) * (viewSetup.m_flAspectRatio / ( 4.0f / 3.0f ));
  505. float flZFocalWidth = t * pCamera->GetFocalDistance() * 2.0f; // Width of Viewport at Focal plane
  506. Vector2D vFocalZJitter = vDoFJitter * flZFocalWidth;
  507. viewSetup.m_flOffCenterBottom += vDoFJitter.y;
  508. viewSetup.m_flOffCenterTop += vDoFJitter.y;
  509. viewSetup.m_flOffCenterLeft += vDoFJitter.x;
  510. viewSetup.m_flOffCenterRight += vDoFJitter.x;
  511. viewSetup.origin = vPosition + vViewLeft * vFocalZJitter.x - vViewUp * vFocalZJitter.y * (1.0f / viewSetup.m_flAspectRatio);
  512. if ( !m_bForceCheapDoF )
  513. {
  514. Vector2D vAAJitter = g_vJitterTable32[nSample % 32]; // Jitter in addition to DoF offset
  515. const float fHalfPixelRadius = 0.6f;
  516. viewSetup.m_flOffCenterBottom += (vAAJitter.y / (float) viewSetup.height) * fHalfPixelRadius;
  517. viewSetup.m_flOffCenterTop += (vAAJitter.y / (float) viewSetup.height) * fHalfPixelRadius;
  518. viewSetup.m_flOffCenterLeft += (vAAJitter.x / (float) viewSetup.width) * fHalfPixelRadius;
  519. viewSetup.m_flOffCenterRight += (vAAJitter.x / (float) viewSetup.width) * fHalfPixelRadius;
  520. }
  521. }
  522. #endif
  523. MatrixAngles( matViewMatrix, viewSetup.angles );
  524. }
  525. int CReplayRenderer::GetMotionBlurQuality() const
  526. {
  527. return m_RenderParams.m_Settings.m_nMotionBlurQuality;
  528. }
  529. int CReplayRenderer::GetDepthOfFieldQuality() const
  530. {
  531. if ( !IsDepthOfFieldEnabled() )
  532. return 0;
  533. return MAX_DOF_QUALITY;
  534. }
  535. /*static*/ int CReplayRenderer::GetNumMotionBlurTimeSteps( int nQuality )
  536. {
  537. Assert( nQuality >= 0 && nQuality <= MAX_MOTION_BLUR_QUALITY );
  538. // Map {0, 1, 2, 3, 4} to {8, 16, 32, 64, 128 }
  539. return (int) pow(2.0f, nQuality+2 );
  540. }
  541. int CReplayRenderer::NumMotionBlurTimeSteps() const
  542. {
  543. return ( IsMotionBlurEnabled() ) ? GetNumMotionBlurTimeSteps( GetMotionBlurQuality() ) : 1;
  544. }
  545. bool CReplayRenderer::IsMotionBlurEnabled() const
  546. {
  547. return m_RenderParams.m_Settings.m_bMotionBlurEnabled;
  548. }
  549. bool CReplayRenderer::IsDepthOfFieldEnabled() const
  550. {
  551. return false;
  552. }
  553. bool CReplayRenderer::IsAntialiasingEnabled() const
  554. {
  555. return m_RenderParams.m_Settings.m_bAAEnabled;
  556. }
  557. void CReplayRenderer::ComputeSampleCounts( int *pNSamplesPerTimeStep, int *pNTotalSamples ) const
  558. {
  559. *pNSamplesPerTimeStep = *pNTotalSamples = 1;
  560. if ( IsMotionBlurEnabled() )
  561. {
  562. *pNTotalSamples *= NumMotionBlurTimeSteps();
  563. }
  564. if ( IsDepthOfFieldEnabled() )
  565. {
  566. *pNTotalSamples *= s_DoFQualityToSamples[GetDepthOfFieldQuality()];
  567. *pNSamplesPerTimeStep *= s_DoFQualityToSamples[GetDepthOfFieldQuality()];
  568. }
  569. }
  570. float CReplayRenderer::GetFramerate() const
  571. {
  572. return m_RenderParams.m_Settings.m_FPS.GetFPS();
  573. }
  574. double CReplayRenderer::GetShutterSpeed() const
  575. {
  576. return 0.5 / m_RenderParams.m_Settings.m_FPS.GetFPS();
  577. }
  578. #ifdef TRACE_REPLAY_STATE_MACHINE
  579. static int nFramesSent = 0;
  580. #endif
  581. void CReplayRenderer::CompositeAndLayoffFrame( int nFrame )
  582. {
  583. #ifdef TRACE_REPLAY_STATE_MACHINE
  584. Msg("CompositeAndLayoffFrame( %3d ) TStep=%d ...... ", nFrame, m_nTimeStep );
  585. #endif
  586. const int nMotionBlurTimeSteps = NumMotionBlurTimeSteps();
  587. bool bAppendToMovie = false;
  588. // Determine if this is a frame we handle audio on
  589. bool AudioTrigger = (m_nTimeStep == 0) && !m_bShutterClosed;
  590. SetAudioSyncFrame( AudioTrigger );
  591. // If we aren't doing motion blur, just render the frame and add it to the video
  592. if ( !IsMotionBlurEnabled() )
  593. {
  594. m_curSampleTime = DmeTime_t( nFrame, GetFramerate() );
  595. #ifdef TRACE_REPLAY_STATE_MACHINE
  596. Msg( "Rendering Frame at T=%.4f ", m_curSampleTime.GetSeconds() );
  597. #endif
  598. RenderLayoffFrame( m_curSampleTime, 0, 1 ); // Just get one frame
  599. bAppendToMovie = true;
  600. goto render_to_video;
  601. }
  602. // Shutter closed?
  603. if ( m_bShutterClosed )
  604. {
  605. m_nTimeStep++;
  606. #ifdef TRACE_REPLAY_STATE_MACHINE
  607. Msg("Shutter Closed... TStep now %d", m_nTimeStep );
  608. #endif
  609. // If nMotionBlurTimeSteps subframes have passed, open the shutter for the next frame.
  610. if ( m_nTimeStep >= nMotionBlurTimeSteps )
  611. {
  612. Assert( m_nTimeStep == nMotionBlurTimeSteps );
  613. m_nTimeStep = 0;
  614. m_bShutterClosed = false;
  615. #ifdef TRACE_REPLAY_STATE_MACHINE
  616. Msg( ", Shutter OPENED, TStep=0");
  617. #endif
  618. }
  619. #ifdef TRACE_REPLAY_STATE_MACHINE
  620. ConVarRef HF( "host_framerate" );
  621. float frameRate = HF.GetFloat();
  622. Msg( ", DONE, ENgine FPS = %f\n", frameRate );
  623. #endif
  624. return;
  625. }
  626. // scope to avoid compiler warnings
  627. {
  628. // Shutter is open, accumulate sub-frames
  629. int nSamplesPerTimeStep = 1;
  630. int nNumTotalSamples = 1;
  631. ComputeSampleCounts( &nSamplesPerTimeStep, &nNumTotalSamples );
  632. double frameTime = DmeTime_t( nFrame, GetFramerate() ).GetSeconds();
  633. DmeTime_t timeStepSize( GetShutterSpeed() );
  634. DmeTime_t remainderStepSize( DmeTime_t( 1, GetFramerate() ) - timeStepSize );
  635. Assert( timeStepSize.GetSeconds() > 0.0 );
  636. DmeTime_t curSampleTime( frameTime );
  637. #ifdef TRACE_REPLAY_STATE_MACHINE
  638. Msg("FrameT=%.4lf ", frameTime );
  639. #endif
  640. timeStepSize /= nMotionBlurTimeSteps;
  641. curSampleTime -= timeStepSize * ( nMotionBlurTimeSteps - 1 ) / 2.0f;
  642. // Loop through all samples for the current timestep, jittering the camera if antialiasing is enabled.
  643. #ifdef TRACE_REPLAY_STATE_MACHINE
  644. Msg(" Shutter's Open, Rendering %d Sub-Frames ", nSamplesPerTimeStep );
  645. Msg( "Frame %i: Laying off sub frame at time step %i \n", nFrame, m_nTimeStep );
  646. #endif
  647. RenderLayoffFrame( m_curSampleTime, m_nCurSample++, nNumTotalSamples );
  648. ++m_nTimeStep;
  649. m_curSampleTime += timeStepSize;
  650. // Catch the very last motionblur timestep and append to movie
  651. if ( m_nTimeStep == nMotionBlurTimeSteps )
  652. {
  653. #ifdef TRACE_REPLAY_STATE_MACHINE
  654. Msg( " TStep=Max, Append=TRUE ... ");
  655. #endif
  656. m_nTimeStep = 0;
  657. m_nCurSample = 0;
  658. m_curSampleTime = curSampleTime;
  659. m_bShutterClosed = true; // Close or open the shutter for nMotionBlurTimeSteps subframes
  660. bAppendToMovie = true; // Add a frame to the movie we've just closed the shutter
  661. }
  662. }
  663. render_to_video:
  664. // Append the frame to the movie?
  665. if ( bAppendToMovie )
  666. {
  667. #ifdef TRACE_REPLAY_STATE_MACHINE
  668. Msg(" -- Appending Frame %d to Movie\n", nFramesSent ); nFramesSent++;
  669. #endif
  670. CMatRenderContextPtr pRenderContext( g_pMaterialSystem );
  671. pRenderContext->PushRenderTargetAndViewport( m_LayoffResult );
  672. // Add this frame to the movie
  673. LayoffFrame( nFrame );
  674. pRenderContext->PopRenderTargetAndViewport();
  675. }
  676. #ifdef TRACE_REPLAY_STATE_MACHINE
  677. Msg("\n");
  678. #endif
  679. }
  680. void CReplayRenderer::LayoffFrame( int nFrame )
  681. {
  682. VPROF_BUDGET( "CReplayRenderer::LayoffFrame", VPROF_BUDGETGROUP_REPLAY );
  683. // FIXME: This is somewhat of a hack to get layoff working again
  684. // We're rendering into the full preview size, but stretching down to the actual size
  685. Rect_t srcRect;
  686. srcRect.x = 0;
  687. srcRect.y = 0;
  688. srcRect.width = m_RenderParams.m_Settings.m_nWidth;
  689. srcRect.height = m_RenderParams.m_Settings.m_nHeight;
  690. Rect_t dstRect;
  691. dstRect.x = 0;
  692. dstRect.y = 0;
  693. dstRect.width = m_RenderParams.m_Settings.m_nWidth;
  694. dstRect.height = m_RenderParams.m_Settings.m_nHeight;
  695. #ifdef TRACE_REPLAY_STATE_MACHINE
  696. Msg( "laying off movie frame %i\n", nFrame );
  697. #endif
  698. CMatRenderContextPtr pRenderContext( materials );
  699. // pRenderContext->ReadPixelsAndStretch( &srcRect, &dstRect, (unsigned char*)m_pLayoffBuf,
  700. // IMAGE_FORMAT_BGRA8888, dstRect.width * ImageLoader::SizeInBytes( IMAGE_FORMAT_BGRA8888 ) );
  701. pRenderContext->ReadPixels( 0, 0, (int) m_RenderParams.m_Settings.m_nWidth, (int) m_RenderParams.m_Settings.m_nHeight, (unsigned char*)m_pLayoffBuf, IMAGE_FORMAT_BGRA8888 );
  702. static ConVarRef mat_queue_mode( "mat_queue_mode" );
  703. // Encode the frame
  704. #ifdef REPLAY_RECORDING_ENABLE
  705. if ( m_RenderParams.m_bExportRaw )
  706. {
  707. CUtlBuffer bufOut;
  708. if ( TGAWriter::WriteToBuffer( (unsigned char *)m_pLayoffBuf, bufOut, m_RenderParams.m_Settings.m_nWidth,
  709. m_RenderParams.m_Settings.m_nHeight, IMAGE_FORMAT_BGRA8888, IMAGE_FORMAT_RGB888 ) )
  710. {
  711. // Format filename and write the TGA
  712. CFmtStr fmtFilename(
  713. "%sFrame_%04i.tga",
  714. m_fmtTgaRenderDirName.Access(),
  715. m_iTgaFrame++
  716. );
  717. if ( !g_pFullFileSystem->WriteFile( fmtFilename.Access(), NULL, bufOut ) )
  718. {
  719. Warning( "Couldn't write bitmap data snapshot to file %s.\n", fmtFilename.Access() );
  720. }
  721. }
  722. }
  723. else if ( m_pMovieMaker )
  724. {
  725. // can't run in any other mode
  726. Assert( mat_queue_mode.GetInt() == 0 );
  727. VPROF_BUDGET( "CReplayRenderer::LayoffFrame - AppendVideoFrame", VPROF_BUDGETGROUP_REPLAY );
  728. m_pMovieMaker->AppendVideoFrame( m_pLayoffBuf );
  729. }
  730. #endif
  731. }
  732. void CReplayRenderer::GetViewSetup( CViewSetup &viewsetup )
  733. {
  734. extern ConVar v_viewmodel_fov;
  735. viewsetup = *view->GetPlayerViewSetup();
  736. // HACK: Override the view - this will keep the view from popping if the user toggles the render preview checkbox.
  737. ReplayCamera()->CalcView( viewsetup.origin, viewsetup.angles, viewsetup.fov );
  738. viewsetup .fovViewmodel = ScaleFOVByWidthRatio( v_viewmodel_fov.GetFloat(), viewsetup.m_flAspectRatio / ( 4.0f / 3.0f ) );
  739. }
  740. void CReplayRenderer::RenderLayoffFrame( DmeTime_t time, int nCurSample, int nNumTotalSamples )
  741. {
  742. CViewSetup viewSetup;
  743. GetViewSetup( viewSetup );
  744. int x=0, y=0, w=m_RenderParams.m_Settings.m_nWidth, h=m_RenderParams.m_Settings.m_nHeight;
  745. // FIXME: Using the preview size here is something of a hack
  746. // to get layoff working again. We're actually going to stretch down from the preview size to layoff size
  747. // during frame capture
  748. float fTonemapScale = 0.28f;
  749. BeginRenderingSample( nCurSample, x, y, w, h, fTonemapScale);
  750. // Initialize view setup for this sample
  751. SetupSampleView( 0, 0, w, h, nCurSample, viewSetup );
  752. const int flags = RENDERVIEW_DRAWVIEWMODEL;
  753. // Tell the engine to tell the client to render the view (sans viewmodel)
  754. view->RenderView( viewSetup, VIEW_CLEAR_COLOR | VIEW_CLEAR_DEPTH, flags );
  755. // Resolve the accumulation buffer samples for display this frame
  756. float fBloomScale = 0.28f;
  757. bool bRenderFinalFrame = nCurSample == ( nNumTotalSamples - 1 );
  758. ResolveSamples( nCurSample, time, 0, 0, w, h, bRenderFinalFrame, fBloomScale );
  759. // Pop the target
  760. CMatRenderContextPtr pRenderContext( g_pMaterialSystem );
  761. pRenderContext->PopRenderTargetAndViewport();
  762. }
  763. void CReplayRenderer::EndRendering()
  764. {
  765. CMatRenderContextPtr pRenderContext( g_pMaterialSystem );
  766. pRenderContext->PopRenderTargetAndViewport();
  767. }
  768. void CReplayRenderer::ClearToBlack( CTextureReference &buf, int x, int y, int nWidth, int nHeight )
  769. {
  770. CMatRenderContextPtr pRenderContext( g_pMaterialSystem );
  771. // Bind the resolving material
  772. AccumParams_t accParms = { m_AccumBuffSample, m_AccumBuffSample, 0.0f, true }; // true to clear to black
  773. pRenderContext->Bind( m_FourSampleResolveMatRef, &accParms );
  774. // Render black quad to the layoff result
  775. pRenderContext->PushRenderTargetAndViewport( buf, x, y, nWidth, nHeight );
  776. DrawResolvingQuad( nWidth, nHeight );
  777. pRenderContext->PopRenderTargetAndViewport();
  778. }
  779. void CReplayRenderer::RenderVideo()
  780. {
  781. #if _DEBUG
  782. static ConVarRef replay_fake_render( "replay_fake_render" );
  783. if ( replay_fake_render.IsValid() && replay_fake_render.GetBool() )
  784. return;
  785. #endif
  786. if ( !engine->IsInGame() )
  787. return;
  788. if ( !m_LayoffResult.IsValid() )
  789. return;
  790. CompositeAndLayoffFrame( m_nFrame++ );
  791. }
  792. void CReplayRenderer::RenderAudio( unsigned char *pBuffer, int nSize, int nNumSamples )
  793. {
  794. #ifdef REPLAY_RECORDING_ENABLE
  795. if ( m_RenderParams.m_bExportRaw )
  796. {
  797. g_pEngineClientReplay->Wave_AppendTmpFile( TMP_WAVE_FILENAME, pBuffer, nNumSamples );
  798. }
  799. else if ( m_pMovieMaker )
  800. {
  801. m_pMovieMaker->AppendAudioSamples( pBuffer, (size_t)nSize );
  802. }
  803. #endif
  804. }
  805. //-----------------------------------------------------------------------------
  806. #endif