Team Fortress 2 Source Code as on 22/4/2020
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

857 lines
25 KiB

  1. //========= Copyright Valve Corporation, All rights reserved. ============//
  2. //
  3. // Purpose:
  4. //
  5. // $NoKeywords: $
  6. //===========================================================================//
  7. #include "cbase.h"
  8. #include "sourcevirtualreality.h"
  9. #include "icommandline.h"
  10. #include "filesystem.h"
  11. #include "materialsystem/imaterial.h"
  12. #include "materialsystem/imesh.h"
  13. #include "materialsystem/imaterialvar.h"
  14. #include "renderparm.h"
  15. #include "openvr/openvr.h"
  16. using namespace vr;
  17. CSourceVirtualReality g_SourceVirtualReality;
  18. EXPOSE_SINGLE_INTERFACE_GLOBALVAR( CSourceVirtualReality, ISourceVirtualReality,
  19. SOURCE_VIRTUAL_REALITY_INTERFACE_VERSION, g_SourceVirtualReality );
  20. static VMatrix VMatrixFrom44(const float v[4][4]);
  21. static VMatrix VMatrixFrom34(const float v[3][4]);
  22. static VMatrix OpenVRToSourceCoordinateSystem(const VMatrix& vortex);
  23. // --------------------------------------------------------------------
  24. // Purpose: Set the current HMD pose as the zero pose
  25. // --------------------------------------------------------------------
  26. void CC_VR_Reset_Home_Pos( const CCommand& args )
  27. {
  28. g_SourceVirtualReality.AcquireNewZeroPose();
  29. }
  30. static ConCommand vr_reset_home_pos("vr_reset_home_pos", CC_VR_Reset_Home_Pos, "Sets the current HMD position as the zero point" );
  31. // --------------------------------------------------------------------
  32. // Purpose: Reinitialize the IHeadtrack object
  33. // --------------------------------------------------------------------
  34. void CC_VR_Track_Reinit( const CCommand& args )
  35. {
  36. if( g_SourceVirtualReality.ResetTracking() )
  37. {
  38. // Tracker can't be restarted: show a message, but don't quit.
  39. Warning("Can't reset HMD tracker");
  40. }
  41. }
  42. static ConCommand vr_track_reinit("vr_track_reinit", CC_VR_Track_Reinit, "Reinitializes HMD tracking" );
  43. // Disable distortion processing altogether.
  44. ConVar vr_distortion_enable ( "vr_distortion_enable", "1" );
  45. // Disable distortion by changing the distortion texture itself, so that the rendering path is otherwise identical.
  46. // This won't take effect until the texture is refresed.
  47. ConVar vr_debug_nodistortion ( "vr_debug_nodistortion", "0" );
  48. // Disable just the chromatic aberration correction in the distortion texture, to make undistort quality/artifacts
  49. // easier to see and debug. As above, won't take effect until the texture is refreshed.
  50. ConVar vr_debug_nochromatic ( "vr_debug_nochromatic", "0" );
  51. // Resolution of the undistort map.
  52. static const int distortionTextureSize = 128;
  53. void CC_vr_refresh_distortion_texture( const CCommand& args )
  54. {
  55. g_SourceVirtualReality.RefreshDistortionTexture();
  56. }
  57. ConCommand vr_refresh_distortion_texture( "vr_refresh_distortion_texture", CC_vr_refresh_distortion_texture );
  58. ConVar vr_use_offscreen_render_target( "vr_use_offscreen_render_target", "0", 0, "Experimental: Use larger offscreen render target for pre-distorted scene in VR" );
  59. // --------------------------------------------------------------------
  60. // construction/destruction
  61. // --------------------------------------------------------------------
  62. CSourceVirtualReality::CSourceVirtualReality()
  63. : m_textureGeneratorLeft( vr::Eye_Left ),
  64. m_textureGeneratorRight( vr::Eye_Right )
  65. {
  66. m_bActive = false;
  67. m_bUsingOffscreenRenderTarget = false;
  68. m_pHmd = NULL;
  69. }
  70. CSourceVirtualReality::~CSourceVirtualReality()
  71. {
  72. }
  73. // --------------------------------------------------------------------
  74. // Purpose:
  75. // --------------------------------------------------------------------
  76. bool CSourceVirtualReality::Connect( CreateInterfaceFn factory )
  77. {
  78. if ( !factory )
  79. return false;
  80. if ( !BaseClass::Connect( factory ) )
  81. return false;
  82. if ( !g_pFullFileSystem )
  83. {
  84. Warning( "The head tracker requires the filesystem to run!\n" );
  85. return false;
  86. }
  87. return true;
  88. }
  89. // --------------------------------------------------------------------
  90. // Purpose:
  91. // --------------------------------------------------------------------
  92. void CSourceVirtualReality::Disconnect()
  93. {
  94. BaseClass::Disconnect();
  95. }
  96. // --------------------------------------------------------------------
  97. // Purpose:
  98. // --------------------------------------------------------------------
  99. void * CSourceVirtualReality::QueryInterface( const char *pInterfaceName )
  100. {
  101. CreateInterfaceFn factory = Sys_GetFactoryThis(); // This silly construction is necessary
  102. return factory( pInterfaceName, NULL ); // to prevent the LTCG compiler from crashing.
  103. }
  104. // --------------------------------------------------------------------
  105. // Purpose:
  106. // --------------------------------------------------------------------
  107. InitReturnVal_t CSourceVirtualReality::Init()
  108. {
  109. InitReturnVal_t nRetVal = BaseClass::Init();
  110. if ( nRetVal != INIT_OK )
  111. return nRetVal;
  112. MathLib_Init( 2.2f, 2.2f, 0.0f, 2.0f );
  113. // if our tracker expects to use the texture base distortion shader,
  114. // make the procedural textures for that shader now
  115. m_pDistortionTextureLeft.Init( materials->CreateProceduralTexture( "vr_distort_map_left", TEXTURE_GROUP_PIXEL_SHADERS,
  116. distortionTextureSize, distortionTextureSize, IMAGE_FORMAT_RGBA16161616,
  117. TEXTUREFLAGS_NOMIP | TEXTUREFLAGS_NOLOD | TEXTUREFLAGS_NODEBUGOVERRIDE |
  118. TEXTUREFLAGS_SINGLECOPY | TEXTUREFLAGS_CLAMPS | TEXTUREFLAGS_CLAMPT ) );
  119. m_pDistortionTextureRight.Init( materials->CreateProceduralTexture( "vr_distort_map_right", TEXTURE_GROUP_PIXEL_SHADERS,
  120. distortionTextureSize, distortionTextureSize, IMAGE_FORMAT_RGBA16161616,
  121. TEXTUREFLAGS_NOMIP | TEXTUREFLAGS_NOLOD | TEXTUREFLAGS_NODEBUGOVERRIDE |
  122. TEXTUREFLAGS_SINGLECOPY | TEXTUREFLAGS_CLAMPS | TEXTUREFLAGS_CLAMPT ) );
  123. m_pDistortionTextureLeft->SetTextureRegenerator( &m_textureGeneratorLeft );
  124. m_pDistortionTextureRight->SetTextureRegenerator( &m_textureGeneratorRight );
  125. return INIT_OK;
  126. }
  127. void CSourceVirtualReality::RefreshDistortionTexture()
  128. {
  129. m_pDistortionTextureLeft->Download();
  130. m_pDistortionTextureRight->Download();
  131. }
  132. void CDistortionTextureRegen::RegenerateTextureBits( ITexture *pTexture, IVTFTexture *pVTFTexture, Rect_t *pSubRect )
  133. {
  134. // only do this if we have an HMD
  135. if( !g_SourceVirtualReality.GetHmd() )
  136. return;
  137. unsigned short *imageData = (unsigned short*) pVTFTexture->ImageData( 0, 0, 0 );
  138. enum ImageFormat imageFormat = pVTFTexture->Format();
  139. if( imageFormat != IMAGE_FORMAT_RGBA16161616 )
  140. {
  141. return;
  142. }
  143. // we use different UVs for the full FB source texture
  144. float fUScale;
  145. float fUOffset;
  146. if( g_SourceVirtualReality.UsingOffscreenRenderTarget() )
  147. {
  148. fUScale = 1.f;
  149. fUOffset = 0.f;
  150. }
  151. else
  152. {
  153. fUScale = 0.5f;
  154. fUOffset = m_eEye == Eye_Left ? 0.f : 0.5f;
  155. }
  156. // optimize
  157. int width = pVTFTexture->Width();
  158. int height = pVTFTexture->Height();
  159. float fHeight = height;
  160. float fWidth = width;
  161. int x, y;
  162. for( y = 0; y < height; y++ )
  163. {
  164. for( x = 0; x < width; x++ )
  165. {
  166. int offset = 4 * ( x + y * width );
  167. assert( offset < width * height * 4 );
  168. float u = ( (float)x + 0.5f) / fWidth;
  169. float v = ( (float)y + 0.5f) / fHeight;
  170. DistortionCoordinates_t coords = g_SourceVirtualReality.GetHmd()->ComputeDistortion( m_eEye, u, v );
  171. coords.rfRed[0] = Clamp( coords.rfRed[0], 0.f, 1.f ) * fUScale + fUOffset;
  172. coords.rfGreen[0] = Clamp( coords.rfGreen[0], 0.f, 1.f ) * fUScale + fUOffset;
  173. coords.rfBlue[0] = Clamp( coords.rfBlue[0], 0.f, 1.f ) * fUScale + fUOffset;
  174. if ( vr_debug_nodistortion.GetBool() )
  175. {
  176. coords.rfRed[0] = coords.rfGreen[0] = coords.rfBlue[0] = u * fUScale + fUOffset;
  177. coords.rfRed[1] = coords.rfGreen[1] = coords.rfBlue[1] = v;
  178. }
  179. if ( vr_debug_nochromatic.GetBool() )
  180. {
  181. coords.rfRed[0] = coords.rfBlue[0] = coords.rfGreen[0];
  182. coords.rfRed[1] = coords.rfBlue[1] = coords.rfGreen[1];
  183. }
  184. imageData[offset + 0] = (unsigned short)(Clamp( coords.rfRed[0], 0.f, 1.f ) * 65535.f );
  185. imageData[offset + 1] = (unsigned short)(Clamp( coords.rfRed[1], 0.f, 1.f ) * 65535.f );
  186. imageData[offset + 2] = (unsigned short)(Clamp( coords.rfBlue[0], 0.f, 1.f ) * 65535.f );
  187. imageData[offset + 3] = (unsigned short)(Clamp( coords.rfBlue[1], 0.f, 1.f ) * 65535.f );
  188. }
  189. }
  190. }
  191. // --------------------------------------------------------------------
  192. // Purpose:
  193. // --------------------------------------------------------------------
  194. void CSourceVirtualReality::Shutdown()
  195. {
  196. BaseClass::Shutdown();
  197. if( m_pHmd )
  198. VR_Shutdown();
  199. m_pDistortionTextureLeft.Shutdown();
  200. m_pDistortionTextureRight.Shutdown();
  201. }
  202. // --------------------------------------------------------------------
  203. // Purpose: Let the caller know if we're in VR mode
  204. // --------------------------------------------------------------------
  205. bool CSourceVirtualReality::ShouldRunInVR()
  206. {
  207. return m_bActive && m_pHmd;
  208. }
  209. // --------------------------------------------------------------------
  210. // Purpose: Returns true if there's an Hmd connected and everything
  211. // started up.
  212. // --------------------------------------------------------------------
  213. bool CSourceVirtualReality::IsHmdConnected()
  214. {
  215. // we really just care if OpenVR init was successful
  216. return EnsureOpenVRInited();
  217. }
  218. // --------------------------------------------------------------------
  219. // Purpose: Let the caller know how big to make the window and where
  220. // to put it.
  221. // --------------------------------------------------------------------
  222. bool CSourceVirtualReality::GetDisplayBounds( VRRect_t *pRect )
  223. {
  224. if( m_pHmd )
  225. {
  226. int32_t x, y;
  227. uint32_t width, height;
  228. m_pHmd->GetWindowBounds( &x, &y, &width, &height );
  229. pRect->nX = x;
  230. pRect->nY = y;
  231. pRect->nWidth = width;
  232. pRect->nHeight = height;
  233. return true;
  234. }
  235. else
  236. {
  237. return false;
  238. }
  239. }
  240. // --------------------------------------------------------------------
  241. // Purpose: Allocates the pre-distortion render targets.
  242. // --------------------------------------------------------------------
  243. void CSourceVirtualReality::CreateRenderTargets( IMaterialSystem *pMaterialSystem )
  244. {
  245. if( !m_pHmd || !m_bActive )
  246. return;
  247. g_StereoGuiTexture.Init( materials->CreateNamedRenderTargetTextureEx2(
  248. "_rt_gui",
  249. 640, 480, RT_SIZE_OFFSCREEN,
  250. materials->GetBackBufferFormat(),
  251. MATERIAL_RT_DEPTH_SHARED,
  252. TEXTUREFLAGS_CLAMPS | TEXTUREFLAGS_CLAMPT,
  253. CREATERENDERTARGETFLAGS_HDR )
  254. );
  255. if( UsingOffscreenRenderTarget() )
  256. {
  257. uint32_t nWidth, nHeight;
  258. m_pHmd->GetRecommendedRenderTargetSize( &nWidth, &nHeight );
  259. m_pPredistortRT.Init( pMaterialSystem->CreateNamedRenderTargetTextureEx2(
  260. "_rt_vr_predistort",
  261. nWidth, nHeight, RT_SIZE_LITERAL,
  262. IMAGE_FORMAT_RGBA8888,
  263. MATERIAL_RT_DEPTH_SEPARATE,
  264. TEXTUREFLAGS_RENDERTARGET |TEXTUREFLAGS_NOMIP/*TEXTUREFLAGS_CLAMPS | TEXTUREFLAGS_CLAMPT */,
  265. 0 ) );
  266. //TODO: Figure out what I really want for the depth texture format
  267. m_pPredistortRTDepth.Init( pMaterialSystem->CreateNamedRenderTargetTextureEx2( "_rt_vr_predistort_depth", nWidth, nHeight,
  268. RT_SIZE_LITERAL, IMAGE_FORMAT_NV_DST24, MATERIAL_RT_DEPTH_NONE,
  269. TEXTUREFLAGS_CLAMPS | TEXTUREFLAGS_CLAMPT |TEXTUREFLAGS_NOMIP,
  270. 0 ) );
  271. }
  272. }
  273. void CSourceVirtualReality::ShutdownRenderTargets()
  274. {
  275. g_StereoGuiTexture.Shutdown();
  276. m_pPredistortRT.Shutdown();
  277. m_pPredistortRTDepth.Shutdown();
  278. }
  279. // Returns the (possibly overridden) framebuffer size for render target sizing.
  280. void CSourceVirtualReality::GetRenderTargetFrameBufferDimensions( int & nWidth, int & nHeight )
  281. {
  282. if( m_pHmd && UsingOffscreenRenderTarget() )
  283. {
  284. uint32_t w, h;
  285. m_pHmd->GetRecommendedRenderTargetSize( &w, &h );
  286. nWidth = w;
  287. nHeight = h;
  288. }
  289. else
  290. {
  291. // this will cause material system to fall back to the
  292. // actual size of the frame buffer
  293. nWidth = nHeight = 0;
  294. }
  295. }
  296. // --------------------------------------------------------------------
  297. // Purpose: fetches the render target for the specified eye
  298. // --------------------------------------------------------------------
  299. ITexture *CSourceVirtualReality::GetRenderTarget( ISourceVirtualReality::VREye eEye, ISourceVirtualReality::EWhichRenderTarget eWhich )
  300. {
  301. // we don't use any render targets if distortion is disabled
  302. // Just let the game render to the frame buffer.
  303. if( !vr_distortion_enable.GetBool() )
  304. return NULL;
  305. if( !m_bActive || !m_pHmd )
  306. return NULL;
  307. if( !UsingOffscreenRenderTarget() )
  308. return NULL;
  309. switch( eWhich )
  310. {
  311. case ISourceVirtualReality::RT_Color:
  312. return m_pPredistortRT;
  313. case ISourceVirtualReality::RT_Depth:
  314. return m_pPredistortRTDepth;
  315. }
  316. return NULL;
  317. }
  318. vr::Hmd_Eye SourceEyeToHmdEye( ISourceVirtualReality::VREye eEye )
  319. {
  320. if( eEye == ISourceVirtualReality::VREye_Left )
  321. return vr::Eye_Left;
  322. else
  323. return vr::Eye_Right;
  324. }
  325. // --------------------------------------------------------------------
  326. // Purpose: Let the caller know if we're in VR mode
  327. // --------------------------------------------------------------------
  328. void CSourceVirtualReality::GetViewportBounds( VREye eEye, int *pnX, int *pnY, int *pnWidth, int *pnHeight )
  329. {
  330. if( !m_pHmd || !m_bActive )
  331. {
  332. *pnWidth = 0;
  333. *pnHeight = 0;
  334. return;
  335. }
  336. // if there are textures, use those
  337. if( m_pPredistortRT && vr_distortion_enable.GetBool() )
  338. {
  339. if( pnX && pnY )
  340. {
  341. *pnX = 0;
  342. *pnY = 0;
  343. }
  344. *pnWidth = m_pPredistortRT->GetActualWidth();
  345. *pnHeight = m_pPredistortRT->GetActualHeight();
  346. }
  347. else
  348. {
  349. uint32_t x, y, w, h;
  350. m_pHmd->GetEyeOutputViewport( SourceEyeToHmdEye( eEye ), &x, &y, &w, &h );
  351. if( pnX && pnY )
  352. {
  353. *pnX = x;
  354. *pnY = y;
  355. }
  356. *pnWidth = w;
  357. *pnHeight = h;
  358. }
  359. }
  360. // --------------------------------------------------------------------
  361. // Purpose: Returns the current pose
  362. // --------------------------------------------------------------------
  363. VMatrix CSourceVirtualReality::GetMideyePose()
  364. {
  365. return m_ZeroFromHeadPose;
  366. }
  367. // ----------------------------------------------------------------------
  368. // Purpose: Create a 4x4 projection transform from eye projection and distortion parameters
  369. // ----------------------------------------------------------------------
  370. inline static void ComposeProjectionTransform(float fLeft, float fRight, float fTop, float fBottom, float zNear, float zFar, float fovScale, VMatrix *pmProj )
  371. {
  372. if( fovScale != 1.0f && fovScale > 0.f )
  373. {
  374. float fFovScaleAdjusted = tan( atan( fTop ) / fovScale ) / fTop;
  375. fRight *= fFovScaleAdjusted;
  376. fLeft *= fFovScaleAdjusted;
  377. fTop *= fFovScaleAdjusted;
  378. fBottom *= fFovScaleAdjusted;
  379. }
  380. float idx = 1.0f / (fRight - fLeft);
  381. float idy = 1.0f / (fBottom - fTop);
  382. float idz = 1.0f / (zFar - zNear);
  383. float sx = fRight + fLeft;
  384. float sy = fBottom + fTop;
  385. float (*p)[4] = pmProj->m;
  386. p[0][0] = 2*idx; p[0][1] = 0; p[0][2] = sx*idx; p[0][3] = 0;
  387. p[1][0] = 0; p[1][1] = 2*idy; p[1][2] = sy*idy; p[1][3] = 0;
  388. p[2][0] = 0; p[2][1] = 0; p[2][2] = -zFar*idz; p[2][3] = -zFar*zNear*idz;
  389. p[3][0] = 0; p[3][1] = 0; p[3][2] = -1.0f; p[3][3] = 0;
  390. }
  391. // ----------------------------------------------------------------------
  392. // Purpose: Computes and returns the projection matrix for the eye
  393. // ----------------------------------------------------------------------
  394. bool CSourceVirtualReality::GetEyeProjectionMatrix ( VMatrix *pResult, VREye eEye, float zNear, float zFar, float fovScale )
  395. {
  396. Assert ( pResult != NULL );
  397. if( !pResult || !m_pHmd || !m_bActive )
  398. return false;
  399. float fLeft, fRight, fTop, fBottom;
  400. m_pHmd->GetProjectionRaw( SourceEyeToHmdEye( eEye ), &fLeft, &fRight, &fTop, &fBottom );
  401. ComposeProjectionTransform( fLeft, fRight, fTop, fBottom, zNear, zFar, fovScale, pResult );
  402. return true;
  403. }
  404. // ----------------------------------------------------------------------
  405. // Purpose: Returns the mid eye from left/right eye part of the view
  406. // matrix transform chain.
  407. // ----------------------------------------------------------------------
  408. VMatrix CSourceVirtualReality::GetMidEyeFromEye( VREye eEye )
  409. {
  410. if( m_pHmd )
  411. {
  412. vr::HmdMatrix34_t matMidEyeFromEye = m_pHmd->GetEyeToHeadTransform( SourceEyeToHmdEye( eEye ) );
  413. return OpenVRToSourceCoordinateSystem( VMatrixFrom34( matMidEyeFromEye.m ) );
  414. }
  415. else
  416. {
  417. VMatrix mat;
  418. mat.Identity();
  419. return mat;
  420. }
  421. }
  422. // returns the adapter index to use for VR mode
  423. int CSourceVirtualReality::GetVRModeAdapter()
  424. {
  425. if( EnsureOpenVRInited() )
  426. {
  427. Assert( m_pHmd );
  428. return m_pHmd->GetD3D9AdapterIndex();
  429. }
  430. else
  431. {
  432. return -1;
  433. }
  434. }
  435. bool CSourceVirtualReality::WillDriftInYaw()
  436. {
  437. if( m_pHmd )
  438. return m_pHmd->GetBoolTrackedDeviceProperty( vr::k_unTrackedDeviceIndex_Hmd, Prop_WillDriftInYaw_Bool );
  439. else
  440. return false;
  441. }
  442. void CSourceVirtualReality::AcquireNewZeroPose()
  443. {
  444. // just let the next tracker update re-zero us
  445. if( m_pHmd )
  446. m_pHmd->ResetSeatedZeroPose();
  447. }
  448. bool CSourceVirtualReality::SampleTrackingState ( float PlayerGameFov, float fPredictionSeconds )
  449. {
  450. if( !m_pHmd || !m_bActive )
  451. return false;
  452. // If tracker can't return a pose (it's possibly recalibrating itself)
  453. // then we will freeze tracking at its current state, rather than
  454. // snapping it back to the zero position
  455. vr::TrackedDevicePose_t pose;
  456. if ( m_pHmd->IsTrackedDeviceConnected( k_unTrackedDeviceIndex_Hmd ) )
  457. {
  458. float fSecondsSinceLastVsync;
  459. m_pHmd->GetTimeSinceLastVsync( &fSecondsSinceLastVsync, NULL );
  460. float fFrameDuration = 1.f / m_pHmd->GetFloatTrackedDeviceProperty( vr::k_unTrackedDeviceIndex_Hmd,
  461. vr::Prop_DisplayFrequency_Float );
  462. float fPredictedSecondsFromNow = fFrameDuration - fSecondsSinceLastVsync \
  463. + m_pHmd->GetFloatTrackedDeviceProperty( vr::k_unTrackedDeviceIndex_Hmd,
  464. vr::Prop_SecondsFromVsyncToPhotons_Float );
  465. // Use Seated here because everything using this interface or older is expecting a seated experience
  466. m_pHmd->GetDeviceToAbsoluteTrackingPose( vr::TrackingUniverseSeated, fPredictedSecondsFromNow, &pose, 1 );
  467. m_bHaveValidPose = pose.bPoseIsValid;
  468. }
  469. else
  470. {
  471. m_bHaveValidPose = false;
  472. }
  473. if( !m_bHaveValidPose )
  474. return false;
  475. m_ZeroFromHeadPose = OpenVRToSourceCoordinateSystem( VMatrixFrom34( pose.mDeviceToAbsoluteTracking.m ) );
  476. return true;
  477. }
  478. // ----------------------------------------------------------------------
  479. // Purpose: Performs the distortion required for the HMD display
  480. // ----------------------------------------------------------------------
  481. bool CSourceVirtualReality::DoDistortionProcessing ( VREye eEye )
  482. {
  483. if( !ShouldRunInVR() )
  484. return false;
  485. if ( !vr_distortion_enable.GetBool() )
  486. {
  487. return false;
  488. }
  489. CMatRenderContextPtr pRenderContext( materials );
  490. IMaterial *pDistortMaterial;
  491. if( eEye == VREye_Left )
  492. pDistortMaterial = m_DistortLeftMaterial;
  493. else
  494. pDistortMaterial = m_DistortRightMaterial;
  495. if( !UsingOffscreenRenderTarget() )
  496. {
  497. // copy the frame buffer to the source texture
  498. ITexture *pFullFrameFB1 = materials->FindTexture( "_rt_FullFrameFB1", TEXTURE_GROUP_RENDER_TARGET );
  499. if( !pFullFrameFB1 )
  500. return false;
  501. Rect_t r;
  502. this->GetViewportBounds( eEye, &r.x, &r.y, &r.width, &r.height );
  503. pRenderContext->CopyRenderTargetToTextureEx( pFullFrameFB1, 0, &r, &r );
  504. }
  505. // This is where we are rendering to
  506. uint32_t x, y, w, h;
  507. m_pHmd->GetEyeOutputViewport( SourceEyeToHmdEye( eEye ), &x, &y, &w, &h );
  508. pRenderContext->DrawScreenSpaceRectangle ( pDistortMaterial,
  509. x, y, w, h,
  510. 0, 0, distortionTextureSize-1,distortionTextureSize-1,distortionTextureSize,distortionTextureSize);
  511. return true;
  512. }
  513. // --------------------------------------------------------------------
  514. // Pastes the HUD directly onto the backbuffer / render target, including
  515. // applying the undistort.
  516. // --------------------------------------------------------------------
  517. bool CSourceVirtualReality::CompositeHud ( VREye eEye, float ndcHudBounds[4], bool bDoUndistort, bool bBlackout, bool bTranslucent )
  518. {
  519. // run away if we're not doing VR at all
  520. if ( ! ShouldRunInVR() )
  521. return false;
  522. bDoUndistort = bDoUndistort && vr_distortion_enable.GetBool();
  523. IMaterial *pDistortHUDMaterial = ( eEye == VREye_Left ) ? m_DistortHUDLeftMaterial : m_DistortHUDRightMaterial;
  524. // The translucency flag will enable/disable both blending and alpha test. The only case where we don't want them enabled
  525. // is when we're blacking out the entire screen (we use blending to smooth the edges of the HUD, and we use alpha test to kill
  526. // the pixels outside the HUD). Note that right now I'm not expecting to see a mode with bTranslucent and bBlackout
  527. // both true (maybe happens in sniper mode?).
  528. pDistortHUDMaterial->SetMaterialVarFlag( MATERIAL_VAR_TRANSLUCENT, ! bBlackout );
  529. // The ndcHudBounds are the min x, min y, max x, max y of where we want to paste the HUD texture in NDC coordinates
  530. // of the main 3D view. We conver to UV (0->1) space here for the shader.
  531. float huduvs[4];
  532. huduvs[0] = ndcHudBounds[0] * 0.5 + 0.5;
  533. huduvs[1] = ndcHudBounds[1] * 0.5 + 0.5;
  534. huduvs[2] = ndcHudBounds[2] * 0.5 + 0.5;
  535. huduvs[3] = ndcHudBounds[3] * 0.5 + 0.5;
  536. // Fix up coordinates depending on whether we're rendering to a buffer sized for one eye or two.
  537. // (note that disabling distortion also disables use of the offscreen render target)
  538. if ( vr_distortion_enable.GetBool() && ! UsingOffscreenRenderTarget() )
  539. {
  540. huduvs[0] *= 0.5;
  541. huduvs[2] *= 0.5;
  542. if ( eEye == VREye_Right )
  543. {
  544. huduvs[0] += 0.5;
  545. huduvs[2] += 0.5;
  546. }
  547. }
  548. IMaterialVar *pVar;
  549. pVar = pDistortHUDMaterial->FindVar( "$distortbounds", NULL );
  550. if ( pVar )
  551. {
  552. pVar->SetVecValue( huduvs, 4 );
  553. }
  554. pVar = pDistortHUDMaterial->FindVar( "$hudtranslucent", NULL );
  555. if ( pVar )
  556. {
  557. pVar->SetIntValue( bTranslucent );
  558. }
  559. pVar = pDistortHUDMaterial->FindVar( "$hudundistort", NULL );
  560. if ( pVar )
  561. {
  562. pVar->SetIntValue( bDoUndistort );
  563. }
  564. CMatRenderContextPtr pRenderContext( materials );
  565. uint32_t x, y, w, h;
  566. m_pHmd->GetEyeOutputViewport( SourceEyeToHmdEye( eEye ), &x, &y, &w, &h );
  567. pRenderContext->DrawScreenSpaceRectangle ( pDistortHUDMaterial,
  568. x, y, w, h,
  569. 0, 0, distortionTextureSize-1,distortionTextureSize-1,distortionTextureSize,distortionTextureSize);
  570. return true;
  571. }
  572. bool CSourceVirtualReality::EnsureOpenVRInited()
  573. {
  574. if( m_pHmd )
  575. return true;
  576. return StartTracker();
  577. }
  578. bool CSourceVirtualReality::StartTracker()
  579. {
  580. Assert( m_pHmd == NULL );
  581. // Initialize SteamVR
  582. vr::HmdError err;
  583. m_pHmd = vr::VR_Init( &err );
  584. if( err != HmdError_None )
  585. {
  586. Msg( "Unable to initialize HMD tracker. Error code %d\n", err );
  587. return false;
  588. }
  589. m_pHmd->ResetSeatedZeroPose();
  590. m_bHaveValidPose = false;
  591. m_ZeroFromHeadPose.Identity();
  592. return true;
  593. }
  594. void CSourceVirtualReality::StopTracker()
  595. {
  596. if ( m_pHmd )
  597. {
  598. VR_Shutdown();
  599. m_pHmd = NULL;
  600. }
  601. }
  602. bool CSourceVirtualReality::ResetTracking()
  603. {
  604. StopTracker();
  605. return StartTracker();
  606. }
  607. bool CSourceVirtualReality::Activate()
  608. {
  609. // init the HMD itself
  610. if( !ResetTracking() )
  611. return false;
  612. m_bActive = true;
  613. m_bUsingOffscreenRenderTarget = vr_use_offscreen_render_target.GetBool();
  614. m_warpMaterial.Init( "dev/warp", "Other" );
  615. if( UsingOffscreenRenderTarget() )
  616. {
  617. m_DistortLeftMaterial.Init( "vr/vr_distort_texture_left", "Other" );
  618. m_DistortRightMaterial.Init( "vr/vr_distort_texture_right", "Other" );
  619. }
  620. else
  621. {
  622. m_DistortLeftMaterial.Init( "vr/vr_distort_texture_left_nort", "Other" );
  623. m_DistortRightMaterial.Init( "vr/vr_distort_texture_right_nort", "Other" );
  624. }
  625. m_InWorldUIMaterial.Init( "vgui/inworldui", "Other" );
  626. m_InWorldUIOpaqueMaterial.Init( "vgui/inworldui_opaque", "Other" );
  627. m_blackMaterial.Init( "vgui/black", "Other" );
  628. m_DistortHUDLeftMaterial.Init( "vr/vr_distort_hud_left", "Other" );
  629. m_DistortHUDRightMaterial.Init( "vr/vr_distort_hud_right", "Other" );
  630. RefreshDistortionTexture();
  631. return true;
  632. }
  633. void CSourceVirtualReality::Deactivate()
  634. {
  635. m_bActive = false;
  636. m_bShouldForceVRMode = false;
  637. m_warpMaterial.Shutdown();
  638. m_DistortLeftMaterial.Shutdown();
  639. m_DistortRightMaterial.Shutdown();
  640. m_DistortHUDLeftMaterial.Shutdown();
  641. m_DistortHUDRightMaterial.Shutdown();
  642. m_InWorldUIMaterial.Shutdown();
  643. m_InWorldUIOpaqueMaterial.Shutdown();
  644. m_blackMaterial.Shutdown();
  645. }
  646. bool CSourceVirtualReality::ShouldForceVRMode()
  647. {
  648. return m_bShouldForceVRMode;
  649. }
  650. void CSourceVirtualReality::SetShouldForceVRMode()
  651. {
  652. m_bShouldForceVRMode = true;
  653. }
  654. static VMatrix OpenVRToSourceCoordinateSystem(const VMatrix& vortex)
  655. {
  656. const float inchesPerMeter = (float)(39.3700787);
  657. // From Vortex: X=right, Y=up, Z=backwards, scale is meters.
  658. // To Source: X=forwards, Y=left, Z=up, scale is inches.
  659. //
  660. // s_from_v = [ 0 0 -1 0
  661. // -1 0 0 0
  662. // 0 1 0 0
  663. // 0 0 0 1];
  664. //
  665. // We want to compute vmatrix = s_from_v * vortex * v_from_s; v_from_s = s_from_v'
  666. // Given vortex =
  667. // [00 01 02 03
  668. // 10 11 12 13
  669. // 20 21 22 23
  670. // 30 31 32 33]
  671. //
  672. // s_from_v * vortex * s_from_v' =
  673. // 22 20 -21 -23
  674. // 02 00 -01 -03
  675. // -12 -10 11 13
  676. // -32 -30 31 33
  677. //
  678. const vec_t (*v)[4] = vortex.m;
  679. VMatrix result(
  680. v[2][2], v[2][0], -v[2][1], -v[2][3] * inchesPerMeter,
  681. v[0][2], v[0][0], -v[0][1], -v[0][3] * inchesPerMeter,
  682. -v[1][2], -v[1][0], v[1][1], v[1][3] * inchesPerMeter,
  683. -v[3][2], -v[3][0], v[3][1], v[3][3]);
  684. return result;
  685. }
  686. static VMatrix VMatrixFrom44(const float v[4][4])
  687. {
  688. return VMatrix(
  689. v[0][0], v[0][1], v[0][2], v[0][3],
  690. v[1][0], v[1][1], v[1][2], v[1][3],
  691. v[2][0], v[2][1], v[2][2], v[2][3],
  692. v[3][0], v[3][1], v[3][2], v[3][3]);
  693. }
  694. static VMatrix VMatrixFrom34(const float v[3][4])
  695. {
  696. return VMatrix(
  697. v[0][0], v[0][1], v[0][2], v[0][3],
  698. v[1][0], v[1][1], v[1][2], v[1][3],
  699. v[2][0], v[2][1], v[2][2], v[2][3],
  700. 0, 0, 0, 1 );
  701. }
  702. static VMatrix VMatrixFrom33(const float v[3][3])
  703. {
  704. return VMatrix(
  705. v[0][0], v[0][1], v[0][2], 0,
  706. v[1][0], v[1][1], v[1][2], 0,
  707. v[2][0], v[2][1], v[2][2], 0,
  708. 0, 0, 0, 1);
  709. }