Source code of Windows XP (NT5)
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

3354 lines
95 KiB

  1. #include "precomp.h"
  2. #ifndef SIZEOF_VIDEOFORMATEX
  3. #define SIZEOF_VIDEOFORMATEX(pwfx) (sizeof(VIDEOFORMATEX))
  4. #endif
  5. // #define LOGSTATISTICS_ON 1
  6. // Used to translate between frame sizes and the FRAME_* bit flags
  7. #define NON_STANDARD 0x80000000
  8. #define SIZE_TO_FLAG(s) (s == Small ? FRAME_SQCIF : s == Medium ? FRAME_QCIF: s == Large ? FRAME_CIF : NON_STANDARD)
  9. const int VID_AVG_PACKET_SIZE = 450; // avg from NetMon stats
  10. // maps temporal spatial tradeoff to a target frame rate
  11. // assume the MAX frame rate for QCIF and SQCIF is 10 on modem
  12. // let the "best quality" be 2 frames/sec
  13. int g_TSTable_Modem_QCIF[] =
  14. {
  15. 200, 225, 250, 275, // best quality
  16. 300, 325, 350, 375,
  17. 400, 425, 450, 475,
  18. 500, 525, 550, 575,
  19. 600, 625, 650, 675,
  20. 700, 725, 750, 775,
  21. 800, 825, 850, 875,
  22. 900, 925, 950, 1000 // fast frames
  23. };
  24. // max frame rate for CIF be 2.5 frames/sec on modem
  25. // best quality will be .6 frame/sec
  26. int g_TSTable_Modem_CIF[] =
  27. {
  28. 60, 66, 72, 78,
  29. 84, 90, 96, 102,
  30. 108, 114, 120, 126,
  31. 132, 140, 146, 152,
  32. 158, 164, 170, 174,
  33. 180, 186, 192, 198,
  34. 208, 216, 222, 228,
  35. 232, 238, 244, 250
  36. };
  37. #ifdef USE_NON_LINEAR_FPS_ADJUSTMENT
  38. // this table and related code anc be used for non-linear adjustment of our frame rate based
  39. // on QOS information in QosNotifyVideoCB
  40. int g_QoSMagic[19][19] =
  41. {
  42. {-90,-90,-90,-90,-90,-90,-90,-90,-90,-90,-90,-90,-90,-90,-90,-90,-90,-90,-90},
  43. {-90,-80,-80,-80,-80,-80,-80,-80,-80,-80,-80,-80,-80,-80,-80,-80,-80,-80,-80},
  44. {-90,-80,-70,-70,-70,-70,-70,-70,-70,-70,-70,-70,-70,-70,-70,-70,-70,-70,-70},
  45. {-90,-80,-70,-60,-60,-60,-60,-60,-60,-60,-60,-60,-60,-60,-60,-60,-60,-60,-60},
  46. {-90,-80,-70,-60,-50,-50,-50,-50,-50,-50,-50,-50,-50,-50,-50,-50,-50,-50,-50},
  47. {-90,-80,-70,-60,-50,-40,-40,-40,-40,-40,-40,-40,-40,-40,-40,-40,-40,-40,-40},
  48. {-90,-80,-70,-60,-50,-40,-30,-30,-30,-30,-30,-30,-30,-30,-30,-30,-30,-30,-30},
  49. {-90,-80,-70,-60,-50,-40,-30,-20,-20,-20,-20,-20,-20,-20,-20,-20,-20,-20,-20},
  50. {-90,-80,-70,-60,-50,-40,-30,-20,-10,-10,-10,-10,-10,-10,-10,-10,-10,-10,-10},
  51. {-90,-80,-70,-60,-50,-40,-30,-20,-10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
  52. {-90,-80,-70,-60,-50,-40,-30,-20,-10, 0, 10, 10, 10, 10, 10, 10, 10, 10, 10},
  53. {-90,-80,-70,-60,-50,-40,-30,-20,-10, 0, 10, 20, 20, 20, 20, 20, 20, 20, 20},
  54. {-90,-80,-70,-60,-50,-40,-30,-20,-10, 0, 10, 20, 30, 30, 30, 30, 30, 30, 30},
  55. {-90,-80,-70,-60,-50,-40,-30,-20,-10, 0, 10, 20, 30, 40, 40, 40, 40, 40, 40},
  56. {-90,-80,-70,-60,-50,-40,-30,-20,-10, 0, 10, 20, 30, 40, 50, 50, 50, 50, 50},
  57. {-90,-80,-70,-60,-50,-40,-30,-20,-10, 0, 10, 20, 30, 40, 50, 60, 60, 60, 60},
  58. {-90,-80,-70,-60,-50,-40,-30,-20,-10, 0, 10, 20, 30, 40, 50, 60, 70, 70, 70},
  59. {-90,-80,-70,-60,-50,-40,-30,-20,-10, 0, 10, 20, 30, 40, 50, 60, 70, 80, 80},
  60. {-90,-80,-70,-60,-50,-40,-30,-20,-10, 0, 10, 20, 30, 40, 50, 60, 70, 80, 90},
  61. };
  62. #endif
  63. BOOL SortOrder(IAppVidCap *pavc, BASIC_VIDCAP_INFO* pvidcaps, DWORD dwcFormats,
  64. DWORD dwFlags, WORD wDesiredSortOrder, int nNumFormats);
  65. UINT ChoosePacketSize(VIDEOFORMATEX *pvf)
  66. {
  67. // set default samples per pkt to 1
  68. UINT spp, sblk;
  69. spp = 1;
  70. // calculate samples per block ( aka frame)
  71. sblk = pvf->nBlockAlign* pvf->nSamplesPerSec/ pvf->nAvgBytesPerSec;
  72. if (sblk <= spp) {
  73. spp = (spp/sblk)*sblk;
  74. } else
  75. spp = sblk;
  76. return spp;
  77. }
  78. HRESULT STDMETHODCALLTYPE SendVideoStream::QueryInterface(REFIID iid, void **ppVoid)
  79. {
  80. // resolve duplicate inheritance to the SendMediaStream;
  81. extern IID IID_IProperty;
  82. if (iid == IID_IUnknown)
  83. {
  84. *ppVoid = (IUnknown*)((RecvMediaStream*)this);
  85. }
  86. else if (iid == IID_IMediaChannel)
  87. {
  88. *ppVoid = (IMediaChannel*)((RecvMediaStream *)this);
  89. }
  90. else if (iid == IID_IVideoChannel)
  91. {
  92. *ppVoid = (IVideoChannel*)this;
  93. }
  94. else if (iid == IID_IProperty)
  95. {
  96. *ppVoid = NULL;
  97. ERROR_OUT(("Don't QueryInterface for IID_IProperty, use IMediaChannel"));
  98. return E_NOINTERFACE;
  99. }
  100. else if (iid == IID_IVideoRender)// satisfy symmetric property of QI
  101. {
  102. *ppVoid = (IVideoRender *)this;
  103. }
  104. else
  105. {
  106. *ppVoid = NULL;
  107. return E_NOINTERFACE;
  108. }
  109. AddRef();
  110. return S_OK;
  111. }
  112. ULONG STDMETHODCALLTYPE SendVideoStream::AddRef(void)
  113. {
  114. return InterlockedIncrement(&m_lRefCount);
  115. }
  116. ULONG STDMETHODCALLTYPE SendVideoStream::Release(void)
  117. {
  118. LONG lRet;
  119. lRet = InterlockedDecrement(&m_lRefCount);
  120. if (lRet == 0)
  121. {
  122. delete this;
  123. return 0;
  124. }
  125. else
  126. return lRet;
  127. }
  128. DWORD CALLBACK SendVideoStream::StartCaptureThread(LPVOID pVoid)
  129. {
  130. SendVideoStream *pThisStream = (SendVideoStream*)pVoid;
  131. return pThisStream->CapturingThread();
  132. }
  133. HRESULT
  134. SendVideoStream::Initialize(DataPump *pDP)
  135. {
  136. HRESULT hr = DPR_OUT_OF_MEMORY;
  137. DWORD dwFlags = DP_FLAG_FULL_DUPLEX | DP_FLAG_AUTO_SWITCH ;
  138. MEDIACTRLINIT mcInit;
  139. FX_ENTRY ("DP::InitChannel")
  140. FINDCAPTUREDEVICE fcd;
  141. m_pIUnknown = (IUnknown *)NULL;
  142. InitializeCriticalSection(&m_crsVidQoS);
  143. InitializeCriticalSection(&m_crs);
  144. dwFlags |= DP_FLAG_VCM | DP_FLAG_VIDEO ;
  145. m_maxfps = 2997; // max of 29.97 fps
  146. m_frametime = 1000 / 30; // default of 30 fps (time in ms) QOS will slow us, if
  147. // need be
  148. // Modem connections will use a frame rate control table
  149. // to implement TS-Tradeoff
  150. m_pTSTable = NULL;
  151. m_dwCurrentTSSetting = VCM_DEFAULT_IMAGE_QUALITY;
  152. // store the platform flags
  153. // enable Send and Recv by default
  154. m_DPFlags = (dwFlags & DP_MASK_PLATFORM) | DPFLAG_ENABLE_SEND ;
  155. // store a back pointer to the datapump container
  156. m_pDP = pDP;
  157. m_pRTPSend = NULL;
  158. // m_PrevFormatId = INVALID_MEDIA_FORMAT;
  159. ZeroMemory(&m_fCodecOutput, sizeof(VIDEOFORMATEX));
  160. // Initialize data (should be in constructor)
  161. m_CaptureDevice = (UINT) -1; // use VIDEO_MAPPER
  162. m_PreviousCaptureDevice = (UINT) -1;
  163. DBG_SAVE_FILE_LINE
  164. m_SendStream = new TxStream();
  165. if (!m_SendStream)
  166. {
  167. DEBUGMSG (ZONE_DP, ("%s: TxStream new failed\r\n", _fx_));
  168. goto StreamAllocError;
  169. }
  170. // Create Input and Output video filters
  171. DBG_SAVE_FILE_LINE
  172. m_pVideoFilter = new VcmFilter();
  173. m_dwDstSize = 0;
  174. if (m_pVideoFilter==NULL)
  175. {
  176. DEBUGMSG (ZONE_DP, ("%s: VcmFilter new failed\r\n", _fx_));
  177. goto FilterAllocError;
  178. }
  179. //Create Video MultiMedia device control objects
  180. DBG_SAVE_FILE_LINE
  181. m_InMedia = new VideoInControl();
  182. if (!m_InMedia )
  183. {
  184. DEBUGMSG (ZONE_DP, ("%s: MediaControl new failed\r\n", _fx_));
  185. goto MediaAllocError;
  186. }
  187. // Initialize the send-stream media control object
  188. mcInit.dwFlags = dwFlags | DP_FLAG_SEND;
  189. hr = m_InMedia->Initialize(&mcInit);
  190. if (hr != DPR_SUCCESS)
  191. {
  192. DEBUGMSG (ZONE_DP, ("%s: IMedia->Init failed, hr=0x%lX\r\n", _fx_, hr));
  193. goto MediaAllocError;
  194. }
  195. // determine if the video devices are available
  196. fcd.dwSize = sizeof (FINDCAPTUREDEVICE);
  197. if (FindFirstCaptureDevice(&fcd, NULL)) {
  198. DEBUGMSG (ZONE_DP, ("%s: OMedia->have capture cap\r\n", _fx_));
  199. m_DPFlags |= DP_FLAG_RECORD_CAP ;
  200. }
  201. // set media to half duplex mode by default
  202. m_InMedia->SetProp(MC_PROP_DUPLEX_TYPE, DP_FLAG_HALF_DUPLEX);
  203. m_SavedTickCount = timeGetTime(); //so we start with low timestamps
  204. m_DPFlags |= DPFLAG_INITIALIZED;
  205. return DPR_SUCCESS;
  206. MediaAllocError:
  207. if (m_InMedia) delete m_InMedia;
  208. FilterAllocError:
  209. if (m_pVideoFilter) delete m_pVideoFilter;
  210. StreamAllocError:
  211. if (m_SendStream) delete m_SendStream;
  212. ERRORMESSAGE( ("SendVideoStream::Initialize: exit, hr=0x%lX\r\n", hr));
  213. return hr;
  214. }
  215. // LOOK: identical to SendAudioStream version.
  216. SendVideoStream::~SendVideoStream()
  217. {
  218. if (m_DPFlags & DPFLAG_INITIALIZED) {
  219. m_DPFlags &= ~DPFLAG_INITIALIZED;
  220. // TEMP:Make sure preview stops
  221. m_DPFlags &= ~DPFLAG_ENABLE_PREVIEW;
  222. if (m_DPFlags & DPFLAG_CONFIGURED_SEND )
  223. {
  224. UnConfigure();
  225. }
  226. if (m_pRTPSend)
  227. {
  228. m_pRTPSend->Release();
  229. m_pRTPSend = NULL;
  230. }
  231. // Close the receive and transmit streams
  232. if (m_SendStream) delete m_SendStream;
  233. // Close the wave devices
  234. if (m_InMedia) { delete m_InMedia;}
  235. // Close the filters
  236. if (m_pVideoFilter)
  237. {
  238. delete m_pVideoFilter;
  239. }
  240. m_pDP->RemoveMediaChannel(MCF_SEND| MCF_VIDEO, (IMediaChannel*)(RecvMediaStream *)this);
  241. }
  242. DeleteCriticalSection(&m_crs);
  243. DeleteCriticalSection(&m_crsVidQoS);
  244. }
  245. HRESULT STDMETHODCALLTYPE SendVideoStream::Configure(
  246. BYTE *pFormat,
  247. UINT cbFormat,
  248. BYTE *pChannelParams,
  249. UINT cbParams,
  250. IUnknown *pUnknown)
  251. {
  252. HRESULT hr;
  253. BOOL fRet;
  254. MEDIAPACKETINIT pcktInit;
  255. MEDIACTRLCONFIG mcConfig;
  256. MediaPacket **ppPckt;
  257. ULONG cPckt, uIndex;
  258. DWORD_PTR dwPropVal;
  259. VIDEOFORMATEX *pfSend = (VIDEOFORMATEX*)pFormat;
  260. DWORD maxBitRate=0;
  261. DWORD i, dwSrcSize, dwMaxFragSize=0;
  262. int iXOffset, iYOffset;
  263. VIDEO_CHANNEL_PARAMETERS vidChannelParams;
  264. struct {
  265. int cResources;
  266. RESOURCE aResources[1];
  267. } m_aLocalRs;
  268. vidChannelParams.RTP_Payload = 0;
  269. int optval = 0 ;
  270. CCaptureChain *pChain;
  271. HCAPDEV hCapDev=NULL;
  272. LPBITMAPINFOHEADER lpcap, lpsend;
  273. BOOL fNewDeviceSettings = TRUE;
  274. BOOL fNewDevice = TRUE;
  275. BOOL fLive = FALSE, fReconfiguring;
  276. MMRESULT mmr;
  277. DWORD dwStreamingMode = STREAMING_PREFER_FRAME_GRAB;
  278. FX_ENTRY ("SendVideoStream::Configure")
  279. if (pfSend)
  280. {
  281. // for now, don't allow SendVideoStream to be re-configured
  282. // if we are already streaming.
  283. if (m_DPFlags & DPFLAG_STARTED_SEND)
  284. {
  285. return DPR_IO_PENDING;
  286. }
  287. }
  288. else
  289. {
  290. ASSERT(!pChannelParams);
  291. }
  292. if(NULL != pChannelParams)
  293. {
  294. // get channel parameters
  295. if (cbParams != sizeof(vidChannelParams))
  296. {
  297. hr = DPR_INVALID_PARAMETER;
  298. goto IMediaInitError;
  299. }
  300. vidChannelParams = *(VIDEO_CHANNEL_PARAMETERS *)pChannelParams;
  301. fLive = TRUE;
  302. }
  303. else
  304. {
  305. //
  306. // else this is configuring for preview or is unconfiguring. There are
  307. // no channel parameters
  308. //
  309. }
  310. if (m_DPFlags & DPFLAG_CONFIGURED_SEND)
  311. {
  312. if (pfSend)
  313. {
  314. if (m_CaptureDevice == m_PreviousCaptureDevice)
  315. fNewDevice = FALSE;
  316. if (IsSimilarVidFormat(&m_fCodecOutput, pfSend))
  317. fNewDeviceSettings = FALSE;
  318. }
  319. // When using a different capture device, we systematically configure everyting
  320. // although it would probably be possible to optimize the configuration
  321. // of the filters and transmit stream
  322. EndSend();
  323. UnConfigureSendVideo(fNewDeviceSettings, fNewDevice);
  324. }
  325. if (!pfSend)
  326. {
  327. return DPR_SUCCESS;
  328. }
  329. if (fLive)
  330. m_DPFlags |= DPFLAG_REAL_THING;
  331. // m_Net = pNet;
  332. if (! (m_DPFlags & DPFLAG_INITIALIZED))
  333. return DPR_OUT_OF_MEMORY; //BUGBUG: return proper error;
  334. if (fNewDeviceSettings || fNewDevice)
  335. {
  336. m_ThreadFlags |= DPTFLAG_PAUSE_CAPTURE;
  337. mcConfig.uDuration = MC_USING_DEFAULT; // set duration by samples per pkt
  338. // force an unknown device to be profiled by fetching
  339. // it's streaming capabilites BEFORE opening it
  340. mmr = vcmGetDevCapsStreamingMode(m_CaptureDevice, &dwStreamingMode);
  341. if (mmr != MMSYSERR_NOERROR)
  342. {
  343. dwStreamingMode = STREAMING_PREFER_FRAME_GRAB;
  344. }
  345. m_InMedia->GetProp (MC_PROP_MEDIA_DEV_HANDLE, &dwPropVal);
  346. if (!dwPropVal) {
  347. // if capture device isn't already open, then open it
  348. m_InMedia->SetProp(MC_PROP_MEDIA_DEV_ID, (DWORD)m_CaptureDevice);
  349. if (fNewDevice)
  350. {
  351. hr = m_InMedia->Open();
  352. if (hr != DPR_SUCCESS) {
  353. DEBUGMSG (ZONE_DP, ("%s: m_InMedia->Open failed to open capture, hr=0x%lX\r\n", _fx_, hr));
  354. goto IMediaInitError;
  355. }
  356. }
  357. m_InMedia->GetProp (MC_PROP_MEDIA_DEV_HANDLE, &dwPropVal);
  358. if (!dwPropVal) {
  359. DEBUGMSG (ZONE_DP, ("%s: capture device not open (0x%lX)\r\n", _fx_));
  360. goto IMediaInitError;
  361. }
  362. }
  363. hCapDev = (HCAPDEV)dwPropVal;
  364. if (m_pCaptureChain) {
  365. delete m_pCaptureChain;
  366. m_pCaptureChain = NULL;
  367. }
  368. i = 0; // assume no colortable
  369. // m_fDevSend is the uncompressed format
  370. // pfSend is the compressed format
  371. mmr = VcmFilter::SuggestEncodeFormat(m_CaptureDevice, &m_fDevSend, pfSend);
  372. if (mmr == MMSYSERR_NOERROR) {
  373. i = m_fDevSend.bih.biClrUsed; // non-zero, if vcmstrm gave us a colortable
  374. SetCaptureDeviceFormat(hCapDev, &m_fDevSend.bih, 0, 0);
  375. }
  376. dwPropVal = GetCaptureDeviceFormatHeaderSize(hCapDev);
  377. while (1) {
  378. if (lpcap = (LPBITMAPINFOHEADER)MemAlloc((UINT)dwPropVal)) {
  379. lpcap->biSize = (DWORD)dwPropVal;
  380. if (!GetCaptureDeviceFormat(hCapDev, lpcap)) {
  381. MemFree(lpcap);
  382. DEBUGMSG (ZONE_DP, ("%s: failed to set/get capture format\r\n", _fx_));
  383. goto IMediaInitError;
  384. }
  385. UPDATE_REPORT_ENTRY(g_prptSystemSettings, (lpcap->biWidth << 22) | (lpcap->biHeight << 12) | ((lpcap->biCompression == VIDEO_FORMAT_UYVY) ? VIDEO_FORMAT_NUM_COLORS_UYVY : (lpcap->biCompression == VIDEO_FORMAT_YUY2) ? VIDEO_FORMAT_NUM_COLORS_YUY2 : (lpcap->biCompression == VIDEO_FORMAT_IYUV) ? VIDEO_FORMAT_NUM_COLORS_IYUV : (lpcap->biCompression == VIDEO_FORMAT_I420) ? VIDEO_FORMAT_NUM_COLORS_I420 : (lpcap->biCompression == VIDEO_FORMAT_YVU9) ? VIDEO_FORMAT_NUM_COLORS_YVU9 : (lpcap->biCompression == 0) ? ((lpcap->biBitCount == 24) ? VIDEO_FORMAT_NUM_COLORS_16777216 : (lpcap->biBitCount == 16) ? VIDEO_FORMAT_NUM_COLORS_65536 : (lpcap->biBitCount == 8) ? VIDEO_FORMAT_NUM_COLORS_256 : (lpcap->biBitCount == 4) ? VIDEO_FORMAT_NUM_COLORS_16 : 0x00000800) : 0x00000800), REP_DEVICE_IMAGE_SIZE);
  386. if (lpcap->biBitCount > 8)
  387. break;
  388. else if (dwPropVal > 256 * sizeof(RGBQUAD)) {
  389. if (i) {
  390. // vcmstrm gave us a colortable in m_fDevSend, so use it
  391. CopyMemory(((BYTE*)lpcap) + lpcap->biSize, (BYTE*)&m_fDevSend.bih + m_fDevSend.bih.biSize,
  392. 256 * sizeof(RGBQUAD));
  393. }
  394. else {
  395. CAPTUREPALETTE pal;
  396. LPRGBQUAD lprgb;
  397. GetCaptureDevicePalette(hCapDev, &pal);
  398. lprgb = (LPRGBQUAD)(((BYTE*)lpcap) + lpcap->biSize);
  399. for (i = 0; i < 256; i++) {
  400. lprgb->rgbRed = pal.pe[i].peRed;
  401. lprgb->rgbGreen = pal.pe[i].peGreen;
  402. lprgb->rgbBlue = pal.pe[i].peBlue;
  403. lprgb++;
  404. }
  405. }
  406. break;
  407. }
  408. dwPropVal += 256 * sizeof(RGBQUAD);
  409. MemFree(lpcap); // free this lpcap, and alloc a new with room for palette
  410. }
  411. else {
  412. DEBUGMSG (ZONE_DP, ("%s: failed to set/get capture format\r\n", _fx_));
  413. goto IMediaInitError;
  414. }
  415. }
  416. DBG_SAVE_FILE_LINE
  417. if (pChain = new CCaptureChain) {
  418. VIDEOFORMATEX *capfmt;
  419. // if pfSend is 128x96, but capture is greater, then InitCaptureChain with a larger size so
  420. // that the codec will just crop to 128x96
  421. iXOffset = pfSend->bih.biWidth;
  422. iYOffset = pfSend->bih.biHeight;
  423. if ((iXOffset == 128) && (iYOffset == 96)) {
  424. if (lpcap->biWidth == 160) {
  425. iXOffset = lpcap->biWidth;
  426. iYOffset = lpcap->biHeight;
  427. }
  428. else if (lpcap->biWidth == 320) {
  429. iXOffset = lpcap->biWidth / 2;
  430. iYOffset = lpcap->biHeight / 2;
  431. }
  432. }
  433. if ((hr = pChain->InitCaptureChain(hCapDev,
  434. (dwStreamingMode==STREAMING_PREFER_STREAMING),
  435. lpcap, iXOffset, iYOffset, 0, &lpsend)) != NO_ERROR) {
  436. DEBUGMSG (ZONE_DP, ("%s: failed to init capture chain\r\n", _fx_));
  437. MemFree(lpcap);
  438. delete pChain;
  439. goto IMediaInitError;
  440. }
  441. }
  442. else {
  443. DEBUGMSG (ZONE_DP, ("%s: failed allocate capture chain\r\n", _fx_));
  444. MemFree((HANDLE)lpcap);
  445. hr = DPR_OUT_OF_MEMORY;
  446. goto IMediaInitError;
  447. }
  448. MemFree((HANDLE)lpcap);
  449. m_pCaptureChain = pChain;
  450. // build m_fDevSend format as format that will be input to codec
  451. CopyMemory(&m_fDevSend, pfSend, sizeof(VIDEOFORMATEX)-sizeof(BITMAPINFOHEADER)-BMIH_SLOP_BYTES);
  452. // m_fDevSend.bih is the output format of the CaptureChain
  453. CopyMemory(&m_fDevSend.bih, lpsend, lpsend->biSize);
  454. //LOOKLOOK RP - need to get colortable too?
  455. m_fDevSend.dwFormatSize = sizeof(VIDEOFORMATEX);
  456. m_fDevSend.dwFormatTag = lpsend->biCompression;
  457. m_fDevSend.nAvgBytesPerSec = m_fDevSend.nMinBytesPerSec =
  458. m_fDevSend.nMaxBytesPerSec = m_fDevSend.nSamplesPerSec * lpsend->biSizeImage;
  459. m_fDevSend.nBlockAlign = lpsend->biSizeImage;
  460. m_fDevSend.wBitsPerSample = lpsend->biBitCount;
  461. LocalFree((HANDLE)lpsend);
  462. mcConfig.pDevFmt = &m_fDevSend;
  463. UPDATE_REPORT_ENTRY(g_prptCallParameters, pfSend->dwFormatTag, REP_SEND_VIDEO_FORMAT);
  464. RETAILMSG(("NAC: Video Send Format: %.4s", (LPSTR)&pfSend->dwFormatTag));
  465. // Initialize the send-stream media control object
  466. mcConfig.hStrm = (DPHANDLE) m_SendStream;
  467. m_InMedia->GetProp(MC_PROP_MEDIA_DEV_ID, &dwPropVal);
  468. mcConfig.uDevId = (DWORD)dwPropVal;
  469. mcConfig.cbSamplesPerPkt = ChoosePacketSize(pfSend);
  470. hr = m_InMedia->Configure(&mcConfig);
  471. if (hr != DPR_SUCCESS)
  472. {
  473. DEBUGMSG (ZONE_DP, ("%s: IVMedia->Config failed, hr=0x%lX\r\n", _fx_, hr));
  474. goto IMediaInitError;
  475. }
  476. // initialize m_cliprect
  477. iXOffset = 0; iYOffset = 0;
  478. if (m_fDevSend.bih.biWidth > pfSend->bih.biWidth)
  479. iXOffset = (m_fDevSend.bih.biWidth - pfSend->bih.biWidth) >> 1;
  480. if (m_fDevSend.bih.biHeight > pfSend->bih.biHeight)
  481. iYOffset = (m_fDevSend.bih.biHeight - pfSend->bih.biHeight) >> 1;
  482. SetRect(&m_cliprect, iXOffset, iYOffset, pfSend->bih.biWidth + iXOffset, pfSend->bih.biHeight + iYOffset);
  483. dwMaxFragSize = 512; // default video packet size
  484. CopyMemory (&m_fCodecOutput, pfSend, sizeof(VIDEOFORMATEX));
  485. m_InMedia->GetProp (MC_PROP_SIZE, &dwPropVal);
  486. dwSrcSize = (DWORD)dwPropVal;
  487. mmr = m_pVideoFilter->Open(&m_fDevSend, &m_fCodecOutput, dwMaxFragSize);
  488. if (mmr != MMSYSERR_NOERROR)
  489. {
  490. DEBUGMSG (ZONE_DP, ("%s: VcmFilter->Open failed, mmr=%d\r\n", _fx_, mmr));
  491. hr = DPR_CANT_OPEN_CODEC;
  492. goto SendFilterInitError;
  493. }
  494. // Initialize the send queue
  495. ZeroMemory (&pcktInit, sizeof (pcktInit));
  496. pcktInit.dwFlags = DP_FLAG_SEND | DP_FLAG_VCM | DP_FLAG_VIDEO;
  497. pcktInit.pStrmConvSrcFmt = &m_fDevSend;
  498. pcktInit.pStrmConvDstFmt = &m_fCodecOutput;
  499. pcktInit.cbSizeRawData = dwSrcSize;
  500. pcktInit.cbOffsetRawData = 0;
  501. m_InMedia->FillMediaPacketInit (&pcktInit);
  502. m_InMedia->GetProp (MC_PROP_SIZE, &dwPropVal);
  503. m_pVideoFilter->SuggestDstSize(dwSrcSize, &m_dwDstSize);
  504. pcktInit.cbSizeNetData = m_dwDstSize;
  505. m_pVideoFilter->GetProperty(FM_PROP_PAYLOAD_HEADER_SIZE,
  506. &pcktInit.cbPayloadHeaderSize);
  507. pcktInit.cbOffsetNetData = sizeof (RTP_HDR);
  508. pcktInit.payload = vidChannelParams.RTP_Payload;
  509. fRet = m_SendStream->Initialize (DP_FLAG_VIDEO, MAX_TXVRING_SIZE, m_pDP, &pcktInit);
  510. if (!fRet)
  511. {
  512. DEBUGMSG (ZONE_DP, ("%s: TxvStream->Init failed, fRet=0%u\r\n", _fx_, fRet));
  513. hr = DPR_CANT_INIT_TXV_STREAM;
  514. goto TxStreamInitError;
  515. }
  516. // Prepare headers for TxvStream
  517. m_SendStream->GetRing (&ppPckt, &cPckt);
  518. m_InMedia->RegisterData (ppPckt, cPckt);
  519. m_InMedia->PrepareHeaders ();
  520. }
  521. else
  522. {
  523. // The following fields may change with the capabilities of the other end point
  524. dwMaxFragSize = 512; // default video packet size
  525. if (pChannelParams)
  526. {
  527. m_pVideoFilter->GetProperty(FM_PROP_PAYLOAD_HEADER_SIZE,
  528. &pcktInit.cbPayloadHeaderSize);
  529. pcktInit.cbOffsetNetData = sizeof (RTP_HDR);
  530. }
  531. }
  532. if(pChannelParams)
  533. {
  534. // Update the bitrate
  535. maxBitRate = vidChannelParams.ns_params.maxBitRate*100;
  536. if (maxBitRate < BW_144KBS_BITS)
  537. maxBitRate = BW_144KBS_BITS;
  538. // set the max. fragment size
  539. DEBUGMSG(ZONE_DP,("%s: Video Send: maxBitRate=%d, maxBPP=%d, MPI=%d\r\n",
  540. _fx_,maxBitRate,
  541. vidChannelParams.ns_params.maxBPP*1024, vidChannelParams.ns_params.MPI*33));
  542. // Initialize the max frame rate with the negociated max
  543. if ((vidChannelParams.ns_params.MPI > 0UL) && (vidChannelParams.ns_params.MPI < 33UL))
  544. {
  545. dwPropVal = 2997UL / vidChannelParams.ns_params.MPI;
  546. m_maxfps = (DWORD)dwPropVal;
  547. INIT_COUNTER_MAX(g_pctrVideoSend, (m_maxfps + 50) / 100);
  548. UPDATE_REPORT_ENTRY(g_prptCallParameters, (m_maxfps + 50) / 100, REP_SEND_VIDEO_MAXFPS);
  549. RETAILMSG(("NAC: Video Send Max Frame Rate (negotiated - fps): %ld", (m_maxfps + 50) / 100));
  550. DEBUGMSG(1,("%s: Video Send: Negociated max fps = %d.%d\r\n", _fx_, m_maxfps/100, m_maxfps - m_maxfps / 100 * 100));
  551. }
  552. UPDATE_REPORT_ENTRY(g_prptCallParameters, maxBitRate, REP_SEND_VIDEO_BITRATE);
  553. RETAILMSG(("NAC: Video Send Max Bitrate (negotiated - bps): %ld", maxBitRate));
  554. INIT_COUNTER_MAX(g_pctrVideoSendBytes, maxBitRate * 75 / 100);
  555. // At this point we actually know what is the minimum bitrate chosen
  556. // by the sender and the receiver. Let's reset the resources reserved
  557. // by the QoS with those more meaningfull values.
  558. if (m_pDP->m_pIQoS)
  559. {
  560. // Fill in the resource list
  561. m_aLocalRs.cResources = 1;
  562. m_aLocalRs.aResources[0].resourceID = RESOURCE_OUTGOING_BANDWIDTH;
  563. // Do a sanity check on the minimal bit rate
  564. m_aLocalRs.aResources[0].nUnits = maxBitRate;
  565. m_aLocalRs.aResources[0].ulResourceFlags = m_aLocalRs.aResources[0].reserved = 0;
  566. DEBUGMSG(1,("%s: Video Send: Negociated max bps = %d\r\n", _fx_, maxBitRate));
  567. // Set the resources on the QoS object
  568. hr = m_pDP->m_pIQoS->SetResources((LPRESOURCELIST)&m_aLocalRs);
  569. }
  570. // if we're sending on the LAN, fragment video frames into Ethernet packet sized chunks
  571. // On slower links use smaller packets for better bandwidth sharing
  572. // NOTE: codec packetizer can occasionally exceed the fragment size limit
  573. if (maxBitRate > BW_ISDN_BITS)
  574. dwMaxFragSize = 1350;
  575. m_pVideoFilter->SetProperty(FM_PROP_VIDEO_MAX_PACKET_SIZE, dwMaxFragSize);
  576. // To correctly initialize the flow spec structure we need to get the values that
  577. // our QoS module will be effectively using. Typically, we only use 70% of the max
  578. // advertized. On top of that, some system administrator may have significantly
  579. // reduced the maximum bitrate on this machine.
  580. if (m_pDP->m_pIQoS)
  581. {
  582. LPRESOURCELIST pResourceList = NULL;
  583. // Get a list of all resources from QoS
  584. hr = m_pDP->m_pIQoS->GetResources(&pResourceList);
  585. if (SUCCEEDED(hr) && pResourceList)
  586. {
  587. // Find the BW resource
  588. for (i=0; i < pResourceList->cResources; i++)
  589. {
  590. if (pResourceList->aResources[i].resourceID == RESOURCE_OUTGOING_BANDWIDTH)
  591. {
  592. maxBitRate = min(maxBitRate, (DWORD)pResourceList->aResources[i].nUnits);
  593. break;
  594. }
  595. }
  596. // Release memory
  597. m_pDP->m_pIQoS->FreeBuffer(pResourceList);
  598. }
  599. }
  600. // WS2Qos will be called in Start to communicate stream information to the
  601. // remote endpoint using a PATH message
  602. //
  603. // We use a peak-rate allocation approach based on our target bitrates
  604. // Note that for the token bucket size and the maximum SDU size, we now
  605. // account for IP header overhead, and use the max frame fragment size
  606. // instead of the maximum compressed image size returned by the codec
  607. ASSERT(maxBitRate > 0);
  608. InitVideoFlowspec(&m_flowspec, maxBitRate, dwMaxFragSize, VID_AVG_PACKET_SIZE);
  609. // Update RTCP send address and payload type. It should be known now
  610. // We have to explicitly set the payload again because the preview
  611. // channel configuration has already set it to zero.
  612. m_RTPPayload = vidChannelParams.RTP_Payload;
  613. m_SendStream->GetRing (&ppPckt, &cPckt);
  614. for (uIndex = 0; uIndex < cPckt; uIndex++)
  615. {
  616. ppPckt[uIndex]->SetPayload(m_RTPPayload);
  617. }
  618. // Keep a weak reference to the IUnknown interface
  619. // We will use it to query a Stream Signal interface pointer in Start()
  620. m_pIUnknown = pUnknown;
  621. }
  622. if (m_DPFlags & DPFLAG_REAL_THING)
  623. {
  624. if (m_pDP->m_pIQoS)
  625. {
  626. // Initialize our requests. One for CPU usage, one for bandwidth usage.
  627. m_aRRq.cResourceRequests = 2;
  628. m_aRRq.aResourceRequest[0].resourceID = RESOURCE_OUTGOING_BANDWIDTH;
  629. m_aRRq.aResourceRequest[0].nUnitsMin = 0;
  630. m_aRRq.aResourceRequest[1].resourceID = RESOURCE_CPU_CYCLES;
  631. m_aRRq.aResourceRequest[1].nUnitsMin = 0;
  632. // Initialize QoS structure
  633. ZeroMemory(&m_Stats, sizeof(m_Stats));
  634. // Start collecting CPU performance data from the registry
  635. StartCPUUsageCollection();
  636. // Register with the QoS module. This call should NEVER fail. If it does, we'll do without the QoS
  637. m_pDP->m_pIQoS->RequestResources((GUID *)&MEDIA_TYPE_H323VIDEO, (LPRESOURCEREQUESTLIST)&m_aRRq, QosNotifyVideoCB, (DWORD_PTR)this);
  638. }
  639. }
  640. // reset the temporal spatial tradeoff to best quality
  641. // it's expected that the UI will re-specify the TS setting
  642. // sometime after the stream is started
  643. m_pVideoFilter->SetProperty(FM_PROP_VIDEO_RESET_IMAGE_QUALITY ,VCM_RESET_IMAGE_QUALITY);
  644. m_pTSTable = NULL;
  645. m_dwCurrentTSSetting = VCM_MAX_IMAGE_QUALITY;
  646. //Before we start, reset the frame frame rate to the channel max.
  647. //If the previous call had been slower than possible, resume
  648. //previewing at the desired FPS.
  649. if (pChannelParams && (m_DPFlags & DPFLAG_REAL_THING))
  650. {
  651. int iSlowStartFrameRate;
  652. // us a frame-rate table for temporal spatial tradeoff settings
  653. // if the bandwidth is a modem setting
  654. if (maxBitRate <= BW_288KBS_BITS)
  655. {
  656. if (pfSend->bih.biWidth >= CIF_WIDTH)
  657. {
  658. m_pTSTable = g_TSTable_Modem_CIF;
  659. }
  660. else
  661. {
  662. m_pTSTable = g_TSTable_Modem_QCIF;
  663. }
  664. }
  665. // Let's do a slow start and then catch up with the negociated max
  666. if (m_pTSTable == NULL)
  667. {
  668. iSlowStartFrameRate = m_maxfps >> 1;
  669. }
  670. else
  671. {
  672. iSlowStartFrameRate = m_pTSTable[VCM_MAX_IMAGE_QUALITY];
  673. }
  674. SetProperty(PROP_VIDEO_FRAME_RATE, &iSlowStartFrameRate, sizeof(int));
  675. // Initialize the codec with the new target bitrates and frame rates
  676. // PhilF-: This assumes that we start with a silent audio channel...
  677. SetTargetRates(iSlowStartFrameRate, maxBitRate);
  678. }
  679. else
  680. {
  681. INIT_COUNTER_MAX(g_pctrVideoSend, 30);
  682. SetProperty(PROP_VIDEO_FRAME_RATE, &m_maxfps, sizeof(int));
  683. }
  684. m_ThreadFlags &= ~DPTFLAG_PAUSE_CAPTURE;
  685. m_DPFlags |= DPFLAG_CONFIGURED_SEND;
  686. m_PreviousCaptureDevice = m_CaptureDevice;
  687. // m_PrevFormatId = SendVidFmt;
  688. return DPR_SUCCESS;
  689. TxStreamInitError:
  690. m_pVideoFilter->Close();
  691. SendFilterInitError:
  692. IMediaInitError:
  693. if (m_pCaptureChain) {
  694. delete m_pCaptureChain;
  695. m_pCaptureChain = NULL;
  696. }
  697. // We need to close the video controller object on failure to open the capture device,
  698. // otherwise we get a pure virtual function call on NM shutdown!
  699. if (m_InMedia)
  700. m_InMedia->Close();
  701. ERRORMESSAGE(("%s: failed, hr=0%u\r\n", _fx_, hr));
  702. return hr;
  703. }
  704. void SendVideoStream::UnConfigure()
  705. {
  706. // By default, unconfigure all resources
  707. UnConfigureSendVideo(TRUE, TRUE);
  708. }
  709. void SendVideoStream::UnConfigureSendVideo(BOOL fNewDeviceSettings, BOOL fNewDevice)
  710. {
  711. #ifdef TEST
  712. DWORD dwTicks;
  713. dwTicks = GetTickCount();
  714. #endif
  715. if (m_DPFlags & DPFLAG_CONFIGURED_SEND)
  716. {
  717. if (m_hCapturingThread)
  718. Stop();
  719. if (fNewDeviceSettings || fNewDevice)
  720. {
  721. // m_PrevFormatId = INVALID_MEDIA_FORMAT;
  722. ZeroMemory(&m_fCodecOutput, sizeof(VIDEOFORMATEX));
  723. m_Net = NULL;
  724. if (m_pCaptureChain)
  725. {
  726. delete m_pCaptureChain;
  727. m_pCaptureChain = NULL;
  728. }
  729. // Close the devices
  730. m_InMedia->Reset();
  731. m_InMedia->UnprepareHeaders();
  732. if (fNewDevice)
  733. {
  734. m_PreviousCaptureDevice = -1L; // VIDEO_MAPPER
  735. m_InMedia->Close();
  736. }
  737. // Close the filters
  738. m_pVideoFilter->Close();
  739. // Close the transmit streams
  740. m_SendStream->Destroy();
  741. }
  742. m_DPFlags &= ~DPFLAG_CONFIGURED_SEND;
  743. // Release the QoS Resources
  744. // If the associated RequestResources had failed, the ReleaseResources can be
  745. // still called... it will just come back without having freed anything.
  746. if (m_pDP->m_pIQoS)
  747. {
  748. if (m_DPFlags & DPFLAG_REAL_THING)
  749. {
  750. m_pDP->m_pIQoS->ReleaseResources((GUID *)&MEDIA_TYPE_H323VIDEO, (LPRESOURCEREQUESTLIST)&m_aRRq);
  751. // Terminate CPU usage data collection
  752. StopCPUUsageCollection();
  753. }
  754. m_DPFlags &= ~DPFLAG_REAL_THING;
  755. }
  756. }
  757. #ifdef TEST
  758. LOG((LOGMSG_TIME_SEND_VIDEO_UNCONFIGURE,GetTickCount() - dwTicks));
  759. #endif
  760. }
  761. HRESULT
  762. SendVideoStream::Start()
  763. {
  764. int nRet= IFRAMES_CAPS_UNKNOWN;
  765. FX_ENTRY ("SendVideoStream::Start")
  766. if (m_DPFlags & DPFLAG_STARTED_SEND)
  767. return DPR_SUCCESS;
  768. if (!(m_DPFlags & DPFLAG_CONFIGURED_SEND))
  769. return DPR_NOT_CONFIGURED;
  770. // to fix: if we optimize SetNetworkInterface to allow
  771. // us to transition from preview->sending without having
  772. // to call stop/start, we need to make sure the flowspec/QOS
  773. // stuff get's called there.
  774. SetFlowSpec();
  775. ASSERT(!m_hCapturingThread);
  776. m_ThreadFlags &= ~(DPTFLAG_STOP_RECORD|DPTFLAG_STOP_SEND);
  777. // Start recording thread
  778. if (!(m_ThreadFlags & DPTFLAG_STOP_RECORD))
  779. m_hCapturingThread = CreateThread(NULL,0, SendVideoStream::StartCaptureThread,this,0,&m_CaptureThId);
  780. // ------------------------------------------------------------------------
  781. // Decide whether or not we need to send periodic I-Frames during this call
  782. // Who are we talking to?
  783. if ((m_pIUnknown) && (m_DPFlags & DPFLAG_REAL_THING))
  784. {
  785. HRESULT hr;
  786. IStreamSignal *pIStreamSignal=NULL;
  787. hr = m_pIUnknown->QueryInterface(IID_IStreamSignal, (void **)&pIStreamSignal);
  788. if (HR_SUCCEEDED(hr))
  789. {
  790. nRet = GetIFrameCaps(pIStreamSignal);
  791. pIStreamSignal->Release();
  792. }
  793. }
  794. // only disable sending of I Frames if and only if we know the remote party
  795. // can handle it. In this case, NetMeeting 3.0 or TAPI 3.1
  796. if (nRet == IFRAMES_CAPS_NM3)
  797. {
  798. m_pVideoFilter->SetProperty(FM_PROP_PERIODIC_IFRAMES, FALSE);
  799. }
  800. else
  801. {
  802. m_pVideoFilter->SetProperty(FM_PROP_PERIODIC_IFRAMES, TRUE);
  803. }
  804. // ------------------------------------------------------------------------
  805. m_DPFlags |= DPFLAG_STARTED_SEND;
  806. DEBUGMSG (ZONE_DP, ("%s: Record threadid=%x,\r\n", _fx_, m_CaptureThId));
  807. return DPR_SUCCESS;
  808. }
  809. // LOOK: identical to SendAudioStream version.
  810. HRESULT
  811. SendVideoStream::Stop()
  812. {
  813. DWORD dwWait;
  814. if(!(m_DPFlags & DPFLAG_STARTED_SEND))
  815. {
  816. return DPR_SUCCESS;
  817. }
  818. m_ThreadFlags = m_ThreadFlags | DPTFLAG_STOP_SEND | DPTFLAG_STOP_RECORD;
  819. if(m_SendStream) {
  820. m_SendStream->Stop();
  821. m_SendStream->Reset();
  822. }
  823. /*
  824. * we want to wait for all the threads to exit, but we need to handle windows
  825. * messages (mostly from winsock) while waiting.
  826. */
  827. if(m_hCapturingThread) {
  828. dwWait = WaitForSingleObject (m_hCapturingThread, INFINITE);
  829. DEBUGMSG (ZONE_VERBOSE, ("STOP2: dwWait =%d\r\n", dwWait));
  830. ASSERT(dwWait != WAIT_FAILED);
  831. CloseHandle(m_hCapturingThread);
  832. m_hCapturingThread = NULL;
  833. }
  834. m_DPFlags &= ~DPFLAG_STARTED_SEND;
  835. return DPR_SUCCESS;
  836. }
  837. HRESULT STDMETHODCALLTYPE SendVideoStream::SetMaxBitrate(UINT uMaxBitrate)
  838. {
  839. DWORD dwFrameRate=0;
  840. UINT uSize=sizeof(DWORD);
  841. BOOL bRet;
  842. HRESULT hr;
  843. hr = GetProperty(PROP_VIDEO_FRAME_RATE, &dwFrameRate, &uSize);
  844. if (SUCCEEDED(hr))
  845. {
  846. bRet = SetTargetRates(dwFrameRate, (DWORD)uMaxBitrate);
  847. if (bRet)
  848. hr = S_OK;
  849. else
  850. hr = E_FAIL;
  851. }
  852. return hr;
  853. }
  854. // IProperty::GetProperty / SetProperty
  855. // (DataPump::MediaChannel::GetProperty)
  856. // Properties of the MediaStream.
  857. STDMETHODIMP
  858. SendVideoStream::GetProperty(
  859. DWORD prop,
  860. PVOID pBuf,
  861. LPUINT pcbBuf
  862. )
  863. {
  864. HRESULT hr = DPR_SUCCESS;
  865. DWORD dwValue;
  866. DWORD_PTR dwPropVal;
  867. UINT len = sizeof(DWORD); // most props are DWORDs
  868. if (!pBuf || *pcbBuf < len)
  869. {
  870. *pcbBuf = len;
  871. return DPR_INVALID_PARAMETER;
  872. }
  873. switch (prop)
  874. {
  875. #ifdef OLDSTUFF
  876. case PROP_NET_SEND_STATS:
  877. if (m_Net && *pcbBuf >= sizeof(RTP_STATS))
  878. {
  879. m_Net->GetSendStats((RTP_STATS *)pBuf);
  880. *pcbBuf = sizeof(RTP_STATS);
  881. } else
  882. hr = DPR_INVALID_PROP_VAL;
  883. break;
  884. #endif
  885. case PROP_DURATION:
  886. hr = m_InMedia->GetProp(MC_PROP_DURATION, &dwPropVal);
  887. *(DWORD *)pBuf = (DWORD)dwPropVal;
  888. break;
  889. case PROP_RECORD_ON:
  890. *(DWORD *)pBuf = ((m_DPFlags & DPFLAG_ENABLE_SEND) !=0);
  891. break;
  892. case PROP_CAPTURE_DEVICE:
  893. *(UINT *)pBuf = m_CaptureDevice;
  894. break;
  895. case PROP_VIDEO_FRAME_RATE:
  896. *((DWORD *)pBuf) = 100000 / m_frametime;
  897. break;
  898. case PROP_VIDEO_IMAGE_QUALITY:
  899. hr = GetTemporalSpatialTradeOff((DWORD *)pBuf);
  900. break;
  901. case PROP_VIDEO_CAPTURE_AVAILABLE:
  902. *(DWORD *)pBuf = (m_DPFlags & DP_FLAG_RECORD_CAP) != 0;
  903. break;
  904. case PROP_VIDEO_CAPTURE_DIALOGS_AVAILABLE:
  905. hr = m_InMedia->GetProp(MC_PROP_VFW_DIALOGS, &dwPropVal);
  906. *(DWORD *)pBuf = (DWORD)dwPropVal;
  907. break;
  908. case PROP_VIDEO_PREVIEW_ON:
  909. *(DWORD *)pBuf = ((m_DPFlags & DPFLAG_ENABLE_PREVIEW) != 0);
  910. break;
  911. case PROP_PAUSE_SEND:
  912. *(DWORD *)pBuf = ((m_ThreadFlags & DPTFLAG_PAUSE_SEND) != 0);
  913. break;
  914. default:
  915. hr = DPR_INVALID_PROP_ID;
  916. break;
  917. }
  918. return hr;
  919. }
  920. STDMETHODIMP
  921. SendVideoStream::SetProperty(
  922. DWORD prop,
  923. PVOID pBuf,
  924. UINT cbBuf
  925. )
  926. {
  927. DWORD dw;
  928. HRESULT hr = S_OK;
  929. if (cbBuf < sizeof (DWORD))
  930. return DPR_INVALID_PARAMETER;
  931. switch (prop)
  932. {
  933. case PROP_CAPTURE_DEVICE:
  934. if (m_DPFlags & DPFLAG_ENABLE_PREVIEW)
  935. {
  936. return DPR_INVALID_PARAMETER;
  937. }
  938. else
  939. {
  940. m_CaptureDevice = *(UINT*)pBuf;
  941. m_InMedia->SetProp(MC_PROP_MEDIA_DEV_ID, (DWORD)m_CaptureDevice);
  942. }
  943. break;
  944. case PROP_VIDEO_FRAME_RATE:
  945. if (*(DWORD*)pBuf <= m_maxfps) {
  946. DEBUGMSG(ZONE_VERBOSE, ("DP: setting fps = %d \n", *(DWORD*)pBuf));
  947. // set frame rate here
  948. m_frametime = 100000 / *(DWORD*)pBuf;
  949. }
  950. break;
  951. case PROP_VIDEO_IMAGE_QUALITY:
  952. hr = SetTemporalSpatialTradeOff(*(DWORD*)pBuf);
  953. break;
  954. case PROP_VIDEO_RESET_IMAGE_QUALITY:
  955. hr = m_pVideoFilter->SetProperty(FM_PROP_VIDEO_IMAGE_QUALITY, VCM_DEFAULT_IMAGE_QUALITY);
  956. break;
  957. case PROP_VIDEO_CAPTURE_DIALOG:
  958. hr = ((VideoInControl *)m_InMedia)->DisplayDriverDialog(GetActiveWindow(), *(DWORD *)pBuf);
  959. break;
  960. case PROP_VIDEO_SIZE:
  961. ASSERT(0);
  962. break;
  963. case PROP_VIDEO_PREVIEW_ON:
  964. ASSERT(0);
  965. break;
  966. case PROP_VIDEO_AUDIO_SYNC:
  967. if (*(DWORD *)pBuf)
  968. m_DPFlags |= DPFLAG_AV_SYNC;
  969. else
  970. m_DPFlags &= ~DPFLAG_AV_SYNC;
  971. break;
  972. case PROP_PAUSE_SEND:
  973. if (*(DWORD *)pBuf)
  974. m_ThreadFlags |= DPTFLAG_PAUSE_SEND;
  975. else
  976. m_ThreadFlags &= ~DPTFLAG_PAUSE_SEND;
  977. break;
  978. default:
  979. return DPR_INVALID_PROP_ID;
  980. break;
  981. }
  982. return hr;
  983. }
  984. //---------------------------------------------------------------------
  985. // IVideoRender implementation and support functions
  986. // IVideoRender::Init
  987. // (DataPump::Init)
  988. STDMETHODIMP
  989. SendVideoStream::Init(
  990. DWORD_PTR dwUser,
  991. LPFNFRAMEREADY pfCallback
  992. )
  993. {
  994. // Save the event away. Note that we DO allow both send and receive to
  995. // share an event
  996. m_hRenderEvent = (HANDLE) dwUser;
  997. // if pfCallback is NULL then dwUser is an event handle
  998. m_pfFrameReadyCallback = pfCallback;
  999. return DPR_SUCCESS;
  1000. }
  1001. // IVideoRender::Done
  1002. // (DataPump::Done)
  1003. STDMETHODIMP
  1004. SendVideoStream::Done( )
  1005. {
  1006. m_hRenderEvent = NULL;
  1007. m_pfFrameReadyCallback = NULL;
  1008. return DPR_SUCCESS;
  1009. }
  1010. // IVideoRender::GetFrame
  1011. // (DataPump::GetFrame)
  1012. STDMETHODIMP
  1013. SendVideoStream::GetFrame(
  1014. FRAMECONTEXT* pfc
  1015. )
  1016. {
  1017. HRESULT hr;
  1018. PVOID pData = NULL;
  1019. UINT cbData = 0;
  1020. // Validate parameters
  1021. if (!pfc )
  1022. return DPR_INVALID_PARAMETER;
  1023. // Don't arbitrarily call out while holding this crs or you may deadlock...
  1024. EnterCriticalSection(&m_crs);
  1025. if ((m_DPFlags & DPFLAG_CONFIGURED_SEND) && m_pNextPacketToRender && !m_pNextPacketToRender->m_fRendering)
  1026. {
  1027. m_pNextPacketToRender->m_fRendering = TRUE;
  1028. m_pNextPacketToRender->GetDevData(&pData,&cbData);
  1029. pfc->lpData = (PUCHAR) pData;
  1030. pfc->dwReserved = (DWORD_PTR) m_pNextPacketToRender;
  1031. // set bmi length?
  1032. pfc->lpbmi = (PBITMAPINFO)&m_fDevSend.bih;
  1033. pfc->lpClipRect = &m_cliprect;
  1034. m_cRendering++;
  1035. hr = S_OK;
  1036. LOG((LOGMSG_GET_SEND_FRAME,m_pNextPacketToRender->GetIndex()));
  1037. } else
  1038. hr = S_FALSE; // nothing ready to render
  1039. LeaveCriticalSection(&m_crs);
  1040. return hr;
  1041. }
  1042. // IVideoRender::ReleaseFrame
  1043. // (DataPump::ReleaseFrame)
  1044. STDMETHODIMP
  1045. SendVideoStream::ReleaseFrame(
  1046. FRAMECONTEXT* pfc
  1047. )
  1048. {
  1049. HRESULT hr;
  1050. MediaPacket *pPacket;
  1051. // Validate parameters
  1052. if (!pfc)
  1053. return DPR_INVALID_PARAMETER;
  1054. // Handle a send frame
  1055. {
  1056. EnterCriticalSection(&m_crs);
  1057. // Don't arbitrarily call out while holding this crs or you may deadlock...
  1058. if ((m_DPFlags & DPFLAG_CONFIGURED_SEND) && (pPacket = (MediaPacket *)pfc->dwReserved) && pPacket->m_fRendering)
  1059. {
  1060. LOG((LOGMSG_RELEASE_SEND_FRAME,pPacket->GetIndex()));
  1061. pPacket->m_fRendering = FALSE;
  1062. pfc->dwReserved = 0;
  1063. // if its not the current frame
  1064. if (m_pNextPacketToRender != pPacket) {
  1065. pPacket->Recycle();
  1066. m_SendStream->Release(pPacket);
  1067. }
  1068. m_cRendering--;
  1069. hr = S_OK;
  1070. }
  1071. else
  1072. hr = DPR_INVALID_PARAMETER;
  1073. LeaveCriticalSection(&m_crs);
  1074. }
  1075. return hr;
  1076. }
  1077. HRESULT __stdcall SendVideoStream::SendKeyFrame(void)
  1078. {
  1079. MMRESULT mmr;
  1080. HVCMSTREAM hvs;
  1081. ASSERT(m_pVideoFilter);
  1082. if ((mmr = m_pVideoFilter->RequestIFrame()) != MMSYSERR_NOERROR)
  1083. {
  1084. return S_FALSE;
  1085. }
  1086. return S_OK;
  1087. }
  1088. // IVideoChannel
  1089. HRESULT __stdcall SendVideoStream::SetTemporalSpatialTradeOff(DWORD dwVal)
  1090. {
  1091. HRESULT hr=DPR_NOT_CONFIGURED;
  1092. ASSERT(m_pVideoFilter);
  1093. if (m_pVideoFilter)
  1094. {
  1095. if (m_pTSTable == NULL)
  1096. {
  1097. hr = m_pVideoFilter->SetProperty(FM_PROP_VIDEO_IMAGE_QUALITY, dwVal);
  1098. }
  1099. m_dwCurrentTSSetting = dwVal;
  1100. return S_OK;
  1101. }
  1102. return hr;
  1103. }
  1104. HRESULT __stdcall SendVideoStream::GetTemporalSpatialTradeOff(DWORD *pdwVal)
  1105. {
  1106. HRESULT hr=DPR_NOT_CONFIGURED;
  1107. ASSERT(m_pVideoFilter);
  1108. if (m_pVideoFilter)
  1109. {
  1110. if (m_pTSTable == NULL)
  1111. {
  1112. *pdwVal = m_dwCurrentTSSetting;
  1113. hr = S_OK;
  1114. }
  1115. else
  1116. {
  1117. hr = m_pVideoFilter->GetProperty(FM_PROP_VIDEO_IMAGE_QUALITY, pdwVal);
  1118. }
  1119. }
  1120. return hr;
  1121. }
  1122. HRESULT STDMETHODCALLTYPE RecvVideoStream::QueryInterface(REFIID iid, void **ppVoid)
  1123. {
  1124. // resolve duplicate inheritance to the SendMediaStream;
  1125. extern IID IID_IProperty;
  1126. if (iid == IID_IUnknown)
  1127. {
  1128. *ppVoid = (IUnknown*)((RecvMediaStream*)this);
  1129. }
  1130. else if (iid == IID_IMediaChannel)
  1131. {
  1132. *ppVoid = (IMediaChannel*)((RecvMediaStream *)this);
  1133. }
  1134. // else if (iid == IID_IVideoChannel)
  1135. // {
  1136. // *ppVoid = (IVideoChannel*)this;
  1137. // }
  1138. else if (iid == IID_IProperty)
  1139. {
  1140. *ppVoid = NULL;
  1141. ERROR_OUT(("Don't QueryInterface for IID_IProperty, use IMediaChannel"));
  1142. return E_NOINTERFACE;
  1143. }
  1144. else if (iid == IID_IVideoRender)// satisfy symmetric property of QI
  1145. {
  1146. *ppVoid = (IVideoRender *)this;
  1147. }
  1148. else
  1149. {
  1150. *ppVoid = NULL;
  1151. return E_NOINTERFACE;
  1152. }
  1153. AddRef();
  1154. return S_OK;
  1155. }
  1156. ULONG STDMETHODCALLTYPE RecvVideoStream::AddRef(void)
  1157. {
  1158. return InterlockedIncrement(&m_lRefCount);
  1159. }
  1160. ULONG STDMETHODCALLTYPE RecvVideoStream::Release(void)
  1161. {
  1162. LONG lRet;
  1163. lRet = InterlockedDecrement(&m_lRefCount);
  1164. if (lRet == 0)
  1165. {
  1166. delete this;
  1167. return 0;
  1168. }
  1169. else
  1170. return lRet;
  1171. }
  1172. DWORD CALLBACK RecvVideoStream::StartRenderingThread(PVOID pVoid)
  1173. {
  1174. RecvVideoStream *pThisStream = (RecvVideoStream*)pVoid;
  1175. return pThisStream->RenderingThread();
  1176. }
  1177. HRESULT
  1178. RecvVideoStream::Initialize(DataPump *pDP)
  1179. {
  1180. HRESULT hr = DPR_OUT_OF_MEMORY;
  1181. DWORD dwFlags = DP_FLAG_FULL_DUPLEX | DP_FLAG_AUTO_SWITCH ;
  1182. MEDIACTRLINIT mcInit;
  1183. FX_ENTRY ("DP::RecvVideoStream")
  1184. m_pIUnknown = (IUnknown *)NULL;
  1185. InitializeCriticalSection(&m_crs);
  1186. InitializeCriticalSection(&m_crsVidQoS);
  1187. InitializeCriticalSection(&m_crsIStreamSignal);
  1188. dwFlags |= DP_FLAG_VCM | DP_FLAG_VIDEO ;
  1189. // store the platform flags
  1190. // enable Send and Recv by default
  1191. m_DPFlags = (dwFlags & DP_MASK_PLATFORM) | DPFLAG_ENABLE_RECV;
  1192. // store a back pointer to the datapump container
  1193. m_pDP = pDP;
  1194. m_Net = NULL;
  1195. m_pIRTPRecv = NULL;
  1196. // Initialize data (should be in constructor)
  1197. m_RenderingDevice = (UINT) -1; // use VIDEO_MAPPER
  1198. // Create Receive and Transmit video streams
  1199. DBG_SAVE_FILE_LINE
  1200. m_RecvStream = new RVStream(MAX_RXVRING_SIZE);
  1201. if (!m_RecvStream )
  1202. {
  1203. DEBUGMSG (ZONE_DP, ("%s: RxStream new failed\r\n", _fx_));
  1204. goto StreamAllocError;
  1205. }
  1206. // Create Input and Output video filters
  1207. DBG_SAVE_FILE_LINE
  1208. m_pVideoFilter = new VcmFilter();
  1209. m_dwSrcSize = 0;
  1210. if (m_pVideoFilter == NULL)
  1211. {
  1212. DEBUGMSG (ZONE_DP, ("%s: VcmFilter new failed\r\n", _fx_));
  1213. goto FilterAllocError;
  1214. }
  1215. //Create Video MultiMedia device control objects
  1216. DBG_SAVE_FILE_LINE
  1217. m_OutMedia = new VideoOutControl();
  1218. if ( !m_OutMedia)
  1219. {
  1220. DEBUGMSG (ZONE_DP, ("%s: MediaControl new failed\r\n", _fx_));
  1221. goto MediaAllocError;
  1222. }
  1223. // Initialize the recv-stream media control object
  1224. mcInit.dwFlags = dwFlags | DP_FLAG_RECV;
  1225. hr = m_OutMedia->Initialize(&mcInit);
  1226. if (hr != DPR_SUCCESS)
  1227. {
  1228. DEBUGMSG (ZONE_DP, ("%s: OMedia->Init failed, hr=0x%lX\r\n", _fx_, hr));
  1229. goto MediaAllocError;
  1230. }
  1231. m_DPFlags |= DP_FLAG_RECORD_CAP ;
  1232. // set media to half duplex mode by default
  1233. m_OutMedia->SetProp(MC_PROP_DUPLEX_TYPE, DP_FLAG_HALF_DUPLEX);
  1234. m_DPFlags |= DPFLAG_INITIALIZED;
  1235. return DPR_SUCCESS;
  1236. MediaAllocError:
  1237. if (m_OutMedia) delete m_OutMedia;
  1238. FilterAllocError:
  1239. if (m_pVideoFilter) delete m_pVideoFilter;
  1240. StreamAllocError:
  1241. if (m_RecvStream) delete m_RecvStream;
  1242. ERRORMESSAGE( ("%s: exit, hr=0x%lX\r\n", _fx_, hr));
  1243. return hr;
  1244. }
  1245. // LOOK: identical to RecvAudioStream version.
  1246. RecvVideoStream::~RecvVideoStream()
  1247. {
  1248. if (m_DPFlags & DPFLAG_INITIALIZED) {
  1249. m_DPFlags &= ~DPFLAG_INITIALIZED;
  1250. if (m_DPFlags & DPFLAG_CONFIGURED_RECV)
  1251. UnConfigure();
  1252. // Close the receive and transmit streams
  1253. if (m_RecvStream) delete m_RecvStream;
  1254. // Close the wave devices
  1255. if (m_OutMedia) { delete m_OutMedia;}
  1256. // Close the filters
  1257. if (m_pVideoFilter)
  1258. delete m_pVideoFilter;
  1259. m_pDP->RemoveMediaChannel(MCF_RECV| MCF_VIDEO, this);
  1260. }
  1261. DeleteCriticalSection(&m_crs);
  1262. DeleteCriticalSection(&m_crsVidQoS);
  1263. DeleteCriticalSection(&m_crsIStreamSignal);
  1264. }
  1265. HRESULT
  1266. RecvVideoStream::Configure(
  1267. BYTE __RPC_FAR *pFormat,
  1268. UINT cbFormat,
  1269. BYTE __RPC_FAR *pChannelParams,
  1270. UINT cbParams,
  1271. IUnknown *pUnknown)
  1272. {
  1273. MMRESULT mmr;
  1274. DWORD dwSrcSize;
  1275. HRESULT hr;
  1276. BOOL fRet;
  1277. MEDIAPACKETINIT pcktInit;
  1278. MEDIACTRLCONFIG mcConfig;
  1279. MediaPacket **ppPckt;
  1280. ULONG cPckt;
  1281. DWORD_PTR dwPropVal;
  1282. UINT ringSize = MAX_RXVRING_SIZE;
  1283. DWORD dwFlags, dwSizeDst, dwMaxFrag, dwMaxBitRate = 0;
  1284. VIDEOFORMATEX *pfRecv = (VIDEOFORMATEX*)pFormat;
  1285. VIDEO_CHANNEL_PARAMETERS vidChannelParams;
  1286. int optval=8192*4; // Use max SQCIF, QCIF I frame size
  1287. #ifdef TEST
  1288. DWORD dwTicks;
  1289. #endif
  1290. FX_ENTRY ("RecvVideoStream::Configure")
  1291. #ifdef TEST
  1292. dwTicks = GetTickCount();
  1293. #endif
  1294. // m_Net = pNet;
  1295. // get format details
  1296. if ((NULL == pFormat) || (NULL == pChannelParams)
  1297. || (cbParams != sizeof(vidChannelParams)))
  1298. {
  1299. return DPR_INVALID_PARAMETER;
  1300. }
  1301. vidChannelParams = *(VIDEO_CHANNEL_PARAMETERS *)pChannelParams;
  1302. if (! (m_DPFlags & DPFLAG_INITIALIZED))
  1303. return DPR_OUT_OF_MEMORY; //BUGBUG: return proper error;
  1304. // if (m_Net)
  1305. // {
  1306. // hr = m_Net->QueryInterface(IID_IRTPRecv, (void **)&m_pIRTPRecv);
  1307. // if (!SUCCEEDED(hr))
  1308. // return hr;
  1309. // }
  1310. mmr = VcmFilter::SuggestDecodeFormat(pfRecv, &m_fDevRecv);
  1311. // initialize m_cliprect
  1312. SetRect(&m_cliprect, 0, 0, m_fDevRecv.bih.biWidth, m_fDevRecv.bih.biHeight);
  1313. // Initialize the recv-stream media control object
  1314. mcConfig.uDuration = MC_USING_DEFAULT; // set duration by samples per pkt
  1315. mcConfig.pDevFmt = &m_fDevRecv;
  1316. UPDATE_REPORT_ENTRY(g_prptCallParameters, pfRecv->dwFormatTag, REP_RECV_VIDEO_FORMAT);
  1317. RETAILMSG(("NAC: Video Recv Format: %.4s", (LPSTR)&pfRecv->dwFormatTag));
  1318. mcConfig.hStrm = (DPHANDLE) m_RecvStream;
  1319. mcConfig.uDevId = m_RenderingDevice;
  1320. mcConfig.cbSamplesPerPkt = ChoosePacketSize(pfRecv);
  1321. hr = m_OutMedia->Configure(&mcConfig);
  1322. m_OutMedia->GetProp (MC_PROP_SIZE, &dwPropVal);
  1323. dwSizeDst = (DWORD)dwPropVal;
  1324. // BUGBUG - HARDCODED platform flags. The right way to do this is to
  1325. // have a smart filter object create() that creates a platform-aware
  1326. // instance of the object
  1327. dwFlags = DP_FLAG_RECV | DP_FLAG_VCM | DP_FLAG_VIDEO;
  1328. mmr = m_pVideoFilter->Open(pfRecv, &m_fDevRecv, 0); // maxfragsize == 0
  1329. if (hr != DPR_SUCCESS)
  1330. {
  1331. DEBUGMSG (ZONE_DP, ("%s: RecvVideoFilter->Init failed, hr=0x%lX\r\n", _fx_, hr));
  1332. hr = DPR_CANT_OPEN_CODEC;
  1333. goto RecvFilterInitError;
  1334. }
  1335. // set the max. fragment size
  1336. DEBUGMSG(ZONE_DP,("%s: Video Recv: maxBitRate=%d, maxBPP=%d, MPI=%d\r\n", _fx_ ,vidChannelParams.ns_params.maxBitRate*100, vidChannelParams.ns_params.maxBPP*1024, vidChannelParams.ns_params.MPI ? 30 / vidChannelParams.ns_params.MPI : 30));
  1337. UPDATE_REPORT_ENTRY(g_prptCallParameters, vidChannelParams.ns_params.MPI ? 30 / vidChannelParams.ns_params.MPI : 30, REP_RECV_VIDEO_MAXFPS);
  1338. UPDATE_REPORT_ENTRY(g_prptCallParameters, vidChannelParams.ns_params.maxBitRate*100, REP_RECV_VIDEO_BITRATE);
  1339. RETAILMSG(("NAC: Video Recv Max Frame Rate (negotiated - fps): %ld", vidChannelParams.ns_params.MPI ? 30 / vidChannelParams.ns_params.MPI : 30));
  1340. RETAILMSG(("NAC: Video Recv Max Bitrate (negotiated - bps): %ld", vidChannelParams.ns_params.maxBitRate*100));
  1341. INIT_COUNTER_MAX(g_pctrVideoReceive, vidChannelParams.ns_params.MPI ? 30 / vidChannelParams.ns_params.MPI : 30);
  1342. INIT_COUNTER_MAX(g_pctrVideoReceiveBytes, vidChannelParams.ns_params.maxBitRate*100);
  1343. // Initialize the recv stream
  1344. ZeroMemory (&pcktInit, sizeof (pcktInit));
  1345. pcktInit.pStrmConvSrcFmt = pfRecv;
  1346. pcktInit.pStrmConvDstFmt = &m_fDevRecv;
  1347. pcktInit.dwFlags = dwFlags;
  1348. pcktInit.cbOffsetRawData = 0;
  1349. pcktInit.cbSizeRawData = dwSizeDst;
  1350. m_OutMedia->FillMediaPacketInit (&pcktInit);
  1351. m_pVideoFilter->SuggestSrcSize(dwSizeDst, &m_dwSrcSize);
  1352. pcktInit.cbSizeNetData = m_dwSrcSize;
  1353. pcktInit.cbOffsetNetData = sizeof (RTP_HDR);
  1354. m_OutMedia->GetProp (MC_PROP_SPP, &dwPropVal);
  1355. ringSize = 8; // reserve space for 8 video frames
  1356. // may need to increase the number if a/v sync is enabled.
  1357. fRet = ((RVStream*)m_RecvStream)->Initialize (DP_FLAG_VIDEO, ringSize, NULL, &pcktInit, (DWORD)dwPropVal, pfRecv->nSamplesPerSec, m_pVideoFilter);
  1358. if (! fRet)
  1359. {
  1360. DEBUGMSG (ZONE_DP, ("%s: RxvStream->Init failed, fRet=0%u\r\n", _fx_, fRet));
  1361. hr = DPR_CANT_INIT_RXV_STREAM;
  1362. goto RxStreamInitError;
  1363. }
  1364. // WS2Qos will be called in Start to communicate stream reservations to the
  1365. // remote endpoint using a RESV message
  1366. //
  1367. // We use a peak-rate allocation approach based on our target bitrates
  1368. // Note that for the token bucket size and the maximum SDU size, we now
  1369. // account for IP header overhead, and use the max frame fragment size
  1370. // instead of the maximum compressed image size returned by the codec
  1371. //
  1372. // Some of the parameters are left unspecified because they are set
  1373. // in the sender Tspec.
  1374. // Computer of actual bandwidth 70 % (but it's already been divided by 100)
  1375. dwMaxBitRate = vidChannelParams.ns_params.maxBitRate*70;
  1376. if (dwMaxBitRate > BW_ISDN_BITS)
  1377. {
  1378. dwMaxFrag = 1350;
  1379. }
  1380. else
  1381. {
  1382. dwMaxFrag = 512;
  1383. }
  1384. InitVideoFlowspec(&m_flowspec, dwMaxBitRate, dwMaxFrag, VID_AVG_PACKET_SIZE);
  1385. /*
  1386. // assume no more than 32 fragments for CIF and
  1387. // 20 fragments for SQCIF, QCIF
  1388. //BLOAT WARNING: this could be quite a bit of memory
  1389. // need to fix this to use a heap instead of fixed size buffers.
  1390. */
  1391. // prepare headers for RxvStream
  1392. m_RecvStream->GetRing (&ppPckt, &cPckt);
  1393. m_OutMedia->RegisterData (ppPckt, cPckt);
  1394. m_OutMedia->PrepareHeaders ();
  1395. // Keep a weak reference to the IUnknown interface
  1396. // We will use it to query a Stream Signal interface pointer in Start()
  1397. m_pIUnknown = pUnknown;
  1398. m_DPFlags |= DPFLAG_CONFIGURED_RECV;
  1399. #ifdef TEST
  1400. LOG((LOGMSG_TIME_RECV_VIDEO_CONFIGURE,GetTickCount() - dwTicks));
  1401. #endif
  1402. return DPR_SUCCESS;
  1403. RxStreamInitError:
  1404. m_pVideoFilter->Close();
  1405. RecvFilterInitError:
  1406. m_OutMedia->Close();
  1407. if (m_pIRTPRecv)
  1408. {
  1409. m_pIRTPRecv->Release();
  1410. m_pIRTPRecv = NULL;
  1411. }
  1412. DEBUGMSG (1, ("%s: failed, hr=0%u\r\n", _fx_, hr));
  1413. return hr;
  1414. }
  1415. void RecvVideoStream::UnConfigure()
  1416. {
  1417. #ifdef TEST
  1418. DWORD dwTicks;
  1419. dwTicks = GetTickCount();
  1420. #endif
  1421. if ( (m_DPFlags & DPFLAG_CONFIGURED_RECV)) {
  1422. Stop();
  1423. // Close the RTP state if its open
  1424. //m_Net->Close(); We should be able to do this in Disconnect()
  1425. m_Net = NULL;
  1426. if (m_pIRTPRecv)
  1427. {
  1428. m_pIRTPRecv->Release();
  1429. m_pIRTPRecv = NULL;
  1430. }
  1431. m_OutMedia->Reset();
  1432. m_OutMedia->UnprepareHeaders();
  1433. m_OutMedia->Close();
  1434. // Close the filter
  1435. m_pVideoFilter->Close();
  1436. // Close the receive stream
  1437. m_RecvStream->Destroy();
  1438. m_DPFlags &= ~(DPFLAG_CONFIGURED_RECV);
  1439. }
  1440. #ifdef TEST
  1441. LOG((LOGMSG_TIME_RECV_VIDEO_UNCONFIGURE,GetTickCount() - dwTicks));
  1442. #endif
  1443. }
  1444. // NOTE: Identical to RecvAudioStream. Move up?
  1445. HRESULT
  1446. RecvVideoStream::Start()
  1447. {
  1448. int nRet=IFRAMES_CAPS_UNKNOWN;
  1449. FX_ENTRY ("RecvVideoStream::Start");
  1450. if (m_DPFlags & DPFLAG_STARTED_RECV)
  1451. return DPR_SUCCESS;
  1452. if ((!(m_DPFlags & DPFLAG_CONFIGURED_RECV)) || (m_pIRTPRecv==NULL))
  1453. return DPR_NOT_CONFIGURED;
  1454. ASSERT(!m_hRenderingThread);
  1455. m_ThreadFlags &= ~(DPTFLAG_STOP_PLAY|DPTFLAG_STOP_RECV);
  1456. m_RecvStream->SetRTP(m_pIRTPRecv);
  1457. SetFlowSpec();
  1458. // --------------------------------------------------------------------------
  1459. // Decide whether or not we will be making I-Frame requests for lost packets
  1460. // This should be done for all scenarios except when we are calling
  1461. // NetMeeting 2.x. NM 2.x will send us periodic I-Frames.
  1462. m_fDiscontinuity = FALSE;
  1463. m_dwLastIFrameRequest = 0UL;
  1464. m_ulLastSeq = UINT_MAX;
  1465. if (m_pIUnknown)
  1466. {
  1467. HRESULT hr;
  1468. if (!m_pIStreamSignal)
  1469. {
  1470. hr = m_pIUnknown->QueryInterface(IID_IStreamSignal, (void **)&m_pIStreamSignal);
  1471. if (!HR_SUCCEEDED(hr))
  1472. {
  1473. m_pIStreamSignal = (IStreamSignal *)NULL;
  1474. m_pIUnknown = (IUnknown *)NULL;
  1475. }
  1476. }
  1477. if (m_pIStreamSignal)
  1478. {
  1479. nRet = GetIFrameCaps(m_pIStreamSignal);
  1480. if (nRet == IFRAMES_CAPS_NM2)
  1481. {
  1482. m_pIStreamSignal->Release();
  1483. m_pIStreamSignal = NULL;
  1484. m_pIUnknown = NULL;
  1485. }
  1486. }
  1487. }
  1488. // --------------------------------------------------------------------------
  1489. // Start playback thread
  1490. if (!(m_ThreadFlags & DPTFLAG_STOP_PLAY))
  1491. m_hRenderingThread = CreateThread(NULL,0,RecvVideoStream::StartRenderingThread,this,0,&m_RenderingThId);
  1492. // Start receive thread
  1493. #if 0
  1494. if (!m_pDP->m_hRecvThread) {
  1495. m_pDP->m_hRecvThread = CreateThread(NULL,0,(LPTHREAD_START_ROUTINE)&StartDPRecvThread,m_pDP,0,&m_pDP->m_RecvThId);
  1496. //Tell the recv Thread we've turned on
  1497. if (m_pDP->m_hRecvThreadChangeEvent)
  1498. SetEvent (m_pDP->m_hRecvThreadChangeEvent);
  1499. }
  1500. m_pDP->m_nReceivers++;
  1501. #else
  1502. m_pDP->StartReceiving(this);
  1503. #endif
  1504. m_DPFlags |= DPFLAG_STARTED_RECV;
  1505. DEBUGMSG (ZONE_DP, ("%s: Rendering ThId =%x\r\n",_fx_, m_RenderingThId));
  1506. return DPR_SUCCESS;
  1507. }
  1508. // LOOK: Identical to RecvAudioStream version.
  1509. HRESULT
  1510. RecvVideoStream::Stop()
  1511. {
  1512. DWORD dwWait;
  1513. FX_ENTRY ("RecvVideoStream::Stop");
  1514. if(!(m_DPFlags & DPFLAG_STARTED_RECV))
  1515. {
  1516. return DPR_SUCCESS;
  1517. }
  1518. m_ThreadFlags = m_ThreadFlags |
  1519. DPTFLAG_STOP_RECV | DPTFLAG_STOP_PLAY ;
  1520. m_pDP->StopReceiving(this);
  1521. DEBUGMSG (ZONE_VERBOSE, ("%s: m_hRenderingThread =%x\r\n",_fx_, m_hRenderingThread));
  1522. /*
  1523. * we want to wait for all the threads to exit, but we need to handle windows
  1524. * messages (mostly from winsock) while waiting.
  1525. * we made several attempts at that. When we wait for messages in addition
  1526. * to the thread exit events, we crash in rrcm.dll, possibly because we
  1527. * process a winsock message to a thread that is terminating.
  1528. *
  1529. * needs more investigation before putting in code that handles messages
  1530. */
  1531. if(m_hRenderingThread)
  1532. {
  1533. dwWait = WaitForSingleObject (m_hRenderingThread, INFINITE);
  1534. DEBUGMSG (ZONE_VERBOSE, ("%s: dwWait =%d\r\n", _fx_, dwWait));
  1535. ASSERT(dwWait != WAIT_FAILED);
  1536. CloseHandle(m_hRenderingThread);
  1537. m_hRenderingThread = NULL;
  1538. }
  1539. // Access to the stream signal interface needs to be serialized. We could crash
  1540. // if we release the interface here and we are still using that interface in the
  1541. // RTP callback.
  1542. if (m_pIStreamSignal)
  1543. {
  1544. EnterCriticalSection(&m_crsIStreamSignal);
  1545. m_pIStreamSignal->Release();
  1546. m_pIStreamSignal = (IStreamSignal *)NULL;
  1547. LeaveCriticalSection(&m_crsIStreamSignal);
  1548. }
  1549. //This is per channel, but the variable is "DPFlags"
  1550. m_DPFlags &= ~DPFLAG_STARTED_RECV;
  1551. return DPR_SUCCESS;
  1552. }
  1553. // IProperty::GetProperty / SetProperty
  1554. // Properties of the MediaChannel.
  1555. STDMETHODIMP
  1556. RecvVideoStream::GetProperty(
  1557. DWORD prop,
  1558. PVOID pBuf,
  1559. LPUINT pcbBuf
  1560. )
  1561. {
  1562. HRESULT hr = DPR_SUCCESS;
  1563. RTP_STATS RTPStats;
  1564. DWORD dwValue;
  1565. DWORD_PTR dwPropVal;
  1566. UINT len = sizeof(DWORD); // most props are DWORDs
  1567. if (!pBuf || *pcbBuf < len)
  1568. {
  1569. *pcbBuf = len;
  1570. return DPR_INVALID_PARAMETER;
  1571. }
  1572. switch (prop)
  1573. {
  1574. #ifdef OLDSTUFF
  1575. case PROP_NET_RECV_STATS:
  1576. if (m_Net && *pcbBuf >= sizeof(RTP_STATS))
  1577. {
  1578. m_Net->GetRecvStats((RTP_STATS *)pBuf);
  1579. *pcbBuf = sizeof(RTP_STATS);
  1580. } else
  1581. hr = DPR_INVALID_PROP_VAL;
  1582. break;
  1583. #endif
  1584. case PROP_DURATION:
  1585. hr = m_OutMedia->GetProp(MC_PROP_DURATION, &dwPropVal);
  1586. *(DWORD *)pBuf = (DWORD)dwPropVal;
  1587. break;
  1588. case PROP_PLAY_ON:
  1589. *(DWORD *)pBuf = ((m_ThreadFlags & DPFLAG_ENABLE_RECV)!=0);
  1590. break;
  1591. case PROP_PLAYBACK_DEVICE:
  1592. *(DWORD *)pBuf = m_RenderingDevice;
  1593. break;
  1594. case PROP_VIDEO_BRIGHTNESS:
  1595. hr = m_pVideoFilter->GetProperty(FM_PROP_VIDEO_BRIGHTNESS, (DWORD *)pBuf);
  1596. break;
  1597. case PROP_VIDEO_CONTRAST:
  1598. hr = m_pVideoFilter->GetProperty(FM_PROP_VIDEO_CONTRAST, (DWORD *)pBuf);
  1599. break;
  1600. case PROP_VIDEO_SATURATION:
  1601. hr = m_pVideoFilter->GetProperty(FM_PROP_VIDEO_SATURATION, (DWORD *)pBuf);
  1602. break;
  1603. case PROP_VIDEO_AUDIO_SYNC:
  1604. *(DWORD *)pBuf = ((m_DPFlags & DPFLAG_AV_SYNC) != 0);
  1605. break;
  1606. case PROP_PAUSE_RECV:
  1607. *(DWORD *)pBuf = ((m_ThreadFlags & DPTFLAG_PAUSE_RECV) != 0);
  1608. break;
  1609. default:
  1610. hr = DPR_INVALID_PROP_ID;
  1611. break;
  1612. }
  1613. return hr;
  1614. }
  1615. STDMETHODIMP
  1616. RecvVideoStream::SetProperty(
  1617. DWORD prop,
  1618. PVOID pBuf,
  1619. UINT cbBuf
  1620. )
  1621. {
  1622. DWORD dw;
  1623. HRESULT hr = S_OK;
  1624. if (cbBuf < sizeof (DWORD))
  1625. return DPR_INVALID_PARAMETER;
  1626. switch (prop)
  1627. {
  1628. #if 0
  1629. case PROP_PLAY_ON:
  1630. {
  1631. DWORD flag = (DPFLAG_ENABLE_RECV);
  1632. if (*(DWORD *)pBuf) {
  1633. m_DPFlags |= flag; // set the flag
  1634. Start();
  1635. }
  1636. else
  1637. {
  1638. m_DPFlags &= ~flag; // clear the flag
  1639. Stop();
  1640. }
  1641. RETAILMSG(("NAC: %s", *(DWORD*)pBuf ? "Enabling":"Disabling"));
  1642. //hr = EnableStream( *(DWORD*)pBuf);
  1643. break;
  1644. }
  1645. #endif
  1646. case PROP_PLAYBACK_DEVICE:
  1647. m_RenderingDevice = *(DWORD*)pBuf;
  1648. // RETAILMSG(("NAC: Setting default playback device to %d", m_RenderingDevice));
  1649. break;
  1650. case PROP_VIDEO_BRIGHTNESS:
  1651. hr = m_pVideoFilter->SetProperty(FM_PROP_VIDEO_BRIGHTNESS, *(DWORD*)pBuf);
  1652. break;
  1653. case PROP_VIDEO_CONTRAST:
  1654. hr = m_pVideoFilter->SetProperty(FM_PROP_VIDEO_CONTRAST, *(DWORD*)pBuf);
  1655. break;
  1656. case PROP_VIDEO_SATURATION:
  1657. hr = m_pVideoFilter->SetProperty(FM_PROP_VIDEO_SATURATION, *(DWORD*)pBuf);
  1658. break;
  1659. case PROP_VIDEO_RESET_BRIGHTNESS:
  1660. hr = m_pVideoFilter->SetProperty(FM_PROP_VIDEO_BRIGHTNESS, VCM_DEFAULT_BRIGHTNESS);
  1661. break;
  1662. case PROP_VIDEO_RESET_CONTRAST:
  1663. hr = m_pVideoFilter->SetProperty(FM_PROP_VIDEO_CONTRAST, VCM_DEFAULT_CONTRAST);
  1664. break;
  1665. case PROP_VIDEO_RESET_SATURATION:
  1666. hr = m_pVideoFilter->SetProperty(FM_PROP_VIDEO_SATURATION, VCM_DEFAULT_SATURATION);
  1667. break;
  1668. case PROP_VIDEO_SIZE:
  1669. // For now, do not change anything if we already are connected
  1670. ASSERT(0);
  1671. //return SetVideoSize(m_pDP->m_pNac, *(DWORD*)pBuf);
  1672. case PROP_VIDEO_AUDIO_SYNC:
  1673. if (*(DWORD *)pBuf)
  1674. m_DPFlags |= DPFLAG_AV_SYNC;
  1675. else
  1676. m_DPFlags &= ~DPFLAG_AV_SYNC;
  1677. break;
  1678. case PROP_PAUSE_RECV:
  1679. if (*(DWORD *)pBuf)
  1680. m_ThreadFlags |= DPTFLAG_PAUSE_RECV;
  1681. else
  1682. m_ThreadFlags &= ~DPTFLAG_PAUSE_RECV;
  1683. break;
  1684. default:
  1685. return DPR_INVALID_PROP_ID;
  1686. break;
  1687. }
  1688. return hr;
  1689. }
  1690. //---------------------------------------------------------------------
  1691. // IVideoRender implementation and support functions
  1692. // IVideoRender::Init
  1693. // (DataPump::Init)
  1694. // identical to SendVideoStream::Init
  1695. STDMETHODIMP
  1696. RecvVideoStream::Init(
  1697. DWORD_PTR dwUser,
  1698. LPFNFRAMEREADY pfCallback
  1699. )
  1700. {
  1701. // Save the event away. Note that we DO allow both send and receive to
  1702. // share an event
  1703. m_hRenderEvent = (HANDLE)dwUser;
  1704. // if pfCallback is NULL then dwUser is an event handle
  1705. m_pfFrameReadyCallback = pfCallback;
  1706. return DPR_SUCCESS;
  1707. }
  1708. // IVideoRender::Done
  1709. // idnentical to SendVideoStream::Done
  1710. STDMETHODIMP
  1711. RecvVideoStream::Done( )
  1712. {
  1713. m_hRenderEvent = NULL;
  1714. m_pfFrameReadyCallback = NULL;
  1715. return DPR_SUCCESS;
  1716. }
  1717. // IVideoRender::GetFrame
  1718. // (RecvVideoStream::GetFrame)
  1719. // NOTE: subtly different from SendVideoStream implementation!
  1720. STDMETHODIMP
  1721. RecvVideoStream::GetFrame(
  1722. FRAMECONTEXT* pfc
  1723. )
  1724. {
  1725. HRESULT hr;
  1726. PVOID pData = NULL;
  1727. UINT cbData = 0;
  1728. // Validate parameters
  1729. if (!pfc )
  1730. return DPR_INVALID_PARAMETER;
  1731. // Don't arbitrarily call out while holding this crs or you may deadlock...
  1732. EnterCriticalSection(&m_crs);
  1733. if ((m_DPFlags & DPFLAG_CONFIGURED_RECV) && m_pNextPacketToRender && !m_pNextPacketToRender->m_fRendering)
  1734. {
  1735. m_pNextPacketToRender->m_fRendering = TRUE;
  1736. m_pNextPacketToRender->GetDevData(&pData,&cbData);
  1737. pfc->lpData = (PUCHAR) pData;
  1738. pfc->dwReserved = (DWORD_PTR) m_pNextPacketToRender;
  1739. // set bmi length?
  1740. pfc->lpbmi = (PBITMAPINFO)&m_fDevRecv.bih;
  1741. pfc->lpClipRect = &m_cliprect;
  1742. m_cRendering++;
  1743. hr = S_OK;
  1744. LOG((LOGMSG_GET_RECV_FRAME,m_pNextPacketToRender->GetIndex()));
  1745. } else
  1746. hr = S_FALSE; // nothing ready to render
  1747. LeaveCriticalSection(&m_crs);
  1748. return hr;
  1749. }
  1750. // IVideoRender::ReleaseFrame
  1751. // NOTE: subtly different from SendVideoStream implementation!
  1752. STDMETHODIMP
  1753. RecvVideoStream::ReleaseFrame(
  1754. FRAMECONTEXT* pfc
  1755. )
  1756. {
  1757. HRESULT hr;
  1758. MediaPacket *pPacket;
  1759. // Validate parameters
  1760. if (!pfc)
  1761. return DPR_INVALID_PARAMETER;
  1762. // Handle a send frame
  1763. {
  1764. EnterCriticalSection(&m_crs);
  1765. // Don't arbitrarily call out while holding this crs or you may deadlock...
  1766. if ((m_DPFlags & DPFLAG_CONFIGURED_RECV) && (pPacket = (MediaPacket *)pfc->dwReserved) && pPacket->m_fRendering)
  1767. {
  1768. LOG((LOGMSG_RELEASE_SEND_FRAME,pPacket->GetIndex()));
  1769. pPacket->m_fRendering = FALSE;
  1770. pfc->dwReserved = 0;
  1771. // if its not the current frame
  1772. if (m_pNextPacketToRender != pPacket) {
  1773. pPacket->Recycle();
  1774. m_RecvStream->Release(pPacket);
  1775. }
  1776. m_cRendering--;
  1777. hr = S_OK;
  1778. }
  1779. else
  1780. hr = DPR_INVALID_PARAMETER;
  1781. LeaveCriticalSection(&m_crs);
  1782. }
  1783. return hr;
  1784. }
  1785. HRESULT CALLBACK SendVideoStream::QosNotifyVideoCB(LPRESOURCEREQUESTLIST lpResourceRequestList, DWORD_PTR dwThis)
  1786. {
  1787. HRESULT hr=NOERROR;
  1788. LPRESOURCEREQUESTLIST prrl=lpResourceRequestList;
  1789. int i;
  1790. int iMaxBWUsage, iMaxCPUUsage;
  1791. DWORD dwCPUUsage, dwBWUsage;
  1792. int iCPUUsageId, iBWUsageId;
  1793. int iCPUDelta, iBWDelta, deltascale;
  1794. int iFrameRate, iMaxFrameRate, iOldFrameRate;
  1795. UINT dwSize = sizeof(int);
  1796. DWORD dwOverallCPUUsage;
  1797. #ifdef LOGSTATISTICS_ON
  1798. char szDebug[256];
  1799. HANDLE hDebugFile;
  1800. DWORD d;
  1801. #endif
  1802. DWORD dwEpoch;
  1803. SendVideoStream *pThis = (SendVideoStream *)dwThis;
  1804. FX_ENTRY("QosNotifyVideoCB");
  1805. // Get the max for the resources.
  1806. iMaxCPUUsage = -1L; iMaxBWUsage = -1L;
  1807. for (i=0, iCPUUsageId = -1L, iBWUsageId = -1L; i<(int)lpResourceRequestList->cRequests; i++)
  1808. if (lpResourceRequestList->aRequests[i].resourceID == RESOURCE_OUTGOING_BANDWIDTH)
  1809. iBWUsageId = i;
  1810. else if (lpResourceRequestList->aRequests[i].resourceID == RESOURCE_CPU_CYCLES)
  1811. iCPUUsageId = i;
  1812. // Enter critical section to allow QoS thread to read the statistics while capturing
  1813. EnterCriticalSection(&(pThis->m_crsVidQoS));
  1814. // Record the time of this callback call
  1815. pThis->m_Stats.dwNewestTs = timeGetTime();
  1816. // Only do anything if we have at least captured a frame in the previous epoch
  1817. if ((pThis->m_Stats.dwCount) && (pThis->m_Stats.dwNewestTs > pThis->m_Stats.dwOldestTs))
  1818. {
  1819. // Measure the epoch
  1820. dwEpoch = pThis->m_Stats.dwNewestTs - pThis->m_Stats.dwOldestTs;
  1821. #ifdef LOGSTATISTICS_ON
  1822. wsprintf(szDebug, " Epoch = %ld\r\n", dwEpoch);
  1823. OutputDebugString(szDebug);
  1824. #endif
  1825. // Compute the current average frame rate
  1826. iOldFrameRate = pThis->m_Stats.dwCount * 100000 / dwEpoch;
  1827. if (iCPUUsageId != -1L)
  1828. iMaxCPUUsage = lpResourceRequestList->aRequests[iCPUUsageId].nUnitsMin;
  1829. if (iBWUsageId != -1L)
  1830. iMaxBWUsage = lpResourceRequestList->aRequests[iBWUsageId].nUnitsMin;
  1831. // Get general BW usage
  1832. dwBWUsage = pThis->m_Stats.dwBits * 1000UL / dwEpoch;
  1833. // Get general CPU usage. In order to reduce oscillations, apply low-pass filtering operation
  1834. // We will use our own CPU usage number ONLY if the call to GetCPUUsage() fails.
  1835. if (pThis->GetCPUUsage(&dwOverallCPUUsage))
  1836. {
  1837. if (pThis->m_Stats.dwSmoothedCPUUsage)
  1838. dwCPUUsage = (pThis->m_Stats.dwSmoothedCPUUsage + dwOverallCPUUsage) >> 1;
  1839. else
  1840. dwCPUUsage = dwOverallCPUUsage;
  1841. }
  1842. else
  1843. dwCPUUsage = (pThis->m_Stats.dwMsCap + pThis->m_Stats.dwMsComp) * 1000UL / dwEpoch;
  1844. // Record current CPU usage
  1845. pThis->m_Stats.dwSmoothedCPUUsage = dwCPUUsage;
  1846. #ifdef LOGSTATISTICS_ON
  1847. hDebugFile = CreateFile("C:\\QoS.txt", GENERIC_WRITE, 0, NULL, OPEN_ALWAYS, FILE_ATTRIBUTE_NORMAL, (HANDLE)NULL);
  1848. SetFilePointer(hDebugFile, 0, NULL, FILE_END);
  1849. wsprintf(szDebug, " Overall CPU usage = %ld\r\n", dwOverallCPUUsage);
  1850. WriteFile(hDebugFile, szDebug, strlen(szDebug), &d, NULL);
  1851. OutputDebugString(szDebug);
  1852. CloseHandle(hDebugFile);
  1853. wsprintf(szDebug, " Number of frames dwCount = %ld\r\n", pThis->m_Stats.dwCount);
  1854. OutputDebugString(szDebug);
  1855. #endif
  1856. // For this first implementation, the only output variable is the frame rate of the
  1857. // video capture
  1858. #ifdef USE_NON_LINEAR_FPS_ADJUSTMENT
  1859. if (iCPUUsageId != -1L)
  1860. {
  1861. if (dwCPUUsage)
  1862. {
  1863. iCPUDelta = (iMaxCPUUsage - (int)dwCPUUsage) * 10 / (int)dwCPUUsage;
  1864. if (iCPUDelta >= 10)
  1865. iCPUDelta = 9;
  1866. else if (iCPUDelta <= -1)
  1867. iCPUDelta = -9;
  1868. }
  1869. else
  1870. iCPUDelta = 9;
  1871. }
  1872. else
  1873. iCPUDelta = 0;
  1874. if (iBWUsageId != -1L)
  1875. {
  1876. if (dwBWUsage)
  1877. {
  1878. iBWDelta = (iMaxBWUsage - (int)dwBWUsage) * 10 / (int)dwBWUsage;
  1879. if (iBWDelta >= 10)
  1880. iBWDelta = 9;
  1881. else if (iBWDelta <= -1)
  1882. iBWDelta = -9;
  1883. }
  1884. else
  1885. iBWDelta = 9;
  1886. }
  1887. else
  1888. iBWDelta = 0;
  1889. #else
  1890. if (iCPUUsageId != -1L)
  1891. {
  1892. if (dwCPUUsage)
  1893. iCPUDelta = (iMaxCPUUsage - (int)dwCPUUsage) * 100 / (int)dwCPUUsage;
  1894. else
  1895. iCPUDelta = 90;
  1896. }
  1897. else
  1898. iCPUDelta = 0;
  1899. if (iBWUsageId != -1L)
  1900. {
  1901. if (dwBWUsage)
  1902. iBWDelta = (iMaxBWUsage - (int)dwBWUsage) * 100 / (int)dwBWUsage;
  1903. else
  1904. iBWDelta = 90;
  1905. }
  1906. else
  1907. iBWDelta = 0;
  1908. #endif
  1909. UPDATE_COUNTER(g_pctrVideoCPUuse, iCPUDelta);
  1910. UPDATE_COUNTER(g_pctrVideoBWuse, iBWDelta);
  1911. #ifdef USE_NON_LINEAR_FPS_ADJUSTMENT
  1912. iFrameRate = iOldFrameRate + iOldFrameRate * g_QoSMagic[iCPUDelta + 9][iBWDelta + 9] / 100;
  1913. #else
  1914. deltascale = iCPUDelta;
  1915. if (deltascale > iBWDelta) deltascale = iBWDelta;
  1916. if (deltascale > 90) deltascale = 90;
  1917. if (deltascale < -90) deltascale = -90;
  1918. iFrameRate = iOldFrameRate + (iOldFrameRate * deltascale) / 100;
  1919. #endif
  1920. // Initialize QoS structure. Only the four first fields should be zeroed.
  1921. // The handle to the CPU performance key should not be cleared.
  1922. ZeroMemory(&(pThis->m_Stats), 4UL * sizeof(DWORD));
  1923. // The video should reduce its CPU and bandwidth usage quickly, but probably shouldn't
  1924. // be allowed to increase its CPU and bandwidth usage as fast. Let's increase the
  1925. // frame rate at half the speed it would be decreased when we are above 5fps.
  1926. if ((iFrameRate > iOldFrameRate) && (iFrameRate > 500))
  1927. iFrameRate -= (iFrameRate - iOldFrameRate) >> 1;
  1928. // We should keep our requirements between a minimum that will allow us to catch up
  1929. // quickly and the current max frame rate
  1930. iMaxFrameRate = pThis->m_maxfps; // max negotiated for call
  1931. // if using a modem, then the frame rate is determined by the
  1932. // temporal spatial tradeoff
  1933. if (pThis->m_pTSTable)
  1934. {
  1935. iMaxFrameRate = min(iMaxFrameRate, pThis->m_pTSTable[pThis->m_dwCurrentTSSetting]);
  1936. }
  1937. if (iFrameRate > iMaxFrameRate)
  1938. iFrameRate = iMaxFrameRate;
  1939. if (iFrameRate < 50) // make sure framerate is > 0 (this does not mean 50 fps; it is .50 fps)
  1940. iFrameRate = 50;
  1941. // Update the frame rate
  1942. if (iFrameRate != iOldFrameRate)
  1943. pThis->SetProperty(PROP_VIDEO_FRAME_RATE, &iFrameRate, sizeof(int));
  1944. // Record the time of this call for the next callback call
  1945. pThis->m_Stats.dwOldestTs = pThis->m_Stats.dwNewestTs;
  1946. // Get the latest RTCP stats and update the counters.
  1947. // we do this here because it is called periodically.
  1948. if (pThis->m_pRTPSend)
  1949. {
  1950. UINT lastPacketsLost = pThis->m_RTPStats.packetsLost;
  1951. if (g_pctrVideoSendLost && SUCCEEDED(pThis->m_pRTPSend->GetSendStats(&pThis->m_RTPStats)))
  1952. UPDATE_COUNTER(g_pctrVideoSendLost, pThis->m_RTPStats.packetsLost-lastPacketsLost);
  1953. }
  1954. // Leave critical section
  1955. LeaveCriticalSection(&(pThis->m_crsVidQoS));
  1956. DEBUGMSG(ZONE_QOS, ("%s: Over the last %ld.%lds, video used %ld%% of the CPU (max allowed %ld%%) and %ld bps (max allowed %ld bps)\r\n", _fx_, dwEpoch / 1000UL, dwEpoch - (dwEpoch / 1000UL) * 1000UL, dwCPUUsage / 10UL, iMaxCPUUsage / 10UL, dwBWUsage, iMaxBWUsage));
  1957. DEBUGMSG(ZONE_QOS, ("%s: Ajusting target frame rate from %ld.%ld fps to %ld.%ld fps\r\n", _fx_, iOldFrameRate / 100UL, iOldFrameRate - (iOldFrameRate / 100UL) * 100UL, iFrameRate / 100UL, iFrameRate - (iFrameRate / 100UL) * 100UL));
  1958. // Set the target bitrates and frame rates on the codec
  1959. pThis->SetTargetRates(iFrameRate, iMaxBWUsage);
  1960. #ifdef LOGSTATISTICS_ON
  1961. // How are we doing?
  1962. if (iCPUUsageId != -1L)
  1963. {
  1964. if (iCPUDelta > 0)
  1965. wsprintf(szDebug, "Max CPU Usage: %ld, Current CPU Usage: %ld, Increase CPU Usage by: %li, Old Frame Rate: %ld, New Frame Rate: %ld\r\n", lpResourceRequestList->aRequests[iCPUUsageId].nUnitsMin, dwCPUUsage, iCPUDelta, iOldFrameRate, iFrameRate);
  1966. else
  1967. wsprintf(szDebug, "Max CPU Usage: %ld, Current CPU Usage: %ld, Decrese CPU Usage by: %li, Old Frame Rate: %ld, New Frame Rate: %ld\r\n", lpResourceRequestList->aRequests[iCPUUsageId].nUnitsMin, dwCPUUsage, iCPUDelta, iOldFrameRate, iFrameRate);
  1968. hDebugFile = CreateFile("C:\\QoS.txt", GENERIC_WRITE, 0, NULL, OPEN_ALWAYS, FILE_ATTRIBUTE_NORMAL, (HANDLE)NULL);
  1969. SetFilePointer(hDebugFile, 0, NULL, FILE_END);
  1970. WriteFile(hDebugFile, szDebug, strlen(szDebug), &d, NULL);
  1971. CloseHandle(hDebugFile);
  1972. OutputDebugString(szDebug);
  1973. }
  1974. if (iBWUsageId != -1L)
  1975. {
  1976. if (iBWDelta > 0)
  1977. wsprintf(szDebug, "Max BW Usage: %ld, Current BW Usage: %ld, Increase BW Usage by: %li\r\n", lpResourceRequestList->aRequests[iBWUsageId].nUnitsMin, dwBWUsage, iBWDelta);
  1978. else
  1979. wsprintf(szDebug, "Max BW Usage: %ld, Current BW Usage: %ld, Decrease BW Usage by: %li\r\n", lpResourceRequestList->aRequests[iBWUsageId].nUnitsMin, dwBWUsage, iBWDelta);
  1980. hDebugFile = CreateFile("C:\\QoS.txt", GENERIC_WRITE, 0, NULL, OPEN_ALWAYS, FILE_ATTRIBUTE_NORMAL, (HANDLE)NULL);
  1981. SetFilePointer(hDebugFile, 0, NULL, FILE_END);
  1982. WriteFile(hDebugFile, szDebug, strlen(szDebug), &d, NULL);
  1983. CloseHandle(hDebugFile);
  1984. OutputDebugString(szDebug);
  1985. }
  1986. #endif
  1987. }
  1988. else
  1989. {
  1990. // Leave critical section
  1991. LeaveCriticalSection(&(pThis->m_crsVidQoS));
  1992. #ifdef LOGSTATISTICS_ON
  1993. hDebugFile = CreateFile("C:\\QoS.txt", GENERIC_WRITE, 0, NULL, OPEN_ALWAYS, FILE_ATTRIBUTE_NORMAL, (HANDLE)NULL);
  1994. SetFilePointer(hDebugFile, 0, NULL, FILE_END);
  1995. wsprintf(szDebug, "Not enough data captured -> Leave without any change\r\n");
  1996. WriteFile(hDebugFile, szDebug, strlen(szDebug), &d, NULL);
  1997. CloseHandle(hDebugFile);
  1998. OutputDebugString(szDebug);
  1999. #endif
  2000. }
  2001. return hr;
  2002. }
  2003. // SortOrder
  2004. // Helper function to search for the specific format type and set its sort
  2005. // order to the desired number
  2006. BOOL
  2007. SortOrder(
  2008. IAppVidCap *pavc,
  2009. BASIC_VIDCAP_INFO* pvidcaps,
  2010. DWORD dwcFormats,
  2011. DWORD dwFlags,
  2012. WORD wDesiredSortOrder,
  2013. int nNumFormats
  2014. )
  2015. {
  2016. int i, j;
  2017. int nNumSizes = 0;
  2018. int *aFrameSizes = (int *)NULL;
  2019. int *aMinFrameSizes = (int *)NULL;
  2020. int iMaxPos;
  2021. WORD wTempPos, wMaxSortIndex;
  2022. // Scale sort value
  2023. wDesiredSortOrder *= (WORD)nNumFormats;
  2024. // Local buffer of sizes that match dwFlags
  2025. if (!(aFrameSizes = (int *)MEMALLOC(nNumFormats * sizeof (int))))
  2026. goto out;
  2027. // Look through all the formats until we find the ones we want
  2028. // Save the position of these entries
  2029. for (i=0; i<(int)dwcFormats; i++)
  2030. if (SIZE_TO_FLAG(pvidcaps[i].enumVideoSize) == dwFlags)
  2031. aFrameSizes[nNumSizes++] = i;
  2032. // Now order those entries from highest to lowest sort index
  2033. for (i=0; i<nNumSizes; i++)
  2034. {
  2035. for (iMaxPos = -1L, wMaxSortIndex=0UL, j=i; j<nNumSizes; j++)
  2036. {
  2037. if (pvidcaps[aFrameSizes[j]].wSortIndex > wMaxSortIndex)
  2038. {
  2039. wMaxSortIndex = pvidcaps[aFrameSizes[j]].wSortIndex;
  2040. iMaxPos = j;
  2041. }
  2042. }
  2043. if (iMaxPos != -1L)
  2044. {
  2045. wTempPos = (WORD)aFrameSizes[i];
  2046. aFrameSizes[i] = aFrameSizes[iMaxPos];
  2047. aFrameSizes[iMaxPos] = wTempPos;
  2048. }
  2049. }
  2050. // Change the sort index of the sorted entries
  2051. for (; nNumSizes--;)
  2052. pvidcaps[aFrameSizes[nNumSizes]].wSortIndex = wDesiredSortOrder++;
  2053. // Release memory
  2054. MEMFREE(aFrameSizes);
  2055. return TRUE;
  2056. out:
  2057. return FALSE;
  2058. }
  2059. // LOOK: this is identical to the RecvAudioStream implementation
  2060. HRESULT
  2061. RecvVideoStream::GetCurrentPlayNTPTime(NTP_TS *pNtpTime)
  2062. {
  2063. DWORD rtpTime;
  2064. #ifdef OLDSTUFF
  2065. if ((m_DPFlags & DPFLAG_STARTED_RECV) && m_fReceiving) {
  2066. if (m_Net->RTPtoNTP(m_PlaybackTimestamp,pNtpTime))
  2067. return S_OK;
  2068. }
  2069. #endif
  2070. return 0xff; // return proper error
  2071. }
  2072. BOOL RecvVideoStream::IsEmpty() {
  2073. return m_RecvStream->IsEmpty();
  2074. }
  2075. /*
  2076. Called by the recv thread to setup the stream for receiving.
  2077. Call RTP object to post the initial recv buffer(s).
  2078. */
  2079. // NOTE: identical to audio version except for choice of number of packet buffers
  2080. HRESULT
  2081. RecvVideoStream::StartRecv(HWND hWnd)
  2082. {
  2083. HRESULT hr = S_OK;
  2084. DWORD dwPropVal = 0;
  2085. UINT numPackets;
  2086. if ((!(m_ThreadFlags & DPTFLAG_STOP_RECV) ) && (m_DPFlags & DPFLAG_CONFIGURED_RECV))
  2087. {
  2088. numPackets = m_dwSrcSize > 10000 ? MAX_VIDEO_FRAGMENTS : MAX_QCIF_VIDEO_FRAGMENTS;
  2089. hr = m_pIRTPRecv->SetRecvNotification(&RTPRecvCallback, (DWORD_PTR)this, numPackets);
  2090. }
  2091. return hr;
  2092. }
  2093. // NOTE: identical to audio version
  2094. HRESULT
  2095. RecvVideoStream::StopRecv()
  2096. {
  2097. // Free any RTP buffers that we're holding on to
  2098. m_RecvStream->ReleaseNetBuffers();
  2099. // dont recv on this stream
  2100. m_pIRTPRecv->CancelRecvNotification();
  2101. return S_OK;
  2102. }
  2103. HRESULT RecvVideoStream::RTPCallback(WSABUF *pWsaBuf, DWORD timestamp, UINT seq, UINT fMark)
  2104. {
  2105. HRESULT hr;
  2106. DWORD_PTR dwPropVal;
  2107. BOOL fSkippedAFrame;
  2108. BOOL fReceivedKeyframe;
  2109. FX_ENTRY("RecvVideoStream::RTPCallback");
  2110. // if we are paused, reject the packet
  2111. if (m_ThreadFlags & DPTFLAG_PAUSE_RECV)
  2112. {
  2113. return E_FAIL;
  2114. }
  2115. // PutNextNetIn will return DPR_SUCESS to indicate a new frame
  2116. // S_FALSE if success, but no new frame
  2117. // error otherwise
  2118. // It always takes care of freeing the RTP buffers
  2119. hr = m_RecvStream->PutNextNetIn(pWsaBuf, timestamp, seq, fMark, &fSkippedAFrame, &fReceivedKeyframe);
  2120. if (m_pIUnknown)
  2121. {
  2122. // Check out the sequence number
  2123. // If there is a gap between the new sequence number and the last
  2124. // one, a frame got lost. Generate an I-Frame request then, but no more
  2125. // often than one every 15 seconds. How should we go about NM2.0? Other
  2126. // clients that don't support I-Frame requests.
  2127. //
  2128. // Is there a discontinuity in sequence numbers that was detected
  2129. // in the past but not handled because an I-Frame request had alreay
  2130. // been sent less than 15s ago? Is there a new discontinuity?
  2131. if (FAILED(hr) || fSkippedAFrame || m_fDiscontinuity || ((seq > 0) && (m_ulLastSeq != UINT_MAX) && ((seq - 1) > m_ulLastSeq)))
  2132. {
  2133. DWORD dwNow = GetTickCount();
  2134. // Was the last time we issued an I-Frame request more than 15s ago?
  2135. if ((dwNow > m_dwLastIFrameRequest) && ((dwNow - m_dwLastIFrameRequest) > MIN_IFRAME_REQUEST_INTERVAL))
  2136. {
  2137. DEBUGMSG (ZONE_IFRAME, ("%s: Loss detected - Sending I-Frame request...\r\n", _fx_));
  2138. m_dwLastIFrameRequest = dwNow;
  2139. m_fDiscontinuity = FALSE;
  2140. // Access to the stream signal interface needs to be serialized. We could crash
  2141. // if we used the interface here while Stop() is releasing it.
  2142. EnterCriticalSection(&m_crsIStreamSignal);
  2143. if (m_pIStreamSignal)
  2144. m_pIStreamSignal->PictureUpdateRequest();
  2145. LeaveCriticalSection(&m_crsIStreamSignal);
  2146. }
  2147. else
  2148. {
  2149. if (!fReceivedKeyframe)
  2150. {
  2151. DEBUGMSG (ZONE_IFRAME, ("%s: Loss detected but too soon to send I-Frame request. Wait %ld ms.\r\n", _fx_, MIN_IFRAME_REQUEST_INTERVAL - (dwNow - m_dwLastIFrameRequest)));
  2152. m_fDiscontinuity = TRUE;
  2153. }
  2154. else
  2155. {
  2156. DEBUGMSG (ZONE_IFRAME, ("%s: Received a keyframe - resetting packet loss detector\r\n", _fx_));
  2157. m_fDiscontinuity = FALSE;
  2158. }
  2159. }
  2160. }
  2161. m_ulLastSeq = seq;
  2162. }
  2163. if (hr == DPR_SUCCESS)
  2164. {
  2165. m_OutMedia->GetProp (MC_PROP_EVENT_HANDLE, &dwPropVal);
  2166. if (dwPropVal)
  2167. {
  2168. SetEvent( (HANDLE) dwPropVal);
  2169. }
  2170. }
  2171. else if (FAILED(hr))
  2172. {
  2173. DEBUGMSG(ZONE_DP,("RVStream::PutNextNetIn (ts=%d,seq=%d,fMark=%d) failed with 0x%lX\r\n",timestamp,seq,fMark,hr));
  2174. }
  2175. return S_OK;
  2176. }
  2177. #define TOTAL_BYTES 8192
  2178. #define BYTE_INCREMENT 1024
  2179. /****************************************************************************
  2180. * @doc EXTERNAL QOSFUNC
  2181. *
  2182. * @func void | StartCPUUsageCollection | This function does all necessary
  2183. * initialization for CPU usage data collection.
  2184. *
  2185. * @rdesc Although this function doesn't ever fail, m_Stats.hPerfKey is set to a
  2186. * valid HKEY value if initialization occured correctly, and NULL otherwise.
  2187. *
  2188. * @comm This functions executes two different code paths: one for NT and one
  2189. * for Win95-98.
  2190. *
  2191. * @devnote MSDN references:
  2192. * Microsoft Knowledge Base, Article ID Q174631
  2193. * "HOWTO: Access the Performance Registry Under Windows 95"
  2194. *
  2195. * Microsoft Knowledge Base, Article ID Q107728
  2196. * "Retrieving Counter Data from the Registry"
  2197. *
  2198. * Microsoft Knowledge Base, Article ID Q178887
  2199. * "INFO: Troubleshooting Performance Registry Access Violations"
  2200. *
  2201. * Also, used section "Platform SDK\Windows Base Services\Windows NT Features\Performance Data Helper"
  2202. ***************************************************************************/
  2203. void SendVideoStream::StartCPUUsageCollection(void)
  2204. {
  2205. PPERF_DATA_BLOCK pPerfDataBlock;
  2206. PPERF_OBJECT_TYPE pPerfObjectType;
  2207. PPERF_COUNTER_DEFINITION pPerfCounterDefinition;
  2208. PPERF_INSTANCE_DEFINITION pPerfInstanceDefinition;
  2209. PPERF_COUNTER_BLOCK pPerfCounterBlock;
  2210. OSVERSIONINFO osvInfo = {0};
  2211. DWORD cbCounterData;
  2212. DWORD cbTryCounterData;
  2213. DWORD dwType;
  2214. HANDLE hPerfData;
  2215. char *pszData;
  2216. char *pszIndex;
  2217. char szProcessorIndex[16];
  2218. long lRet;
  2219. FX_ENTRY("SendVideoStream::StartCPUUsageCollection");
  2220. // Are we on NT or Win95/98 ?
  2221. osvInfo.dwOSVersionInfoSize = sizeof(OSVERSIONINFO);
  2222. GetVersionEx(&osvInfo);
  2223. if (m_Stats.fWinNT = (BOOL)(osvInfo.dwPlatformId == VER_PLATFORM_WIN32_NT))
  2224. {
  2225. // Enable the collection of CPU performance data on Win NT
  2226. // Open the registry key that contains the performance counter indices and names.
  2227. // 009 is the U.S. English language id. In a non-English version of Windows NT,
  2228. // performance counters are stored both in the native language of the system and
  2229. // in English.
  2230. if (RegOpenKeyEx(HKEY_LOCAL_MACHINE, "SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\Perflib\\009", NULL, KEY_READ, &m_Stats.hPerfKey) != ERROR_SUCCESS)
  2231. goto MyError0;
  2232. else
  2233. {
  2234. // Get all the counter indices and names.
  2235. // Read the performance data from the registry. The size of that data may change
  2236. // between each call to the registry. We first get the current size of the buffer,
  2237. // allocate it, and try to read from the registry into it. If there already isn't
  2238. // enough room in the buffer, we realloc() it until we manage to read all the data.
  2239. if (RegQueryValueEx(m_Stats.hPerfKey, "Counters", NULL, &dwType, NULL, &cbCounterData) != ERROR_SUCCESS)
  2240. cbCounterData = TOTAL_BYTES;
  2241. // Allocate buffer for counter indices and names
  2242. if (!(m_Stats.NtCPUUsage.hPerfData = (PBYTE)LocalAlloc (LMEM_MOVEABLE, cbCounterData)))
  2243. {
  2244. m_Stats.NtCPUUsage.pbyPerfData = (PBYTE)NULL;
  2245. RegCloseKey(m_Stats.hPerfKey);
  2246. goto MyError0;
  2247. }
  2248. else
  2249. {
  2250. m_Stats.NtCPUUsage.pbyPerfData = (PBYTE)LocalLock(m_Stats.NtCPUUsage.hPerfData);
  2251. cbTryCounterData = cbCounterData;
  2252. while((lRet = RegQueryValueEx(m_Stats.hPerfKey, "Counters", NULL, NULL, m_Stats.NtCPUUsage.pbyPerfData, &cbTryCounterData)) == ERROR_MORE_DATA)
  2253. {
  2254. cbCounterData += BYTE_INCREMENT;
  2255. LocalUnlock(m_Stats.NtCPUUsage.hPerfData);
  2256. hPerfData = LocalReAlloc(m_Stats.NtCPUUsage.hPerfData, cbCounterData, LMEM_MOVEABLE);
  2257. if (!hPerfData)
  2258. goto MyError1;
  2259. m_Stats.NtCPUUsage.hPerfData = hPerfData;
  2260. m_Stats.NtCPUUsage.pbyPerfData = (PBYTE)LocalLock(hPerfData);
  2261. cbTryCounterData = cbCounterData;
  2262. }
  2263. // We don't need that key anymore
  2264. RegCloseKey(m_Stats.hPerfKey);
  2265. if (lRet != ERROR_SUCCESS)
  2266. goto MyError1;
  2267. else
  2268. {
  2269. // The data is stored as MULTI_SZ strings. This data type consists
  2270. // of a list of strings, each terminated with NULL. The last string
  2271. // is followed by an additional NULL. The strings are listed in
  2272. // pairs. The first string of each pair is the string of the index,
  2273. // and the second string is the actual name of the index. The Counter
  2274. // data uses only even-numbered indexes. For example, the Counter
  2275. // data contains the following object and counter name strings.
  2276. // Examples:
  2277. // 2 System
  2278. // 4 Memory
  2279. // 6 % Processor Time
  2280. //
  2281. // Look for the "% Processor Time" counter
  2282. pszData = (char *)m_Stats.NtCPUUsage.pbyPerfData;
  2283. pszIndex = (char *)m_Stats.NtCPUUsage.pbyPerfData;
  2284. while (*pszData && lstrcmpi(pszData, "% Processor Time"))
  2285. {
  2286. pszIndex = pszData;
  2287. pszData += lstrlen(pszData) + 1;
  2288. }
  2289. if (!pszData)
  2290. {
  2291. // Couldn't find "% Processor Time" counter!!!
  2292. goto MyError1;
  2293. }
  2294. else
  2295. {
  2296. m_Stats.NtCPUUsage.dwPercentProcessorIndex = atol(pszIndex);
  2297. // Look for the "Processor" object
  2298. pszIndex = pszData = (char *)m_Stats.NtCPUUsage.pbyPerfData;
  2299. while (*pszData && lstrcmpi(pszData, "Processor"))
  2300. {
  2301. pszIndex = pszData;
  2302. pszData += lstrlen(pszData) + 1;
  2303. }
  2304. if (!pszData)
  2305. {
  2306. // Couldn't find "Processor" counter!!!
  2307. goto MyError1;
  2308. }
  2309. else
  2310. {
  2311. m_Stats.NtCPUUsage.dwProcessorIndex = atol(pszIndex);
  2312. CopyMemory(szProcessorIndex, pszIndex, lstrlen(pszIndex));
  2313. // Read the PERF_DATA_BLOCK header structure. It describes the system
  2314. // and the performance data. The PERF_DATA_BLOCK structure is followed
  2315. // by a list of object information blocks (one per object). We use the
  2316. // counter index to retrieve object information.
  2317. // Under some cicumstances (cf. Q178887 for details) the RegQueryValueEx
  2318. // function may cause an Access Violation because of a buggy performance
  2319. // extension DLL such as SQL's.
  2320. __try
  2321. {
  2322. m_Stats.NtCPUUsage.cbPerfData = cbCounterData;
  2323. while((lRet = RegQueryValueEx(HKEY_PERFORMANCE_DATA, szProcessorIndex, NULL, NULL, m_Stats.NtCPUUsage.pbyPerfData, &cbCounterData)) == ERROR_MORE_DATA)
  2324. {
  2325. m_Stats.NtCPUUsage.cbPerfData += BYTE_INCREMENT;
  2326. LocalUnlock(m_Stats.NtCPUUsage.hPerfData);
  2327. hPerfData = LocalReAlloc(m_Stats.NtCPUUsage.hPerfData, m_Stats.NtCPUUsage.cbPerfData, LMEM_MOVEABLE);
  2328. if (!hPerfData)
  2329. goto MyError1;
  2330. m_Stats.NtCPUUsage.hPerfData = hPerfData;
  2331. m_Stats.NtCPUUsage.pbyPerfData = (PBYTE)LocalLock(hPerfData);
  2332. cbCounterData = m_Stats.NtCPUUsage.cbPerfData;
  2333. }
  2334. }
  2335. __except(EXCEPTION_EXECUTE_HANDLER)
  2336. {
  2337. ERRORMESSAGE(("%s: Performance Registry Access Violation -> don't use perf counters for CPU measurements\r\n", _fx_));
  2338. goto MyError1;
  2339. }
  2340. if (lRet != ERROR_SUCCESS)
  2341. goto MyError1;
  2342. else
  2343. {
  2344. // Each object information block contains a PERF_OBJECT_TYPE structure,
  2345. // which describes the performance data for the object. Look for the one
  2346. // that applies to CPU usage based on its index value.
  2347. pPerfDataBlock = (PPERF_DATA_BLOCK)m_Stats.NtCPUUsage.pbyPerfData;
  2348. pPerfObjectType = (PPERF_OBJECT_TYPE)(m_Stats.NtCPUUsage.pbyPerfData + pPerfDataBlock->HeaderLength);
  2349. for (int i = 0; i < (int)pPerfDataBlock->NumObjectTypes; i++)
  2350. {
  2351. if (pPerfObjectType->ObjectNameTitleIndex == m_Stats.NtCPUUsage.dwProcessorIndex)
  2352. {
  2353. // The PERF_OBJECT_TYPE structure is followed by a list of PERF_COUNTER_DEFINITION
  2354. // structures, one for each counter defined for the object. The list of PERF_COUNTER_DEFINITION
  2355. // structures is followed by a list of instance information blocks (one for each instance).
  2356. //
  2357. // Each instance information block contains a PERF_INSTANCE_DEFINITION structure and
  2358. // a PERF_COUNTER_BLOCK structure, followed by the data for each counter.
  2359. //
  2360. // Look for the counter defined for % processor time.
  2361. pPerfCounterDefinition = (PPERF_COUNTER_DEFINITION)((PBYTE)pPerfObjectType + pPerfObjectType->HeaderLength);
  2362. for (int j = 0; j < (int)pPerfObjectType->NumCounters; j++)
  2363. {
  2364. if (pPerfCounterDefinition->CounterNameTitleIndex == m_Stats.NtCPUUsage.dwPercentProcessorIndex)
  2365. {
  2366. // Note: looking at the CounterType filed of the PERF_COUNTER_DEFINITION
  2367. // structure shows that the '% Processor Time' counter has the following properties:
  2368. // The counter data is a large integer (PERF_SIZE_LARGE set)
  2369. // The counter data is an increasing numeric value (PERF_TYPE_COUNTER set)
  2370. // The counter value should be divided by the elapsed time (PERF_COUNTER_RATE set)
  2371. // The time base units of the 100-nanosecond timer should be used as the base (PERF_TIMER_100NS set)
  2372. // The difference between the previous counter value and the current counter value is computed before proceeding (PERF_DELTA_BASE set)
  2373. // The display suffix is '%' (PERF_DISPLAY_PERCENT set)
  2374. // Save the number of object instances for the CPU counter, as well as the
  2375. // starting time.
  2376. m_Stats.NtCPUUsage.dwNumProcessors = pPerfObjectType->NumInstances;
  2377. if (!(m_Stats.NtCPUUsage.pllCounterValue = (PLONGLONG)LocalAlloc(LMEM_FIXED, m_Stats.NtCPUUsage.dwNumProcessors * sizeof(LONGLONG))))
  2378. goto MyError1;
  2379. m_Stats.NtCPUUsage.llPerfTime100nSec = *(PLONGLONG)&pPerfDataBlock->PerfTime100nSec;
  2380. pPerfInstanceDefinition = (PPERF_INSTANCE_DEFINITION)((PBYTE)pPerfObjectType + pPerfObjectType->DefinitionLength);
  2381. for (int k = 0; k < pPerfObjectType->NumInstances; k++)
  2382. {
  2383. // Get a pointer to the PERF_COUNTER_BLOCK
  2384. pPerfCounterBlock = (PPERF_COUNTER_BLOCK)((PBYTE)pPerfInstanceDefinition + pPerfInstanceDefinition->ByteLength);
  2385. // This last offset steps us over any other counters to the one we need
  2386. m_Stats.NtCPUUsage.pllCounterValue[k] = *(PLONGLONG)((PBYTE)pPerfInstanceDefinition + pPerfInstanceDefinition->ByteLength + pPerfCounterDefinition->CounterOffset);
  2387. // Get to the next instance information block
  2388. pPerfInstanceDefinition = (PPERF_INSTANCE_DEFINITION)((PBYTE)pPerfInstanceDefinition + pPerfInstanceDefinition->ByteLength + pPerfCounterBlock->ByteLength);
  2389. }
  2390. // We're done!
  2391. return;
  2392. }
  2393. else
  2394. pPerfCounterDefinition = (PPERF_COUNTER_DEFINITION)((PBYTE)pPerfCounterDefinition + pPerfCounterDefinition->ByteLength);
  2395. }
  2396. break;
  2397. }
  2398. else
  2399. pPerfObjectType = (PPERF_OBJECT_TYPE)((PBYTE)pPerfObjectType + pPerfObjectType->TotalByteLength);
  2400. }
  2401. // If we get here, we haven't found the counters we were looking for
  2402. goto MyError2;
  2403. }
  2404. }
  2405. }
  2406. }
  2407. }
  2408. }
  2409. }
  2410. else
  2411. {
  2412. // Enable the collection of CPU performance data on Win 95-98 by starting the kernel stat server
  2413. if (RegOpenKeyEx(HKEY_DYN_DATA, "PerfStats\\StartSrv", NULL, KEY_READ, &m_Stats.hPerfKey) != ERROR_SUCCESS)
  2414. m_Stats.hPerfKey = (HKEY)NULL;
  2415. else
  2416. {
  2417. DWORD cbData = sizeof(DWORD);
  2418. DWORD dwType;
  2419. DWORD dwData;
  2420. if (RegQueryValueEx(m_Stats.hPerfKey, "KERNEL", NULL, &dwType, (LPBYTE)&dwData, &cbData) != ERROR_SUCCESS)
  2421. {
  2422. RegCloseKey(m_Stats.hPerfKey);
  2423. m_Stats.hPerfKey = (HKEY)NULL;
  2424. }
  2425. else
  2426. {
  2427. RegCloseKey(m_Stats.hPerfKey);
  2428. // The kernel stat server is now started. Now start the CPUUsage data collection on the kernel stat server.
  2429. if (RegOpenKeyEx(HKEY_DYN_DATA, "PerfStats\\StartStat", NULL, KEY_READ, &m_Stats.hPerfKey) != ERROR_SUCCESS)
  2430. m_Stats.hPerfKey = (HKEY)NULL;
  2431. else
  2432. {
  2433. if (RegQueryValueEx(m_Stats.hPerfKey, "KERNEL\\CPUUsage", NULL, &dwType, (LPBYTE)&dwData, &cbData) != ERROR_SUCCESS)
  2434. {
  2435. RegCloseKey(m_Stats.hPerfKey);
  2436. m_Stats.hPerfKey = (HKEY)NULL;
  2437. }
  2438. else
  2439. {
  2440. RegCloseKey(m_Stats.hPerfKey);
  2441. // The data and stat servers are now started. Let's get ready to collect actual data.
  2442. if (RegOpenKeyEx(HKEY_DYN_DATA, "PerfStats\\StatData", NULL, KEY_READ, &m_Stats.hPerfKey) != ERROR_SUCCESS)
  2443. m_Stats.hPerfKey = (HKEY)NULL;
  2444. }
  2445. }
  2446. }
  2447. }
  2448. }
  2449. return;
  2450. MyError2:
  2451. if (m_Stats.NtCPUUsage.pllCounterValue)
  2452. LocalFree(m_Stats.NtCPUUsage.pllCounterValue);
  2453. m_Stats.NtCPUUsage.pllCounterValue = (PLONGLONG)NULL;
  2454. MyError1:
  2455. if (m_Stats.NtCPUUsage.hPerfData)
  2456. {
  2457. LocalUnlock(m_Stats.NtCPUUsage.hPerfData);
  2458. LocalFree(m_Stats.NtCPUUsage.hPerfData);
  2459. }
  2460. m_Stats.NtCPUUsage.hPerfData = (HANDLE)NULL;
  2461. m_Stats.NtCPUUsage.pbyPerfData = (PBYTE)NULL;
  2462. MyError0:
  2463. m_Stats.hPerfKey = (HKEY)NULL;
  2464. }
  2465. /****************************************************************************
  2466. * @doc EXTERNAL QOSFUNC
  2467. *
  2468. * @func void | StopCPUUsageCollection | This function does all necessary
  2469. * CPU usage data collection cleanup.
  2470. *
  2471. * @comm This function executes two different code paths: one for NT and one
  2472. * for Win95-98.
  2473. *
  2474. * @devnote MSDN references:
  2475. * Microsoft Knowledge Base, Article ID Q174631
  2476. * "HOWTO: Access the Performance Registry Under Windows 95"
  2477. *
  2478. * Microsoft Knowledge Base, Article ID Q107728
  2479. * "Retrieving Counter Data from the Registry"
  2480. *
  2481. * Also, used section "Platform SDK\Windows Base Services\Windows NT Features\Performance Data Helper"
  2482. ***************************************************************************/
  2483. void SendVideoStream::StopCPUUsageCollection(void)
  2484. {
  2485. DWORD dwType;
  2486. DWORD cbData;
  2487. if (m_Stats.fWinNT)
  2488. {
  2489. if (m_Stats.NtCPUUsage.hPerfData)
  2490. {
  2491. LocalUnlock(m_Stats.NtCPUUsage.hPerfData);
  2492. LocalFree(m_Stats.NtCPUUsage.hPerfData);
  2493. }
  2494. m_Stats.NtCPUUsage.hPerfData = (HANDLE)NULL;
  2495. m_Stats.NtCPUUsage.pbyPerfData = (PBYTE)NULL;
  2496. if (m_Stats.NtCPUUsage.pllCounterValue)
  2497. LocalFree(m_Stats.NtCPUUsage.pllCounterValue);
  2498. m_Stats.NtCPUUsage.pllCounterValue = (PLONGLONG)NULL;
  2499. }
  2500. else
  2501. {
  2502. if (m_Stats.hPerfKey)
  2503. {
  2504. // Close the data collection key
  2505. RegCloseKey(m_Stats.hPerfKey);
  2506. // Stop the CPUUsage data collection on the kernel stat server
  2507. if (RegOpenKeyEx(HKEY_DYN_DATA, "PerfStats\\StopStat", 0, KEY_READ, &m_Stats.hPerfKey) == ERROR_SUCCESS)
  2508. {
  2509. RegQueryValueEx(m_Stats.hPerfKey, "KERNEL\\CPUUsage", NULL, &dwType, NULL, &cbData);
  2510. RegCloseKey(m_Stats.hPerfKey);
  2511. }
  2512. // Stop the kernel stat server
  2513. if (RegOpenKeyEx(HKEY_DYN_DATA, "PerfStats\\StopSrv", 0, KEY_READ, &m_Stats.hPerfKey) == ERROR_SUCCESS)
  2514. {
  2515. RegQueryValueEx(m_Stats.hPerfKey, "KERNEL", NULL, &dwType, NULL, &cbData);
  2516. RegCloseKey(m_Stats.hPerfKey);
  2517. }
  2518. m_Stats.hPerfKey = (HKEY)NULL;
  2519. }
  2520. }
  2521. }
  2522. /****************************************************************************
  2523. * @doc EXTERNAL QOSFUNC
  2524. *
  2525. * @func void | GetCPUUsage | This function does all necessary
  2526. * initialization for CPU usage data collection.
  2527. *
  2528. * @parm PDWORD | [OUT] pdwOverallCPUUsage | Specifies a pointer to a DWORD to
  2529. * receive the current CPU usage.
  2530. *
  2531. * @rdesc Returns TRUE on success, and FALSE otherwise.
  2532. *
  2533. * @comm This functions executes two different code paths: one for NT and one
  2534. * for Win95-98. Note that we collect data on all CPUs on NT MP machines.
  2535. *
  2536. * @devnote MSDN references:
  2537. * Microsoft Knowledge Base, Article ID Q174631
  2538. * "HOWTO: Access the Performance Registry Under Windows 95"
  2539. *
  2540. * Microsoft Knowledge Base, Article ID Q107728
  2541. * "Retrieving Counter Data from the Registry"
  2542. *
  2543. * Also, used section "Platform SDK\Windows Base Services\Windows NT Features\Performance Data Helper"
  2544. ***************************************************************************/
  2545. BOOL SendVideoStream::GetCPUUsage(PDWORD pdwOverallCPUUsage)
  2546. {
  2547. PPERF_DATA_BLOCK pPerfDataBlock;
  2548. PPERF_OBJECT_TYPE pPerfObjectType;
  2549. PPERF_COUNTER_DEFINITION pPerfCounterDefinition;
  2550. PPERF_INSTANCE_DEFINITION pPerfInstanceDefinition;
  2551. PPERF_COUNTER_BLOCK pPerfCounterBlock;
  2552. DWORD dwType;
  2553. DWORD cbData = sizeof(DWORD);
  2554. DWORD cbTryCounterData;
  2555. HANDLE hPerfData;
  2556. LONGLONG llDeltaPerfTime100nSec;
  2557. LONGLONG llDeltaCPUUsage = (LONGLONG)NULL;
  2558. char szProcessorIndex[16];
  2559. long lRet;
  2560. FX_ENTRY("SendVideoStream::GetCPUUsage");
  2561. // We use the handle to the perf key as a way to figure out if we have been initialized correctly
  2562. if (m_Stats.hPerfKey && pdwOverallCPUUsage)
  2563. {
  2564. // Initialize result value
  2565. *pdwOverallCPUUsage = 0UL;
  2566. if (m_Stats.fWinNT && m_Stats.NtCPUUsage.pbyPerfData)
  2567. {
  2568. // Make a string out of the processor object index.
  2569. _ltoa(m_Stats.NtCPUUsage.dwProcessorIndex, szProcessorIndex, 10);
  2570. // Under some cicumstances (cf. Q178887 for details) the RegQueryValueEx
  2571. // function may cause an Access Violation because of a buggy performance
  2572. // extension DLL such as SQL's.
  2573. __try
  2574. {
  2575. // Read the performance data. Its size may change between each 'registry' access.
  2576. cbTryCounterData = m_Stats.NtCPUUsage.cbPerfData;
  2577. while((lRet = RegQueryValueEx(HKEY_PERFORMANCE_DATA, szProcessorIndex, NULL, &dwType, m_Stats.NtCPUUsage.pbyPerfData, &cbTryCounterData)) == ERROR_MORE_DATA)
  2578. {
  2579. m_Stats.NtCPUUsage.cbPerfData += BYTE_INCREMENT;
  2580. LocalUnlock(m_Stats.NtCPUUsage.hPerfData);
  2581. hPerfData = LocalReAlloc(m_Stats.NtCPUUsage.hPerfData, m_Stats.NtCPUUsage.cbPerfData, LMEM_MOVEABLE);
  2582. if (!hPerfData)
  2583. goto MyError;
  2584. m_Stats.NtCPUUsage.hPerfData = hPerfData;
  2585. m_Stats.NtCPUUsage.pbyPerfData = (PBYTE)LocalLock(hPerfData);
  2586. cbTryCounterData = m_Stats.NtCPUUsage.cbPerfData;
  2587. }
  2588. }
  2589. __except(EXCEPTION_EXECUTE_HANDLER)
  2590. {
  2591. ERRORMESSAGE(("%s: Performance Registry Access Violation -> don't use perf counters for CPU measurements\r\n", _fx_));
  2592. goto MyError;
  2593. }
  2594. if (lRet != ERROR_SUCCESS)
  2595. goto MyError;
  2596. else
  2597. {
  2598. // Read the PERF_DATA_BLOCK header structure. It describes the system
  2599. // and the performance data. The PERF_DATA_BLOCK structure is followed
  2600. // by a list of object information blocks (one per object). We use the
  2601. // counter index to retrieve object information.
  2602. //
  2603. // Each object information block contains a PERF_OBJECT_TYPE structure,
  2604. // which describes the performance data for the object. Look for the one
  2605. // that applies to CPU usage based on its index value.
  2606. pPerfDataBlock = (PPERF_DATA_BLOCK)m_Stats.NtCPUUsage.pbyPerfData;
  2607. pPerfObjectType = (PPERF_OBJECT_TYPE)(m_Stats.NtCPUUsage.pbyPerfData + pPerfDataBlock->HeaderLength);
  2608. for (int i = 0; i < (int)pPerfDataBlock->NumObjectTypes; i++)
  2609. {
  2610. if (pPerfObjectType->ObjectNameTitleIndex == m_Stats.NtCPUUsage.dwProcessorIndex)
  2611. {
  2612. // The PERF_OBJECT_TYPE structure is followed by a list of PERF_COUNTER_DEFINITION
  2613. // structures, one for each counter defined for the object. The list of PERF_COUNTER_DEFINITION
  2614. // structures is followed by a list of instance information blocks (one for each instance).
  2615. //
  2616. // Each instance information block contains a PERF_INSTANCE_DEFINITION structure and
  2617. // a PERF_COUNTER_BLOCK structure, followed by the data for each counter.
  2618. //
  2619. // Look for the counter defined for % processor time.
  2620. pPerfCounterDefinition = (PPERF_COUNTER_DEFINITION)((PBYTE)pPerfObjectType + pPerfObjectType->HeaderLength);
  2621. for (int j = 0; j < (int)pPerfObjectType->NumCounters; j++)
  2622. {
  2623. if (pPerfCounterDefinition->CounterNameTitleIndex == m_Stats.NtCPUUsage.dwPercentProcessorIndex)
  2624. {
  2625. // Measure elapsed time
  2626. llDeltaPerfTime100nSec = *(PLONGLONG)&pPerfDataBlock->PerfTime100nSec - m_Stats.NtCPUUsage.llPerfTime100nSec;
  2627. // Save the timestamp for the next round
  2628. m_Stats.NtCPUUsage.llPerfTime100nSec = *(PLONGLONG)&pPerfDataBlock->PerfTime100nSec;
  2629. pPerfInstanceDefinition = (PPERF_INSTANCE_DEFINITION)((PBYTE)pPerfObjectType + pPerfObjectType->DefinitionLength);
  2630. for (int k = 0; k < (int)pPerfObjectType->NumInstances && k < (int)m_Stats.NtCPUUsage.dwNumProcessors; k++)
  2631. {
  2632. // Get a pointer to the PERF_COUNTER_BLOCK
  2633. pPerfCounterBlock = (PPERF_COUNTER_BLOCK)((PBYTE)pPerfInstanceDefinition + pPerfInstanceDefinition->ByteLength);
  2634. // Get the CPU usage
  2635. llDeltaCPUUsage += *(PLONGLONG)((PBYTE)pPerfInstanceDefinition + pPerfInstanceDefinition->ByteLength + pPerfCounterDefinition->CounterOffset) - m_Stats.NtCPUUsage.pllCounterValue[k];
  2636. // Save the value for the next round
  2637. m_Stats.NtCPUUsage.pllCounterValue[k] = *(PLONGLONG)((PBYTE)pPerfInstanceDefinition + pPerfInstanceDefinition->ByteLength + pPerfCounterDefinition->CounterOffset);
  2638. // Go to the next instance information block
  2639. pPerfInstanceDefinition = (PPERF_INSTANCE_DEFINITION)((PBYTE)pPerfInstanceDefinition + pPerfInstanceDefinition->ByteLength + pPerfCounterBlock->ByteLength);
  2640. }
  2641. // Do a bit of checking on the return value and change its unit to match QoS unit
  2642. if ((llDeltaPerfTime100nSec != (LONGLONG)0) && pPerfObjectType->NumInstances)
  2643. if ((*pdwOverallCPUUsage = (DWORD)((LONGLONG)1000 - (LONGLONG)1000 * llDeltaCPUUsage / llDeltaPerfTime100nSec / (LONGLONG)pPerfObjectType->NumInstances)) > 1000UL)
  2644. {
  2645. *pdwOverallCPUUsage = 0UL;
  2646. return FALSE;
  2647. }
  2648. // We're done!
  2649. return TRUE;
  2650. }
  2651. else
  2652. pPerfCounterDefinition = (PPERF_COUNTER_DEFINITION)((PBYTE)pPerfCounterDefinition + pPerfCounterDefinition->ByteLength);
  2653. }
  2654. break;
  2655. }
  2656. else
  2657. pPerfObjectType = (PPERF_OBJECT_TYPE)((PBYTE)pPerfObjectType + pPerfObjectType->TotalByteLength);
  2658. }
  2659. // If we get here, we haven't found the counters we were looking for
  2660. goto MyError;
  2661. }
  2662. }
  2663. else
  2664. {
  2665. // Do a bit of checking on the return value and change its unit to match QoS unit.
  2666. if ((RegQueryValueEx(m_Stats.hPerfKey, "KERNEL\\CPUUsage", NULL, &dwType, (LPBYTE)pdwOverallCPUUsage, &cbData) == ERROR_SUCCESS) && (*pdwOverallCPUUsage > 0) && (*pdwOverallCPUUsage <= 100))
  2667. {
  2668. *pdwOverallCPUUsage *= 10UL;
  2669. return TRUE;
  2670. }
  2671. else
  2672. {
  2673. *pdwOverallCPUUsage = 0UL;
  2674. return FALSE;
  2675. }
  2676. }
  2677. }
  2678. return FALSE;
  2679. MyError:
  2680. if (m_Stats.NtCPUUsage.pllCounterValue)
  2681. LocalFree(m_Stats.NtCPUUsage.pllCounterValue);
  2682. m_Stats.NtCPUUsage.pllCounterValue = (PLONGLONG)NULL;
  2683. if (m_Stats.NtCPUUsage.hPerfData)
  2684. {
  2685. LocalUnlock(m_Stats.NtCPUUsage.hPerfData);
  2686. LocalFree(m_Stats.NtCPUUsage.hPerfData);
  2687. }
  2688. m_Stats.NtCPUUsage.hPerfData = (HANDLE)NULL;
  2689. m_Stats.NtCPUUsage.pbyPerfData = (PBYTE)NULL;
  2690. m_Stats.hPerfKey = (HKEY)NULL;
  2691. return FALSE;
  2692. }
  2693. BOOL SendVideoStream::SetTargetRates(DWORD dwTargetFrameRate, DWORD dwTargetBitrate)
  2694. {
  2695. MMRESULT mmr;
  2696. ASSERT(m_pVideoFilter);
  2697. mmr = m_pVideoFilter->SetTargetRates(dwTargetFrameRate, dwTargetBitrate >> 3);
  2698. return (mmr == MMSYSERR_NOERROR);
  2699. }
  2700. // dwFlags must be one of the following:
  2701. // CAPTURE_DIALOG_FORMAT
  2702. // CAPTURE_DIALOG_SOURCE
  2703. HRESULT __stdcall SendVideoStream::ShowDeviceDialog(DWORD dwFlags)
  2704. {
  2705. DWORD dwQueryFlags = 0;
  2706. DWORD_PTR dwPropVal;
  2707. HRESULT hr=DPR_INVALID_PARAMETER;
  2708. // the device must be "open" for us to display the dialog box
  2709. if (!(m_DPFlags & DPFLAG_CONFIGURED_SEND))
  2710. return DPR_NOT_CONFIGURED;
  2711. ((VideoInControl*)m_InMedia)->GetProp(MC_PROP_VFW_DIALOGS, &dwPropVal);
  2712. dwQueryFlags = (DWORD)dwPropVal;
  2713. if ((dwQueryFlags & CAPTURE_DIALOG_SOURCE) && (dwFlags & CAPTURE_DIALOG_SOURCE))
  2714. {
  2715. hr = ((VideoInControl *)m_InMedia)->DisplayDriverDialog(GetActiveWindow(), CAPTURE_DIALOG_SOURCE);
  2716. }
  2717. else if ((dwQueryFlags & CAPTURE_DIALOG_FORMAT) && (dwFlags & CAPTURE_DIALOG_FORMAT))
  2718. {
  2719. hr = ((VideoInControl *)m_InMedia)->DisplayDriverDialog(GetActiveWindow(), CAPTURE_DIALOG_FORMAT);
  2720. }
  2721. return hr;
  2722. }
  2723. // will set dwFlags to one or more of the following bits
  2724. // CAPTURE_DIALOG_FORMAT
  2725. // CAPTURE_DIALOG_SOURCE
  2726. HRESULT __stdcall SendVideoStream::GetDeviceDialog(DWORD *pdwFlags)
  2727. {
  2728. HRESULT hr;
  2729. DWORD_PTR dwPropVal;
  2730. hr = ((VideoInControl*)m_InMedia)->GetProp(MC_PROP_VFW_DIALOGS, &dwPropVal);
  2731. *pdwFlags = (DWORD)dwPropVal;
  2732. return hr;
  2733. }
  2734.