Leaked source code of windows server 2003
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1211 lines
35 KiB

  1. // $Header: G:/SwDev/WDM/Video/bt848/rcs/Vidch.cpp 1.22 1998/05/12 20:39:19 tomz Exp $
  2. #include "vidch.h"
  3. #include "defaults.h"
  4. #include "fourcc.h"
  5. #include "capmain.h"
  6. #ifdef HAUPPAUGE
  7. #include "HCWDebug.h"
  8. #endif
  9. void CheckSrbStatus( PHW_STREAM_REQUEST_BLOCK pSrb );
  10. BOOL VideoChannel::bIsVBI()
  11. {
  12. PSTREAMEX pStrmEx = (PSTREAMEX)GetStrmEx( );
  13. if ( pStrmEx->StreamNumber == STREAM_IDX_VBI )
  14. {
  15. return TRUE;
  16. }
  17. else
  18. {
  19. return FALSE;
  20. }
  21. }
  22. BOOL VideoChannel::bIsVideo()
  23. {
  24. PSTREAMEX pStrmEx = (PSTREAMEX)GetStrmEx( );
  25. if (( pStrmEx->StreamNumber == STREAM_IDX_PREVIEW ) ||
  26. ( pStrmEx->StreamNumber == STREAM_IDX_CAPTURE ))
  27. {
  28. return TRUE;
  29. }
  30. else
  31. {
  32. return FALSE;
  33. }
  34. }
  35. /* Method: VideoChannel::SetDigitalWindow
  36. * Purpose: Sets the output image size
  37. * Input: r: MRect &
  38. * Output:
  39. */
  40. ErrorCode VideoChannel::SetDigitalWindow( MRect &r )
  41. {
  42. Trace t("VideoChannel::SetDigitalWindow()");
  43. return Digitizer_->SetDigitalWindow( r, *OurField_ );
  44. }
  45. /* Method: VideoChannel::SetAnalogWindow
  46. * Purpose: Sets the analog dimention for this stream
  47. * Input: r: MRect &
  48. * Output:
  49. */
  50. ErrorCode VideoChannel::SetAnalogWindow( MRect &r )
  51. {
  52. Trace t("VideoChannel::SetAnalogWindow()");
  53. return Digitizer_->SetAnalogWindow( r, *OurField_ );
  54. }
  55. /* Method: VideoChannel::OpenChannel
  56. * Purpose: Allocates a stream from a capture chip
  57. * Input:
  58. * Output:
  59. * Note: It is possible that the current implementation does not require an
  60. * elaborate stream allocation scheme. Nonetheless it is used as number of
  61. * streams can increase in the future and their dynamics can change
  62. */
  63. ErrorCode VideoChannel::OpenChannel()
  64. {
  65. Trace t("VideoChannel::OpenChannel()");
  66. // can not open twice
  67. if ( IsOpen() == true )
  68. return Fail;
  69. if ( Digitizer_->AllocateStream( OurField_, Stream_ ) == Success ) {
  70. // store information for all subsequent calls
  71. SetPaired( false );
  72. OurField_->SetCallback( &Caller_ );
  73. SetInterrupt( true );
  74. // flag the state
  75. SetOpen();
  76. SetDefaultQue();
  77. return Success;
  78. }
  79. return Fail;
  80. }
  81. /* Method: VideoChannel::CloseChannel
  82. * Purpose: Closes the channel. Makes sure everything is freed
  83. * Input:
  84. * Output:
  85. */
  86. ErrorCode VideoChannel::CloseChannel()
  87. {
  88. Trace t("VideoChannel::CloseChannel()");
  89. if ( !IsOpen() )
  90. return Fail;
  91. Stop( );
  92. while( !BufQue_.IsEmpty( ) )
  93. {
  94. DataBuf buf = BufQue_.Get();
  95. }
  96. BufQue_.Flush();
  97. while( !Requests_.IsEmpty( ) )
  98. {
  99. PHW_STREAM_REQUEST_BLOCK pSrb = Requests_.Get();
  100. if ( RemoveSRB( pSrb ))
  101. {
  102. DebugOut((0, " RemoveSRB failed\n"));
  103. DEBUG_BREAKPOINT();
  104. }
  105. }
  106. Requests_.Flush();
  107. SetClose();
  108. return Success;
  109. }
  110. /* Method: VideoChannel::SetFormat
  111. * Purpose:
  112. * Input:
  113. * Output:
  114. */
  115. ErrorCode VideoChannel::SetFormat( ColFmt aFormat )
  116. {
  117. Trace t("VideoChannel::SetFormat()");
  118. Digitizer_->SetPixelFormat( aFormat, *OurField_ );
  119. return Success;
  120. }
  121. /* Method: VideoChannel::GetFormat
  122. * Purpose:
  123. * Input:
  124. * Output:
  125. */
  126. ColFmt VideoChannel::GetFormat()
  127. {
  128. Trace t("VideoChannel::GetFormat()");
  129. return Digitizer_->GetPixelFormat( *OurField_ );
  130. }
  131. /* Method: VideoChannel::AddBuffer
  132. * Purpose: This function adds a buffer to a queue
  133. * Input: pNewBuffer: PVOID - pointer to a buffer to add
  134. * Output: None
  135. * Note: This function 'does not know' where the queue is located. It just uses
  136. * a pointer to it.
  137. */
  138. void VideoChannel::AddBuffer( PVOID pPacket )
  139. {
  140. Trace t("VideoChannel::AddBuffer()");
  141. DataBuf buf( GetSRB(), pPacket );
  142. BufQue_.Put( buf );
  143. DebugOut((1, "AddBuf %x\n", pPacket ) );
  144. LONGLONG *pB1 = (LONGLONG *)pPacket;
  145. LONGLONG *pB2 = pB1 + 1;
  146. #ifdef DEBUG
  147. for ( UINT i = 0; i < 640; i++ ) {
  148. #endif
  149. *pB1 = 0xAAAAAAAA33333333;
  150. *pB2 = 0xBBBBBBBB22222222;
  151. #ifdef DEBUG
  152. pB1 += 2;
  153. pB2 += 2;
  154. }
  155. #endif
  156. }
  157. /* Method: VideoChannel::ResetCounters
  158. * Purpose: Reset the frame info counters
  159. * Input: None
  160. * Output: None
  161. */
  162. VOID VideoChannel::ResetCounters( )
  163. {
  164. ULONG StreamNumber = Stream_;
  165. if ( StreamNumber == STREAM_IDX_VBI )
  166. {
  167. PKS_VBI_FRAME_INFO pSavedFrameInfo = &((PSTREAMEX)GetStrmEx())->FrameInfo.VbiFrameInfo;
  168. pSavedFrameInfo->ExtendedHeaderSize = sizeof( KS_VBI_FRAME_INFO );
  169. pSavedFrameInfo->PictureNumber = 0;
  170. pSavedFrameInfo->DropCount = 0;
  171. }
  172. else
  173. {
  174. PKS_FRAME_INFO pSavedFrameInfo = &((PSTREAMEX)GetStrmEx())->FrameInfo.VideoFrameInfo;
  175. pSavedFrameInfo->ExtendedHeaderSize = sizeof( KS_FRAME_INFO );
  176. pSavedFrameInfo->PictureNumber = 0;
  177. pSavedFrameInfo->DropCount = 0;
  178. }
  179. }
  180. /* Method: VideoChannel::TimeStamp
  181. * Purpose: Performs the standard buffer massaging when it's done
  182. * Input: pSrb
  183. * Output: None
  184. */
  185. void STREAMAPI VideoChannel::TimeStamp( PHW_STREAM_REQUEST_BLOCK pSrb )
  186. {
  187. Trace t("VideoChannel::TimeStamp()");
  188. PKSSTREAM_HEADER pDataPacket = pSrb->CommandData.DataBufferArray;
  189. VideoChannel *chan = (VideoChannel *)((PSTREAMEX)pSrb->StreamObject->HwStreamExtension)->videochannel;
  190. pDataPacket->PresentationTime.Numerator = 1;
  191. pDataPacket->PresentationTime.Denominator = 1;
  192. if( chan->IsVideoInfo2() )
  193. {
  194. pDataPacket->DataUsed = chan->GetVidHdr2()->bmiHeader.biSizeImage;
  195. }
  196. else
  197. {
  198. pDataPacket->DataUsed = chan->GetVidHdr()->bmiHeader.biSizeImage;
  199. }
  200. pDataPacket->Duration = chan->GetTimePerFrame();
  201. DebugOut((1, "DataUsed = %d\n", pDataPacket->DataUsed));
  202. // [TMZ] [!!!] - hack, timestamping seems broken
  203. if( 0 ) {
  204. //if( hMasterClock ) {
  205. pDataPacket->OptionsFlags |= KSSTREAM_HEADER_OPTIONSF_DURATIONVALID;
  206. pDataPacket->OptionsFlags |= KSSTREAM_HEADER_OPTIONSF_TIMEVALID;
  207. //pDataPacket->OptionsFlags &= ~KSSTREAM_HEADER_OPTIONSF_TIMEVALID;
  208. HW_TIME_CONTEXT TimeContext;
  209. TimeContext.HwDeviceExtension = (struct _HW_DEVICE_EXTENSION *)pSrb->HwDeviceExtension;
  210. TimeContext.HwStreamObject = pSrb->StreamObject;
  211. TimeContext.Function = TIME_GET_STREAM_TIME;
  212. StreamClassQueryMasterClockSync (
  213. chan->hMasterClock,
  214. &TimeContext
  215. );
  216. /*
  217. LARGE_INTEGER Delta;
  218. Delta.QuadPart = TimeContext.Time;
  219. if( TimeContext.Time > (ULONGLONG) Delta.QuadPart )
  220. {
  221. pDataPacket->PresentationTime.Time = TimeContext.Time;
  222. } else {
  223. pDataPacket->PresentationTime.Time = 0;
  224. }
  225. */
  226. pDataPacket->PresentationTime.Time = TimeContext.Time;
  227. } else {
  228. pDataPacket->OptionsFlags &= ~KSSTREAM_HEADER_OPTIONSF_DURATIONVALID;
  229. pDataPacket->OptionsFlags &= ~KSSTREAM_HEADER_OPTIONSF_TIMEVALID;
  230. pDataPacket->PresentationTime.Time = 0;
  231. }
  232. // now gather the statistics
  233. PKS_FRAME_INFO pSavedFrameInfo = &((PSTREAMEX)chan->GetStrmEx())->FrameInfo.VideoFrameInfo;
  234. pSavedFrameInfo->ExtendedHeaderSize = sizeof( KS_FRAME_INFO );
  235. pSavedFrameInfo->PictureNumber++;
  236. pSavedFrameInfo->DropCount = 0;
  237. PKS_FRAME_INFO pFrameInfo =
  238. (PKS_FRAME_INFO) ( pSrb->CommandData.DataBufferArray + 1 );
  239. // copy the information to the outbound buffer
  240. pFrameInfo->ExtendedHeaderSize = pSavedFrameInfo->ExtendedHeaderSize;
  241. pFrameInfo->PictureNumber = pSavedFrameInfo->PictureNumber;
  242. pFrameInfo->DropCount = pSavedFrameInfo->DropCount;
  243. if ( pFrameInfo->DropCount ) {
  244. pSrb->CommandData.DataBufferArray->OptionsFlags |=
  245. KSSTREAM_HEADER_OPTIONSF_DATADISCONTINUITY;
  246. }
  247. // Every frame we generate is a key frame (aka SplicePoint)
  248. // Delta frames (B or P) should not set this flag
  249. pDataPacket->OptionsFlags |= KSSTREAM_HEADER_OPTIONSF_SPLICEPOINT;
  250. // make the stream class driver happy
  251. pSrb->Status = STATUS_SUCCESS;
  252. DebugOut((1, "*** 2 *** completing SRB %x\n", pSrb));
  253. CheckSrbStatus( pSrb );
  254. StreamClassStreamNotification( StreamRequestComplete, pSrb->StreamObject, pSrb );
  255. DebugOut((1, "Signal SRB - %x\n", pSrb->CommandData.DataBufferArray->Data ) );
  256. DebugOut((1, "********** NeedNotification_ = %d\n", chan->NeedNotification_ ) );
  257. if ( chan->NeedNotification_ ) {
  258. // queue was full; now it has at least one entry
  259. StreamClassStreamNotification( ReadyForNextStreamDataRequest, pSrb->StreamObject );
  260. }
  261. }
  262. /* Method: VideoChannel::Interrupt
  263. * Purpose: Called by the interface class on behalf of capture chip to let know
  264. * an interrupt happened.
  265. * Input: pTag: PVOID, to be passed to the Digitizer_
  266. * Output: None
  267. */
  268. void VideoChannel::Interrupt( PVOID pTag, bool skipped )
  269. {
  270. Trace t("VideoChannel::Interrupt()");
  271. Digitizer_->ProcessBufferAtInterrupt( pTag );
  272. if ( skipped ) {
  273. DebugOut((1, "VidChan::Interrupt skipped\n" ) );
  274. return;
  275. }
  276. // let the class driver know we are done with this buffer
  277. if ( !Requests_.IsEmpty() ) {
  278. PHW_STREAM_REQUEST_BLOCK pSrb = Requests_.Get();
  279. TimeStamp( pSrb ); // [TMZ] [!!!] [HACK]
  280. }
  281. }
  282. /* Method: VideoChannel::Create
  283. * Purpose: Creates the stream
  284. * Input: None
  285. * Output: None
  286. */
  287. ErrorCode VideoChannel::Create()
  288. {
  289. Trace t("VideoChannel::Create()");
  290. KS_VIDEOINFOHEADER* pVideoInfoHdr = NULL;
  291. KS_VIDEOINFOHEADER2* pVideoInfoHdr2 = NULL;
  292. DWORD biCompression;
  293. WORD biBitCount;
  294. LONG biWidth;
  295. LONG biHeight;
  296. LONG biWidthBytes;
  297. if( IsVideoInfo2() )
  298. {
  299. pVideoInfoHdr2 = GetVidHdr2();
  300. biCompression = pVideoInfoHdr2->bmiHeader.biCompression;
  301. biBitCount = pVideoInfoHdr2->bmiHeader.biBitCount;
  302. biWidth = pVideoInfoHdr2->bmiHeader.biWidth;
  303. biHeight = abs(pVideoInfoHdr2->bmiHeader.biHeight);
  304. }
  305. else
  306. {
  307. pVideoInfoHdr = GetVidHdr();
  308. biCompression = pVideoInfoHdr->bmiHeader.biCompression;
  309. biBitCount = pVideoInfoHdr->bmiHeader.biBitCount;
  310. biWidth = pVideoInfoHdr->bmiHeader.biWidth;
  311. biHeight = abs(pVideoInfoHdr->bmiHeader.biHeight);
  312. }
  313. MRect analog( 0, 0, biWidth, biHeight );
  314. MRect ImageRect( 0, 0, biWidth, biHeight );
  315. DebugOut((1, "**************************************************************************\n"));
  316. DebugOut((1, "biCompression = %d\n", biCompression));
  317. DebugOut((1, "biBitCount = %d\n", biBitCount));
  318. if ( pVideoInfoHdr->bmiHeader.biCompression == 3)
  319. {
  320. if( IsVideoInfo2() )
  321. {
  322. pVideoInfoHdr2->bmiHeader.biCompression = FCC_YUY2;
  323. biCompression = FCC_YUY2;
  324. }
  325. else
  326. {
  327. pVideoInfoHdr->bmiHeader.biCompression = FCC_YUY2;
  328. biCompression = FCC_YUY2;
  329. }
  330. }
  331. ColorSpace tmp( biCompression, biBitCount );
  332. DebugOut((1, "ColorFormat = %d\n", tmp.GetColorFormat()));
  333. DebugOut((1, "**************************************************************************\n"));
  334. OurField_->ResetCounters();
  335. ResetCounters();
  336. // verify that we are not asked to produce a smaller image
  337. #ifdef HACK_FUDGE_RECTANGLES
  338. if( IsVideoInfo2() )
  339. {
  340. if( pVideoInfoHdr2->rcTarget.bottom == 0 )
  341. {
  342. // [!!!] [TMZ] - hack
  343. pVideoInfoHdr2->rcTarget.left = 0;
  344. pVideoInfoHdr2->rcTarget.top = 0;
  345. pVideoInfoHdr2->rcTarget.right = biWidth;
  346. pVideoInfoHdr2->rcTarget.bottom = biHeight;
  347. }
  348. }
  349. else
  350. {
  351. if( pVideoInfoHdr->rcTarget.bottom == 0 )
  352. {
  353. // [!!!] [TMZ] - hack
  354. pVideoInfoHdr->rcTarget.left = 0;
  355. pVideoInfoHdr->rcTarget.top = 0;
  356. pVideoInfoHdr->rcTarget.right = biWidth;
  357. pVideoInfoHdr->rcTarget.bottom = biHeight;
  358. }
  359. }
  360. #endif
  361. MRect dst;
  362. MRect src;
  363. if( IsVideoInfo2() )
  364. {
  365. dst.Set( pVideoInfoHdr2->rcTarget.left, pVideoInfoHdr2->rcTarget.top, pVideoInfoHdr2->rcTarget.right, pVideoInfoHdr2->rcTarget.bottom );
  366. src.Set( pVideoInfoHdr2->rcSource.left, pVideoInfoHdr2->rcSource.top, pVideoInfoHdr2->rcSource.right, pVideoInfoHdr2->rcSource.bottom );
  367. }
  368. else
  369. {
  370. dst.Set( pVideoInfoHdr->rcTarget.left, pVideoInfoHdr->rcTarget.top, pVideoInfoHdr->rcTarget.right, pVideoInfoHdr->rcTarget.bottom );
  371. src.Set( pVideoInfoHdr->rcSource.left, pVideoInfoHdr->rcSource.top, pVideoInfoHdr->rcSource.right, pVideoInfoHdr->rcSource.bottom );
  372. }
  373. if ( !dst.IsEmpty() )
  374. {
  375. // use the new size
  376. ImageRect = dst;
  377. if ( !src.IsEmpty() )
  378. {
  379. analog = src;
  380. }
  381. else
  382. {
  383. analog = dst;
  384. }
  385. // calculate the offset for the new beginning of the data
  386. dwBufferOffset_ = dst.top * biWidth + dst.left * tmp.GetPitchBpp();
  387. // when rcTarget is non-empty, biWidth is stride of the buffer
  388. biWidthBytes = biWidth;
  389. }
  390. else
  391. {
  392. biWidthBytes = biWidth * tmp.GetPitchBpp() / 8;
  393. }
  394. if( IsVideoInfo2() )
  395. {
  396. DebugOut((1, "pVideoInfoHdr2->rcTarget(%d, %d, %d, %d)\n",
  397. pVideoInfoHdr2->rcTarget.left,
  398. pVideoInfoHdr2->rcTarget.top,
  399. pVideoInfoHdr2->rcTarget.right,
  400. pVideoInfoHdr2->rcTarget.bottom
  401. ));
  402. }
  403. else
  404. {
  405. DebugOut((1, "pVideoInfoHdr->rcTarget(%d, %d, %d, %d)\n",
  406. pVideoInfoHdr->rcTarget.left,
  407. pVideoInfoHdr->rcTarget.top,
  408. pVideoInfoHdr->rcTarget.right,
  409. pVideoInfoHdr->rcTarget.bottom
  410. ));
  411. }
  412. DebugOut((1, "dst(%d, %d, %d, %d)\n",
  413. dst.left,
  414. dst.top,
  415. dst.right,
  416. dst.bottom
  417. ));
  418. DebugOut((1, "Pitch =%d, width = %d\n", biWidthBytes, dst.Width() ) );
  419. SetBufPitch( biWidthBytes );
  420. if ( SetAnalogWindow ( analog ) == Success && //<-must be set first !
  421. SetDigitalWindow( ImageRect ) == Success &&
  422. SetFormat( tmp.GetColorFormat() ) == Success &&
  423. Digitizer_->Create( *OurField_ ) == Success )
  424. {
  425. State_ = Created;
  426. return Success;
  427. }
  428. return Fail;
  429. }
  430. /* Method: VideoChannel::Start
  431. * Purpose: Starts the stream
  432. * Input: None
  433. * Output: None
  434. */
  435. void VideoChannel::Start()
  436. {
  437. Trace t("VideoChannel::Start()");
  438. State_ = Started;
  439. Digitizer_->Start( *OurField_ );
  440. }
  441. /* Method: VideoChannel::Stop
  442. * Purpose: Stops the stream
  443. * Input: None
  444. * Output: None
  445. */
  446. ErrorCode VideoChannel::Stop()
  447. {
  448. Trace t("VideoChannel::Stop()");
  449. if ( !IsOpen() )
  450. return Fail;
  451. Digitizer_->Stop( *OurField_ );
  452. State_ = Open;
  453. while( !BufQue_.IsEmpty( ) )
  454. {
  455. DataBuf buf = BufQue_.Get();
  456. }
  457. BufQue_.Flush();
  458. return Success;
  459. }
  460. /* Method: VideoChannel::Pause
  461. * Purpose: Stops the stream
  462. * Input: None
  463. * Output: None
  464. */
  465. ErrorCode VideoChannel::Pause()
  466. {
  467. Trace t("VideoChannel::Pause()");
  468. Digitizer_->Pause( *OurField_ );
  469. State_ = Paused;
  470. OurField_->ResetCounters(); // jaybo
  471. ResetCounters();
  472. return Success;
  473. }
  474. /* Method: VideoChanIface::Notify
  475. * Purpose: Notifies the VideoChannel that an interrupt happened
  476. * Input: None
  477. * Output: None
  478. */
  479. void VideoChanIface::Notify( PVOID pTag, bool skipped )
  480. {
  481. Trace t("VideoChanIface::Notify()");
  482. ToBeNotified_->Interrupt( pTag, skipped );
  483. }
  484. /* Method: VideoChannel::AddSRB
  485. * Purpose: Adds SRB and buffer to the queues
  486. * Input: pSrb
  487. * Output: None
  488. */
  489. void VideoChannel::AddSRB( PHW_STREAM_REQUEST_BLOCK pSrb )
  490. {
  491. Trace t("VideoChannel::AddSRB()");
  492. Requests_.Put( pSrb );
  493. SetSRB( pSrb );
  494. PUCHAR pBufAddr = (PUCHAR)pSrb->CommandData.DataBufferArray->Data;
  495. AddBuffer( pBufAddr + dwBufferOffset_ );
  496. // don't forget to report our field type !
  497. // this cast is valid for VBI FRAME as well ( see ksmedia.h )
  498. PKS_FRAME_INFO pFrameInfo =
  499. (PKS_FRAME_INFO) ( pSrb->CommandData.DataBufferArray + 1 );
  500. pFrameInfo->dwFrameFlags = FieldType_;
  501. // ask for more buffers
  502. CheckNotificationNeed();
  503. }
  504. /* Method: VideoChannel::RemoveSRB
  505. * Purpose: Removes SRB from the queue and signals it
  506. * Input: pSrb
  507. * Output: None
  508. */
  509. bool VideoChannel::RemoveSRB( PHW_STREAM_REQUEST_BLOCK pSrb )
  510. {
  511. Trace t("VideoChannel::RemoveSRB()");
  512. /*
  513. //FGR - TODO: i guess we should see if there really is a record of this SRB
  514. if(Requests_.IsEmpty()){
  515. pSrb->Status = STATUS_CANCELLED;
  516. DebugOut((1, "*** 3 *** completing SRB %x\n", pSrb));
  517. CheckSrbStatus( pSrb );
  518. StreamClassStreamNotification( StreamRequestComplete, pSrb->StreamObject, pSrb );
  519. //StreamClassStreamNotification( ReadyForNextStreamDataRequest, pSrb->StreamObject );
  520. return( true );
  521. }
  522. */
  523. int n = 0;
  524. n = Requests_.GetNumOfItems();
  525. DebugOut((1, "VideoChannel::RemoveSRB - Found %d SRBs in queue\n", n));
  526. bool bFound = false;
  527. // cycle through the list
  528. // pull from the head, put to the tail
  529. // if we find our pSrb during one cycle, pull it out
  530. while ( n-- > 0 ) // yes it can go negative
  531. {
  532. PHW_STREAM_REQUEST_BLOCK pTempSrb = Requests_.Get();
  533. if ( pTempSrb == pSrb )
  534. {
  535. // Pull him out
  536. if ( bFound )
  537. {
  538. DebugOut((0, "Found pSrb(%x) in the queue more than once\n", pSrb));
  539. DEBUG_BREAKPOINT();
  540. }
  541. else
  542. {
  543. bFound = true;
  544. pSrb->Status = STATUS_CANCELLED;
  545. DebugOut((1, "*** 4 *** completing SRB %x\n", pSrb));
  546. CheckSrbStatus( pSrb );
  547. StreamClassStreamNotification( StreamRequestComplete, pSrb->StreamObject, pSrb );
  548. //StreamClassStreamNotification( ReadyForNextStreamDataRequest, pSrb->StreamObject );
  549. }
  550. n--; // warning: if this is the last, it will go negative
  551. }
  552. else
  553. {
  554. Requests_.Put( pTempSrb );
  555. }
  556. }
  557. n = Requests_.GetNumOfItems();
  558. DebugOut((1, "VideoChannel::RemoveSRB - Left %d SRBs in queue, returning %d\n", n, bFound));
  559. /*
  560. PHW_STREAM_REQUEST_BLOCK InQueSRB = Requests_.PeekLeft();
  561. if ( InQueSRB == pSrb ) {
  562. InQueSRB = Requests_.Get();
  563. InQueSRB->Status = STATUS_CANCELLED;
  564. DebugOut((1, "Cancel SRB -%x\n", pSrb ) );
  565. CheckSrbStatus( pSrb );
  566. StreamClassStreamNotification( StreamRequestComplete,
  567. InQueSRB->StreamObject, InQueSRB );
  568. if ( Requests_.IsEmpty() )
  569. DebugOut((1, " queue is empty\n" ) );
  570. else
  571. DebugOut((1, "queue is not empty\n" ) );
  572. return( true );
  573. } else {
  574. // DebugOut((1, "Cancelling wrong SRB ! - %x, %x\n", pSrb, InQueSRB ) );
  575. //#ifdef HAUPPAUGE
  576. // TRAP();
  577. //#endif
  578. // }
  579. InQueSRB = Requests_.PeekRight();
  580. if ( InQueSRB == pSrb ) {
  581. InQueSRB = Requests_.GetRight();
  582. InQueSRB->Status = STATUS_CANCELLED;
  583. DebugOut((1, "Cancel SRB from right - %x\n", pSrb ) );
  584. CheckSrbStatus( pSrb );
  585. StreamClassStreamNotification( StreamRequestComplete,
  586. pSrb->StreamObject, pSrb );
  587. return( true );
  588. } else {
  589. DebugOut((0, "Cancelling wrong SRB from right too! - %x, %x\n", pSrb, InQueSRB ) );
  590. return( false );
  591. }
  592. }
  593. */
  594. return( bFound );
  595. }
  596. VideoChannel::~VideoChannel()
  597. {
  598. Trace t("VideoChannel::~VideoChannel()");
  599. CloseChannel();
  600. }
  601. /* Method: VideoChannel::CheckNotificationNeed
  602. * Purpose: Sees if there is room for more buffers
  603. * Input: None
  604. * Output: None
  605. */
  606. void VideoChannel::CheckNotificationNeed()
  607. {
  608. Trace t("VideoChannel::CheckNotificationNeed()");
  609. if ( !BufQue_.IsFull() ) {
  610. // always hungry for more
  611. StreamClassStreamNotification( ReadyForNextStreamDataRequest, pSRB_->StreamObject );
  612. NeedNotification_ = false;
  613. } else
  614. NeedNotification_ = true;
  615. }
  616. /* Method: InterVideoChannel::Interrupt
  617. * Purpose: Processes the interrupt for the interleaved video streams
  618. * Input: pTag: PVOID - index in reality
  619. * skipped: bool - indicates if buffer was written to
  620. * Output: None
  621. */
  622. void InterVideoChannel::Interrupt( PVOID pTag, bool skipped )
  623. {
  624. Trace t("InterVideoChannel::Interrupt()");
  625. int idx = (int)pTag;
  626. slave.IntNotify( PVOID( idx - ProgsWithinField ), skipped );
  627. Parent::Interrupt( pTag, skipped );
  628. }
  629. /* Method: InterVideoChannel::AddSRB
  630. * Purpose: Adds SRB to itself and dispatches 2 buffer pointers, one to each
  631. * channel
  632. * Input: pSRB
  633. * Output: None
  634. */
  635. void InterVideoChannel::AddSRB( PHW_STREAM_REQUEST_BLOCK pSrb )
  636. {
  637. Trace t("InterVideoChannel::AddSRB()");
  638. PUCHAR pBufAddr = (PUCHAR)pSrb->CommandData.DataBufferArray->Data;
  639. // biWidth was set in Create()
  640. UINT biWidthBytes;
  641. if( IsVideoInfo2() )
  642. {
  643. biWidthBytes = VidHeader2_.bmiHeader.biWidth / 2;
  644. }
  645. else
  646. {
  647. biWidthBytes = VidHeader_.bmiHeader.biWidth / 2;
  648. }
  649. // to be used when adding buffer
  650. SetSRB( pSrb );
  651. slave.SetSRB( pSrb );
  652. // need to swap addresses for even/odd fields for RGB formats due to up-side-down bitmaps
  653. ColorSpace tmp( GetFormat() );
  654. if ( !( tmp.GetColorFormat() > CF_RGB8 && tmp.GetColorFormat() < CF_VBI ) )
  655. {
  656. // put buffer in its place
  657. // and adjusted address into the other channel
  658. slave.AddBuffer( pBufAddr + biWidthBytes );
  659. AddBuffer( pBufAddr );
  660. }
  661. else
  662. {
  663. slave.AddBuffer( pBufAddr );
  664. AddBuffer( pBufAddr + biWidthBytes );
  665. }
  666. // don't forget to add the SRB !
  667. Requests_.Put( pSrb );
  668. // set field type to full frame.
  669. PKS_FRAME_INFO pFrameInfo = (PKS_FRAME_INFO)( pSrb->CommandData.DataBufferArray + 1 );
  670. pFrameInfo->dwFrameFlags = KS_VIDEO_FLAG_FRAME;
  671. CheckNotificationNeed();
  672. }
  673. /* Function: SplitFrame
  674. * Purpose: Halfs the size of the video image so 2 fields can be used to create
  675. * the original size
  676. * Input: VidHdr: KS_VIDEOINFOHEADER &
  677. * Output: None
  678. */
  679. inline void SplitFrame( KS_VIDEOINFOHEADER &VidHdr )
  680. {
  681. Trace t("SplitFrame()");
  682. VidHdr.bmiHeader.biHeight /= 2;
  683. VidHdr.rcSource.top /= 2;
  684. VidHdr.rcTarget.top /= 2;
  685. VidHdr.rcSource.bottom /= 2;
  686. VidHdr.rcTarget.bottom /= 2;
  687. }
  688. inline void SplitFrame2( KS_VIDEOINFOHEADER2 &VidHdr2 )
  689. {
  690. Trace t("SplitFrame()");
  691. VidHdr2.bmiHeader.biHeight /= 2;
  692. VidHdr2.rcSource.top /= 2;
  693. VidHdr2.rcTarget.top /= 2;
  694. VidHdr2.rcSource.bottom /= 2;
  695. VidHdr2.rcTarget.bottom /= 2;
  696. }
  697. /* Method: InterVideoChannel::Create
  698. * Purpose: Sets the video parameters for the slave channel and
  699. * calls into parent to create both
  700. * Input: None
  701. * Output: None
  702. */
  703. ErrorCode InterVideoChannel::Create()
  704. {
  705. Trace t("InterVideoChannel::Create()");
  706. // slave.SetInterrupt( false );
  707. slave.SetCallback( 0 );
  708. // restore the original as SplitFrame mangles the parameters
  709. MRect dst;
  710. DWORD biCompression;
  711. WORD biBitCount;
  712. LONG biWidthBytes;
  713. if( IsVideoInfo2() )
  714. {
  715. VidHeader2_ = OrigVidHeader2_;
  716. // split a frame into two fields
  717. SplitFrame2( VidHeader2_ );
  718. // double up the pitch, so we can interleave the buffers
  719. dst.Set( VidHeader2_.rcTarget.left, VidHeader2_.rcTarget.top, VidHeader2_.rcTarget.right, VidHeader2_.rcTarget.bottom );
  720. biCompression = VidHeader2_.bmiHeader.biCompression;
  721. biBitCount = VidHeader2_.bmiHeader.biBitCount;
  722. }
  723. else
  724. {
  725. VidHeader_ = OrigVidHeader_;
  726. // split a frame into two fields
  727. SplitFrame( VidHeader_ );
  728. // double up the pitch, so we can interleave the buffers
  729. dst.Set( VidHeader_.rcTarget.left, VidHeader_.rcTarget.top, VidHeader_.rcTarget.right, VidHeader_.rcTarget.bottom );
  730. biCompression = VidHeader_.bmiHeader.biCompression;
  731. biBitCount = VidHeader_.bmiHeader.biBitCount;
  732. }
  733. ColorSpace tmp( biCompression, biBitCount );
  734. if ( !dst.IsEmpty() )
  735. {
  736. // biWidth is the stride in bytes
  737. if( IsVideoInfo2() )
  738. {
  739. VidHeader2_.bmiHeader.biWidth *= 2 * 2;
  740. biWidthBytes = VidHeader2_.bmiHeader.biWidth;
  741. }
  742. else
  743. {
  744. VidHeader_.bmiHeader.biWidth *= 2 * 2;
  745. biWidthBytes = VidHeader_.bmiHeader.biWidth;
  746. }
  747. }
  748. else
  749. {
  750. if( IsVideoInfo2() )
  751. {
  752. // calculate the number of bytes per scan line
  753. biWidthBytes = tmp.GetPitchBpp() * VidHeader2_.bmiHeader.biWidth / 8;
  754. // can it be non-aligned ??
  755. biWidthBytes += 3;
  756. biWidthBytes &= ~3;
  757. // must be increased two times to interleave the fields;
  758. biWidthBytes *= 2;
  759. // the rcTarget uses half the original height and full width
  760. VidHeader2_.rcTarget = MRect(
  761. 0,
  762. 0,
  763. VidHeader2_.bmiHeader.biWidth,
  764. abs(VidHeader2_.bmiHeader.biHeight)
  765. );
  766. DebugOut((1, "VidHeader2_.rcTarget(%d, %d, %d, %d)\n",
  767. VidHeader2_.rcTarget.left,
  768. VidHeader2_.rcTarget.top,
  769. VidHeader2_.rcTarget.right,
  770. VidHeader2_.rcTarget.bottom
  771. ));
  772. // have to trick the slave into using correct ( doubled ) pitch
  773. VidHeader2_.bmiHeader.biWidth = biWidthBytes; // this is the pitch slave uses
  774. }
  775. else
  776. {
  777. // calculate the number of bytes per scan line
  778. biWidthBytes = tmp.GetPitchBpp() * VidHeader_.bmiHeader.biWidth / 8;
  779. // can it be non-aligned ??
  780. biWidthBytes += 3;
  781. biWidthBytes &= ~3;
  782. // must be increased two times to interleave the fields;
  783. biWidthBytes *= 2;
  784. // the rcTarget uses half the original height and full width
  785. VidHeader_.rcTarget = MRect(
  786. 0,
  787. 0,
  788. VidHeader_.bmiHeader.biWidth,
  789. abs(VidHeader_.bmiHeader.biHeight)
  790. );
  791. DebugOut((1, "VidHeader_.rcTarget(%d, %d, %d, %d)\n",
  792. VidHeader_.rcTarget.left,
  793. VidHeader_.rcTarget.top,
  794. VidHeader_.rcTarget.right,
  795. VidHeader_.rcTarget.bottom
  796. ));
  797. // have to trick the slave into using correct ( doubled ) pitch
  798. VidHeader_.bmiHeader.biWidth = biWidthBytes; // this is the pitch slave uses
  799. }
  800. }
  801. SetBufPitch( biWidthBytes );
  802. // at this point slave will have all the members set up properly
  803. if( IsVideoInfo2() )
  804. {
  805. slave.SetVidHdr2( VidHeader2_ );
  806. }
  807. else
  808. {
  809. slave.SetVidHdr( VidHeader_ );
  810. }
  811. slave.SetPaired( true );
  812. // needed for full-size YUV9 and other planar modes
  813. Digitizer_->SetPlanarAdjust( biWidthBytes / 2 );
  814. return Parent::Create();
  815. }
  816. /* Method: VideoChannel::GetStreamType
  817. * Purpose: reports back type of the stream. Used when destroying channels
  818. */
  819. StreamType VideoChannel::GetStreamType()
  820. {
  821. Trace t("VideoChannel::GetStreamType()");
  822. return Single;
  823. }
  824. /* Method: VideoChannel::TimeStampVBI
  825. * Purpose: Performs the standard buffer massaging when it's done
  826. * Input: pSrb
  827. * Output: None
  828. */
  829. void STREAMAPI VideoChannel::TimeStampVBI( PHW_STREAM_REQUEST_BLOCK pSrb )
  830. {
  831. Trace t("VideoChannel::TimeStamp()");
  832. PKSSTREAM_HEADER pDataPacket = pSrb->CommandData.DataBufferArray;
  833. VideoChannel *chan = (VideoChannel *)((PSTREAMEX)pSrb->StreamObject->HwStreamExtension)->videochannel;
  834. pDataPacket->PresentationTime.Numerator = 1;
  835. pDataPacket->PresentationTime.Denominator = 1;
  836. if( chan->IsVideoInfo2() )
  837. {
  838. pDataPacket->DataUsed = chan->GetVidHdr2()->bmiHeader.biSizeImage;
  839. }
  840. else
  841. {
  842. pDataPacket->DataUsed = chan->GetVidHdr()->bmiHeader.biSizeImage;
  843. }
  844. pDataPacket->Duration = chan->GetTimePerFrame();
  845. DebugOut((1, "DataUsed = %d\n", pDataPacket->DataUsed));
  846. // [TMZ] [!!!] - hack, timestamping seems broken
  847. if( 0 ) {
  848. //if( hMasterClock ) {
  849. pDataPacket->OptionsFlags |= KSSTREAM_HEADER_OPTIONSF_DURATIONVALID;
  850. pDataPacket->OptionsFlags |= KSSTREAM_HEADER_OPTIONSF_TIMEVALID;
  851. //pDataPacket->OptionsFlags &= ~KSSTREAM_HEADER_OPTIONSF_TIMEVALID;
  852. HW_TIME_CONTEXT TimeContext;
  853. TimeContext.HwDeviceExtension = (struct _HW_DEVICE_EXTENSION *)pSrb->HwDeviceExtension;
  854. TimeContext.HwStreamObject = pSrb->StreamObject;
  855. TimeContext.Function = TIME_GET_STREAM_TIME;
  856. StreamClassQueryMasterClockSync (
  857. chan->hMasterClock,
  858. &TimeContext
  859. );
  860. /*
  861. LARGE_INTEGER Delta;
  862. Delta.QuadPart = TimeContext.Time;
  863. if( TimeContext.Time > (ULONGLONG) Delta.QuadPart )
  864. {
  865. pDataPacket->PresentationTime.Time = TimeContext.Time;
  866. } else {
  867. pDataPacket->PresentationTime.Time = 0;
  868. }
  869. */
  870. pDataPacket->PresentationTime.Time = TimeContext.Time;
  871. } else {
  872. pDataPacket->OptionsFlags &= ~KSSTREAM_HEADER_OPTIONSF_DURATIONVALID;
  873. pDataPacket->OptionsFlags &= ~KSSTREAM_HEADER_OPTIONSF_TIMEVALID;
  874. pDataPacket->PresentationTime.Time = 0;
  875. }
  876. PKS_VBI_FRAME_INFO pSavedFrameInfo = &((PSTREAMEX)chan->GetStrmEx())->FrameInfo.VbiFrameInfo;
  877. pSavedFrameInfo->ExtendedHeaderSize = sizeof( PKS_VBI_FRAME_INFO );
  878. pSavedFrameInfo->PictureNumber++;
  879. pSavedFrameInfo->DropCount = 0;
  880. // now gather the statistics
  881. PKS_VBI_FRAME_INFO pFrameInfo =
  882. (PKS_VBI_FRAME_INFO) ( pSrb->CommandData.DataBufferArray + 1 );
  883. // copy the information to the outbound buffer
  884. pFrameInfo->ExtendedHeaderSize = pSavedFrameInfo->ExtendedHeaderSize;
  885. pFrameInfo->PictureNumber = pSavedFrameInfo->PictureNumber;
  886. pFrameInfo->DropCount = pSavedFrameInfo->DropCount;
  887. pFrameInfo->dwSamplingFrequency = VBISampFreq; // Bug - changes with video format
  888. if ( ((VBIChannel*)(chan))->Dirty_ ) { // propagate the tv tuner change notification
  889. ((VBIChannel*)(chan))->Dirty_ = false;
  890. pFrameInfo->TvTunerChangeInfo = ((VBIChannel*)(chan))->TVTunerChangeInfo_;
  891. pFrameInfo->dwFrameFlags |= KS_VBI_FLAG_TVTUNER_CHANGE;
  892. pFrameInfo->VBIInfoHeader = ((VBIChannel*)(chan))->VBIInfoHeader_;
  893. pFrameInfo->dwFrameFlags |= KS_VBI_FLAG_VBIINFOHEADER_CHANGE ;
  894. } else {
  895. pFrameInfo->dwFrameFlags &= ~KS_VBI_FLAG_TVTUNER_CHANGE;
  896. pFrameInfo->dwFrameFlags &= ~KS_VBI_FLAG_VBIINFOHEADER_CHANGE;
  897. }
  898. if ( pFrameInfo->DropCount ) {
  899. pSrb->CommandData.DataBufferArray->OptionsFlags |=
  900. KSSTREAM_HEADER_OPTIONSF_DATADISCONTINUITY;
  901. }
  902. // Every frame we generate is a key frame (aka SplicePoint)
  903. // Delta frames (B or P) should not set this flag
  904. pDataPacket->OptionsFlags |= KSSTREAM_HEADER_OPTIONSF_SPLICEPOINT;
  905. // make the stream class driver happy
  906. pSrb->Status = STATUS_SUCCESS;
  907. DebugOut((1, "*** 5 *** completing SRB %x\n", pSrb));
  908. CheckSrbStatus( pSrb );
  909. StreamClassStreamNotification( StreamRequestComplete, pSrb->StreamObject, pSrb );
  910. DebugOut((1, "Signal SRB - %x\n", pSrb->CommandData.DataBufferArray->Data ) );
  911. DebugOut((1, "********** NeedNotification_ = %d\n", chan->NeedNotification_ ) );
  912. if ( chan->NeedNotification_ ) {
  913. // queue was full; now it has at least one entry
  914. StreamClassStreamNotification( ReadyForNextStreamDataRequest,
  915. pSrb->StreamObject );
  916. }
  917. }
  918. /* Method: VBIAlterChannel::Interrupt
  919. * Purpose: Processes the interrupt for the VBI channel
  920. */
  921. void VBIChannel::Interrupt( PVOID pTag, bool skipped )
  922. {
  923. Trace t("VBIChannel::Interrupt()");
  924. if ( Requests_.IsEmpty( ) )
  925. {
  926. DebugOut((1, "VBI interrupt, but Requests_ is empty\n"));
  927. return;
  928. }
  929. // save the SRB for further processing ( it is gone from the qu in the Parent::Interrupt
  930. PHW_STREAM_REQUEST_BLOCK pSrb = Requests_.PeekLeft();
  931. // Parent::Interrupt( pTag, skipped );
  932. {
  933. Digitizer_->ProcessBufferAtInterrupt( pTag );
  934. if ( skipped ) {
  935. DebugOut((1, "VidChan::Interrupt skipped\n" ) );
  936. return;
  937. }
  938. // let the class driver know we are done with this buffer
  939. if ( !Requests_.IsEmpty() ) {
  940. PHW_STREAM_REQUEST_BLOCK pTimeSrb = Requests_.Get();
  941. TimeStampVBI( pTimeSrb ); // [TMZ] [!!!]
  942. }
  943. }
  944. }
  945. /* Method: VBIChannel::ChangeNotification
  946. * Purpose: Called to save off the tv tuner change notification
  947. * Input: pSrb
  948. */
  949. void VBIChannel::ChangeNotification( PHW_STREAM_REQUEST_BLOCK pSrb )
  950. {
  951. Trace t("VBIChannel::ChangeNotification()");
  952. const KSSTREAM_HEADER &DataPacket = *pSrb->CommandData.DataBufferArray;
  953. RtlCopyMemory( &TVTunerChangeInfo_, DataPacket.Data, sizeof( KS_TVTUNER_CHANGE_INFO ) );
  954. Dirty_ = true;
  955. }
  956. /* Method: VideoChannel::ChangeNotification
  957. * Purpose: Noop for the base class.
  958. */
  959. void VideoChannel::ChangeNotification( PHW_STREAM_REQUEST_BLOCK )
  960. {
  961. Trace t("VideoChannel::ChangeNotification()");
  962. }
  963. /* Method: VBIAlterChannel::SetVidHdr
  964. * Purpose: Transforms the VBI parameters ( size ) into regular video header
  965. * Input:
  966. */
  967. void VBIAlterChannel::SetVidHdr( const KS_DATAFORMAT_VBIINFOHEADER &df )
  968. {
  969. Trace t("VBIAlterChannel::SetVidHdr()");
  970. // save for the history ( for the interrupt, actually )
  971. SetVBIInfHdr( df.VBIInfoHeader );
  972. (*(VBIChannel*)&slave).SetVBIInfHdr( df.VBIInfoHeader );
  973. KS_VIDEOINFOHEADER VidInfHdr;
  974. RtlZeroMemory( &VidInfHdr, sizeof( VidInfHdr ) );
  975. // create a regular video info header
  976. VidInfHdr.bmiHeader.biWidth = VBISamples;
  977. VidInfHdr.bmiHeader.biHeight =
  978. df.VBIInfoHeader.EndLine - df.VBIInfoHeader.StartLine + 1; // inclusive
  979. // taken from the VBI GUID
  980. VidInfHdr.bmiHeader.biCompression = FCC_VBI;
  981. VidInfHdr.bmiHeader.biBitCount = 8;
  982. // this is very important too
  983. VidInfHdr.bmiHeader.biSizeImage =
  984. VidInfHdr.bmiHeader.biWidth * VidInfHdr.bmiHeader.biHeight;
  985. // now handle the case when stride is larger than width ( have to set the
  986. // target rectangle )
  987. if ( df.VBIInfoHeader.StrideInBytes > VBISamples ) {
  988. VidInfHdr.rcTarget.right = df.VBIInfoHeader.StrideInBytes;
  989. VidInfHdr.rcTarget.bottom = VidInfHdr.bmiHeader.biHeight;
  990. }
  991. // the Parent::Create will take care of setting vid header for the slave
  992. Parent::SetVidHdr( VidInfHdr );
  993. }
  994. //??? TODO: -- is this needed?
  995. void VBIAlterChannel::SetVidHdr2( const KS_DATAFORMAT_VBIINFOHEADER &df )
  996. {
  997. Trace t("VBIAlterChannel::SetVidHdr2()");
  998. // save for the history ( for the interrupt, actually )
  999. SetVBIInfHdr( df.VBIInfoHeader );
  1000. KS_VIDEOINFOHEADER2 VidInfHdr;
  1001. RtlZeroMemory( &VidInfHdr, sizeof( VidInfHdr ) );
  1002. // create a regular video info header
  1003. VidInfHdr.bmiHeader.biWidth = VBISamples;
  1004. VidInfHdr.bmiHeader.biHeight =
  1005. df.VBIInfoHeader.EndLine - df.VBIInfoHeader.StartLine + 1; // inclusive
  1006. // taken from the VBI GUID
  1007. VidInfHdr.bmiHeader.biCompression = FCC_VBI;
  1008. VidInfHdr.bmiHeader.biBitCount = 8;
  1009. // this is very important too
  1010. VidInfHdr.bmiHeader.biSizeImage =
  1011. VidInfHdr.bmiHeader.biWidth * VidInfHdr.bmiHeader.biHeight;
  1012. // now handle the case when stride is larger than width ( have to set the
  1013. // target rectangle )
  1014. if ( df.VBIInfoHeader.StrideInBytes > VBISamples ) {
  1015. VidInfHdr.rcTarget.right = df.VBIInfoHeader.StrideInBytes;
  1016. VidInfHdr.rcTarget.bottom = VidInfHdr.bmiHeader.biHeight;
  1017. }
  1018. // the Parent::Create will take care of setting vid header for the slave
  1019. Parent::SetVidHdr2( VidInfHdr );
  1020. }