Leaked source code of windows server 2003
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

674 lines
30 KiB

  1. //==========================================================================;
  2. //
  3. // THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY
  4. // KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
  5. // IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR
  6. // PURPOSE.
  7. //
  8. // Copyright (c) 1992 - 1996 Microsoft Corporation. All Rights Reserved.
  9. //
  10. //==========================================================================;
  11. #ifndef __CAPSTRM_H__
  12. #define __CAPSTRM_H__
  13. #ifdef __cplusplus
  14. extern "C" {
  15. #endif // __cplusplus
  16. KSPIN_MEDIUM StandardMedium = {
  17. STATIC_KSMEDIUMSETID_Standard,
  18. 0, 0
  19. };
  20. // ------------------------------------------------------------------------
  21. // The master list of all streams supported by this driver
  22. // ------------------------------------------------------------------------
  23. typedef enum {
  24. STREAM_Capture,
  25. #ifndef TOSHIBA
  26. STREAM_Preview,
  27. STREAM_AnalogVideoInput
  28. #endif//TOSHIBA
  29. };
  30. // ------------------------------------------------------------------------
  31. // Property sets for all video capture streams
  32. // ------------------------------------------------------------------------
  33. DEFINE_KSPROPERTY_TABLE(VideoStreamConnectionProperties)
  34. {
  35. DEFINE_KSPROPERTY_ITEM
  36. (
  37. KSPROPERTY_CONNECTION_ALLOCATORFRAMING,
  38. TRUE, // GetSupported or Handler
  39. sizeof(KSPROPERTY), // MinProperty
  40. sizeof(KSALLOCATOR_FRAMING), // MinData
  41. FALSE, // SetSupported or Handler
  42. NULL, // Values
  43. 0, // RelationsCount
  44. NULL, // Relations
  45. NULL, // SupportHandler
  46. 0 // SerializedSize
  47. ),
  48. };
  49. DEFINE_KSPROPERTY_TABLE(VideoStreamDroppedFramesProperties)
  50. {
  51. DEFINE_KSPROPERTY_ITEM
  52. (
  53. KSPROPERTY_DROPPEDFRAMES_CURRENT,
  54. TRUE, // GetSupported or Handler
  55. sizeof(KSPROPERTY_DROPPEDFRAMES_CURRENT_S),// MinProperty
  56. sizeof(KSPROPERTY_DROPPEDFRAMES_CURRENT_S),// MinData
  57. FALSE, // SetSupported or Handler
  58. NULL, // Values
  59. 0, // RelationsCount
  60. NULL, // Relations
  61. NULL, // SupportHandler
  62. 0 // SerializedSize
  63. ),
  64. };
  65. // ------------------------------------------------------------------------
  66. // Array of all of the property sets supported by video streams
  67. // ------------------------------------------------------------------------
  68. DEFINE_KSPROPERTY_SET_TABLE(VideoStreamProperties)
  69. {
  70. DEFINE_KSPROPERTY_SET
  71. (
  72. &KSPROPSETID_Connection, // Set
  73. SIZEOF_ARRAY(VideoStreamConnectionProperties), // PropertiesCount
  74. VideoStreamConnectionProperties, // PropertyItem
  75. 0, // FastIoCount
  76. NULL // FastIoTable
  77. ),
  78. DEFINE_KSPROPERTY_SET
  79. (
  80. &PROPSETID_VIDCAP_DROPPEDFRAMES, // Set
  81. SIZEOF_ARRAY(VideoStreamDroppedFramesProperties), // PropertiesCount
  82. VideoStreamDroppedFramesProperties, // PropertyItem
  83. 0, // FastIoCount
  84. NULL // FastIoTable
  85. ),
  86. };
  87. #define NUMBER_VIDEO_STREAM_PROPERTIES (SIZEOF_ARRAY(VideoStreamProperties))
  88. //---------------------------------------------------------------------------
  89. // All of the video and vbi data formats we might use
  90. //---------------------------------------------------------------------------
  91. #define D_X 320
  92. #define D_Y 240
  93. #ifdef TOSHIBA
  94. static KS_DATARANGE_VIDEO StreamFormatYVU9_Capture =
  95. {
  96. // KSDATARANGE
  97. {
  98. sizeof (KS_DATARANGE_VIDEO), // FormatSize
  99. 0, // Flags
  100. (D_X * D_Y * 9)/8, // SampleSize
  101. 0, // Reserved
  102. STATIC_KSDATAFORMAT_TYPE_VIDEO, // aka. MEDIATYPE_Video
  103. FOURCC_YVU9, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71, //MEDIASUBTYPE_YVU9
  104. STATIC_KSDATAFORMAT_SPECIFIER_VIDEOINFO // aka. FORMAT_VideoInfo
  105. },
  106. TRUE, // BOOL, bFixedSizeSamples (all samples same size?)
  107. TRUE, // BOOL, bTemporalCompression (all I frames?)
  108. 0, // Reserved (was StreamDescriptionFlags)
  109. 0, // Reserved (was MemoryAllocationFlags (KS_VIDEO_ALLOC_*))
  110. // _KS_VIDEO_STREAM_CONFIG_CAPS
  111. {
  112. STATIC_KSDATAFORMAT_SPECIFIER_VIDEOINFO, // GUID
  113. #if 1
  114. KS_AnalogVideo_None, // VideoStandard
  115. #else
  116. KS_AnalogVideo_NTSC_M |
  117. KS_AnalogVideo_PAL_B, // AnalogVideoStandard
  118. #endif
  119. 640,480, // InputSize, (the inherent size of the incoming signal
  120. // with every digitized pixel unique)
  121. 160,120, // MinCroppingSize, smallest rcSrc cropping rect allowed
  122. 640,480, // MaxCroppingSize, largest rcSrc cropping rect allowed
  123. 2, // CropGranularityX, granularity of cropping size
  124. 2, // CropGranularityY
  125. 2, // CropAlignX, alignment of cropping rect
  126. 2, // CropAlignY;
  127. 160, 120, // MinOutputSize, smallest bitmap stream can produce
  128. 640, 480, // MaxOutputSize, largest bitmap stream can produce
  129. 16, // OutputGranularityX, granularity of output bitmap size
  130. 4, // OutputGranularityY;
  131. 0, // StretchTapsX (0 no stretch, 1 pix dup, 2 interp...)
  132. 0, // StretchTapsY
  133. 2, // ShrinkTapsX
  134. 2, // ShrinkTapsY
  135. 333667, // MinFrameInterval, 100 nS units
  136. 640000000, // MaxFrameInterval, 100 nS units
  137. 30 * 160 * 120 * 9, // MinBitsPerSecond;
  138. 30 * 640 * 480 * 9 // MaxBitsPerSecond;
  139. },
  140. // KS_VIDEOINFOHEADER (default format)
  141. {
  142. 0,0,0,0, // RECT rcSource;
  143. 0,0,0,0, // RECT rcTarget;
  144. D_X * D_Y * 9 / 8 * 30, // DWORD dwBitRate;
  145. 0L, // DWORD dwBitErrorRate;
  146. 333667, // REFERENCE_TIME AvgTimePerFrame;
  147. sizeof (KS_BITMAPINFOHEADER), // DWORD biSize;
  148. D_X, // LONG biWidth;
  149. D_Y, // LONG biHeight;
  150. 1, // WORD biPlanes;
  151. 9, // WORD biBitCount;
  152. FOURCC_YVU9, // DWORD biCompression;
  153. D_X * D_Y * 9 / 8, // DWORD biSizeImage;
  154. 0, // LONG biXPelsPerMeter;
  155. 0, // LONG biYPelsPerMeter;
  156. 0, // DWORD biClrUsed;
  157. 0 // DWORD biClrImportant;
  158. }
  159. };
  160. static KS_DATARANGE_VIDEO StreamFormatYUV12_Capture =
  161. {
  162. // KSDATARANGE
  163. {
  164. sizeof (KS_DATARANGE_VIDEO), // FormatSize
  165. 0, // Flags
  166. (D_X * D_Y * 12)/8, // SampleSize
  167. 0, // Reserved
  168. STATIC_KSDATAFORMAT_TYPE_VIDEO, // aka. MEDIATYPE_Video
  169. FOURCC_YUV12, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71, //MEDIASUBTYPE_YUV12
  170. STATIC_KSDATAFORMAT_SPECIFIER_VIDEOINFO // aka. FORMAT_VideoInfo
  171. },
  172. TRUE, // BOOL, bFixedSizeSamples (all samples same size?)
  173. TRUE, // BOOL, bTemporalCompression (all I frames?)
  174. 0, // Reserved (was StreamDescriptionFlags)
  175. 0, // Reserved (was MemoryAllocationFlags (KS_VIDEO_ALLOC_*))
  176. // _KS_VIDEO_STREAM_CONFIG_CAPS
  177. {
  178. STATIC_KSDATAFORMAT_SPECIFIER_VIDEOINFO, // GUID
  179. #if 1
  180. KS_AnalogVideo_None, // VideoStandard
  181. #else
  182. KS_AnalogVideo_NTSC_M |
  183. KS_AnalogVideo_PAL_B, // AnalogVideoStandard
  184. #endif
  185. 640,480, // InputSize, (the inherent size of the incoming signal
  186. // with every digitized pixel unique)
  187. 160,120, // MinCroppingSize, smallest rcSrc cropping rect allowed
  188. 640,480, // MaxCroppingSize, largest rcSrc cropping rect allowed
  189. 2, // CropGranularityX, granularity of cropping size
  190. 2, // CropGranularityY
  191. 2, // CropAlignX, alignment of cropping rect
  192. 2, // CropAlignY;
  193. 160, 120, // MinOutputSize, smallest bitmap stream can produce
  194. 640, 480, // MaxOutputSize, largest bitmap stream can produce
  195. 16, // OutputGranularityX, granularity of output bitmap size
  196. 4, // OutputGranularityY;
  197. 0, // StretchTapsX (0 no stretch, 1 pix dup, 2 interp...)
  198. 0, // StretchTapsY
  199. 2, // ShrinkTapsX
  200. 2, // ShrinkTapsY
  201. 333667, // MinFrameInterval, 100 nS units
  202. 640000000, // MaxFrameInterval, 100 nS units
  203. 30 * 160 * 120 * 12, // MinBitsPerSecond;
  204. 30 * 640 * 480 * 12 // MaxBitsPerSecond;
  205. },
  206. // KS_VIDEOINFOHEADER (default format)
  207. {
  208. 0,0,0,0, // RECT rcSource;
  209. 0,0,0,0, // RECT rcTarget;
  210. D_X * D_Y * 12 / 8 * 30, // DWORD dwBitRate;
  211. 0L, // DWORD dwBitErrorRate;
  212. 333667, // REFERENCE_TIME AvgTimePerFrame;
  213. sizeof (KS_BITMAPINFOHEADER), // DWORD biSize;
  214. D_X, // LONG biWidth;
  215. D_Y, // LONG biHeight;
  216. 1, // WORD biPlanes;
  217. 12, // WORD biBitCount;
  218. FOURCC_YUV12, // DWORD biCompression;
  219. D_X * D_Y * 12 / 8, // DWORD biSizeImage;
  220. 0, // LONG biXPelsPerMeter;
  221. 0, // LONG biYPelsPerMeter;
  222. 0, // DWORD biClrUsed;
  223. 0 // DWORD biClrImportant;
  224. }
  225. };
  226. #else //TOSHIBA
  227. static KS_DATARANGE_VIDEO StreamFormatRGB24Bpp_Capture =
  228. {
  229. // KSDATARANGE
  230. {
  231. sizeof (KS_DATARANGE_VIDEO), // FormatSize
  232. 0, // Flags
  233. D_X * D_Y * 3, // SampleSize
  234. 0, // Reserved
  235. STATIC_KSDATAFORMAT_TYPE_VIDEO, // aka. MEDIATYPE_Video
  236. 0xe436eb7d, 0x524f, 0x11ce, 0x9f, 0x53, 0x00, 0x20, 0xaf, 0x0b, 0xa7, 0x70, //MEDIASUBTYPE_RGB24,
  237. STATIC_KSDATAFORMAT_SPECIFIER_VIDEOINFO // aka. FORMAT_VideoInfo
  238. },
  239. TRUE, // BOOL, bFixedSizeSamples (all samples same size?)
  240. TRUE, // BOOL, bTemporalCompression (all I frames?)
  241. 0, // Reserved (was StreamDescriptionFlags)
  242. 0, // Reserved (was MemoryAllocationFlags (KS_VIDEO_ALLOC_*))
  243. // _KS_VIDEO_STREAM_CONFIG_CAPS
  244. {
  245. STATIC_KSDATAFORMAT_SPECIFIER_VIDEOINFO, // GUID
  246. KS_AnalogVideo_NTSC_M |
  247. KS_AnalogVideo_PAL_B, // AnalogVideoStandard
  248. 720,480, // InputSize, (the inherent size of the incoming signal
  249. // with every digitized pixel unique)
  250. 160,120, // MinCroppingSize, smallest rcSrc cropping rect allowed
  251. 720,480, // MaxCroppingSize, largest rcSrc cropping rect allowed
  252. 8, // CropGranularityX, granularity of cropping size
  253. 1, // CropGranularityY
  254. 8, // CropAlignX, alignment of cropping rect
  255. 1, // CropAlignY;
  256. 160, 120, // MinOutputSize, smallest bitmap stream can produce
  257. 720, 480, // MaxOutputSize, largest bitmap stream can produce
  258. 8, // OutputGranularityX, granularity of output bitmap size
  259. 1, // OutputGranularityY;
  260. 0, // StretchTapsX (0 no stretch, 1 pix dup, 2 interp...)
  261. 0, // StretchTapsY
  262. 0, // ShrinkTapsX
  263. 0, // ShrinkTapsY
  264. 333667, // MinFrameInterval, 100 nS units
  265. 640000000, // MaxFrameInterval, 100 nS units
  266. 8 * 3 * 30 * 160 * 120, // MinBitsPerSecond;
  267. 8 * 3 * 30 * 720 * 480 // MaxBitsPerSecond;
  268. },
  269. // KS_VIDEOINFOHEADER (default format)
  270. {
  271. 0,0,0,0, // RECT rcSource;
  272. 0,0,0,0, // RECT rcTarget;
  273. D_X * D_Y * 3 * 30, // DWORD dwBitRate;
  274. 0L, // DWORD dwBitErrorRate;
  275. 333667, // REFERENCE_TIME AvgTimePerFrame;
  276. sizeof (KS_BITMAPINFOHEADER), // DWORD biSize;
  277. D_X, // LONG biWidth;
  278. D_Y, // LONG biHeight;
  279. 1, // WORD biPlanes;
  280. 24, // WORD biBitCount;
  281. KS_BI_RGB, // DWORD biCompression;
  282. D_X * D_Y * 3, // DWORD biSizeImage;
  283. 0, // LONG biXPelsPerMeter;
  284. 0, // LONG biYPelsPerMeter;
  285. 0, // DWORD biClrUsed;
  286. 0 // DWORD biClrImportant;
  287. }
  288. };
  289. #undef D_X
  290. #undef D_Y
  291. #define D_X 320
  292. #define D_Y 240
  293. static KS_DATARANGE_VIDEO StreamFormatUYU2_Capture =
  294. {
  295. // KSDATARANGE
  296. {
  297. sizeof (KS_DATARANGE_VIDEO), // FormatSize
  298. 0, // Flags
  299. D_X * D_Y * 2, // SampleSize
  300. 0, // Reserved
  301. STATIC_KSDATAFORMAT_TYPE_VIDEO, // aka. MEDIATYPE_Video
  302. 0x59565955, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71, //MEDIASUBTYPE_UYVY,
  303. STATIC_KSDATAFORMAT_SPECIFIER_VIDEOINFO // aka. FORMAT_VideoInfo
  304. },
  305. TRUE, // BOOL, bFixedSizeSamples (all samples same size?)
  306. TRUE, // BOOL, bTemporalCompression (all I frames?)
  307. 0, // Reserved (was StreamDescriptionFlags)
  308. 0, // Reserved (was MemoryAllocationFlags (KS_VIDEO_ALLOC_*))
  309. // _KS_VIDEO_STREAM_CONFIG_CAPS
  310. {
  311. STATIC_KSDATAFORMAT_SPECIFIER_VIDEOINFO, // GUID
  312. KS_AnalogVideo_NTSC_M |
  313. KS_AnalogVideo_PAL_B, // AnalogVideoStandard
  314. 720,480, // InputSize, (the inherent size of the incoming signal
  315. // with every digitized pixel unique)
  316. 160,120, // MinCroppingSize, smallest rcSrc cropping rect allowed
  317. 720,480, // MaxCroppingSize, largest rcSrc cropping rect allowed
  318. 8, // CropGranularityX, granularity of cropping size
  319. 1, // CropGranularityY
  320. 8, // CropAlignX, alignment of cropping rect
  321. 1, // CropAlignY;
  322. 160, 120, // MinOutputSize, smallest bitmap stream can produce
  323. 720, 480, // MaxOutputSize, largest bitmap stream can produce
  324. 8, // OutputGranularityX, granularity of output bitmap size
  325. 1, // OutputGranularityY;
  326. 0, // StretchTapsX (0 no stretch, 1 pix dup, 2 interp...)
  327. 0, // StretchTapsY
  328. 0, // ShrinkTapsX
  329. 0, // ShrinkTapsY
  330. 333667, // MinFrameInterval, 100 nS units
  331. 640000000, // MaxFrameInterval, 100 nS units
  332. 8 * 2 * 30 * 160 * 120, // MinBitsPerSecond;
  333. 8 * 2 * 30 * 720 * 480 // MaxBitsPerSecond;
  334. },
  335. // KS_VIDEOINFOHEADER (default format)
  336. {
  337. 0,0,0,0, // RECT rcSource;
  338. 0,0,0,0, // RECT rcTarget;
  339. D_X * D_Y * 2 * 30, // DWORD dwBitRate;
  340. 0L, // DWORD dwBitErrorRate;
  341. 333667, // REFERENCE_TIME AvgTimePerFrame;
  342. sizeof (KS_BITMAPINFOHEADER), // DWORD biSize;
  343. D_X, // LONG biWidth;
  344. D_Y, // LONG biHeight;
  345. 1, // WORD biPlanes;
  346. 16, // WORD biBitCount;
  347. FOURCC_YUV422, // DWORD biCompression;
  348. D_X * D_Y * 2, // DWORD biSizeImage;
  349. 0, // LONG biXPelsPerMeter;
  350. 0, // LONG biYPelsPerMeter;
  351. 0, // DWORD biClrUsed;
  352. 0 // DWORD biClrImportant;
  353. }
  354. };
  355. #endif//TOSHIBA
  356. #undef D_X
  357. #undef D_Y
  358. #ifndef TOSHIBA
  359. static KS_DATARANGE_ANALOGVIDEO StreamFormatAnalogVideo =
  360. {
  361. // KS_DATARANGE_ANALOGVIDEO
  362. {
  363. sizeof (KS_DATARANGE_ANALOGVIDEO), // FormatSize
  364. 0, // Flags
  365. sizeof (KS_TVTUNER_CHANGE_INFO), // SampleSize
  366. 0, // Reserved
  367. STATIC_KSDATAFORMAT_TYPE_ANALOGVIDEO, // aka MEDIATYPE_AnalogVideo
  368. STATIC_KSDATAFORMAT_SUBTYPE_NONE,
  369. STATIC_KSDATAFORMAT_SPECIFIER_ANALOGVIDEO, // aka FORMAT_AnalogVideo
  370. },
  371. // KS_ANALOGVIDEOINFO
  372. {
  373. 0, 0, 720, 480, // rcSource;
  374. 0, 0, 720, 480, // rcTarget;
  375. 720, // dwActiveWidth;
  376. 480, // dwActiveHeight;
  377. 0, // REFERENCE_TIME AvgTimePerFrame;
  378. }
  379. };
  380. #endif//TOSHIBA
  381. //---------------------------------------------------------------------------
  382. // STREAM_Capture Formats
  383. //---------------------------------------------------------------------------
  384. static PKSDATAFORMAT Stream0Formats[] =
  385. {
  386. #ifdef TOSHIBA
  387. (PKSDATAFORMAT) &StreamFormatYUV12_Capture,
  388. (PKSDATAFORMAT) &StreamFormatYVU9_Capture,
  389. #else //TOSHIBA
  390. (PKSDATAFORMAT) &StreamFormatRGB24Bpp_Capture,
  391. (PKSDATAFORMAT) &StreamFormatUYU2_Capture,
  392. #endif//TOSHIBA
  393. };
  394. #define NUM_STREAM_0_FORMATS (SIZEOF_ARRAY(Stream0Formats))
  395. #ifndef TOSHIBA
  396. //---------------------------------------------------------------------------
  397. // STREAM_Preview Formats
  398. //---------------------------------------------------------------------------
  399. static PKSDATAFORMAT Stream1Formats[] =
  400. {
  401. #ifdef TOSHIBA
  402. (PKSDATAFORMAT) &StreamFormatYUV12_Capture,
  403. (PKSDATAFORMAT) &StreamFormatYVU9_Capture,
  404. #else //TOSHIBA
  405. (PKSDATAFORMAT) &StreamFormatRGB24Bpp_Capture,
  406. (PKSDATAFORMAT) &StreamFormatUYU2_Capture,
  407. #endif//TOSHIBA
  408. };
  409. #define NUM_STREAM_1_FORMATS (SIZEOF_ARRAY (Stream1Formats))
  410. //---------------------------------------------------------------------------
  411. // STREAM_AnalogVideoInput Formats
  412. //---------------------------------------------------------------------------
  413. static PKSDATAFORMAT Stream2Formats[] =
  414. {
  415. (PKSDATAFORMAT) &StreamFormatAnalogVideo,
  416. };
  417. #define NUM_STREAM_2_FORMATS (SIZEOF_ARRAY (Stream2Formats))
  418. #endif//TOSHIBA
  419. //---------------------------------------------------------------------------
  420. // Create an array that holds the list of all of the streams supported
  421. //---------------------------------------------------------------------------
  422. typedef struct _ALL_STREAM_INFO {
  423. HW_STREAM_INFORMATION hwStreamInfo;
  424. HW_STREAM_OBJECT hwStreamObject;
  425. } ALL_STREAM_INFO, *PALL_STREAM_INFO;
  426. static ALL_STREAM_INFO Streams [] =
  427. {
  428. // -----------------------------------------------------------------
  429. // STREAM_Capture
  430. // -----------------------------------------------------------------
  431. {
  432. // HW_STREAM_INFORMATION -------------------------------------------
  433. {
  434. 1, // NumberOfPossibleInstances
  435. KSPIN_DATAFLOW_OUT, // DataFlow
  436. TRUE, // DataAccessible
  437. NUM_STREAM_0_FORMATS, // NumberOfFormatArrayEntries
  438. Stream0Formats, // StreamFormatsArray
  439. 0, // ClassReserved[0]
  440. 0, // ClassReserved[1]
  441. 0, // ClassReserved[2]
  442. 0, // ClassReserved[3]
  443. NUMBER_VIDEO_STREAM_PROPERTIES, // NumStreamPropArrayEntries
  444. (PKSPROPERTY_SET) VideoStreamProperties,// StreamPropertiesArray
  445. 0, // NumStreamEventArrayEntries;
  446. 0, // StreamEventsArray;
  447. (GUID *) &PINNAME_VIDEO_CAPTURE, // Category
  448. (GUID *) &PINNAME_VIDEO_CAPTURE, // Name
  449. 1, // MediumsCount
  450. &StandardMedium, // Mediums
  451. FALSE, // BridgeStream
  452. },
  453. // HW_STREAM_OBJECT ------------------------------------------------
  454. {
  455. sizeof (HW_STREAM_OBJECT), // SizeOfThisPacket
  456. 0, // StreamNumber
  457. 0, // HwStreamExtension
  458. VideoReceiveDataPacket, // HwReceiveDataPacket
  459. VideoReceiveCtrlPacket, // HwReceiveControlPacket
  460. { NULL, 0 }, // HW_CLOCK_OBJECT
  461. FALSE, // Dma
  462. TRUE, // Pio
  463. NULL, // HwDeviceExtension
  464. sizeof (KS_FRAME_INFO), // StreamHeaderMediaSpecific
  465. 0, // StreamHeaderWorkspace
  466. FALSE, // Allocator
  467. NULL, // HwEventRoutine
  468. { 0, 0 }, // Reserved[2]
  469. },
  470. #ifndef TOSHIBA
  471. },
  472. // -----------------------------------------------------------------
  473. // STREAM_Preview
  474. // -----------------------------------------------------------------
  475. {
  476. // HW_STREAM_INFORMATION -------------------------------------------
  477. {
  478. 1, // NumberOfPossibleInstances
  479. KSPIN_DATAFLOW_OUT, // DataFlow
  480. TRUE, // DataAccessible
  481. NUM_STREAM_1_FORMATS, // NumberOfFormatArrayEntries
  482. Stream1Formats, // StreamFormatsArray
  483. 0, // ClassReserved[0]
  484. 0, // ClassReserved[1]
  485. 0, // ClassReserved[2]
  486. 0, // ClassReserved[3]
  487. NUMBER_VIDEO_STREAM_PROPERTIES, // NumStreamPropArrayEntries
  488. (PKSPROPERTY_SET) VideoStreamProperties,// StreamPropertiesArray
  489. 0, // NumStreamEventArrayEntries;
  490. 0, // StreamEventsArray;
  491. (GUID *) &PINNAME_VIDEO_PREVIEW, // Category
  492. (GUID *) &PINNAME_VIDEO_PREVIEW, // Name
  493. 1, // MediumsCount
  494. &StandardMedium, // Mediums
  495. FALSE, // BridgeStream
  496. },
  497. // HW_STREAM_OBJECT ------------------------------------------------
  498. {
  499. sizeof (HW_STREAM_OBJECT), // SizeOfThisPacket
  500. 1, // StreamNumber
  501. 0, // HwStreamExtension
  502. VideoReceiveDataPacket, // HwReceiveDataPacket
  503. VideoReceiveCtrlPacket, // HwReceiveControlPacket
  504. { NULL, 0 }, // HW_CLOCK_OBJECT
  505. FALSE, // Dma
  506. TRUE, // Pio
  507. 0, // HwDeviceExtension
  508. sizeof (KS_FRAME_INFO), // StreamHeaderMediaSpecific
  509. 0, // StreamHeaderWorkspace
  510. FALSE, // Allocator
  511. NULL, // HwEventRoutine
  512. { 0, 0 }, // Reserved[2]
  513. },
  514. },
  515. // -----------------------------------------------------------------
  516. // STREAM_AnalogVideoInput
  517. // -----------------------------------------------------------------
  518. {
  519. // HW_STREAM_INFORMATION -------------------------------------------
  520. {
  521. 1, // NumberOfPossibleInstances
  522. KSPIN_DATAFLOW_IN, // DataFlow
  523. TRUE, // DataAccessible
  524. NUM_STREAM_2_FORMATS, // NumberOfFormatArrayEntries
  525. Stream2Formats, // StreamFormatsArray
  526. 0, // ClassReserved[0]
  527. 0, // ClassReserved[1]
  528. 0, // ClassReserved[2]
  529. 0, // ClassReserved[3]
  530. 0, // NumStreamPropArrayEntries
  531. 0, // StreamPropertiesArray
  532. 0, // NumStreamEventArrayEntries;
  533. 0, // StreamEventsArray;
  534. (GUID *) &PINNAME_VIDEO_ANALOGVIDEOIN, // Category
  535. (GUID *) &PINNAME_VIDEO_ANALOGVIDEOIN, // Name
  536. 1, // MediumsCount
  537. &CrossbarMediums[9], // Mediums
  538. FALSE, // BridgeStream
  539. },
  540. // HW_STREAM_OBJECT ------------------------------------------------
  541. {
  542. sizeof (HW_STREAM_OBJECT), // SizeOfThisPacket
  543. 2, // StreamNumber
  544. 0, // HwStreamExtension
  545. AnalogVideoReceiveDataPacket, // HwReceiveDataPacket
  546. AnalogVideoReceiveCtrlPacket, // HwReceiveControlPacket
  547. { NULL, 0 }, // HW_CLOCK_OBJECT
  548. FALSE, // Dma
  549. TRUE, // Pio
  550. 0, // HwDeviceExtension
  551. 0, // StreamHeaderMediaSpecific
  552. 0, // StreamHeaderWorkspace
  553. FALSE, // Allocator
  554. NULL, // HwEventRoutine
  555. { 0, 0 }, // Reserved[2]
  556. }
  557. #endif//TOSHIBA
  558. }
  559. };
  560. #define DRIVER_STREAM_COUNT (SIZEOF_ARRAY (Streams))
  561. //---------------------------------------------------------------------------
  562. // Topology
  563. //---------------------------------------------------------------------------
  564. // Categories define what the device does.
  565. static const GUID Categories[] = {
  566. #ifdef TOSHIBA
  567. STATIC_KSCATEGORY_VIDEO,
  568. STATIC_KSCATEGORY_CAPTURE,
  569. #else //TOSHIBA
  570. STATIC_KSCATEGORY_VIDEO,
  571. STATIC_KSCATEGORY_CAPTURE,
  572. STATIC_KSCATEGORY_TVTUNER,
  573. STATIC_KSCATEGORY_CROSSBAR,
  574. STATIC_KSCATEGORY_TVAUDIO
  575. #endif//TOSHIBA
  576. };
  577. #define NUMBER_OF_CATEGORIES SIZEOF_ARRAY (Categories)
  578. static KSTOPOLOGY Topology = {
  579. NUMBER_OF_CATEGORIES, // CategoriesCount
  580. (GUID*) &Categories, // Categories
  581. 0, // TopologyNodesCount
  582. NULL, // TopologyNodes
  583. 0, // TopologyConnectionsCount
  584. NULL, // TopologyConnections
  585. NULL, // TopologyNodesNames
  586. 0, // Reserved
  587. };
  588. //---------------------------------------------------------------------------
  589. // The Main stream header
  590. //---------------------------------------------------------------------------
  591. static HW_STREAM_HEADER StreamHeader =
  592. {
  593. DRIVER_STREAM_COUNT, // NumberOfStreams
  594. sizeof (HW_STREAM_INFORMATION), // Future proofing
  595. 0, // NumDevPropArrayEntries set at init time
  596. NULL, // DevicePropertiesArray set at init time
  597. 0, // NumDevEventArrayEntries;
  598. NULL, // DeviceEventsArray;
  599. &Topology // Pointer to Device Topology
  600. };
  601. #ifdef __cplusplus
  602. }
  603. #endif // __cplusplus
  604. #endif // __CAPSTRM_H__
  605.