Source code of Windows XP (NT5)
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

3700 lines
120 KiB

  1. /*++
  2. Copyright (C) Microsoft Corporation, 1999 - 2000
  3. Module Name:
  4. MSTpGuts.c
  5. Abstract:
  6. Main service functions.
  7. Last changed by:
  8. Author: Yee J. Wu
  9. Environment:
  10. Kernel mode only
  11. Revision History:
  12. $Revision:: $
  13. $Date:: $
  14. --*/
  15. #include "strmini.h"
  16. #include "ksmedia.h"
  17. #include "1394.h"
  18. #include "61883.h"
  19. #include "avc.h"
  20. #include "dbg.h"
  21. #include "ksguid.h"
  22. #include "MsTpFmt.h" // Before MsTpDefs.h
  23. #include "MsTpDef.h"
  24. #include "MsTpGuts.h"
  25. #include "MsTpUtil.h"
  26. #include "MsTpAvc.h"
  27. #include "XPrtDefs.h"
  28. #include "EDevCtrl.h"
  29. // Support MPEG2TS stride data format MPEG2_TRANSPORT_STRIDE
  30. #include "BdaTypes.h"
  31. //
  32. // Define formats supported
  33. //
  34. #include "strmdata.h"
  35. NTSTATUS
  36. AVCTapeGetDevInfo(
  37. IN PDVCR_EXTENSION pDevExt,
  38. IN PAV_61883_REQUEST pAVReq
  39. );
  40. VOID
  41. AVCTapeIniStrmExt(
  42. PHW_STREAM_OBJECT pStrmObject,
  43. PSTREAMEX pStrmExt,
  44. PDVCR_EXTENSION pDevExt,
  45. PSTREAM_INFO_AND_OBJ pStream
  46. );
  47. NTSTATUS
  48. DVStreamGetConnectionProperty (
  49. PDVCR_EXTENSION pDevExt,
  50. PSTREAM_PROPERTY_DESCRIPTOR pSPD,
  51. PULONG pulActualBytesTransferred
  52. );
  53. NTSTATUS
  54. DVGetDroppedFramesProperty(
  55. PDVCR_EXTENSION pDevExt,
  56. PSTREAMEX pStrmExt,
  57. PSTREAM_PROPERTY_DESCRIPTOR pSPD,
  58. PULONG pulBytesTransferred
  59. );
  60. #if 0 // Enable later
  61. #ifdef ALLOC_PRAGMA
  62. #pragma alloc_text(PAGE, AVCTapeGetDevInfo)
  63. #pragma alloc_text(PAGE, AVCTapeInitialize)
  64. #pragma alloc_text(PAGE, AVCTapeGetStreamInfo)
  65. #pragma alloc_text(PAGE, AVCTapeVerifyDataFormat)
  66. #pragma alloc_text(PAGE, AVCTapeGetDataIntersection)
  67. #pragma alloc_text(PAGE, AVCTapeIniStrmExt)
  68. #pragma alloc_text(PAGE, AVCTapeOpenStream)
  69. #pragma alloc_text(PAGE, AVCTapeCloseStream)
  70. #pragma alloc_text(PAGE, DVChangePower)
  71. #pragma alloc_text(PAGE, AVCTapeSurpriseRemoval)
  72. #pragma alloc_text(PAGE, AVCTapeProcessPnPBusReset)
  73. #pragma alloc_text(PAGE, AVCTapeUninitialize)
  74. #pragma alloc_text(PAGE, DVStreamGetConnectionProperty)
  75. #pragma alloc_text(PAGE, DVGetDroppedFramesProperty)
  76. #pragma alloc_text(PAGE, DVGetStreamProperty)
  77. #pragma alloc_text(PAGE, DVSetStreamProperty)
  78. #pragma alloc_text(PAGE, AVCTapeOpenCloseMasterClock)
  79. #pragma alloc_text(PAGE, AVCTapeIndicateMasterClock)
  80. #endif
  81. #endif
  82. NTSTATUS
  83. AVCStrmReqIrpSynchCR(
  84. IN PDEVICE_OBJECT DeviceObject,
  85. IN PIRP pIrp,
  86. IN PKEVENT Event
  87. )
  88. {
  89. #if DBG
  90. if(!NT_SUCCESS(pIrp->IoStatus.Status)) {
  91. TRACE(TL_FCP_WARNING,("AVCStrmReqIrpSynchCR: pIrp->IoStatus.Status:%x\n", pIrp->IoStatus.Status));
  92. }
  93. #endif
  94. KeSetEvent(Event, 0, FALSE);
  95. return STATUS_MORE_PROCESSING_REQUIRED;
  96. } // AVCStrmReqIrpSynchCR
  97. NTSTATUS
  98. AVCStrmReqSubmitIrpSynch(
  99. IN PDEVICE_OBJECT DeviceObject,
  100. IN PIRP pIrp,
  101. IN PAVC_STREAM_REQUEST_BLOCK pAVCStrmReq
  102. )
  103. {
  104. NTSTATUS Status;
  105. KEVENT Event;
  106. PIO_STACK_LOCATION NextIrpStack;
  107. Status = STATUS_SUCCESS;;
  108. NextIrpStack = IoGetNextIrpStackLocation(pIrp);
  109. NextIrpStack->MajorFunction = IRP_MJ_INTERNAL_DEVICE_CONTROL;
  110. NextIrpStack->Parameters.DeviceIoControl.IoControlCode = IOCTL_AVCSTRM_CLASS;
  111. NextIrpStack->Parameters.Others.Argument1 = pAVCStrmReq;
  112. KeInitializeEvent(&Event, NotificationEvent, FALSE);
  113. IoSetCompletionRoutine(
  114. pIrp,
  115. AVCStrmReqIrpSynchCR,
  116. &Event,
  117. TRUE,
  118. TRUE,
  119. TRUE
  120. );
  121. Status =
  122. IoCallDriver(
  123. DeviceObject,
  124. pIrp
  125. );
  126. if (Status == STATUS_PENDING) {
  127. TRACE(TL_PNP_TRACE,("(AVCStrm) Irp is pending...\n"));
  128. if(KeGetCurrentIrql() < DISPATCH_LEVEL) {
  129. KeWaitForSingleObject(
  130. &Event,
  131. Executive,
  132. KernelMode,
  133. FALSE,
  134. NULL
  135. );
  136. TRACE(TL_PNP_TRACE,("Irp has completed; IoStatus.Status %x\n", pIrp->IoStatus.Status));
  137. Status = pIrp->IoStatus.Status; // Final status
  138. }
  139. else {
  140. ASSERT(FALSE && "Pending but in DISPATCH_LEVEL!");
  141. return Status;
  142. }
  143. }
  144. TRACE(TL_PNP_TRACE,("AVCStrmReqSubmitIrpSynch: IoCallDriver, Status:%x\n", Status));
  145. return Status;
  146. } // AVCStrmReqSubmitIrpSynch
  147. NTSTATUS
  148. AVCReqIrpSynchCR(
  149. IN PDEVICE_OBJECT DeviceObject,
  150. IN PIRP pIrp,
  151. IN PKEVENT Event
  152. )
  153. {
  154. #if DBG
  155. if(!NT_SUCCESS(pIrp->IoStatus.Status)) {
  156. TRACE(TL_PNP_WARNING,("AVCReqIrpSynchCR: pIrp->IoStatus.Status:%x\n", pIrp->IoStatus.Status));
  157. }
  158. #endif
  159. KeSetEvent(Event, 0, FALSE);
  160. return STATUS_MORE_PROCESSING_REQUIRED;
  161. } // AVCReqIrpSynchCR
  162. NTSTATUS
  163. AVCReqSubmitIrpSynch(
  164. IN PDEVICE_OBJECT DeviceObject,
  165. IN PIRP pIrp,
  166. IN PAVC_MULTIFUNC_IRB pAvcIrbReq
  167. )
  168. {
  169. NTSTATUS Status;
  170. KEVENT Event;
  171. PIO_STACK_LOCATION NextIrpStack;
  172. Status = STATUS_SUCCESS;;
  173. NextIrpStack = IoGetNextIrpStackLocation(pIrp);
  174. NextIrpStack->MajorFunction = IRP_MJ_INTERNAL_DEVICE_CONTROL;
  175. NextIrpStack->Parameters.DeviceIoControl.IoControlCode = IOCTL_AVC_CLASS;
  176. NextIrpStack->Parameters.Others.Argument1 = pAvcIrbReq;
  177. KeInitializeEvent(&Event, NotificationEvent, FALSE);
  178. IoSetCompletionRoutine(
  179. pIrp,
  180. AVCReqIrpSynchCR,
  181. &Event,
  182. TRUE,
  183. TRUE,
  184. TRUE
  185. );
  186. Status =
  187. IoCallDriver(
  188. DeviceObject,
  189. pIrp
  190. );
  191. if (Status == STATUS_PENDING) {
  192. TRACE(TL_PNP_TRACE,("(AVC) Irp is pending...\n"));
  193. if(KeGetCurrentIrql() < DISPATCH_LEVEL) {
  194. KeWaitForSingleObject(
  195. &Event,
  196. Executive,
  197. KernelMode,
  198. FALSE,
  199. NULL
  200. );
  201. TRACE(TL_PNP_TRACE,("Irp has completed; IoStatus.Status %x\n", pIrp->IoStatus.Status));
  202. Status = pIrp->IoStatus.Status; // Final status
  203. }
  204. else {
  205. ASSERT(FALSE && "Pending but in DISPATCH_LEVEL!");
  206. return Status;
  207. }
  208. }
  209. TRACE(TL_PNP_TRACE,("AVCReqSubmitIrpSynch: IoCallDriver, Status:%x\n", Status));
  210. return Status;
  211. } // AVCReqSubmitIrpSynch
  212. VOID
  213. DVIniDevExtStruct(
  214. IN PDVCR_EXTENSION pDevExt,
  215. IN PPORT_CONFIGURATION_INFORMATION pConfigInfo
  216. )
  217. /*++
  218. Routine Description:
  219. Initialiaze the device extension structure.
  220. --*/
  221. {
  222. ULONG i;
  223. RtlZeroMemory( pDevExt, sizeof(DVCR_EXTENSION) );
  224. //
  225. // Cache what are in ConfigInfo in device extension
  226. //
  227. pDevExt->pBusDeviceObject = pConfigInfo->PhysicalDeviceObject; // IoCallDriver()
  228. pDevExt->pPhysicalDeviceObject = pConfigInfo->RealPhysicalDeviceObject; // Used in PnP API
  229. //
  230. // Allow only one stream open at a time to avoid cyclic format
  231. //
  232. pDevExt->cndStrmOpen = 0;
  233. //
  234. // Serialize in the event of getting two consecutive SRB_OPEN_STREAMs
  235. //
  236. KeInitializeMutex( &pDevExt->hMutex, 0); // Level 0 and in Signal state
  237. //
  238. // Initialize our pointer to stream extension
  239. //
  240. for (i=0; i<pDevExt->NumOfPins; i++) {
  241. pDevExt->paStrmExt[i] = NULL;
  242. }
  243. //
  244. // Bus reset, surprise removal
  245. //
  246. pDevExt->bDevRemoved = FALSE;
  247. pDevExt->PowerState = PowerDeviceD0;
  248. //
  249. // External device control (AV/C commands)
  250. //
  251. KeInitializeSpinLock( &pDevExt->AVCCmdLock ); // To guard the count
  252. pDevExt->cntCommandQueued = 0; // Cmd that is completed its life cycle waiting to be read (most for RAW_AVC's Set/Read model)
  253. InitializeListHead(&pDevExt->AVCCmdList);
  254. // Initialize the list of possible opcode values of the response
  255. // from a Transport State status or notify command. The first item
  256. // is the number of values that follow.
  257. ASSERT(sizeof(pDevExt->TransportModes) == 5);
  258. pDevExt->TransportModes[0] = 4;
  259. pDevExt->TransportModes[1] = 0xC1;
  260. pDevExt->TransportModes[2] = 0xC2;
  261. pDevExt->TransportModes[3] = 0xC3;
  262. pDevExt->TransportModes[4] = 0xC4;
  263. }
  264. NTSTATUS
  265. AVCTapeGetDevInfo(
  266. IN PDVCR_EXTENSION pDevExt,
  267. IN PAV_61883_REQUEST pAVReq
  268. )
  269. /*++
  270. Routine Description:
  271. Issue AVC command to determine basic device information and cache them in the device extension.
  272. --*/
  273. {
  274. NTSTATUS Status;
  275. PIRP pIrp;
  276. BYTE bAvcBuf[MAX_FCP_PAYLOAD_SIZE]; // For issue AV/C command within this module
  277. PKSPROPERTY_EXTXPORT_S pXPrtProperty; // Point to bAvcBuf;
  278. KSPROPERTY_EXTDEVICE_S XDevProperty; // External device property
  279. PAGED_CODE();
  280. pIrp = IoAllocateIrp(pDevExt->pBusDeviceObject->StackSize, FALSE);
  281. if(!pIrp) {
  282. ASSERT(pIrp && "IoAllocateIrp() failed!");
  283. return STATUS_INSUFFICIENT_RESOURCES;
  284. }
  285. //
  286. // The input and output plug arrays are at the end of the device extension
  287. //
  288. pDevExt->pDevOutPlugs = (PAVC_DEV_PLUGS) ((PBYTE) pDevExt + sizeof(DVCR_EXTENSION));
  289. pDevExt->pDevInPlugs = (PAVC_DEV_PLUGS) ((PBYTE) pDevExt + sizeof(DVCR_EXTENSION) + sizeof(AVC_DEV_PLUGS));
  290. //
  291. // Get unit's capabilities indirectly from 61883.sys
  292. // Speed
  293. //
  294. Status = DVGetUnitCapabilities(pDevExt, pIrp, pAVReq);
  295. if(!NT_SUCCESS(Status)) {
  296. TRACE(TL_61883_ERROR,("Av61883_GetUnitCapabilities Failed %x\n", Status));
  297. IoFreeIrp(pIrp);
  298. return Status;
  299. }
  300. IoFreeIrp(pIrp);
  301. //
  302. // Get current power state. Turn it on if it's off.
  303. //
  304. Status = DVIssueAVCCommand(pDevExt, AVC_CTYPE_STATUS, DV_GET_POWER_STATE, (PVOID) &XDevProperty);
  305. TRACE(TL_PNP_WARNING,("GET_POWER_STATE: Status:%x; %s\n", Status, XDevProperty.u.PowerState == ED_POWER_ON ? "PowerON" : "PowerStandby"));
  306. if(STATUS_SUCCESS == Status) {
  307. #define WAIT_SET_POWER 100 // Wait time when set power state; (msec)
  308. #define MAX_SET_POWER_RETRIES 3
  309. if( XDevProperty.u.PowerState == ED_POWER_STANDBY
  310. || XDevProperty.u.PowerState == ED_POWER_OFF
  311. ) {
  312. NTSTATUS StatusSetPower;
  313. LONG lRetries = 0;
  314. do {
  315. //
  316. // Some AVC device, such as D-VHS will return STATUS_DEVICE_DATA_ERROR when
  317. // this command is issue right after get power state command. Such device
  318. // might be slow in response to the AVC command. Even though wait is not
  319. // desirable, but it is the only way.
  320. //
  321. DVDelayExecutionThread(WAIT_SET_POWER); // Wait a little
  322. StatusSetPower = DVIssueAVCCommand(pDevExt, AVC_CTYPE_CONTROL, DV_SET_POWER_STATE_ON, (PVOID) &XDevProperty);
  323. lRetries++;
  324. TRACE(TL_PNP_WARNING,("SET_POWER_STATE_ON: (%d) StatusSetPower:%x; Waited (%d msec).\n", lRetries, StatusSetPower, WAIT_SET_POWER));
  325. } while ( lRetries < MAX_SET_POWER_RETRIES
  326. && ( StatusSetPower == STATUS_REQUEST_ABORTED
  327. || StatusSetPower == STATUS_DEVICE_DATA_ERROR
  328. || StatusSetPower == STATUS_IO_TIMEOUT
  329. ));
  330. TRACE(TL_PNP_WARNING,("SET_POWER_STATE_ON: StatusSetPower:%x; Retries:%d times\n\n", StatusSetPower, lRetries));
  331. }
  332. }
  333. //
  334. // Subunit_Info : VCR or camera
  335. //
  336. DVDelayExecutionThread(DV_AVC_CMD_DELAY_INTER_CMD);
  337. Status = DVIssueAVCCommand(pDevExt, AVC_CTYPE_STATUS, DV_SUBUNIT_INFO, (PVOID) bAvcBuf);
  338. if(STATUS_SUCCESS == Status) {
  339. TRACE(TL_PNP_TRACE,("GetDevInfo: Status %x DV_SUBUNIT_INFO (%x %x %x %x)\n",
  340. Status, bAvcBuf[0], bAvcBuf[1], bAvcBuf[2], bAvcBuf[3]));
  341. //
  342. // Cache it. We assume max_subunit_ID is 0 and there is a max of 4 entries.
  343. //
  344. pDevExt->Subunit_Type[0] = bAvcBuf[0] & AVC_SUBTYPE_MASK;
  345. pDevExt->Subunit_Type[1] = bAvcBuf[1] & AVC_SUBTYPE_MASK;
  346. pDevExt->Subunit_Type[2] = bAvcBuf[2] & AVC_SUBTYPE_MASK;
  347. pDevExt->Subunit_Type[3] = bAvcBuf[3] & AVC_SUBTYPE_MASK;
  348. // This is a tape subunit driver so one of the subunit must be a tape subunit.
  349. if(pDevExt->Subunit_Type[0] != AVC_DEVICE_TAPE_REC && pDevExt->Subunit_Type[1]) {
  350. TRACE(TL_PNP_ERROR,("GetDevInfo:Device not supported: %x, %x; (VCR %x, Camera %x)\n",
  351. pDevExt->Subunit_Type[0], pDevExt->Subunit_Type[1], AVC_DEVICE_TAPE_REC, AVC_DEVICE_CAMERA));
  352. return STATUS_NOT_SUPPORTED;
  353. }
  354. } else {
  355. TRACE(TL_PNP_ERROR,("GetDevInfo: DV_SUBUNIT_INFO failed, Status %x\n", Status));
  356. if(STATUS_TIMEOUT == Status) {
  357. TRACE(TL_PNP_WARNING, ("GetDevInfo: Query DV_SUBUNIT_INFO failed. This could be the MediaDecoder box.\n"));
  358. // Do not fail this. Making an exception.
  359. }
  360. // Has our device gone away?
  361. if (STATUS_IO_DEVICE_ERROR == Status || STATUS_REQUEST_ABORTED == Status)
  362. return Status;
  363. pDevExt->Subunit_Type[0] = AVC_DEVICE_UNKNOWN;
  364. pDevExt->Subunit_Type[1] = AVC_DEVICE_UNKNOWN;
  365. pDevExt->Subunit_Type[2] = AVC_DEVICE_UNKNOWN;
  366. pDevExt->Subunit_Type[3] = AVC_DEVICE_UNKNOWN;
  367. }
  368. //
  369. // Medium_Info: MediaPresent, MediaType, RecordInhibit
  370. //
  371. pXPrtProperty = (PKSPROPERTY_EXTXPORT_S) bAvcBuf;
  372. DVDelayExecutionThread(DV_AVC_CMD_DELAY_INTER_CMD);
  373. Status = DVIssueAVCCommand(pDevExt, AVC_CTYPE_STATUS, VCR_MEDIUM_INFO, (PVOID) pXPrtProperty);
  374. if(STATUS_SUCCESS == Status) {
  375. pDevExt->bHasTape = pXPrtProperty->u.MediumInfo.MediaPresent;
  376. pDevExt->MediaType = pXPrtProperty->u.MediumInfo.MediaType;
  377. TRACE(TL_PNP_TRACE,("GetDevInfo: Status %x HasTape %s, VCR_MEDIUM_INFO (%x %x %x %x)\n",
  378. Status, pDevExt->bHasTape ? "Yes" : "No", bAvcBuf[0], bAvcBuf[1], bAvcBuf[2], bAvcBuf[3]));
  379. } else {
  380. pDevExt->bHasTape = FALSE;
  381. TRACE(TL_PNP_ERROR,("GetDevInfo: VCR_MEDIUM_INFO failed, Status %x\n", Status));
  382. // Has our device gone away?
  383. if (STATUS_IO_DEVICE_ERROR == Status || STATUS_REQUEST_ABORTED == Status)
  384. return Status;
  385. }
  386. //
  387. // If this is a Panasonic AVC device, we will detect if it is a DVCPro format;
  388. // This needs to be called before MediaFormat
  389. //
  390. if(pDevExt->ulVendorID == VENDORID_PANASONIC) {
  391. DVDelayExecutionThread(DV_AVC_CMD_DELAY_INTER_CMD);
  392. DVGetDevIsItDVCPro(pDevExt);
  393. }
  394. //
  395. // Medium format: NTSC or PAL
  396. //
  397. pDevExt->VideoFormatIndex = AVCSTRM_FORMAT_SDDV_NTSC; // Default
  398. DVDelayExecutionThread(DV_AVC_CMD_DELAY_INTER_CMD);
  399. if(!DVGetDevSignalFormat(
  400. pDevExt,
  401. KSPIN_DATAFLOW_OUT,
  402. 0)) {
  403. ASSERT(FALSE && "IN/OUTPUT SIGNAL MODE is not supported; driver abort.");
  404. return STATUS_NOT_SUPPORTED;
  405. } else {
  406. if(pDevExt->VideoFormatIndex != AVCSTRM_FORMAT_SDDV_NTSC &&
  407. pDevExt->VideoFormatIndex != AVCSTRM_FORMAT_SDDV_PAL &&
  408. pDevExt->VideoFormatIndex != AVCSTRM_FORMAT_MPEG2TS
  409. ) {
  410. TRACE(TL_PNP_ERROR,("**** Format idx %d not supported by this driver ***\n", pDevExt->VideoFormatIndex));
  411. ASSERT(pDevExt->VideoFormatIndex == AVCSTRM_FORMAT_SDDV_NTSC || pDevExt->VideoFormatIndex == AVCSTRM_FORMAT_SDDV_PAL);
  412. return STATUS_NOT_SUPPORTED;
  413. }
  414. }
  415. //
  416. // Mode of Operation: 0(Undetermined), Camera or VCR
  417. //
  418. DVDelayExecutionThread(DV_AVC_CMD_DELAY_INTER_CMD);
  419. DVGetDevModeOfOperation(
  420. pDevExt
  421. );
  422. return STATUS_SUCCESS; // Status;
  423. }
  424. #ifdef SUPPORT_NEW_AVC
  425. HANDLE
  426. AVCTapeGetPlugHandle(
  427. IN PDVCR_EXTENSION pDevExt,
  428. IN ULONG PlugNum,
  429. IN KSPIN_DATAFLOW DataFlow
  430. )
  431. {
  432. NTSTATUS Status;
  433. PAV_61883_REQUEST pAVReq;
  434. PAGED_CODE();
  435. Status = STATUS_SUCCESS;
  436. pAVReq = &pDevExt->AVReq;
  437. RtlZeroMemory(pAVReq, sizeof(AV_61883_REQUEST));
  438. INIT_61883_HEADER(pAVReq, Av61883_GetPlugHandle);
  439. pAVReq->GetPlugHandle.PlugNum = PlugNum;
  440. pAVReq->GetPlugHandle.hPlug = 0;
  441. pAVReq->GetPlugHandle.Type = DataFlow == KSPIN_DATAFLOW_OUT ? CMP_PlugOut : CMP_PlugIn;
  442. Status = DVSubmitIrpSynch(pDevExt, pDevExt->pIrpSyncCall, pAVReq);
  443. if(!NT_SUCCESS(Status)) {
  444. TRACE(TL_61883_ERROR,("GetPlugHandle: Failed:%x\n", Status));
  445. ASSERT(NT_SUCCESS(Status));
  446. pAVReq->GetPlugHandle.hPlug = NULL;
  447. return NULL;
  448. }
  449. else {
  450. TRACE(TL_61883_TRACE,("hPlug=%x\n", pAVReq->GetPlugHandle.hPlug));
  451. }
  452. return pAVReq->GetPlugHandle.hPlug;
  453. }
  454. NTSTATUS
  455. AVCTapeGetPinInfo(
  456. IN PDVCR_EXTENSION pDevExt
  457. )
  458. /*++
  459. Routine Description:
  460. Acquire pin information from avc.sys. These information will be used to define data range and
  461. then for doing data interssection.
  462. --*/
  463. {
  464. NTSTATUS Status;
  465. ULONG i;
  466. ULONG PinId; // Pin number
  467. Status = STATUS_SUCCESS;
  468. // Get pin count
  469. RtlZeroMemory(&pDevExt->AvcMultIrb, sizeof(AVC_MULTIFUNC_IRB));
  470. pDevExt->AvcMultIrb.Function = AVC_FUNCTION_GET_PIN_COUNT;
  471. Status = AVCReqSubmitIrpSynch(pDevExt->pBusDeviceObject, pDevExt->pIrpSyncCall, &pDevExt->AvcMultIrb);
  472. if(!NT_SUCCESS(Status)) {
  473. TRACE(TL_STRM_ERROR,("GetPinCount Failed:%x\n", Status));
  474. goto GetPinInfoDone;
  475. } else {
  476. TRACE(TL_STRM_TRACE,("There are %d pins\n", pDevExt->AvcMultIrb.PinCount.PinCount));
  477. if(pDevExt->VideoFormatIndex == AVCSTRM_FORMAT_MPEG2TS) {
  478. if(pDevExt->AvcMultIrb.PinCount.PinCount > 1) {
  479. goto GetPinInfoDone;
  480. }
  481. } else {
  482. if(pDevExt->AvcMultIrb.PinCount.PinCount > 3) {
  483. goto GetPinInfoDone;
  484. }
  485. }
  486. pDevExt->PinCount = pDevExt->AvcMultIrb.PinCount.PinCount; // <<<
  487. }
  488. // Get all pin descriptors
  489. for(i=0; i<pDevExt->PinCount; i++) {
  490. // Get a pin descriptor
  491. RtlZeroMemory(&pDevExt->AvcMultIrb, sizeof(AVC_MULTIFUNC_IRB));
  492. pDevExt->AvcMultIrb.Function = AVC_FUNCTION_GET_PIN_DESCRIPTOR;
  493. pDevExt->AvcMultIrb.PinDescriptor.PinId = i;
  494. Status = AVCReqSubmitIrpSynch(pDevExt->pBusDeviceObject, pDevExt->pIrpSyncCall, &pDevExt->AvcMultIrb);
  495. if(!NT_SUCCESS(Status)) {
  496. TRACE(TL_PNP_ERROR,("GetPinDescriptor Failed:%x\n", Status));
  497. goto GetPinInfoDone;
  498. } else {
  499. // Copy the pDevExt->AvcMultIrb.PinDescriptor.PinDescriptor
  500. PinId = pDevExt->AvcMultIrb.PinDescriptor.PinId;
  501. // Anything else ?
  502. }
  503. // Get pre connection info
  504. RtlZeroMemory(&pDevExt->AvcMultIrb, sizeof(AVC_MULTIFUNC_IRB));
  505. pDevExt->AvcMultIrb.Function = AVC_FUNCTION_GET_CONNECTINFO;
  506. pDevExt->AvcMultIrb.PinDescriptor.PinId = PinId;
  507. Status = AVCReqSubmitIrpSynch(pDevExt->pBusDeviceObject, pDevExt->pIrpSyncCall, &pDevExt->AvcMultIrb);
  508. if(!NT_SUCCESS(Status)) {
  509. TRACE(TL_PNP_ERROR,("GetPinDescriptor Failed:%x\n", Status));
  510. goto GetPinInfoDone;
  511. } else {
  512. // Cache connectInfo
  513. if(pDevExt->VideoFormatIndex == AVCSTRM_FORMAT_MPEG2TS) {
  514. // Check
  515. if(pDevExt->AvcMultIrb.PreConnectInfo.ConnectInfo.DataFlow == KSPIN_DATAFLOW_OUT) {
  516. MPEG2TStreamOut.ConnectInfo = pDevExt->AvcMultIrb.PreConnectInfo.ConnectInfo;
  517. } else {
  518. MPEG2TStreamIn.ConnectInfo = pDevExt->AvcMultIrb.PreConnectInfo.ConnectInfo;
  519. }
  520. }
  521. else {
  522. if(pDevExt->AvcMultIrb.PreConnectInfo.ConnectInfo.DataFlow == KSPIN_DATAFLOW_OUT) {
  523. DvcrNTSCiavStream.ConnectInfo = pDevExt->AvcMultIrb.PreConnectInfo.ConnectInfo;
  524. DvcrPALiavStream.ConnectInfo = pDevExt->AvcMultIrb.PreConnectInfo.ConnectInfo;
  525. } else if(pDevExt->AvcMultIrb.PreConnectInfo.ConnectInfo.DataFlow == KSPIN_DATAFLOW_IN) {
  526. DvcrNTSCiavStreamIn.ConnectInfo = pDevExt->AvcMultIrb.PreConnectInfo.ConnectInfo;
  527. DvcrPALiavStreamIn.ConnectInfo = pDevExt->AvcMultIrb.PreConnectInfo.ConnectInfo;
  528. } else {
  529. // Error; unexpected;
  530. TRACE(TL_PNP_ERROR,("Unexpected index:%d for format:%d\n", i, pDevExt->VideoFormatIndex));
  531. // goto GetPinInfoDone;
  532. }
  533. }
  534. }
  535. }
  536. GetPinInfoDone:
  537. TRACE(TL_STRM_TRACE,("GetPinInfo exited with ST:%x\n", Status));
  538. return Status;
  539. }
  540. #endif // SUPPORT_NEW_AVC
  541. NTSTATUS
  542. AVCTapeInitialize(
  543. IN PDVCR_EXTENSION pDevExt,
  544. IN PPORT_CONFIGURATION_INFORMATION pConfigInfo,
  545. IN PAV_61883_REQUEST pAVReq
  546. )
  547. /*++
  548. Routine Description:
  549. This where we perform the necessary initialization tasks.
  550. --*/
  551. {
  552. ULONG i;
  553. NTSTATUS Status = STATUS_SUCCESS;
  554. PAGED_CODE();
  555. //
  556. // Initialize the device extension structure
  557. //
  558. DVIniDevExtStruct(
  559. pDevExt,
  560. pConfigInfo
  561. );
  562. #ifdef READ_CUTOMIZE_REG_VALUES
  563. //
  564. // Get values from this device's own registry
  565. //
  566. DVGetPropertyValuesFromRegistry(
  567. pDevExt
  568. );
  569. #endif
  570. // Allocate an Irp for synchronize call
  571. pDevExt->pIrpSyncCall = IoAllocateIrp(pDevExt->pBusDeviceObject->StackSize, FALSE);
  572. if(!pDevExt->pIrpSyncCall) {
  573. ASSERT(pDevExt->pIrpSyncCall && "Allocate Irp failed.\n");
  574. return STATUS_INSUFFICIENT_RESOURCES;
  575. }
  576. //
  577. // Query device information at the laod time:
  578. // Subunit
  579. // Unit Info
  580. // Mode of operation
  581. // NTSC or PAL
  582. // Speed
  583. //
  584. Status =
  585. AVCTapeGetDevInfo(
  586. pDevExt,
  587. pAVReq
  588. );
  589. if(!NT_SUCCESS(Status)) {
  590. TRACE(TL_PNP_ERROR,("GetDevInfo failed %x\n", Status));
  591. ASSERT(NT_SUCCESS(Status) && "AVCTapeGetDevInfo failed");
  592. goto AbortLoading;
  593. }
  594. //
  595. // Get device's output plug handles and states
  596. //
  597. if(pDevExt->pDevOutPlugs->NumPlugs) {
  598. NTSTATUS StatusPlug;
  599. TRACE(TL_61883_WARNING,("%d oPCR(s); MaxDataRate:%d (%s)\n",
  600. pDevExt->pDevOutPlugs->NumPlugs,
  601. pDevExt->pDevOutPlugs->MaxDataRate,
  602. (pDevExt->pDevOutPlugs->MaxDataRate == CMP_SPEED_S100) ? "S100" :
  603. (pDevExt->pDevOutPlugs->MaxDataRate == CMP_SPEED_S200) ? "S200" :
  604. (pDevExt->pDevOutPlugs->MaxDataRate == CMP_SPEED_S400) ? "S400" : "Sxxx"
  605. ));
  606. for (i = 0; i < pDevExt->pDevOutPlugs->NumPlugs; i++) {
  607. if(NT_SUCCESS(
  608. StatusPlug = AVCDevGetDevPlug(
  609. pDevExt,
  610. CMP_PlugOut,
  611. i,
  612. &pDevExt->pDevOutPlugs->DevPlug[i].hPlug
  613. ))) {
  614. if(NT_SUCCESS(
  615. AVCDevGetPlugState(
  616. pDevExt,
  617. pDevExt->pDevOutPlugs->DevPlug[i].hPlug,
  618. &pDevExt->pDevOutPlugs->DevPlug[i].PlugState
  619. ))) {
  620. } else {
  621. //
  622. // This is an error if we were told to this many number of plugs;
  623. // Set default plug states.
  624. //
  625. pDevExt->pDevOutPlugs->DevPlug[i].PlugState.DataRate = CMP_SPEED_S100;
  626. pDevExt->pDevOutPlugs->DevPlug[i].PlugState.Payload = PCR_PAYLOAD_MPEG2TS_DEF;
  627. pDevExt->pDevOutPlugs->DevPlug[i].PlugState.BC_Connections = 0;
  628. pDevExt->pDevOutPlugs->DevPlug[i].PlugState.PP_Connections = 0;
  629. }
  630. }
  631. else {
  632. //
  633. // If there is a plug, we should be able to get its handle!
  634. //
  635. TRACE(TL_61883_ERROR,("GetDevPlug oPlug[%d] failed %x\n", i, StatusPlug));
  636. ASSERT(NT_SUCCESS(StatusPlug) && "Failed to get oPCR handle from 61883!");
  637. break;
  638. }
  639. }
  640. }
  641. else {
  642. TRACE(TL_61883_WARNING,("Has no oPCR\n"));
  643. }
  644. //
  645. // Get device's input plug handles and states
  646. //
  647. if(pDevExt->pDevInPlugs->NumPlugs) {
  648. NTSTATUS StatusPlug;
  649. TRACE(TL_61883_WARNING,("%d iPCR(s); MaxDataRate:%d (%s)\n",
  650. pDevExt->pDevInPlugs->NumPlugs,
  651. pDevExt->pDevInPlugs->MaxDataRate,
  652. (pDevExt->pDevInPlugs->MaxDataRate == CMP_SPEED_S100) ? "S100" :
  653. (pDevExt->pDevInPlugs->MaxDataRate == CMP_SPEED_S200) ? "S200" :
  654. (pDevExt->pDevInPlugs->MaxDataRate == CMP_SPEED_S400) ? "S400" : "Sxxx"
  655. ));
  656. for (i = 0; i < pDevExt->pDevInPlugs->NumPlugs; i++) {
  657. if(NT_SUCCESS(
  658. StatusPlug = AVCDevGetDevPlug(
  659. pDevExt,
  660. CMP_PlugIn,
  661. i,
  662. &pDevExt->pDevInPlugs->DevPlug[i].hPlug
  663. ))) {
  664. if(NT_SUCCESS(
  665. AVCDevGetPlugState(
  666. pDevExt,
  667. pDevExt->pDevInPlugs->DevPlug[i].hPlug,
  668. &pDevExt->pDevInPlugs->DevPlug[i].PlugState
  669. ))) {
  670. } else {
  671. //
  672. // This is an error if we were told to this many number of plugs;
  673. // Set default plug states.
  674. //
  675. pDevExt->pDevInPlugs->DevPlug[i].PlugState.DataRate = CMP_SPEED_S200;
  676. pDevExt->pDevInPlugs->DevPlug[i].PlugState.Payload = PCR_PAYLOAD_MPEG2TS_DEF;
  677. pDevExt->pDevInPlugs->DevPlug[i].PlugState.BC_Connections = 0;
  678. pDevExt->pDevInPlugs->DevPlug[i].PlugState.PP_Connections = 0;
  679. }
  680. }
  681. else {
  682. //
  683. // If there is a plug, we should be able to get its handle!
  684. //
  685. TRACE(TL_61883_ERROR,("GetDevPlug iPlug[%d] failed %x\n", i, StatusPlug));
  686. ASSERT(NT_SUCCESS(StatusPlug) && "Failed to get iPCR handle from 61883!");
  687. break;
  688. }
  689. }
  690. }
  691. else {
  692. TRACE(TL_61883_WARNING,("Has no iPCR\n"));
  693. }
  694. #ifdef SUPPORT_LOCAL_PLUGS
  695. // Create a local output plug.
  696. pDevExt->OPCR.oPCR.OnLine = 0; // We are not online so we cannot be programmed.
  697. pDevExt->OPCR.oPCR.BCCCounter = 0;
  698. pDevExt->OPCR.oPCR.PPCCounter = 0;
  699. pDevExt->OPCR.oPCR.Channel = 0;
  700. // Default to MPEG2TS data since MPEg2TS device, like D-VHS, can initialize connection.
  701. if(pDevExt->pDevOutPlugs->NumPlugs) {
  702. //
  703. // Set PC's oPCR to match device's oPCR[0]
  704. //
  705. pDevExt->OPCR.oPCR.DataRate =
  706. #if 0
  707. // Be conservative and use this to match its oPCR[0]'s setting..
  708. pDevExt->pDevOutPlugs->DevPlug[0].PlugState.DataRate; // oPCR's data rate <= MPR's MaxDataRate
  709. #else
  710. // Be aggreessive in conserving BWU, use MaxDataRate.
  711. pDevExt->pDevOutPlugs->MaxDataRate; // Use MPR's MaxDataRate?
  712. #endif
  713. pDevExt->OPCR.oPCR.OverheadID = PCR_OVERHEAD_ID_MPEG2TS_DEF; // Default since we do not get this as a plug state
  714. pDevExt->OPCR.oPCR.Payload = pDevExt->pDevOutPlugs->DevPlug[0].PlugState.Payload;
  715. } else {
  716. pDevExt->OPCR.oPCR.DataRate = CMP_SPEED_S200; // Default of D-VHS
  717. pDevExt->OPCR.oPCR.OverheadID = PCR_OVERHEAD_ID_MPEG2TS_DEF; // This is just default
  718. pDevExt->OPCR.oPCR.Payload = PCR_PAYLOAD_MPEG2TS_DEF; // Default
  719. }
  720. if(!AVCTapeCreateLocalPlug(
  721. pDevExt,
  722. &pDevExt->AVReq,
  723. CMP_PlugOut,
  724. &pDevExt->OPCR,
  725. &pDevExt->OutputPCRLocalNum,
  726. &pDevExt->hOutputPCRLocal)) {
  727. TRACE(TL_PNP_ERROR,("Create PC oPCR failed!\n"));
  728. Status = STATUS_INSUFFICIENT_RESOURCES;
  729. goto AbortLoading;
  730. }
  731. // Create a local input plug.
  732. pDevExt->IPCR.iPCR.OnLine = 0; // We are not online so we cannot be programmed.
  733. pDevExt->IPCR.iPCR.BCCCounter = 0;
  734. pDevExt->IPCR.iPCR.PPCCounter = 0;
  735. pDevExt->IPCR.iPCR.Channel = 0;
  736. if(!AVCTapeCreateLocalPlug(
  737. pDevExt,
  738. &pDevExt->AVReq,
  739. CMP_PlugIn,
  740. &pDevExt->IPCR,
  741. &pDevExt->InputPCRLocalNum,
  742. &pDevExt->hInputPCRLocal)) {
  743. TRACE(TL_PNP_ERROR,("Create PC iPCR failed!\n"));
  744. // Delete oPCR created
  745. if(!AVCTapeDeleteLocalPlug(
  746. pDevExt,
  747. &pDevExt->AVReq,
  748. &pDevExt->OutputPCRLocalNum,
  749. &pDevExt->hOutputPCRLocal)) {
  750. TRACE(TL_PNP_ERROR,("Delete PC oPCR failed!\n"));
  751. }
  752. Status = STATUS_INSUFFICIENT_RESOURCES;
  753. goto AbortLoading;
  754. }
  755. #endif
  756. #ifdef SUPPORT_NEW_AVC // Initialize device
  757. //
  758. // Get plug handle of this device;
  759. // BUGBUG: For now, assume there is one pair of input and output plugs
  760. //
  761. pDevExt->hPlugLocalIn = AVCTapeGetPlugHandle(pDevExt, 0, KSPIN_DATAFLOW_IN);
  762. pDevExt->hPlugLocalOut = AVCTapeGetPlugHandle(pDevExt, 0, KSPIN_DATAFLOW_OUT);
  763. //
  764. // Get Pin information for connection purpose
  765. //
  766. Status = AVCTapeGetPinInfo(pDevExt);
  767. if(!NT_SUCCESS(Status)) {
  768. TRACE(TL_PNP_ERROR,("GetPinInfo failed %x\n", Status));
  769. ASSERT(NT_SUCCESS(Status) && "AVCTapeGetPinInfo failed");
  770. goto AbortLoading;
  771. }
  772. #endif
  773. //
  774. // Can customize the FormatInfoTable here!
  775. //
  776. switch(pDevExt->VideoFormatIndex) {
  777. case AVCSTRM_FORMAT_SDDV_NTSC:
  778. case AVCSTRM_FORMAT_SDDV_PAL:
  779. case AVCSTRM_FORMAT_HDDV_NTSC:
  780. case AVCSTRM_FORMAT_HDDV_PAL:
  781. case AVCSTRM_FORMAT_SDLDV_NTSC:
  782. case AVCSTRM_FORMAT_SDLDV_PAL:
  783. pDevExt->NumOfPins = DV_STREAM_COUNT;
  784. AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex].cipHdr1.DBS = CIP_DBS_SDDV;
  785. AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex].cipHdr1.FN = CIP_FN_DV;
  786. AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex].cipHdr1.QPC = CIP_QPC_DV;
  787. AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex].cipHdr1.SPH = CIP_SPH_DV;
  788. AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex].cipHdr2.FMT = CIP_FMT_DV;
  789. AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex].cipHdr2.STYPE = CIP_STYPE_DV;
  790. break;
  791. case AVCSTRM_FORMAT_MPEG2TS:
  792. pDevExt->NumOfPins = MPEG_STREAM_COUNT;
  793. AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex].cipHdr1.DBS = CIP_DBS_MPEG;
  794. AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex].cipHdr1.FN = CIP_FN_MPEG;
  795. AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex].cipHdr1.QPC = CIP_QPC_MPEG;
  796. AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex].cipHdr1.SPH = CIP_SPH_MPEG;
  797. AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex].cipHdr2.FMT = CIP_FMT_MPEG;
  798. // AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex].cipHdr2.F5060_OR_TSF = CIP_60_FIELDS;
  799. break;
  800. default:
  801. Status = STATUS_NOT_SUPPORTED;
  802. goto AbortLoading;
  803. break;
  804. }
  805. switch(pDevExt->VideoFormatIndex) {
  806. case AVCSTRM_FORMAT_SDDV_NTSC:
  807. case AVCSTRM_FORMAT_HDDV_NTSC:
  808. case AVCSTRM_FORMAT_SDLDV_NTSC:
  809. AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex].cipHdr2.F5060_OR_TSF = CIP_60_FIELDS;
  810. break;
  811. case AVCSTRM_FORMAT_SDDV_PAL:
  812. case AVCSTRM_FORMAT_HDDV_PAL:
  813. case AVCSTRM_FORMAT_SDLDV_PAL:
  814. AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex].cipHdr2.F5060_OR_TSF = CIP_50_FIELDS;
  815. break;
  816. }
  817. //
  818. // Note: Must do ExAllocatePool after DVIniDevExtStruct() since ->pStreamInfoObject is initialized.
  819. // Since the format that this driver support is known when this driver is known,'
  820. // the stream information table need to be custonmized. Make a copy and customized it.
  821. //
  822. //
  823. // Set the size of the stream inforamtion structure that we returned in SRB_GET_STREAM_INFO
  824. //
  825. pDevExt->pStreamInfoObject = (STREAM_INFO_AND_OBJ *)
  826. ExAllocatePool(NonPagedPool, sizeof(STREAM_INFO_AND_OBJ) * pDevExt->NumOfPins);
  827. if(!pDevExt->pStreamInfoObject) {
  828. ASSERT(pDevExt->pStreamInfoObject && "STATUS_INSUFFICIENT_RESOURCES");
  829. Status = STATUS_INSUFFICIENT_RESOURCES;
  830. goto AbortLoading;
  831. }
  832. pConfigInfo->StreamDescriptorSize =
  833. (pDevExt->NumOfPins * sizeof(HW_STREAM_INFORMATION)) + // number of stream descriptors
  834. sizeof(HW_STREAM_HEADER); // and 1 stream header
  835. TRACE(TL_PNP_TRACE,("pStreamInfoObject:%x; StreamDescriptorSize:%d\n", pDevExt->pStreamInfoObject, pConfigInfo->StreamDescriptorSize ));
  836. // Make a copy of the default stream information
  837. for(i = 0; i < pDevExt->NumOfPins; i++ ) {
  838. if(pDevExt->VideoFormatIndex == AVCSTRM_FORMAT_MPEG2TS)
  839. pDevExt->pStreamInfoObject[i] = MPEGStreams[i];
  840. else
  841. pDevExt->pStreamInfoObject[i] = DVStreams[i];
  842. }
  843. switch(pDevExt->VideoFormatIndex) {
  844. case AVCSTRM_FORMAT_SDDV_NTSC:
  845. case AVCSTRM_FORMAT_SDDV_PAL:
  846. // Set AUDIO AUX to reflect: NTSC/PAL, consumer DV or DVCPRO
  847. if(pDevExt->bDVCPro) {
  848. // Note: there is no DVInfo in VideoInfoHeader but there is for the iAV streams.
  849. DvcrPALiavStream.DVVideoInfo.dwDVAAuxSrc = PAL_DVAAuxSrc_DVCPRO;
  850. DvcrNTSCiavStream.DVVideoInfo.dwDVAAuxSrc = NTSC_DVAAuxSrc_DVCPRO;
  851. } else {
  852. DvcrPALiavStream.DVVideoInfo.dwDVAAuxSrc = PAL_DVAAuxSrc;
  853. DvcrNTSCiavStream.DVVideoInfo.dwDVAAuxSrc = NTSC_DVAAuxSrc;
  854. }
  855. }
  856. TRACE(TL_PNP_WARNING,("#### %s:%s:%s PhyDO %x, BusDO %x, DevExt %x, FrmSz %d; StrmIf %d\n",
  857. pDevExt->ulDevType == ED_DEVTYPE_VCR ? "DVCR" : pDevExt->ulDevType == ED_DEVTYPE_CAMERA ? "Camera" : "Tuner?",
  858. pDevExt->VideoFormatIndex == AVCSTRM_FORMAT_SDDV_NTSC ? "SD:NTSC" : pDevExt->VideoFormatIndex == AVCSTRM_FORMAT_SDDV_PAL ? "PAL" : "MPEG_TS?",
  859. (pDevExt->ulDevType == ED_DEVTYPE_VCR && pDevExt->pDevInPlugs->NumPlugs > 0) ? "CanRec" : "NotRec",
  860. pDevExt->pPhysicalDeviceObject,
  861. pDevExt->pBusDeviceObject,
  862. pDevExt,
  863. AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex].FrameSize,
  864. pConfigInfo->StreamDescriptorSize
  865. ));
  866. return STATUS_SUCCESS;
  867. AbortLoading:
  868. DvFreeTextualString(pDevExt, &pDevExt->UnitIDs);
  869. return Status;
  870. }
  871. NTSTATUS
  872. AVCTapeInitializeCompleted(
  873. IN PDVCR_EXTENSION pDevExt
  874. )
  875. /*++
  876. Routine Description:
  877. This where we perform the necessary initialization tasks.
  878. --*/
  879. {
  880. PAGED_CODE();
  881. #ifdef SUPPORT_ACCESS_DEVICE_INTERFACE
  882. //
  883. // Access to the device's interface section
  884. //
  885. DVAccessDeviceInterface(pDevExt, NUMBER_OF_DV_CATEGORIES, DVCategories);
  886. #endif
  887. return STATUS_SUCCESS;
  888. }
  889. NTSTATUS
  890. AVCTapeGetStreamInfo(
  891. IN PDVCR_EXTENSION pDevExt,
  892. IN ULONG ulBytesToTransfer,
  893. IN PHW_STREAM_HEADER pStreamHeader,
  894. IN PHW_STREAM_INFORMATION pStreamInfo
  895. )
  896. /*++
  897. Routine Description:
  898. Returns the information of all streams that are supported by the driver
  899. --*/
  900. {
  901. ULONG i;
  902. PAGED_CODE();
  903. //
  904. // Make sure we have enough space to return our stream informations
  905. //
  906. if(ulBytesToTransfer < sizeof (HW_STREAM_HEADER) + sizeof(HW_STREAM_INFORMATION) * pDevExt->NumOfPins ) {
  907. TRACE(TL_PNP_ERROR,("GetStrmInfo: ulBytesToTransfer %d ?= %d\n",
  908. ulBytesToTransfer, sizeof(HW_STREAM_HEADER) + sizeof(HW_STREAM_INFORMATION) * pDevExt->NumOfPins ));
  909. ASSERT(ulBytesToTransfer >= sizeof(HW_STREAM_HEADER) + sizeof(HW_STREAM_INFORMATION) * pDevExt->NumOfPins );
  910. return STATUS_INVALID_PARAMETER;
  911. }
  912. //
  913. // Initialize stream header:
  914. // Device properties
  915. // Streams
  916. //
  917. RtlZeroMemory(pStreamHeader, sizeof(HW_STREAM_HEADER));
  918. pStreamHeader->NumberOfStreams = pDevExt->NumOfPins;
  919. pStreamHeader->SizeOfHwStreamInformation = sizeof(HW_STREAM_INFORMATION);
  920. pStreamHeader->NumDevPropArrayEntries = NUMBER_VIDEO_DEVICE_PROPERTIES;
  921. pStreamHeader->DevicePropertiesArray = (PKSPROPERTY_SET) VideoDeviceProperties;
  922. pStreamHeader->NumDevEventArrayEntries = NUMBER_VIDEO_DEVICE_EVENTS;
  923. pStreamHeader->DeviceEventsArray = (PKSEVENT_SET) VideoDeviceEvents;
  924. TRACE(TL_PNP_TRACE,("GetStreamInfo: StreamPropEntries %d, DevicePropEntries %d\n",
  925. pStreamHeader->NumberOfStreams, pStreamHeader->NumDevPropArrayEntries));
  926. //
  927. // Initialize the stream structure.
  928. //
  929. ASSERT(pDevExt->pStreamInfoObject);
  930. for( i = 0; i < pDevExt->NumOfPins; i++ )
  931. *pStreamInfo++ = pDevExt->pStreamInfoObject[i].hwStreamInfo;
  932. //
  933. //
  934. // store a pointer to the topology for the device
  935. //
  936. if(pDevExt->VideoFormatIndex == AVCSTRM_FORMAT_MPEG2TS)
  937. pStreamHeader->Topology = &MPEG2TSTopology;
  938. else
  939. pStreamHeader->Topology = &DVTopology;
  940. return STATUS_SUCCESS;
  941. }
  942. BOOL
  943. AVCTapeVerifyDataFormat(
  944. IN ULONG NumOfPins,
  945. PKSDATAFORMAT pKSDataFormatToVerify,
  946. ULONG StreamNumber,
  947. ULONG ulSupportedFrameSize,
  948. STREAM_INFO_AND_OBJ * pStreamInfoObject
  949. )
  950. /*++
  951. Routine Description:
  952. Checks the validity of a format request by walking through the array of
  953. supported KSDATA_RANGEs for a given stream.
  954. Arguments:
  955. pKSDataFormat - pointer of a KS_DATAFORMAT_VIDEOINFOHEADER structure.
  956. StreamNumber - index of the stream being queried / opened.
  957. Return Value:
  958. TRUE if the format is supported
  959. FALSE if the format cannot be suppored
  960. --*/
  961. {
  962. PKSDATAFORMAT *pAvailableFormats;
  963. int NumberOfFormatArrayEntries;
  964. int j;
  965. PAGED_CODE();
  966. //
  967. // Make sure the stream index is valid
  968. //
  969. if(StreamNumber >= NumOfPins) {
  970. return FALSE;
  971. }
  972. //
  973. // How many formats does this data range support?
  974. //
  975. NumberOfFormatArrayEntries = pStreamInfoObject[StreamNumber].hwStreamInfo.NumberOfFormatArrayEntries;
  976. //
  977. // Get the pointer to the array of available formats
  978. //
  979. pAvailableFormats = pStreamInfoObject[StreamNumber].hwStreamInfo.StreamFormatsArray;
  980. //
  981. // Walk the array, searching for a match
  982. //
  983. for (j = 0; j < NumberOfFormatArrayEntries; j++, pAvailableFormats++) {
  984. if (!DVCmpGUIDsAndFormatSize(
  985. pKSDataFormatToVerify,
  986. *pAvailableFormats,
  987. FALSE /* CompareFormatSize */ )) {
  988. continue;
  989. }
  990. //
  991. // Additional verification test
  992. //
  993. if(IsEqualGUID (&pKSDataFormatToVerify->Specifier, &KSDATAFORMAT_SPECIFIER_VIDEOINFO)) {
  994. // Make sure
  995. if( ((PKS_DATAFORMAT_VIDEOINFOHEADER)pKSDataFormatToVerify)->VideoInfoHeader.bmiHeader.biSizeImage !=
  996. ulSupportedFrameSize) {
  997. TRACE(TL_STRM_TRACE,("VIDEOINFO: biSizeToVerify %d != Supported %d\n",
  998. ((PKS_DATAFORMAT_VIDEOINFOHEADER)pKSDataFormatToVerify)->VideoInfoHeader.bmiHeader.biSizeImage,
  999. ulSupportedFrameSize
  1000. ));
  1001. continue;
  1002. } else {
  1003. TRACE(TL_STRM_TRACE,("VIDOINFO: **** biSizeToVerify %d == Supported %d\n",
  1004. ((PKS_DATAFORMAT_VIDEOINFOHEADER)pKSDataFormatToVerify)->VideoInfoHeader.bmiHeader.biSizeImage,
  1005. ulSupportedFrameSize
  1006. ));
  1007. }
  1008. } else if (IsEqualGUID (&pKSDataFormatToVerify->Specifier, &KSDATAFORMAT_SPECIFIER_DVINFO)) {
  1009. // Test 50/60 bit
  1010. if((((PKS_DATARANGE_DVVIDEO) pKSDataFormatToVerify)->DVVideoInfo.dwDVAAuxSrc & MASK_AUX_50_60_BIT) !=
  1011. (((PKS_DATARANGE_DVVIDEO) *pAvailableFormats)->DVVideoInfo.dwDVAAuxSrc & MASK_AUX_50_60_BIT) ||
  1012. (((PKS_DATARANGE_DVVIDEO) pKSDataFormatToVerify)->DVVideoInfo.dwDVVAuxSrc & MASK_AUX_50_60_BIT) !=
  1013. (((PKS_DATARANGE_DVVIDEO) *pAvailableFormats)->DVVideoInfo.dwDVVAuxSrc & MASK_AUX_50_60_BIT) ) {
  1014. TRACE(TL_STRM_TRACE,("DVINFO VerifyFormat failed: ASrc: %x!=%x (MSDV);or VSrc: %x!=%x\n",
  1015. ((PKS_DATARANGE_DVVIDEO) pKSDataFormatToVerify)->DVVideoInfo.dwDVAAuxSrc,
  1016. ((PKS_DATARANGE_DVVIDEO) *pAvailableFormats)->DVVideoInfo.dwDVAAuxSrc,
  1017. ((PKS_DATARANGE_DVVIDEO) pKSDataFormatToVerify)->DVVideoInfo.dwDVVAuxSrc,
  1018. ((PKS_DATARANGE_DVVIDEO) *pAvailableFormats)->DVVideoInfo.dwDVVAuxSrc
  1019. ));
  1020. continue;
  1021. }
  1022. TRACE(TL_STRM_TRACE,("DVINFO: dwDVAAuxCtl %x, Supported %x\n",
  1023. ((PKS_DATARANGE_DVVIDEO) pKSDataFormatToVerify)->DVVideoInfo.dwDVAAuxSrc,
  1024. ((PKS_DATARANGE_DVVIDEO) *pAvailableFormats)->DVVideoInfo.dwDVAAuxSrc
  1025. ));
  1026. TRACE(TL_STRM_TRACE,("DVINFO: dwDVVAuxSrc %x, Supported %x\n",
  1027. ((PKS_DATARANGE_DVVIDEO) pKSDataFormatToVerify)->DVVideoInfo.dwDVVAuxSrc,
  1028. ((PKS_DATARANGE_DVVIDEO) *pAvailableFormats)->DVVideoInfo.dwDVVAuxSrc
  1029. ));
  1030. }
  1031. else if (IsEqualGUID (&pKSDataFormatToVerify->SubFormat, &KSDATAFORMAT_TYPE_MPEG2_TRANSPORT) ) {
  1032. TRACE(TL_STRM_TRACE,("VerifyFormat: MPEG2 subformat\n"));
  1033. }
  1034. else if (IsEqualGUID (&pKSDataFormatToVerify->SubFormat, &KSDATAFORMAT_TYPE_MPEG2_TRANSPORT_STRIDE)
  1035. && pKSDataFormatToVerify->FormatSize >= (sizeof(KSDATARANGE)+sizeof(MPEG2_TRANSPORT_STRIDE)) ) {
  1036. //
  1037. // Verify the STRIDE structure
  1038. //
  1039. if( ((KS_DATARANGE_MPEG2TS_STRIDE_AVC *) pKSDataFormatToVerify)->Stride.dwOffset != MPEG2TS_STRIDE_OFFSET
  1040. || ((KS_DATARANGE_MPEG2TS_STRIDE_AVC *) pKSDataFormatToVerify)->Stride.dwPacketLength != MPEG2TS_STRIDE_PACKET_LEN
  1041. || ((KS_DATARANGE_MPEG2TS_STRIDE_AVC *) pKSDataFormatToVerify)->Stride.dwStride != MPEG2TS_STRIDE_STRIDE_LEN
  1042. ) {
  1043. TRACE(TL_STRM_ERROR,("VerifyDataFormat: Invalid STRIDE parameters: dwOffset:%d; dwPacketLength:%d; dwStride:%d\n",
  1044. ((KS_DATARANGE_MPEG2TS_STRIDE_AVC *) pKSDataFormatToVerify)->Stride.dwOffset,
  1045. ((KS_DATARANGE_MPEG2TS_STRIDE_AVC *) pKSDataFormatToVerify)->Stride.dwPacketLength,
  1046. ((KS_DATARANGE_MPEG2TS_STRIDE_AVC *) pKSDataFormatToVerify)->Stride.dwStride
  1047. ));
  1048. continue;
  1049. }
  1050. TRACE(TL_STRM_TRACE,("VerifyFormat: MPEG2 stride subformat\n"));
  1051. }
  1052. else {
  1053. continue;
  1054. }
  1055. return TRUE;
  1056. }
  1057. return FALSE;
  1058. }
  1059. NTSTATUS
  1060. AVCTapeGetDataIntersection(
  1061. IN ULONG NumOfPins,
  1062. IN ULONG ulStreamNumber,
  1063. IN PKSDATARANGE pDataRange,
  1064. OUT PVOID pDataFormatBuffer,
  1065. IN ULONG ulSizeOfDataFormatBuffer,
  1066. IN ULONG ulSupportedFrameSize,
  1067. OUT ULONG *pulActualBytesTransferred,
  1068. STREAM_INFO_AND_OBJ * pStreamInfoObject
  1069. #ifdef SUPPORT_NEW_AVC
  1070. ,
  1071. HANDLE hPlugLocalOut,
  1072. HANDLE hPlugLocalIn
  1073. #endif
  1074. )
  1075. /*++
  1076. Routine Description:
  1077. Called to get a DATAFORMAT from a DATARANGE.
  1078. --*/
  1079. {
  1080. BOOL bMatchFound = FALSE;
  1081. ULONG ulFormatSize;
  1082. ULONG j;
  1083. ULONG ulNumberOfFormatArrayEntries;
  1084. PKSDATAFORMAT *pAvailableFormats;
  1085. #ifdef SUPPORT_NEW_AVC
  1086. AVCPRECONNECTINFO * pPreConnectInfo;
  1087. AVCCONNECTINFO * pConnectInfo;
  1088. #endif
  1089. PAGED_CODE();
  1090. //
  1091. // Check that the stream number is valid
  1092. //
  1093. if(ulStreamNumber >= NumOfPins) {
  1094. TRACE(TL_STRM_ERROR,("FormatFromRange: ulStreamNumber %d >= NumOfPins %d\n", ulStreamNumber, NumOfPins));
  1095. ASSERT(ulStreamNumber < NumOfPins && "Invalid stream index");
  1096. return STATUS_NOT_SUPPORTED;
  1097. }
  1098. // Number of format this stream supports
  1099. ulNumberOfFormatArrayEntries = pStreamInfoObject[ulStreamNumber].hwStreamInfo.NumberOfFormatArrayEntries;
  1100. //
  1101. // Get the pointer to the array of available formats
  1102. //
  1103. pAvailableFormats = pStreamInfoObject[ulStreamNumber].hwStreamInfo.StreamFormatsArray;
  1104. //
  1105. // Walk the formats supported by the stream searching for a match
  1106. // Note: DataIntersection is really enumerating supported MediaType only!
  1107. // SO matter compare format is NTSC or PAL, we need suceeded both;
  1108. // however, we will copy back only the format is currently supported (NTSC or PAL).
  1109. //
  1110. for(j = 0; j < ulNumberOfFormatArrayEntries; j++, pAvailableFormats++) {
  1111. if(!DVCmpGUIDsAndFormatSize(pDataRange, *pAvailableFormats, TRUE)) {
  1112. TRACE(TL_STRM_TRACE,("CmpGUIDsAndFormatSize failed! FormatSize:%d?=%d\n", pDataRange->FormatSize, (*pAvailableFormats)->FormatSize));
  1113. continue;
  1114. }
  1115. //
  1116. // SUBTYPE_DVSD has a fix sample size;
  1117. //
  1118. if( IsEqualGUID (&pDataRange->SubFormat, &KSDATAFORMAT_SUBTYPE_DVSD)
  1119. && (*pAvailableFormats)->SampleSize != ulSupportedFrameSize) {
  1120. TRACE(TL_STRM_TRACE,("_SUBTYPE_DVSD: StrmNum %d, %d of %d formats, SizeToVerify %d *!=* SupportedSampleSize %d\n",
  1121. ulStreamNumber,
  1122. j+1, ulNumberOfFormatArrayEntries,
  1123. (*pAvailableFormats)->SampleSize,
  1124. ulSupportedFrameSize));
  1125. continue;
  1126. }
  1127. // -------------------------------------------------------------------
  1128. // Specifier FORMAT_VideoInfo for VIDEOINFOHEADER
  1129. // -------------------------------------------------------------------
  1130. if(IsEqualGUID (&pDataRange->Specifier, &KSDATAFORMAT_SPECIFIER_VIDEOINFO)) {
  1131. PKS_DATARANGE_VIDEO pDataRangeVideoToVerify = (PKS_DATARANGE_VIDEO) pDataRange;
  1132. PKS_DATARANGE_VIDEO pDataRangeVideo = (PKS_DATARANGE_VIDEO) *pAvailableFormats;
  1133. #if 0
  1134. //
  1135. // Check that the other fields match
  1136. //
  1137. if ((pDataRangeVideoToVerify->bFixedSizeSamples != pDataRangeVideo->bFixedSizeSamples)
  1138. || (pDataRangeVideoToVerify->bTemporalCompression != pDataRangeVideo->bTemporalCompression)
  1139. || (pDataRangeVideoToVerify->StreamDescriptionFlags != pDataRangeVideo->StreamDescriptionFlags)
  1140. || (pDataRangeVideoToVerify->MemoryAllocationFlags != pDataRangeVideo->MemoryAllocationFlags)
  1141. #ifdef COMPARE_CONFIG_CAP
  1142. || (RtlCompareMemory (&pDataRangeVideoToVerify->ConfigCaps,
  1143. &pDataRangeVideo->ConfigCaps,
  1144. sizeof (KS_VIDEO_STREAM_CONFIG_CAPS)) !=
  1145. sizeof (KS_VIDEO_STREAM_CONFIG_CAPS))
  1146. #endif
  1147. ) {
  1148. TRACE(TL_STRM_TRACE,("FormatFromRange: *!=* bFixSizeSample (%d %d) (%d %d) (%d %d) (%x %x)\n",
  1149. pDataRangeVideoToVerify->bFixedSizeSamples, pDataRangeVideo->bFixedSizeSamples,
  1150. pDataRangeVideoToVerify->bTemporalCompression , pDataRangeVideo->bTemporalCompression,
  1151. pDataRangeVideoToVerify->StreamDescriptionFlags, pDataRangeVideo->StreamDescriptionFlags,
  1152. pDataRangeVideoToVerify->ConfigCaps.VideoStandard, pDataRangeVideo->ConfigCaps.VideoStandard
  1153. ));
  1154. continue;
  1155. } else {
  1156. TRACE(TL_STRM_TRACE,("FormatFromRange: == bFixSizeSample (%d %d) (%d %d) (%d %d) (%x %x)\n",
  1157. pDataRangeVideoToVerify->bFixedSizeSamples, pDataRangeVideo->bFixedSizeSamples,
  1158. pDataRangeVideoToVerify->bTemporalCompression , pDataRangeVideo->bTemporalCompression,
  1159. pDataRangeVideoToVerify->StreamDescriptionFlags, pDataRangeVideo->StreamDescriptionFlags,
  1160. pDataRangeVideoToVerify->ConfigCaps.VideoStandard, pDataRangeVideo->ConfigCaps.VideoStandard
  1161. ));
  1162. }
  1163. #endif
  1164. bMatchFound = TRUE;
  1165. ulFormatSize = sizeof (KSDATAFORMAT) +
  1166. KS_SIZE_VIDEOHEADER (&pDataRangeVideo->VideoInfoHeader);
  1167. if(ulSizeOfDataFormatBuffer == 0) {
  1168. // We actually have not returned this much data,
  1169. // this "size" will be used by Ksproxy to send down
  1170. // a buffer of that size in next query.
  1171. *pulActualBytesTransferred = ulFormatSize;
  1172. return STATUS_BUFFER_OVERFLOW;
  1173. }
  1174. // Caller wants the full data format
  1175. if(ulSizeOfDataFormatBuffer < ulFormatSize) {
  1176. TRACE(TL_STRM_TRACE,("VIDEOINFO: StreamNum %d, SizeOfDataFormatBuffer %d < ulFormatSize %d\n",ulStreamNumber, ulSizeOfDataFormatBuffer, ulFormatSize));
  1177. return STATUS_BUFFER_TOO_SMALL;
  1178. }
  1179. // KS_DATAFORMAT_VIDEOINFOHEADER
  1180. // KSDATAFORMAT DataFormat;
  1181. // KS_VIDEOINFOHEADER VideoInfoHeader;
  1182. RtlCopyMemory(
  1183. &((PKS_DATAFORMAT_VIDEOINFOHEADER)pDataFormatBuffer)->DataFormat,
  1184. &pDataRangeVideo->DataRange,
  1185. sizeof (KSDATAFORMAT));
  1186. // This size is differnt from our data range size which also contains ConfigCap
  1187. ((PKSDATAFORMAT)pDataFormatBuffer)->FormatSize = ulFormatSize;
  1188. *pulActualBytesTransferred = ulFormatSize;
  1189. RtlCopyMemory(
  1190. &((PKS_DATAFORMAT_VIDEOINFOHEADER) pDataFormatBuffer)->VideoInfoHeader,
  1191. &pDataRangeVideo->VideoInfoHeader,
  1192. KS_SIZE_VIDEOHEADER (&pDataRangeVideo->VideoInfoHeader));
  1193. TRACE(TL_STRM_TRACE,("FormatFromRange: Matched, StrmNum %d, FormatSize %d, CopySize %d; FormatBufferSize %d, biSizeImage.\n",
  1194. ulStreamNumber, (*pAvailableFormats)->FormatSize, ulFormatSize, ulSizeOfDataFormatBuffer,
  1195. ((PKS_DATAFORMAT_VIDEOINFOHEADER) pDataFormatBuffer)->VideoInfoHeader.bmiHeader.biSizeImage));
  1196. return STATUS_SUCCESS;
  1197. } else if (IsEqualGUID (&pDataRange->Specifier, &KSDATAFORMAT_SPECIFIER_DVINFO)) {
  1198. // -------------------------------------------------------------------
  1199. // Specifier FORMAT_DVInfo for KS_DATARANGE_DVVIDEO
  1200. // -------------------------------------------------------------------
  1201. // MATCH FOUND!
  1202. bMatchFound = TRUE;
  1203. ulFormatSize = sizeof(KS_DATARANGE_DVVIDEO);
  1204. if(ulSizeOfDataFormatBuffer == 0) {
  1205. // We actually have not returned this much data,
  1206. // this "size" will be used by Ksproxy to send down
  1207. // a buffer of that size in next query.
  1208. *pulActualBytesTransferred = ulFormatSize;
  1209. return STATUS_BUFFER_OVERFLOW;
  1210. }
  1211. // Caller wants the full data format
  1212. if (ulSizeOfDataFormatBuffer < ulFormatSize) {
  1213. TRACE(TL_STRM_ERROR,("DVINFO: StreamNum %d, SizeOfDataFormatBuffer %d < ulFormatSize %d\n", ulStreamNumber, ulSizeOfDataFormatBuffer, ulFormatSize));
  1214. return STATUS_BUFFER_TOO_SMALL;
  1215. }
  1216. RtlCopyMemory(
  1217. pDataFormatBuffer,
  1218. *pAvailableFormats,
  1219. (*pAvailableFormats)->FormatSize);
  1220. ((PKSDATAFORMAT)pDataFormatBuffer)->FormatSize = ulFormatSize;
  1221. *pulActualBytesTransferred = ulFormatSize;
  1222. #ifdef SUPPORT_NEW_AVC // Data intersection; return hPlug if flag is set
  1223. pPreConnectInfo = &(((KS_DATARANGE_DV_AVC *) *pAvailableFormats)->ConnectInfo);
  1224. pConnectInfo = &(((KS_DATAFORMAT_DV_AVC *) pDataFormatBuffer)->ConnectInfo);
  1225. if(pPreConnectInfo->Flags & (KSPIN_FLAG_AVC_PCRONLY | KSPIN_FLAG_AVC_FIXEDPCR)) {
  1226. // Need to return the plug handle
  1227. pConnectInfo->hPlug = \
  1228. (pPreConnectInfo->DataFlow == KSPIN_DATAFLOW_OUT) ? hPlugLocalOut : hPlugLocalIn;
  1229. } else {
  1230. // Choose any that is available
  1231. // Set to 0 for now.
  1232. pConnectInfo->hPlug = NULL;
  1233. }
  1234. #if DBG
  1235. TRACE(TL_STRM_TRACE,("DVINFO: pPreConnectInfo:%x; pConnectInfo:%x\n", pPreConnectInfo, pConnectInfo));
  1236. if(TapeDebugLevel >= 2) {
  1237. ASSERT(FALSE && "Check ConnectInfo!");
  1238. }
  1239. #endif
  1240. #endif
  1241. TRACE(TL_STRM_TRACE,("FormatFromRange: Matched, StrmNum %d, FormatSize %d, CopySize %d; FormatBufferSize %d.\n",
  1242. ulStreamNumber, (*pAvailableFormats)->FormatSize, ulFormatSize, ulSizeOfDataFormatBuffer));
  1243. return STATUS_SUCCESS;
  1244. } else if (IsEqualGUID (&pDataRange->SubFormat, &KSDATAFORMAT_TYPE_MPEG2_TRANSPORT_STRIDE) ){
  1245. // -------------------------------------------------------------------
  1246. // Compare subformat since it is unique
  1247. // Subformat STATIC_KSDATAFORMAT_TYPE_MPEG2_TRANSPORT_STRIDE
  1248. // -------------------------------------------------------------------
  1249. #if 0 // Not enforced.
  1250. // Only for a certain specifier
  1251. if(!IsEqualGUID (&pDataRange->Specifier, &KSDATAFORMAT_SPECIFIER_61883_4)) {
  1252. TRACE(TL_STRM_TRACE,("SubFormat KSDATAFORMAT_TYPE_MPEG2_TRANSPORT_STRIDE but Specifier is not STATIC_KSDATAFORMAT_SPECIFIER_61883_4\n"));
  1253. continue;
  1254. }
  1255. #endif
  1256. // Sample size must match!
  1257. if((*pAvailableFormats)->SampleSize != pDataRange->SampleSize) {
  1258. TRACE(TL_STRM_TRACE,("SampleSize(MPEG2_TRANSPORT_STRIDE): Availabel:%d != Range:%d\n", (*pAvailableFormats)->SampleSize, pDataRange->SampleSize));
  1259. continue;
  1260. }
  1261. // MATCH FOUND!
  1262. bMatchFound = TRUE;
  1263. #ifdef SUPPORT_NEW_AVC
  1264. ulFormatSize = sizeof(KS_DATARANGE_MPEG2TS_STRIDE_AVC);
  1265. #else
  1266. ulFormatSize = sizeof(KS_DATARANGE_MPEG2TS_STRIDE_AVC) - sizeof(AVCPRECONNECTINFO); // FormatSize; exclude AVCPRECONNECTINFO
  1267. #endif
  1268. if(ulSizeOfDataFormatBuffer == 0) {
  1269. // We actually have not returned this much data,
  1270. // this "size" will be used by Ksproxy to send down
  1271. // a buffer of that size in next query.
  1272. *pulActualBytesTransferred = ulFormatSize;
  1273. return STATUS_BUFFER_OVERFLOW;
  1274. }
  1275. // Caller wants the full data format
  1276. if (ulSizeOfDataFormatBuffer < ulFormatSize) {
  1277. TRACE(TL_STRM_ERROR,("MPEG2_TRANSPORT_STRIDE: StreamNum %d, SizeOfDataFormatBuffer %d < ulFormatSize %d\n", ulStreamNumber, ulSizeOfDataFormatBuffer, ulFormatSize));
  1278. return STATUS_BUFFER_TOO_SMALL;
  1279. }
  1280. //
  1281. // Verify the STRIDE structure
  1282. //
  1283. if( ((KS_DATARANGE_MPEG2TS_STRIDE_AVC *) pDataRange)->Stride.dwOffset != MPEG2TS_STRIDE_OFFSET
  1284. || ((KS_DATARANGE_MPEG2TS_STRIDE_AVC *) pDataRange)->Stride.dwPacketLength != MPEG2TS_STRIDE_PACKET_LEN
  1285. || ((KS_DATARANGE_MPEG2TS_STRIDE_AVC *) pDataRange)->Stride.dwStride != MPEG2TS_STRIDE_STRIDE_LEN
  1286. ) {
  1287. TRACE(TL_PNP_ERROR,("AVCTapeGetDataIntersection:Invalid STRIDE parameters: dwOffset:%d; dwPacketLength:%d; dwStride:%d\n",
  1288. ((KS_DATARANGE_MPEG2TS_STRIDE_AVC *) pDataRange)->Stride.dwOffset,
  1289. ((KS_DATARANGE_MPEG2TS_STRIDE_AVC *) pDataRange)->Stride.dwPacketLength,
  1290. ((KS_DATARANGE_MPEG2TS_STRIDE_AVC *) pDataRange)->Stride.dwStride
  1291. ));
  1292. return STATUS_INVALID_PARAMETER;
  1293. }
  1294. RtlCopyMemory(pDataFormatBuffer, *pAvailableFormats, (*pAvailableFormats)->FormatSize);
  1295. ((PKSDATAFORMAT)pDataFormatBuffer)->FormatSize = ulFormatSize;
  1296. *pulActualBytesTransferred = ulFormatSize;
  1297. #ifdef SUPPORT_NEW_AVC // Data intersection; return hPlug if flag is set
  1298. pPreConnectInfo = &(((KS_DATARANGE_MPEG2TS_AVC *) *pAvailableFormats)->ConnectInfo);
  1299. pConnectInfo = &(((KS_DATAFORMAT_MPEG2TS_AVC *) pDataFormatBuffer)->ConnectInfo);
  1300. if(pPreConnectInfo->Flags & (KSPIN_FLAG_AVC_PCRONLY | KSPIN_FLAG_AVC_FIXEDPCR)) {
  1301. // Need to return the plug handle
  1302. pConnectInfo->hPlug = \
  1303. (pPreConnectInfo->DataFlow == KSPIN_DATAFLOW_OUT) ? hPlugLocalOut : hPlugLocalIn;
  1304. } else {
  1305. // Choose any that is available
  1306. // Set to 0 for now.
  1307. pConnectInfo->hPlug = NULL;
  1308. }
  1309. #if DBG
  1310. TRACE(TL_STRM_TRACE,("MPEG2TS: pPreConnectInfo:%x; pConnectInfo:%x\n", pPreConnectInfo, pConnectInfo));
  1311. ASSERT(FALSE && "Check ConnectInfo!");
  1312. #endif
  1313. #endif
  1314. TRACE(TL_STRM_TRACE,("FormatFromRange:(MPEG2TS_STRIDE) Matched, StrmNum %d, FormatSize %d, CopySize %d; FormatBufferSize %d.\n",
  1315. ulStreamNumber, (*pAvailableFormats)->FormatSize, ulFormatSize, ulSizeOfDataFormatBuffer));
  1316. return STATUS_SUCCESS;
  1317. } else if (IsEqualGUID (&pDataRange->SubFormat, &KSDATAFORMAT_TYPE_MPEG2_TRANSPORT)) {
  1318. // -------------------------------------------------------------------
  1319. // Compare subformat since it is unique
  1320. // Subformat STATIC_KSDATAFORMAT_TYPE_MPEG2_TRANSPORT
  1321. // -------------------------------------------------------------------
  1322. // Sample size must match!
  1323. if((*pAvailableFormats)->SampleSize != pDataRange->SampleSize) {
  1324. TRACE(TL_STRM_TRACE,("SampleSize(MPEG2_TRANSPORT): Availabel:%d != Range:%d\n", (*pAvailableFormats)->SampleSize, pDataRange->SampleSize));
  1325. continue;
  1326. }
  1327. // MATCH FOUND!
  1328. bMatchFound = TRUE;
  1329. #ifdef SUPPORT_NEW_AVC
  1330. ulFormatSize = sizeof(KS_DATARANGE_MPEG2TS_AVC);
  1331. #else
  1332. ulFormatSize = sizeof(KS_DATARANGE_MPEG2TS_AVC) - sizeof(AVCPRECONNECTINFO); // FormatSize; exclude AVCPRECONNECTINFO
  1333. #endif
  1334. if(ulSizeOfDataFormatBuffer == 0) {
  1335. // We actually have not returned this much data,
  1336. // this "size" will be used by Ksproxy to send down
  1337. // a buffer of that size in next query.
  1338. *pulActualBytesTransferred = ulFormatSize;
  1339. return STATUS_BUFFER_OVERFLOW;
  1340. }
  1341. // Caller wants the full data format
  1342. if (ulSizeOfDataFormatBuffer < ulFormatSize) {
  1343. TRACE(TL_STRM_ERROR,("MPEG2_TRANSPORT: StreamNum %d, SizeOfDataFormatBuffer %d < ulFormatSize %d\n", ulStreamNumber, ulSizeOfDataFormatBuffer, ulFormatSize));
  1344. return STATUS_BUFFER_TOO_SMALL;
  1345. }
  1346. RtlCopyMemory(pDataFormatBuffer, *pAvailableFormats, (*pAvailableFormats)->FormatSize);
  1347. ((PKSDATAFORMAT)pDataFormatBuffer)->FormatSize = ulFormatSize;
  1348. *pulActualBytesTransferred = ulFormatSize;
  1349. #ifdef SUPPORT_NEW_AVC // Data intersection; return hPlug if flag is set
  1350. pPreConnectInfo = &(((KS_DATARANGE_MPEG2TS_AVC *) *pAvailableFormats)->ConnectInfo);
  1351. pConnectInfo = &(((KS_DATAFORMAT_MPEG2TS_AVC *) pDataFormatBuffer)->ConnectInfo);
  1352. if(pPreConnectInfo->Flags & (KSPIN_FLAG_AVC_PCRONLY | KSPIN_FLAG_AVC_FIXEDPCR)) {
  1353. // Need to return the plug handle
  1354. pConnectInfo->hPlug = \
  1355. (pPreConnectInfo->DataFlow == KSPIN_DATAFLOW_OUT) ? hPlugLocalOut : hPlugLocalIn;
  1356. } else {
  1357. // Choose any that is available
  1358. // Set to 0 for now.
  1359. pConnectInfo->hPlug = NULL;
  1360. }
  1361. #if DBG
  1362. TRACE(TL_STRM_TRACE,("MPEG2TS: pPreConnectInfo:%x; pConnectInfo:%x\n", pPreConnectInfo, pConnectInfo));
  1363. ASSERT(FALSE && "Check ConnectInfo!");
  1364. #endif
  1365. #endif
  1366. TRACE(TL_STRM_TRACE,("FormatFromRange: (MPEG2TS) Matched, StrmNum %d, FormatSize %d, CopySize %d; FormatBufferSize %d.\n",
  1367. ulStreamNumber, (*pAvailableFormats)->FormatSize, ulFormatSize, ulSizeOfDataFormatBuffer));
  1368. return STATUS_SUCCESS;
  1369. }
  1370. } // End of loop on all formats for this stream
  1371. if(!bMatchFound) {
  1372. TRACE(TL_STRM_TRACE,("FormatFromRange: No Match! StrmNum %d, pDataRange %x\n", ulStreamNumber, pDataRange));
  1373. }
  1374. return STATUS_NO_MATCH;
  1375. }
  1376. VOID
  1377. AVCTapeIniStrmExt(
  1378. PHW_STREAM_OBJECT pStrmObject,
  1379. PSTREAMEX pStrmExt,
  1380. PDVCR_EXTENSION pDevExt,
  1381. PSTREAM_INFO_AND_OBJ pStream
  1382. )
  1383. /*++
  1384. Routine Description:
  1385. Initialize stream extension strcuture.
  1386. --*/
  1387. {
  1388. PAGED_CODE();
  1389. RtlZeroMemory( pStrmExt, sizeof(STREAMEX) );
  1390. pStrmExt->bEOStream = TRUE; // Stream has not started yet!
  1391. pStrmExt->pStrmObject = pStrmObject;
  1392. pStrmExt->StreamState = KSSTATE_STOP;
  1393. pStrmExt->pDevExt = pDevExt;
  1394. pStrmExt->hMyClock = 0;
  1395. pStrmExt->hMasterClock = 0;
  1396. pStrmExt->hClock = 0;
  1397. //
  1398. // Aplly to both IN/OUT data flow
  1399. //
  1400. //
  1401. // Init isoch resources
  1402. //
  1403. pStrmExt->CurrentStreamTime = 0;
  1404. pStrmExt->cntSRBReceived = 0; // Total number of SRB_READ/WRITE_DATA
  1405. pStrmExt->cntDataSubmitted = 0; // Number of pending data buffer
  1406. pStrmExt->cntSRBCancelled = 0; // number of SRB_READ/WRITE_DATA cancelled
  1407. pStrmExt->FramesProcessed = 0;
  1408. pStrmExt->PictureNumber = 0;
  1409. pStrmExt->FramesDropped = 0;
  1410. //
  1411. // Subcode data that can be extract from a DV frame
  1412. //
  1413. pStrmExt->AbsTrackNumber = 0;
  1414. pStrmExt->bATNUpdated = FALSE;
  1415. pStrmExt->Timecode[0] = 0;
  1416. pStrmExt->Timecode[1] = 0;
  1417. pStrmExt->Timecode[2] = 0;
  1418. pStrmExt->Timecode[3] = 0;
  1419. pStrmExt->bTimecodeUpdated = FALSE;
  1420. //
  1421. // Work item variables use to cancel all SRBs
  1422. //
  1423. pStrmExt->lCancelStateWorkItem = 0;
  1424. pStrmExt->AbortInProgress = FALSE;
  1425. #ifdef USE_WDM110
  1426. pStrmExt->pIoWorkItem = NULL;
  1427. #endif
  1428. //
  1429. // Cache the pointer
  1430. // What in DVStreams[] are READONLY
  1431. //
  1432. pStrmExt->pStrmInfo = &pStream->hwStreamInfo;
  1433. pStrmObject->ReceiveDataPacket = (PVOID) pStream->hwStreamObject.ReceiveDataPacket;
  1434. pStrmObject->ReceiveControlPacket = (PVOID) pStream->hwStreamObject.ReceiveControlPacket;
  1435. pStrmObject->Dma = pStream->hwStreamObject.Dma;
  1436. pStrmObject->Pio = pStream->hwStreamObject.Pio;
  1437. pStrmObject->StreamHeaderWorkspace = pStream->hwStreamObject.StreamHeaderWorkspace;
  1438. pStrmObject->StreamHeaderMediaSpecific = pStream->hwStreamObject.StreamHeaderMediaSpecific;
  1439. pStrmObject->HwClockObject = pStream->hwStreamObject.HwClockObject;
  1440. pStrmObject->Allocator = pStream->hwStreamObject.Allocator;
  1441. pStrmObject->HwEventRoutine = pStream->hwStreamObject.HwEventRoutine;
  1442. }
  1443. NTSTATUS
  1444. AVCTapeOpenStream(
  1445. IN PHW_STREAM_OBJECT pStrmObject,
  1446. IN PKSDATAFORMAT pOpenFormat,
  1447. IN PAV_61883_REQUEST pAVReq
  1448. )
  1449. /*++
  1450. Routine Description:
  1451. Verify the OpenFormat and then allocate PC resource needed for this stream.
  1452. The isoch resource, if needed, is allocated when streaming is transition to PAUSE state.
  1453. --*/
  1454. {
  1455. NTSTATUS Status = STATUS_SUCCESS;
  1456. PSTREAMEX pStrmExt;
  1457. PDVCR_EXTENSION pDevExt;
  1458. ULONG idxStreamNumber;
  1459. KSPIN_DATAFLOW DataFlow;
  1460. PIRP pIrp = NULL;
  1461. FMT_INDEX VideoFormatIndexLast; // Last format index; used to detect change.
  1462. PAVC_STREAM_REQUEST_BLOCK pAVCStrmReq;
  1463. ULONG i, j;
  1464. PAGED_CODE();
  1465. pDevExt = (PDVCR_EXTENSION) pStrmObject->HwDeviceExtension;
  1466. pStrmExt = (PSTREAMEX) pStrmObject->HwStreamExtension;
  1467. idxStreamNumber = pStrmObject->StreamNumber;
  1468. TRACE(TL_STRM_TRACE,("OpenStream: pStrmObject %x, pOpenFormat %x, cntOpen %d, idxStream %d\n", pStrmObject, pOpenFormat, pDevExt->cndStrmOpen, idxStreamNumber));
  1469. //
  1470. // When nonone else has open a stream (or is opening ?)
  1471. //
  1472. if(pDevExt->cndStrmOpen > 0) {
  1473. Status = STATUS_UNSUCCESSFUL;
  1474. TRACE(TL_STRM_WARNING,("OpenStream: %d stream open already; failed hr %x\n", pDevExt->cndStrmOpen, Status));
  1475. return Status;
  1476. }
  1477. pIrp = IoAllocateIrp(pDevExt->pBusDeviceObject->StackSize, FALSE);
  1478. if(!pIrp) {
  1479. ASSERT(pIrp && "IoAllocateIrp() failed!");
  1480. return STATUS_INSUFFICIENT_RESOURCES;
  1481. }
  1482. //
  1483. // If a user switch from Camera to VCR mode very quickly (passing the OFF position),
  1484. // the driver may not be relaoded to detect correct mode of operation.
  1485. // It is safe to redetect here.
  1486. // Note: MSDV does return all the stream info for both input and output pin format.
  1487. //
  1488. DVGetDevModeOfOperation(pDevExt);
  1489. //
  1490. // WARNING: !! we advertise both input and output pin regardless of its mode of operation,
  1491. // but Camera does not support input pin so open should failed!
  1492. // If a VCR does not have input pin should fail as well.
  1493. //
  1494. // Ignore checking for ED_DEVTYOPE_UNKNOWN (most likely a hardware decoder box)
  1495. //
  1496. if((pDevExt->ulDevType == ED_DEVTYPE_CAMERA ||
  1497. (pDevExt->ulDevType == ED_DEVTYPE_VCR && pDevExt->pDevInPlugs->NumPlugs == 0))
  1498. && idxStreamNumber == 2) {
  1499. IoFreeIrp(pIrp);
  1500. TRACE(TL_STRM_WARNING,("OpenStream:Camera mode or VCR with 0 input pin cannot take external in.\n"));
  1501. return STATUS_UNSUCCESSFUL;
  1502. }
  1503. ASSERT(idxStreamNumber < pDevExt->NumOfPins);
  1504. ASSERT(pDevExt->paStrmExt[idxStreamNumber] == NULL); // Not yet open!
  1505. //
  1506. // Data flow
  1507. //
  1508. DataFlow= pDevExt->pStreamInfoObject[idxStreamNumber].hwStreamInfo.DataFlow;
  1509. //
  1510. // Initialize the stream extension structure
  1511. //
  1512. AVCTapeIniStrmExt(
  1513. pStrmObject,
  1514. pStrmExt,
  1515. pDevExt,
  1516. &pDevExt->pStreamInfoObject[idxStreamNumber]
  1517. );
  1518. //
  1519. // Sony's NTSC can play PAL tape and its plug will change its supported format accordingly.
  1520. //
  1521. // Query video format (NTSC/PAL) supported.
  1522. // Compare with its default (set at load time or last opensteam),
  1523. // if difference, change our internal video format table.
  1524. //
  1525. if(pDevExt->ulDevType != ED_DEVTYPE_CAMERA) {
  1526. VideoFormatIndexLast = pDevExt->VideoFormatIndex;
  1527. if(!DVGetDevSignalFormat(
  1528. pDevExt,
  1529. DataFlow,
  1530. pStrmExt
  1531. )) {
  1532. IoFreeIrp(pIrp);
  1533. // If querying its format has failed, we cannot open this stream.
  1534. TRACE(TL_STRM_WARNING,("OpenStream:Camera mode cannot take external in.\n"));
  1535. Status = STATUS_UNSUCCESSFUL;
  1536. goto AbortOpenStream;
  1537. }
  1538. }
  1539. //
  1540. // Check the video data format is okay.
  1541. //
  1542. if(!AVCTapeVerifyDataFormat(
  1543. pDevExt->NumOfPins,
  1544. pOpenFormat,
  1545. idxStreamNumber,
  1546. AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex].FrameSize,
  1547. pDevExt->pStreamInfoObject
  1548. ) ) {
  1549. IoFreeIrp(pIrp);
  1550. TRACE(TL_STRM_ERROR,("OpenStream: AdapterVerifyFormat failed.\n"));
  1551. return STATUS_INVALID_PARAMETER;
  1552. }
  1553. //
  1554. // This event guard againt work item completion
  1555. //
  1556. KeInitializeEvent(&pStrmExt->hCancelDoneEvent, NotificationEvent, TRUE);
  1557. //
  1558. // Alloccate synchronization structures for flow control and queue management
  1559. //
  1560. pStrmExt->hMutexFlow = (KMUTEX *) ExAllocatePool(NonPagedPool, sizeof(KMUTEX));
  1561. if(!pStrmExt->hMutexFlow) {
  1562. Status = STATUS_INSUFFICIENT_RESOURCES;
  1563. goto AbortOpenStream;
  1564. }
  1565. KeInitializeMutex( pStrmExt->hMutexFlow, 0); // Level 0 and in Signal state
  1566. pStrmExt->hMutexReq = (KMUTEX *) ExAllocatePool(NonPagedPool, sizeof(KMUTEX));
  1567. if(!pStrmExt->hMutexReq) {
  1568. Status = STATUS_INSUFFICIENT_RESOURCES;
  1569. goto AbortOpenStream;
  1570. }
  1571. KeInitializeMutex(pStrmExt->hMutexReq, 0);
  1572. pStrmExt->DataListLock = (KSPIN_LOCK *) ExAllocatePool(NonPagedPool, sizeof(KSPIN_LOCK));
  1573. if(!pStrmExt->DataListLock) {
  1574. Status = STATUS_INSUFFICIENT_RESOURCES;
  1575. goto AbortOpenStream;
  1576. }
  1577. KeInitializeSpinLock(pStrmExt->DataListLock);
  1578. //
  1579. // Request AVCStrm to open a stream
  1580. //
  1581. pStrmExt->pIrpReq = IoAllocateIrp(pDevExt->pBusDeviceObject->StackSize, FALSE);
  1582. if(!pStrmExt->pIrpReq) {
  1583. Status = STATUS_INSUFFICIENT_RESOURCES;
  1584. goto AbortOpenStream;
  1585. }
  1586. pStrmExt->pIrpAbort = IoAllocateIrp(pDevExt->pBusDeviceObject->StackSize, FALSE);
  1587. if(!pStrmExt->pIrpAbort) {
  1588. IoFreeIrp(pStrmExt->pIrpReq); pStrmExt->pIrpReq = NULL;
  1589. Status = STATUS_INSUFFICIENT_RESOURCES;
  1590. goto AbortOpenStream;
  1591. }
  1592. //
  1593. // Pre-allocate list of detached (free) and attached (busy) list for tracking
  1594. // data request sending down to lower driver for processing.
  1595. //
  1596. InitializeListHead(&pStrmExt->DataDetachedListHead); pStrmExt->cntDataDetached = 0;
  1597. InitializeListHead(&pStrmExt->DataAttachedListHead); pStrmExt->cntDataAttached = 0;
  1598. for (i=0; i < MAX_DATA_REQUESTS; i++) {
  1599. pStrmExt->AsyncReq[i].pIrp = IoAllocateIrp(pDevExt->pBusDeviceObject->StackSize, FALSE);
  1600. if(!pStrmExt->AsyncReq[i].pIrp) {
  1601. // Free resource allocated so far.
  1602. for (j=0; j < i; j++) {
  1603. if(pStrmExt->AsyncReq[j].pIrp) {
  1604. IoFreeIrp(pStrmExt->AsyncReq[j].pIrp); pStrmExt->AsyncReq[j].pIrp = NULL;
  1605. }
  1606. RemoveEntryList(&pStrmExt->AsyncReq[j].ListEntry); pStrmExt->cntDataDetached--;
  1607. }
  1608. IoFreeIrp(pStrmExt->pIrpAbort); pStrmExt->pIrpAbort = NULL;
  1609. IoFreeIrp(pStrmExt->pIrpReq); pStrmExt->pIrpReq = NULL;
  1610. Status = STATUS_INSUFFICIENT_RESOURCES;
  1611. goto AbortOpenStream;
  1612. }
  1613. InsertTailList(&pStrmExt->DataDetachedListHead, &pStrmExt->AsyncReq[i].ListEntry); pStrmExt->cntDataDetached++;
  1614. }
  1615. // Synchronous calls share the same AV request packet in the stream extension..
  1616. EnterAVCStrm(pStrmExt->hMutexReq);
  1617. pAVCStrmReq = &pStrmExt->AVCStrmReq;
  1618. RtlZeroMemory(pAVCStrmReq, sizeof(AVC_STREAM_REQUEST_BLOCK));
  1619. INIT_AVCSTRM_HEADER(pAVCStrmReq, AVCSTRM_OPEN);
  1620. #if 1
  1621. if(pDevExt->VideoFormatIndex == AVCSTRM_FORMAT_MPEG2TS) {
  1622. // Data Rate
  1623. // AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex].AvgTimePerFrame = ?
  1624. if(DataFlow == KSPIN_DATAFLOW_IN) {
  1625. AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex].OptionFlags = 0;
  1626. AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex].FrameSize = BUFFER_SIZE_MPEG2TS_SPH;
  1627. } else {
  1628. if(IsEqualGUID (&pOpenFormat->SubFormat, &KSDATAFORMAT_TYPE_MPEG2_TRANSPORT_STRIDE)) {
  1629. AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex].OptionFlags = 0;
  1630. AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex].FrameSize = BUFFER_SIZE_MPEG2TS_SPH;
  1631. } else {
  1632. AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex].OptionFlags = AVCSTRM_FORMAT_OPTION_STRIP_SPH;
  1633. AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex].FrameSize = BUFFER_SIZE_MPEG2TS;
  1634. }
  1635. }
  1636. }
  1637. #endif
  1638. pAVCStrmReq->CommandData.OpenStruct.AVCFormatInfo = &AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex];
  1639. pAVCStrmReq->CommandData.OpenStruct.AVCStreamContext = 0; // will return the AV stream context
  1640. pAVCStrmReq->CommandData.OpenStruct.DataFlow = DataFlow;
  1641. #ifdef SUPPORT_LOCAL_PLUGS
  1642. if(DataFlow == KSPIN_DATAFLOW_OUT)
  1643. pAVCStrmReq->CommandData.OpenStruct.hPlugLocal = pDevExt->hInputPCRLocal; // Remote(oPCR)->Local(iPCR)
  1644. else
  1645. pAVCStrmReq->CommandData.OpenStruct.hPlugLocal = pDevExt->hOutputPCRLocal; // Remote(iPCR)<-Local(oPCR)
  1646. #else
  1647. pAVCStrmReq->CommandData.OpenStruct.hPlugLocal = 0; // Not supported; use whatever 61883 supply.
  1648. #endif
  1649. Status =
  1650. AVCStrmReqSubmitIrpSynch(
  1651. pDevExt->pBusDeviceObject,
  1652. pStrmExt->pIrpReq,
  1653. pAVCStrmReq
  1654. );
  1655. // Expect SUCCESS or anything else is failure! (including _PENDING) since this is a Sync call.
  1656. if(STATUS_SUCCESS != Status) {
  1657. TRACE(TL_STRM_ERROR,("AVCSTRM_OPEN: failed %x; pAVCStrmReq:%x\n", Status, pAVCStrmReq));
  1658. ASSERT(NT_SUCCESS(Status) && "AVCSTGRM_OPEN failed!\n");
  1659. IoFreeIrp(pStrmExt->pIrpReq); pStrmExt->pIrpReq = NULL;
  1660. LeaveAVCStrm(pStrmExt->hMutexReq);
  1661. goto OpenStreamDone; // Failed to open!
  1662. }
  1663. //
  1664. // Save the context, which is used for subsequent call to AVCStrm filter driver
  1665. //
  1666. pStrmExt->AVCStreamContext = pAVCStrmReq->CommandData.OpenStruct.AVCStreamContext;
  1667. TRACE(TL_STRM_TRACE,("AVCSTRM_OPEN: suceeded %x; pAVCStrmReq:%x; AVCStreamContext:%x\n", Status, pAVCStrmReq, pStrmExt->AVCStreamContext));
  1668. //
  1669. // Format specific tasks
  1670. //
  1671. switch(pDevExt->VideoFormatIndex) {
  1672. // For DV input pin, setup a timer DPC to periodically fired to singal clock event.
  1673. case AVCSTRM_FORMAT_MPEG2TS:
  1674. break;
  1675. case AVCSTRM_FORMAT_SDDV_NTSC: // 61883-2
  1676. case AVCSTRM_FORMAT_SDDV_PAL: // 61883-2
  1677. case AVCSTRM_FORMAT_HDDV_NTSC: // 61883-3
  1678. case AVCSTRM_FORMAT_HDDV_PAL: // 61883-3
  1679. case AVCSTRM_FORMAT_SDLDV_NTSC: // 61883-5
  1680. case AVCSTRM_FORMAT_SDLDV_PAL: // 61883-5
  1681. #ifdef SUPPORT_LOCAL_PLUGS
  1682. if(DataFlow == KSPIN_DATAFLOW_IN) {
  1683. // Remote(iPCR)<-Local(oPCR)
  1684. // The default was S200 for MPEG2TS data; set it to DV.
  1685. pDevExt->OPCR.oPCR.DataRate = CMP_SPEED_S100;
  1686. pDevExt->OPCR.oPCR.OverheadID = PCR_OVERHEAD_ID_SDDV_DEF;
  1687. pDevExt->OPCR.oPCR.Payload = PCR_PAYLOAD_SDDV_DEF;
  1688. if(AVCTapeSetLocalPlug(
  1689. pDevExt,
  1690. &pDevExt->AVReq,
  1691. &pDevExt->hOutputPCRLocal,
  1692. &pDevExt->OPCR)) {
  1693. TRACE(TL_STRM_ERROR|TL_61883_ERROR,("Failed to set oPCR\n"));
  1694. }
  1695. }
  1696. #endif
  1697. KeInitializeDpc(
  1698. &pStrmExt->DPCTimer,
  1699. AVCTapeSignalClockEvent,
  1700. pStrmExt
  1701. );
  1702. KeInitializeTimer(
  1703. &pStrmExt->Timer
  1704. );
  1705. break;
  1706. default:
  1707. // Not supported!
  1708. break;
  1709. }
  1710. LeaveAVCStrm(pStrmExt->hMutexReq);
  1711. //
  1712. // Cache it and reference when pDevExt is all we have, such as BusReset and SurprieseRemoval
  1713. //
  1714. pDevExt->idxStreamNumber = idxStreamNumber; // index of current active stream; work only if there is only one active stream at any time.
  1715. pDevExt->paStrmExt[idxStreamNumber] = pStrmExt;
  1716. //
  1717. // In the future, a DV can be unplug and plug back in,
  1718. // and restore its state if the application is not yet closed.
  1719. //
  1720. pDevExt->bDevRemoved = FALSE;
  1721. //
  1722. // No one else can open another stream (inout or output) unitil this is release.
  1723. // This is done to avoid cyclic graph.
  1724. //
  1725. pDevExt->cndStrmOpen++;
  1726. ASSERT(pDevExt->cndStrmOpen == 1);
  1727. OpenStreamDone:
  1728. TRACE(TL_STRM_WARNING,("OpenStream: %d stream open, idx %d, Status %x, pStrmExt %x, Context:%x; pDevExt %x\n",
  1729. pDevExt->cndStrmOpen, pDevExt->idxStreamNumber, Status, pStrmExt, pStrmExt->AVCStreamContext, pDevExt));
  1730. TRACE(TL_STRM_TRACE,("OpenStream: Status %x, idxStream %d, pDevExt %x, pStrmExt %x, Contextg:%x\n",
  1731. Status, idxStreamNumber, pDevExt, pStrmExt, pStrmExt->AVCStreamContext));
  1732. return Status;
  1733. AbortOpenStream:
  1734. if(pStrmExt->DataListLock) {
  1735. ExFreePool(pStrmExt->DataListLock); pStrmExt->DataListLock = NULL;
  1736. }
  1737. if(pStrmExt->hMutexFlow) {
  1738. ExFreePool(pStrmExt->hMutexFlow); pStrmExt->hMutexFlow = NULL;
  1739. }
  1740. if(pStrmExt->hMutexReq) {
  1741. ExFreePool(pStrmExt->hMutexReq); pStrmExt->hMutexReq = NULL;
  1742. }
  1743. TRACE(TL_STRM_ERROR,("OpenStream failed %x, idxStream %d, pDevExt %x, pStrmExt %x\n",
  1744. Status, idxStreamNumber, pDevExt, pStrmExt));
  1745. return Status;
  1746. }
  1747. NTSTATUS
  1748. AVCTapeCloseStream(
  1749. IN PHW_STREAM_OBJECT pStrmObject,
  1750. IN PKSDATAFORMAT pOpenFormat,
  1751. IN PAV_61883_REQUEST pAVReq
  1752. )
  1753. /*++
  1754. Routine Description:
  1755. Called when an CloseStream Srb request is received
  1756. --*/
  1757. {
  1758. PSTREAMEX pStrmExt;
  1759. PDVCR_EXTENSION pDevExt;
  1760. ULONG idxStreamNumber;
  1761. NTSTATUS Status;
  1762. PAVC_STREAM_REQUEST_BLOCK pAVCStrmReq;
  1763. ULONG i;
  1764. PDRIVER_REQUEST pDriverReq;
  1765. PAGED_CODE();
  1766. pDevExt = (PDVCR_EXTENSION) pStrmObject->HwDeviceExtension;
  1767. pStrmExt = (PSTREAMEX) pStrmObject->HwStreamExtension;
  1768. idxStreamNumber = pStrmObject->StreamNumber;
  1769. TRACE(TL_STRM_TRACE,("CloseStream: >> pStrmExt %x, pDevExt %x\n", pStrmExt, pDevExt));
  1770. //
  1771. // If the stream isn't open, just return
  1772. //
  1773. if(pStrmExt == NULL) {
  1774. ASSERT(pStrmExt && "CloseStream but pStrmExt is NULL!");
  1775. return STATUS_SUCCESS; // ????
  1776. }
  1777. //
  1778. // Wait until the pending work item is completed.
  1779. //
  1780. KeWaitForSingleObject( &pStrmExt->hCancelDoneEvent, Executive, KernelMode, FALSE, 0 );
  1781. //
  1782. // Request AVCStrm to close a stream
  1783. //
  1784. EnterAVCStrm(pStrmExt->hMutexReq);
  1785. #if 0
  1786. // For DV input pin, setup a timer DPC to periodically fired to singal clock event.
  1787. if(pDevExt->VideoFormatIndex != AVCSTRM_FORMAT_MPEG2TS) {
  1788. // Cancel timer
  1789. TRACE(TL_STRM_TRACE,("*** CancelTimer *********************************************...\n"));
  1790. KeCancelTimer(
  1791. &pStrmExt->Timer
  1792. );
  1793. }
  1794. #endif
  1795. pAVCStrmReq = &pStrmExt->AVCStrmReq;
  1796. RtlZeroMemory(pAVCStrmReq, sizeof(AVC_STREAM_REQUEST_BLOCK));
  1797. INIT_AVCSTRM_HEADER(pAVCStrmReq, AVCSTRM_CLOSE);
  1798. pAVCStrmReq->AVCStreamContext = pStrmExt->AVCStreamContext;
  1799. Status =
  1800. AVCStrmReqSubmitIrpSynch(
  1801. pDevExt->pBusDeviceObject,
  1802. pStrmExt->pIrpReq,
  1803. pAVCStrmReq
  1804. );
  1805. if(!NT_SUCCESS(Status)) {
  1806. TRACE(TL_STRM_ERROR,("AVCSTRM_CLOSE: failed %x; pAVCStrmReq:%x\n", Status, pAVCStrmReq));
  1807. ASSERT(NT_SUCCESS(Status) && "AVCSTGRM_CLOSE failed!\n");
  1808. }
  1809. else {
  1810. // Save the context, which is used for subsequent call to AVCStrm.sys
  1811. TRACE(TL_STRM_TRACE,("AVCSTRM_CLOSE: suceeded %x; pAVCStrmReq:%x\n", Status, pAVCStrmReq));
  1812. pStrmExt->AVCStreamContext = 0;
  1813. }
  1814. // Free system resources
  1815. if(pStrmExt->pIrpReq) {
  1816. IoFreeIrp(pStrmExt->pIrpReq); pStrmExt->pIrpReq = NULL;
  1817. }
  1818. if(pStrmExt->pIrpAbort) {
  1819. IoFreeIrp(pStrmExt->pIrpAbort); pStrmExt->pIrpAbort = NULL;
  1820. }
  1821. #if 0
  1822. for (i=0; i < MAX_DATA_REQUESTS; i++) {
  1823. if(pStrmExt->AsyncReq[i].pIrp) {
  1824. IoFreeIrp(pStrmExt->AsyncReq[i].pIrp); pStrmExt->AsyncReq[i].pIrp = NULL;
  1825. }
  1826. }
  1827. #else
  1828. //
  1829. // Free IRPs preallocated. The entire data structure is part of the stream extension so
  1830. // it will be freed by the StreamClass.
  1831. //
  1832. ASSERT(pStrmExt->cntDataAttached == 0);
  1833. ASSERT(pStrmExt->cntDataDetached >= MAX_DATA_REQUESTS);
  1834. while (!IsListEmpty(&pStrmExt->DataDetachedListHead)) {
  1835. pDriverReq = (PDRIVER_REQUEST) RemoveHeadList(&pStrmExt->DataDetachedListHead); pStrmExt->cntDataDetached--;
  1836. IoFreeIrp(pDriverReq->pIrp); pDriverReq->pIrp = NULL;
  1837. }
  1838. #endif
  1839. LeaveAVCStrm(pStrmExt->hMutexReq);
  1840. //
  1841. // Not valid after this call.
  1842. //
  1843. for (i=0; i<pDevExt->NumOfPins; i++) {
  1844. //
  1845. // Find what we cache and remove it.
  1846. //
  1847. if(pStrmExt == pDevExt->paStrmExt[i]) {
  1848. pDevExt->paStrmExt[i] = NULL;
  1849. break;
  1850. }
  1851. }
  1852. //
  1853. // Free synchronization structures
  1854. //
  1855. if(pStrmExt->DataListLock) {
  1856. ExFreePool(pStrmExt->DataListLock); pStrmExt->DataListLock = NULL;
  1857. }
  1858. if(pStrmExt->hMutexFlow) {
  1859. ExFreePool(pStrmExt->hMutexFlow); pStrmExt->hMutexFlow = NULL;
  1860. }
  1861. if(pStrmExt->hMutexReq) {
  1862. ExFreePool(pStrmExt->hMutexReq); pStrmExt->hMutexReq = NULL;
  1863. }
  1864. // Release this count so other can open.
  1865. pDevExt->cndStrmOpen--;
  1866. ASSERT(pDevExt->cndStrmOpen == 0);
  1867. TRACE(TL_STRM_TRACE,("CloseStream: completed; %d stream;\n", pDevExt->cndStrmOpen));
  1868. return STATUS_SUCCESS;
  1869. }
  1870. NTSTATUS
  1871. DVChangePower(
  1872. PDVCR_EXTENSION pDevExt,
  1873. PAV_61883_REQUEST pAVReq,
  1874. DEVICE_POWER_STATE NewPowerState
  1875. )
  1876. /*++
  1877. Routine Description:
  1878. Process changing this device's power state.
  1879. --*/
  1880. {
  1881. ULONG i;
  1882. NTSTATUS Status;
  1883. PAGED_CODE();
  1884. //
  1885. // D0: Device is on and can be streaming.
  1886. // D1,D2: not supported.
  1887. // D3: Device is off and can not streaming. The context is lost.
  1888. // Power can be removed from the device.
  1889. // When power is back on, we will get a bus reset.
  1890. //
  1891. TRACE(TL_PNP_TRACE,("ChangePower: PowrSt: %d->%d; (d0:[1:On],D3[4:off])\n", pDevExt->PowerState, NewPowerState));
  1892. Status = STATUS_SUCCESS;
  1893. if(pDevExt->PowerState == NewPowerState) {
  1894. TRACE(TL_STRM_WARNING,("ChangePower: no change; do nothing!\n"));
  1895. return STATUS_SUCCESS;
  1896. }
  1897. switch (NewPowerState) {
  1898. case PowerDeviceD3: // Power OFF
  1899. // We are at D0 and ask to go to D3: save state, stop streaming and Sleep
  1900. if( pDevExt->PowerState == PowerDeviceD0) {
  1901. // For a supported power state change
  1902. for (i=0; i<pDevExt->NumOfPins; i++) {
  1903. if(pDevExt->paStrmExt[i]) {
  1904. if(pDevExt->paStrmExt[i]->bIsochIsActive) {
  1905. // Stop isoch but do not change the streaming state
  1906. TRACE(TL_PNP_WARNING,("ChangePower: Stop isoch but not change stream state:%d\n", pDevExt->paStrmExt[i]->StreamState));
  1907. }
  1908. }
  1909. }
  1910. }
  1911. else {
  1912. TRACE(TL_PNP_WARNING,("pDevExt->paStrmExt[i].StreamState:Intermieate power state; do nothing;\n"));
  1913. }
  1914. break;
  1915. case PowerDeviceD0: // Powering ON (waking up)
  1916. if( pDevExt->PowerState == PowerDeviceD3) {
  1917. // For a supported power state change
  1918. for (i=0; i<pDevExt->NumOfPins; i++) {
  1919. if(pDevExt->paStrmExt[i]) {
  1920. if(!pDevExt->paStrmExt[i]->bIsochIsActive) {
  1921. TRACE(TL_PNP_ERROR,("ChangePower: StrmSt:%d; Start isoch\n", pDevExt->paStrmExt[i]->StreamState));
  1922. // Start isoch depending on streaming state for DATAFLOW_IN/OUT
  1923. if(pDevExt->paStrmExt[i]->pStrmInfo->DataFlow == KSPIN_DATAFLOW_IN) {
  1924. if(pDevExt->paStrmExt[i]->StreamState == KSSTATE_PAUSE ||
  1925. pDevExt->paStrmExt[i]->StreamState == KSSTATE_RUN) {
  1926. }
  1927. }
  1928. else if(pDevExt->paStrmExt[i]->pStrmInfo->DataFlow == KSPIN_DATAFLOW_OUT) {
  1929. if(pDevExt->paStrmExt[i]->StreamState == KSSTATE_RUN) {
  1930. }
  1931. }
  1932. } // IsochActive
  1933. }
  1934. }
  1935. }
  1936. else {
  1937. TRACE(TL_PNP_WARNING,("Intermieate power state; do nothing;\n"));
  1938. }
  1939. break;
  1940. // These state are not supported.
  1941. case PowerDeviceD1:
  1942. case PowerDeviceD2:
  1943. default:
  1944. TRACE(TL_PNP_WARNING,("ChangePower: Not supported PowerState %d\n", DevicePowerState));
  1945. Status = STATUS_SUCCESS; // STATUS_INVALID_PARAMETER;
  1946. break;
  1947. }
  1948. if(Status == STATUS_SUCCESS)
  1949. pDevExt->PowerState = NewPowerState;
  1950. else
  1951. Status = STATUS_NOT_IMPLEMENTED;
  1952. return STATUS_SUCCESS;
  1953. }
  1954. NTSTATUS
  1955. AVCTapeSurpriseRemoval(
  1956. PDVCR_EXTENSION pDevExt,
  1957. PAV_61883_REQUEST pAVReq
  1958. )
  1959. /*++
  1960. Routine Description:
  1961. Response to SRB_SURPRISE_REMOVAL.
  1962. --*/
  1963. {
  1964. ULONG i;
  1965. PKSEVENT_ENTRY pEvent = NULL;
  1966. PAGED_CODE();
  1967. //
  1968. // ONLY place this flag is set to TRUE.
  1969. // Block incoming read although there might still in the process of being attached
  1970. //
  1971. pDevExt->bDevRemoved = TRUE;
  1972. // Signal
  1973. if(pDevExt->PowerState != PowerDeviceD3) {
  1974. pDevExt->PowerState = PowerDeviceD3; // It is as good as power is off.
  1975. }
  1976. //
  1977. // Now Stop the stream and clean up
  1978. //
  1979. for(i=0; i < pDevExt->NumOfPins; i++) {
  1980. if(pDevExt->paStrmExt[i] != NULL) {
  1981. TRACE(TL_PNP_WARNING,("#SURPRISE_REMOVAL# StrmNum %d, pStrmExt %x\n", i, pDevExt->paStrmExt[i]));
  1982. // Signal this event so SRB can complete.
  1983. if(pDevExt->paStrmExt[i]->pStrmInfo->DataFlow == KSPIN_DATAFLOW_IN ) {
  1984. //
  1985. // Imply EOStream!
  1986. //
  1987. if(!pDevExt->paStrmExt[i]->bEOStream)
  1988. pDevExt->paStrmExt[i]->bEOStream = TRUE;
  1989. //
  1990. // Signal EOStream
  1991. //
  1992. StreamClassStreamNotification(
  1993. SignalMultipleStreamEvents,
  1994. pDevExt->paStrmExt[i]->pStrmObject,
  1995. (GUID *)&KSEVENTSETID_Connection_Local,
  1996. KSEVENT_CONNECTION_ENDOFSTREAM
  1997. );
  1998. }
  1999. //
  2000. // Start a work item to abort streaming
  2001. //
  2002. AVCTapeCreateAbortWorkItem(pDevExt, pDevExt->paStrmExt[i]);
  2003. //
  2004. // Wait until the pending work item is completed.
  2005. //
  2006. TRACE(TL_PNP_WARNING,("SupriseRemoval: Wait for CancelDoneEvent <entering>; lCancelStateWorkItem:%d\n", pDevExt->paStrmExt[i]->lCancelStateWorkItem));
  2007. KeWaitForSingleObject( &pDevExt->paStrmExt[i]->hCancelDoneEvent, Executive, KernelMode, FALSE, 0 );
  2008. TRACE(TL_PNP_WARNING,("SupriseRemoval: Wait for CancelDoneEvent; Attached:%d <exited>...\n", pDevExt->paStrmExt[i]->cntDataAttached));
  2009. ASSERT(pDevExt->paStrmExt[i]->cntDataAttached == 0); // No more attach after abort stream!
  2010. }
  2011. }
  2012. // Signal KSEvent that device is removed.
  2013. // After this SRb, there will be no more Set/Get property Srb into this driver.
  2014. // By notifying the COM I/F, it will wither signal application that device is removed and
  2015. // return ERROR_DEVICE_REMOVED error code for subsequent calls.
  2016. pEvent =
  2017. StreamClassGetNextEvent(
  2018. (PVOID) pDevExt,
  2019. 0,
  2020. (GUID *)&KSEVENTSETID_EXTDEV_Command,
  2021. KSEVENT_EXTDEV_NOTIFY_REMOVAL,
  2022. pEvent);
  2023. if(pEvent) {
  2024. //
  2025. // signal the event here
  2026. //
  2027. if(pEvent->EventItem->EventId == KSEVENT_EXTDEV_NOTIFY_REMOVAL) {
  2028. StreamClassDeviceNotification(
  2029. SignalDeviceEvent,
  2030. pDevExt,
  2031. pEvent
  2032. );
  2033. TRACE(TL_PNP_WARNING,("SurpriseRemoval: signal KSEVENT_EXTDEV_NOTIFY_REMOVAL, id %x.\n", pEvent->EventItem->EventId));
  2034. } else {
  2035. TRACE(TL_PNP_TRACE,("SurpriseRemoval: pEvent:%x; Id:%d not matched!\n", pEvent, pEvent->EventItem->EventId));
  2036. }
  2037. } else {
  2038. TRACE(TL_PNP_TRACE,("SurpriseRemoval: KSEVENT_EXTDEV_NOTIFY_REMOVAL event not enabled\n"));
  2039. }
  2040. return STATUS_SUCCESS;
  2041. }
  2042. // Return code is basically return in pSrb->Status.
  2043. NTSTATUS
  2044. AVCTapeProcessPnPBusReset(
  2045. PDVCR_EXTENSION pDevExt
  2046. )
  2047. /*++
  2048. Routine Description:
  2049. Process a bus reset.
  2050. Arguments:
  2051. Srb - Pointer to stream request block
  2052. Return Value:
  2053. Nothing
  2054. --*/
  2055. {
  2056. #ifdef MSDVDV_SUPPORT_BUSRESET_EVENT
  2057. PKSEVENT_ENTRY pEvent;
  2058. #endif
  2059. PAGED_CODE();
  2060. TRACE(TL_PNP_TRACE,("ProcessPnPBusReset: >>\n"));
  2061. #ifdef MSDVDV_SUPPORT_BUSRESET_EVENT
  2062. //
  2063. // Signal (if enabled) busreset event to let upper layer know that a busreset has occurred.
  2064. //
  2065. pEvent = NULL;
  2066. pEvent =
  2067. StreamClassGetNextEvent(
  2068. (PVOID) pDevExt,
  2069. 0,
  2070. (GUID *)&KSEVENTSETID_EXTDEV_Command,
  2071. KSEVENT_EXTDEV_COMMAND_BUSRESET,
  2072. pEvent
  2073. );
  2074. if(pEvent) {
  2075. //
  2076. // signal the event here
  2077. //
  2078. if(pEvent->EventItem->EventId == KSEVENT_EXTDEV_COMMAND_BUSRESET) {
  2079. StreamClassDeviceNotification(
  2080. SignalDeviceEvent,
  2081. pDevExt,
  2082. pEvent
  2083. );
  2084. TRACE(TL_PNP_TRACE,("ProcessPnPBusReset: Signal BUSRESET; EventId %d.\n", pEvent->EventItem->EventId));
  2085. }
  2086. }
  2087. #endif
  2088. //
  2089. // Reset pending count and AVC command that is in Interim
  2090. //
  2091. DVAVCCmdResetAfterBusReset(pDevExt);
  2092. //
  2093. // Can we return anything other than SUCCESS ?
  2094. //
  2095. return STATUS_SUCCESS;
  2096. }
  2097. NTSTATUS
  2098. AVCTapeUninitialize(
  2099. IN PDVCR_EXTENSION pDevExt
  2100. )
  2101. /*++
  2102. Routine Description:
  2103. This where we perform the necessary initialization tasks.
  2104. Arguments:
  2105. Srb - Pointer to stream request block
  2106. Return Value:
  2107. Nothing
  2108. --*/
  2109. {
  2110. PAGED_CODE();
  2111. TRACE(TL_PNP_TRACE,("UnInitialize: pDevExt=%x\n", pDevExt));
  2112. //
  2113. // Clear all pending AVC command entries.
  2114. //
  2115. DVAVCCmdResetAfterBusReset(pDevExt);
  2116. //
  2117. // Free textual string
  2118. //
  2119. DvFreeTextualString(pDevExt, &pDevExt->UnitIDs);
  2120. #ifdef SUPPORT_LOCAL_PLUGS
  2121. // Delete the local output plug.
  2122. if(pDevExt->hOutputPCRLocal) {
  2123. if(!AVCTapeDeleteLocalPlug(
  2124. pDevExt,
  2125. &pDevExt->AVReq,
  2126. &pDevExt->OutputPCRLocalNum,
  2127. &pDevExt->hOutputPCRLocal)) {
  2128. TRACE(TL_PNP_ERROR,("Failed to delete a local oPCR!\n"));
  2129. }
  2130. }
  2131. // Delete the local input plug.
  2132. if(pDevExt->hInputPCRLocal) {
  2133. if(!AVCTapeDeleteLocalPlug(
  2134. pDevExt,
  2135. &pDevExt->AVReq,
  2136. &pDevExt->InputPCRLocalNum,
  2137. &pDevExt->hInputPCRLocal)) {
  2138. TRACE(TL_PNP_ERROR,("Failed to delete a local iPCR!\n"));
  2139. }
  2140. }
  2141. #endif
  2142. // Free preallocate resource
  2143. if(pDevExt->pIrpSyncCall) {
  2144. IoFreeIrp(pDevExt->pIrpSyncCall); pDevExt->pIrpSyncCall = NULL;
  2145. }
  2146. // Free stream information allocated
  2147. if(pDevExt->pStreamInfoObject) {
  2148. ExFreePool(pDevExt->pStreamInfoObject);
  2149. pDevExt->pStreamInfoObject = NULL;
  2150. }
  2151. TRACE(TL_PNP_TRACE,("UnInitialize: done!\n"));
  2152. return STATUS_SUCCESS;
  2153. }
  2154. //*****************************************************************************
  2155. //*****************************************************************************
  2156. // S T R E A M S R B
  2157. //*****************************************************************************
  2158. //*****************************************************************************
  2159. #if DBG
  2160. ULONG DbgLastIdx = 0;
  2161. #endif
  2162. NTSTATUS
  2163. AVCTapeReqReadDataCR(
  2164. IN PDEVICE_OBJECT DeviceObject,
  2165. IN PIRP pIrpReq,
  2166. IN PDRIVER_REQUEST pDriverReq
  2167. )
  2168. {
  2169. PHW_STREAM_REQUEST_BLOCK pSrb;
  2170. PSTREAMEX pStrmExt;
  2171. KIRQL oldIrql;
  2172. ASSERT(pDriverReq);
  2173. pSrb = pDriverReq->Context1;
  2174. pStrmExt = pDriverReq->Context2;
  2175. if(pSrb == NULL || pStrmExt == NULL) {
  2176. TRACE(TL_STRM_ERROR|TL_CIP_ERROR,("ReqReadDataCR: Context are all NULL!\n"));
  2177. return STATUS_MORE_PROCESSING_REQUIRED; // Will reuse this irp
  2178. }
  2179. KeAcquireSpinLock(pStrmExt->DataListLock, &oldIrql);
  2180. // Count frame procesed
  2181. pStrmExt->FramesProcessed++;
  2182. pStrmExt->cntDataSubmitted--;
  2183. #if 1
  2184. // Retrieve current stream time
  2185. if(pStrmExt->hMasterClock) {
  2186. pStrmExt->CurrentStreamTime = pSrb->CommandData.DataBufferArray->PresentationTime.Time;
  2187. #if 0
  2188. AVCTapeSignalClockEvent(pStrmExt);
  2189. #endif
  2190. }
  2191. #endif
  2192. #if DBG
  2193. //
  2194. // Check data request completion is in sequence
  2195. //
  2196. if(pStrmExt->FramesProcessed != pDriverReq->cntDataRequestReceived) {
  2197. TRACE(TL_STRM_WARNING,("** OOSeq: Next:%d != Actual:%d **\n",
  2198. (DWORD) pStrmExt->FramesProcessed, (DWORD) pDriverReq->cntDataRequestReceived));
  2199. // ASSERT(pStrmExt->FramesProcessed == pDriverReq->cntDataRequestReceived);
  2200. }
  2201. #endif
  2202. if(!NT_SUCCESS(pIrpReq->IoStatus.Status)) {
  2203. TRACE(TL_STRM_TRACE|TL_CIP_TRACE,("ReadDataReq failed; St:%x; DataUsed:%d\n", pIrpReq->IoStatus.Status,
  2204. pSrb->CommandData.DataBufferArray->DataUsed));
  2205. // Only acceptable status is cancel.
  2206. ASSERT(pIrpReq->IoStatus.Status == STATUS_CANCELLED && "ReadDataReq failed\n");
  2207. } else {
  2208. TRACE(TL_STRM_INFO,("ReadDataReq pSrb:%x; St:%x; DataUsed:%d; Flag:%x\n", pIrpReq->IoStatus.Status,
  2209. pSrb->CommandData.DataBufferArray->DataUsed, pSrb->CommandData.DataBufferArray->OptionsFlags));
  2210. }
  2211. ASSERT(pIrpReq->IoStatus.Status != STATUS_PENDING);
  2212. pSrb->Status = pIrpReq->IoStatus.Status;
  2213. // Reset them so if this is completed here before the IRP's IoCallDriver is returned,
  2214. // it will not try to complete again.
  2215. pDriverReq->Context1 = NULL;
  2216. pDriverReq->Context2 = NULL;
  2217. // Done; recycle.
  2218. RemoveEntryList(&pDriverReq->ListEntry); pStrmExt->cntDataAttached--;
  2219. InsertTailList(&pStrmExt->DataDetachedListHead, &pDriverReq->ListEntry); pStrmExt->cntDataDetached++;
  2220. KeReleaseSpinLock(pStrmExt->DataListLock, oldIrql);
  2221. //
  2222. // Signal the graph manager that we are completed.
  2223. //
  2224. if(pSrb->CommandData.DataBufferArray->OptionsFlags & KSSTREAM_HEADER_OPTIONSF_ENDOFSTREAM) {
  2225. StreamClassStreamNotification(
  2226. SignalMultipleStreamEvents,
  2227. pStrmExt->pStrmObject,
  2228. &KSEVENTSETID_Connection,
  2229. KSEVENT_CONNECTION_ENDOFSTREAM
  2230. );
  2231. }
  2232. // Finally, send the srb back up ...
  2233. StreamClassStreamNotification(
  2234. StreamRequestComplete,
  2235. pSrb->StreamObject,
  2236. pSrb
  2237. );
  2238. return STATUS_MORE_PROCESSING_REQUIRED; // Will reuse this irp
  2239. } // AVCStrmReqIrpSynchCR
  2240. NTSTATUS
  2241. AVCTapeGetStreamState(
  2242. PSTREAMEX pStrmExt,
  2243. IN PDEVICE_OBJECT DeviceObject,
  2244. PKSSTATE pStreamState,
  2245. PULONG pulActualBytesTransferred
  2246. )
  2247. /*++
  2248. Routine Description:
  2249. Gets the current state of the requested stream
  2250. --*/
  2251. {
  2252. NTSTATUS Status;
  2253. PAVC_STREAM_REQUEST_BLOCK pAVCStrmReq;
  2254. PAGED_CODE();
  2255. if(!pStrmExt) {
  2256. TRACE(TL_STRM_ERROR,("GetStreamState: pStrmExt:%x; STATUS_UNSUCCESSFUL\n", pStrmExt));
  2257. return STATUS_UNSUCCESSFUL;
  2258. }
  2259. //
  2260. // Request AVCStrm to get current stream state
  2261. //
  2262. EnterAVCStrm(pStrmExt->hMutexReq);
  2263. pAVCStrmReq = &pStrmExt->AVCStrmReq;
  2264. RtlZeroMemory(pAVCStrmReq, sizeof(AVC_STREAM_REQUEST_BLOCK));
  2265. INIT_AVCSTRM_HEADER(pAVCStrmReq, AVCSTRM_GET_STATE);
  2266. pAVCStrmReq->AVCStreamContext = pStrmExt->AVCStreamContext;
  2267. Status =
  2268. AVCStrmReqSubmitIrpSynch(
  2269. DeviceObject,
  2270. pStrmExt->pIrpReq,
  2271. pAVCStrmReq
  2272. );
  2273. if(!NT_SUCCESS(Status)) {
  2274. TRACE(TL_STRM_ERROR,("AVCSTRM_GET_STATE: failed %x; pAVCStrmReq:%x\n", Status, pAVCStrmReq));
  2275. ASSERT(NT_SUCCESS(Status) && "AVCSTRM_GET_STATE failed!\n");
  2276. }
  2277. else {
  2278. // Save the context, which is used for subsequent call to AVCStrm.sys
  2279. TRACE(TL_STRM_WARNING,("AVCSTRM_GET_STATE: Status:%x; pAVCStrmReq:%x; KSSTATE:%d\n", Status, pAVCStrmReq, pAVCStrmReq->CommandData.StreamState));
  2280. *pStreamState = pAVCStrmReq->CommandData.StreamState;
  2281. *pulActualBytesTransferred = sizeof (KSSTATE);
  2282. // A very odd rule:
  2283. // When transitioning from stop to pause, DShow tries to preroll
  2284. // the graph. Capture sources can't preroll, and indicate this
  2285. // by returning VFW_S_CANT_CUE in user mode. To indicate this
  2286. // condition from drivers, they must return ERROR_NO_DATA_DETECTED
  2287. if( *pStreamState == KSSTATE_PAUSE
  2288. && pStrmExt->pStrmInfo->DataFlow == KSPIN_DATAFLOW_OUT
  2289. )
  2290. Status = STATUS_NO_DATA_DETECTED;
  2291. else
  2292. Status = STATUS_SUCCESS;
  2293. }
  2294. LeaveAVCStrm(pStrmExt->hMutexReq);
  2295. return Status;
  2296. }
  2297. NTSTATUS
  2298. AVCTapeSetStreamState(
  2299. PSTREAMEX pStrmExt,
  2300. PDVCR_EXTENSION pDevExt,
  2301. PAV_61883_REQUEST pAVReq,
  2302. KSSTATE StreamState
  2303. )
  2304. /*++
  2305. Routine Description:
  2306. Sets the stream state via the SRB.
  2307. --*/
  2308. {
  2309. PAVC_STREAM_REQUEST_BLOCK pAVCStrmReq;
  2310. NTSTATUS Status;
  2311. PAGED_CODE();
  2312. ASSERT(pStrmExt);
  2313. if(pStrmExt == NULL) {
  2314. return STATUS_UNSUCCESSFUL;
  2315. }
  2316. Status = STATUS_SUCCESS;
  2317. TRACE(TL_STRM_TRACE,("Set State %d -> %d; PowerSt:%d (1/On;4/Off]); AD [%d,%d]\n", \
  2318. pStrmExt->StreamState, StreamState, pDevExt->PowerState,
  2319. pStrmExt->cntDataAttached,
  2320. pStrmExt->cntDataDetached
  2321. ));
  2322. #if DBG
  2323. if(StreamState == KSSTATE_RUN) {
  2324. ASSERT(pDevExt->PowerState == PowerDeviceD0 && "Cannot set to RUN while power is off!");
  2325. }
  2326. #endif
  2327. //
  2328. // Request AVCStrm to set to a new stream state
  2329. //
  2330. EnterAVCStrm(pStrmExt->hMutexReq);
  2331. pAVCStrmReq = &pStrmExt->AVCStrmReq;
  2332. RtlZeroMemory(pAVCStrmReq, sizeof(AVC_STREAM_REQUEST_BLOCK));
  2333. INIT_AVCSTRM_HEADER(pAVCStrmReq, AVCSTRM_SET_STATE);
  2334. pAVCStrmReq->AVCStreamContext = pStrmExt->AVCStreamContext;
  2335. pAVCStrmReq->CommandData.StreamState = StreamState;
  2336. Status =
  2337. AVCStrmReqSubmitIrpSynch(
  2338. pDevExt->pBusDeviceObject,
  2339. pStrmExt->pIrpReq,
  2340. pAVCStrmReq
  2341. );
  2342. if(!NT_SUCCESS(Status)) {
  2343. TRACE(TL_STRM_ERROR,("AVCSTRM_SET_STATE: failed %x; pAVCStrmReq:%x\n", Status, pAVCStrmReq));
  2344. ASSERT(NT_SUCCESS(Status) && "AVCSTRM_SET_STATE failed!\n");
  2345. }
  2346. else {
  2347. // Save the context, which is used for subsequent call to AVCStrm.sys
  2348. TRACE(TL_STRM_TRACE,("AVCSTRM_SET_STATE: Status:%x; pAVCStrmReq:%x, new KSSTATE:%d\n", Status, pAVCStrmReq, pAVCStrmReq->CommandData.StreamState));
  2349. // Reset the abort state
  2350. if(pStrmExt->StreamState == KSSTATE_STOP && StreamState == KSSTATE_ACQUIRE)
  2351. pStrmExt->AbortInProgress = FALSE;
  2352. // Reaction due to state change
  2353. switch(StreamState) {
  2354. case KSSTATE_STOP:
  2355. TRACE(TL_STRM_TRACE,("SrbRcv:%d, Processed:%d; Pending:%d\n", (DWORD) pStrmExt->cntSRBReceived, (DWORD) pStrmExt->FramesProcessed, (DWORD) pStrmExt->cntDataSubmitted));
  2356. // Reset it
  2357. pStrmExt->cntSRBReceived = pStrmExt->FramesProcessed = pStrmExt->cntDataSubmitted = 0;
  2358. pStrmExt->CurrentStreamTime = 0;
  2359. break;
  2360. case KSSTATE_PAUSE:
  2361. // For DV input pin, setup a timer DPC to periodically fired to singal clock event.
  2362. if(pStrmExt->hMasterClock && pDevExt->VideoFormatIndex != AVCSTRM_FORMAT_MPEG2TS && pStrmExt->StreamState == KSSTATE_RUN) {
  2363. // Cancel timer
  2364. #if 1
  2365. TRACE(TL_STRM_TRACE,("*** (RUN->PAUSE) CancelTimer *********************************************...\n"));
  2366. KeCancelTimer(
  2367. &pStrmExt->Timer
  2368. );
  2369. #endif
  2370. }
  2371. break;
  2372. case KSSTATE_RUN:
  2373. // For DV input pin, setup a timer DPC to periodically fired to singal clock event.
  2374. if(pStrmExt->hMasterClock &&
  2375. pDevExt->VideoFormatIndex != AVCSTRM_FORMAT_MPEG2TS) {
  2376. LARGE_INTEGER DueTime;
  2377. #define CLOCK_INTERVAL 20 // Unit=MilliSeconds
  2378. #if 0
  2379. // For DV input pin, setup a timer DPC to periodically fired to singal clock event.
  2380. KeInitializeDpc(
  2381. &pStrmExt->DPCTimer,
  2382. AVCTapeSignalClockEvent,
  2383. pStrmExt
  2384. );
  2385. KeInitializeTimer(
  2386. &pStrmExt->Timer
  2387. );
  2388. #endif
  2389. DueTime = RtlConvertLongToLargeInteger(-CLOCK_INTERVAL * 10000);
  2390. TRACE(TL_STRM_TRACE,("*** ScheduleTimer (RUN) *****************************************...\n"));
  2391. KeSetTimerEx(
  2392. &pStrmExt->Timer,
  2393. DueTime,
  2394. CLOCK_INTERVAL, // Repeat every 40 MilliSecond
  2395. &pStrmExt->DPCTimer
  2396. );
  2397. }
  2398. break;
  2399. default:
  2400. break;
  2401. }
  2402. // Cache the current state
  2403. pStrmExt->StreamState = StreamState;
  2404. }
  2405. LeaveAVCStrm(pStrmExt->hMutexReq);
  2406. return Status;
  2407. }
  2408. NTSTATUS
  2409. DVStreamGetConnectionProperty (
  2410. PDVCR_EXTENSION pDevExt,
  2411. PSTREAM_PROPERTY_DESCRIPTOR pSPD,
  2412. PULONG pulActualBytesTransferred
  2413. )
  2414. /*++
  2415. Routine Description:
  2416. Handles KS_PROPERTY_CONNECTION* request. For now, only ALLOCATORFRAMING and
  2417. CONNECTION_STATE are supported.
  2418. --*/
  2419. {
  2420. NTSTATUS Status = STATUS_SUCCESS;
  2421. PAGED_CODE();
  2422. switch (pSPD->Property->Id) {
  2423. case KSPROPERTY_CONNECTION_ALLOCATORFRAMING:
  2424. if (pDevExt != NULL && pDevExt->cndStrmOpen) {
  2425. PKSALLOCATOR_FRAMING pFraming = (PKSALLOCATOR_FRAMING) pSPD->PropertyInfo;
  2426. pFraming->RequirementsFlags =
  2427. KSALLOCATOR_REQUIREMENTF_SYSTEM_MEMORY |
  2428. KSALLOCATOR_REQUIREMENTF_INPLACE_MODIFIER |
  2429. KSALLOCATOR_REQUIREMENTF_PREFERENCES_ONLY;
  2430. pFraming->PoolType = NonPagedPool;
  2431. pFraming->Frames = \
  2432. pDevExt->paStrmExt[pDevExt->idxStreamNumber]->pStrmInfo->DataFlow == KSPIN_DATAFLOW_OUT ? \
  2433. AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex].NumOfRcvBuffers : \
  2434. AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex].NumOfXmtBuffers;
  2435. // Note: we'll allocate the biggest frame. We need to make sure when we're
  2436. // passing the frame back up we also set the number of bytes in the frame.
  2437. pFraming->FrameSize = AVCStrmFormatInfoTable[pDevExt->VideoFormatIndex].FrameSize;
  2438. pFraming->FileAlignment = 0; // FILE_LONG_ALIGNMENT;
  2439. pFraming->Reserved = 0;
  2440. *pulActualBytesTransferred = sizeof (KSALLOCATOR_FRAMING);
  2441. TRACE(TL_STRM_TRACE,("*** AllocFraming: cntStrmOpen:%d; VdoFmtIdx:%d; Frames %d; size:%d\n", \
  2442. pDevExt->cndStrmOpen, pDevExt->VideoFormatIndex, pFraming->Frames, pFraming->FrameSize));
  2443. } else {
  2444. Status = STATUS_INVALID_PARAMETER;
  2445. }
  2446. break;
  2447. default:
  2448. *pulActualBytesTransferred = 0;
  2449. Status = STATUS_NOT_SUPPORTED;
  2450. ASSERT(pSPD->Property->Id == KSPROPERTY_CONNECTION_ALLOCATORFRAMING);
  2451. break;
  2452. }
  2453. return Status;
  2454. }
  2455. NTSTATUS
  2456. DVGetDroppedFramesProperty(
  2457. PDVCR_EXTENSION pDevExt,
  2458. PSTREAMEX pStrmExt,
  2459. PSTREAM_PROPERTY_DESCRIPTOR pSPD,
  2460. PULONG pulBytesTransferred
  2461. )
  2462. /*++
  2463. Routine Description:
  2464. Return the dropped frame information while captureing.
  2465. --*/
  2466. {
  2467. NTSTATUS Status = STATUS_SUCCESS;
  2468. PAGED_CODE();
  2469. switch (pSPD->Property->Id) {
  2470. case KSPROPERTY_DROPPEDFRAMES_CURRENT:
  2471. {
  2472. PKSPROPERTY_DROPPEDFRAMES_CURRENT_S pDroppedFrames =
  2473. (PKSPROPERTY_DROPPEDFRAMES_CURRENT_S) pSPD->PropertyInfo;
  2474. pDroppedFrames->AverageFrameSize = AVCStrmFormatInfoTable[pStrmExt->pDevExt->VideoFormatIndex].FrameSize;
  2475. if(pStrmExt->pStrmInfo->DataFlow == KSPIN_DATAFLOW_IN) {
  2476. #if 0
  2477. // pStrmExt->PictureNumber is not returned since it might be greater than number of SRBs returned.
  2478. // pStrmExt->CurrentStreamTime >= pDroppedFrames->PictureNumber * (ulAvgTimePerFrame)
  2479. // CurrentStreamTime will be ahead if there is repeat frame and the data source
  2480. // cannot keep up with the constant data transfer of 29.97 (or 25) FPS; therefore,
  2481. // repeat frame might have inserted and the last data in the last SRB is transferred.
  2482. // To resolve this, an application can query PictureNumber and CurrentStreamTime and
  2483. // does a read ahead of their delta to "catch up".
  2484. pDroppedFrames->PictureNumber = pStrmExt->FramesProcessed + pStrmExt->FramesDropped;
  2485. #else
  2486. // This is the picture number that MSDV is actually sending, and in a slow harddisk case,
  2487. // it will be greater than (FramesProcessed + FramesDropped) considering repeat frame.
  2488. pDroppedFrames->PictureNumber = pStrmExt->PictureNumber;
  2489. #endif
  2490. } else {
  2491. pDroppedFrames->PictureNumber = pStrmExt->PictureNumber;
  2492. }
  2493. pDroppedFrames->DropCount = pStrmExt->FramesDropped; // For transmit, this value includes both dropped and repeated.
  2494. TRACE(TL_STRM_TRACE,("*DroppedFP: Pic#(%d), Drp(%d)\n", (LONG) pDroppedFrames->PictureNumber, (LONG) pDroppedFrames->DropCount));
  2495. *pulBytesTransferred = sizeof (KSPROPERTY_DROPPEDFRAMES_CURRENT_S);
  2496. Status = STATUS_SUCCESS;
  2497. }
  2498. break;
  2499. default:
  2500. *pulBytesTransferred = 0;
  2501. Status = STATUS_NOT_SUPPORTED;
  2502. ASSERT(pSPD->Property->Id == KSPROPERTY_DROPPEDFRAMES_CURRENT);
  2503. break;
  2504. }
  2505. return Status;
  2506. }
  2507. NTSTATUS
  2508. DVGetStreamProperty(
  2509. PHW_STREAM_REQUEST_BLOCK pSrb
  2510. )
  2511. /*++
  2512. Routine Description:
  2513. Routine to process property request
  2514. --*/
  2515. {
  2516. NTSTATUS Status = STATUS_SUCCESS;
  2517. PSTREAM_PROPERTY_DESCRIPTOR pSPD = pSrb->CommandData.PropertyInfo;
  2518. PAGED_CODE();
  2519. if(IsEqualGUID (&KSPROPSETID_Connection, &pSPD->Property->Set)) {
  2520. Status =
  2521. DVStreamGetConnectionProperty (
  2522. pSrb->HwDeviceExtension,
  2523. pSrb->CommandData.PropertyInfo,
  2524. &pSrb->ActualBytesTransferred
  2525. );
  2526. }
  2527. else if (IsEqualGUID (&PROPSETID_VIDCAP_DROPPEDFRAMES, &pSPD->Property->Set)) {
  2528. Status =
  2529. DVGetDroppedFramesProperty (
  2530. pSrb->HwDeviceExtension,
  2531. (PSTREAMEX) pSrb->StreamObject->HwStreamExtension,
  2532. pSrb->CommandData.PropertyInfo,
  2533. &pSrb->ActualBytesTransferred
  2534. );
  2535. }
  2536. else {
  2537. Status = STATUS_NOT_SUPPORTED;
  2538. }
  2539. return Status;
  2540. }
  2541. NTSTATUS
  2542. DVSetStreamProperty(
  2543. PHW_STREAM_REQUEST_BLOCK pSrb
  2544. )
  2545. /*++
  2546. Routine Description:
  2547. Routine to process set property request
  2548. --*/
  2549. {
  2550. PSTREAM_PROPERTY_DESCRIPTOR pSPD = pSrb->CommandData.PropertyInfo;
  2551. PAGED_CODE();
  2552. TRACE(TL_STRM_TRACE,("SetStreamProperty: entered ...\n"));
  2553. return STATUS_NOT_SUPPORTED;
  2554. }
  2555. void
  2556. DVCancelSrbWorkItemRoutine(
  2557. #ifdef USE_WDM110 // Win2000 code base
  2558. // Extra parameter if using WDM10
  2559. PDEVICE_OBJECT DeviceObject,
  2560. #endif
  2561. PSTREAMEX pStrmExt
  2562. )
  2563. /*++
  2564. Routine Description:
  2565. This work item routine will stop streaming and cancel all SRBs.
  2566. --*/
  2567. {
  2568. PAVC_STREAM_REQUEST_BLOCK pAVCStrmReq;
  2569. NTSTATUS Status;
  2570. NTSTATUS StatusWait;
  2571. PAGED_CODE();
  2572. TRACE(TL_STRM_WARNING,("CancelWorkItem: StreamState:%d; lCancel:%d\n", pStrmExt->StreamState, pStrmExt->lCancelStateWorkItem));
  2573. ASSERT(pStrmExt->lCancelStateWorkItem == 1);
  2574. #ifdef USE_WDM110 // Win2000 code base
  2575. ASSERT(pStrmExt->pIoWorkItem);
  2576. #endif
  2577. // Synchronize
  2578. // streaming state, and
  2579. // incoming streaming data SRBs
  2580. StatusWait =
  2581. KeWaitForMutexObject(pStrmExt->hMutexFlow, Executive, KernelMode, FALSE, NULL);
  2582. ASSERT(StatusWait == STATUS_SUCCESS);
  2583. //
  2584. // We get here usually as a result of a thread was terminated and it needs to cancel irps.
  2585. // We therefore abort streaming.
  2586. //
  2587. pAVCStrmReq = &pStrmExt->AVCStrmReqAbort;
  2588. RtlZeroMemory(pAVCStrmReq, sizeof(AVC_STREAM_REQUEST_BLOCK));
  2589. INIT_AVCSTRM_HEADER(pAVCStrmReq, AVCSTRM_ABORT_STREAMING);
  2590. pAVCStrmReq->AVCStreamContext = pStrmExt->AVCStreamContext;
  2591. Status =
  2592. AVCStrmReqSubmitIrpSynch(
  2593. pStrmExt->pDevExt->pBusDeviceObject,
  2594. pStrmExt->pIrpAbort,
  2595. pAVCStrmReq
  2596. );
  2597. #if DBG
  2598. if(Status != STATUS_SUCCESS) {
  2599. TRACE(TL_STRM_ERROR,("Abort streaming status:%x\n", Status));
  2600. ASSERT(Status == STATUS_SUCCESS && "Abort streaming failed\n");
  2601. }
  2602. #endif
  2603. KeReleaseMutex(pStrmExt->hMutexFlow, FALSE);
  2604. #ifdef USE_WDM110 // Win2000 code base
  2605. // Release work item and release the cancel token
  2606. IoFreeWorkItem(pStrmExt->pIoWorkItem); pStrmExt->pIoWorkItem = NULL;
  2607. #endif
  2608. pStrmExt->AbortInProgress = TRUE;
  2609. InterlockedExchange(&pStrmExt->lCancelStateWorkItem, 0);
  2610. KeSetEvent(&pStrmExt->hCancelDoneEvent, 0, FALSE);
  2611. }
  2612. VOID
  2613. AVCTapeCreateAbortWorkItem(
  2614. PDVCR_EXTENSION pDevExt,
  2615. PSTREAMEX pStrmExt
  2616. )
  2617. {
  2618. // Claim this token
  2619. if(InterlockedExchange(&pStrmExt->lCancelStateWorkItem, 1) == 1) {
  2620. TRACE(TL_STRM_WARNING,("Cancel work item is already issued.\n"));
  2621. return;
  2622. }
  2623. // Cancel is already in progress
  2624. if(pStrmExt->AbortInProgress) {
  2625. TRACE(TL_STRM_WARNING,("Cancel work item is already in progress.\n"));
  2626. return;
  2627. }
  2628. #ifdef USE_WDM110 // Win2000 code base
  2629. ASSERT(pStrmExt->pIoWorkItem == NULL); // Have not yet queued work item.
  2630. // We will queue work item to stop and cancel all SRBs
  2631. if(pStrmExt->pIoWorkItem = IoAllocateWorkItem(pDevExt->pBusDeviceObject)) {
  2632. // Set to non-signal
  2633. KeClearEvent(&pStrmExt->hCancelDoneEvent); // Before queuing; just in case it return the work item is completed.
  2634. IoQueueWorkItem(
  2635. pStrmExt->pIoWorkItem,
  2636. DVCancelSrbWorkItemRoutine,
  2637. DelayedWorkQueue, // CriticalWorkQueue
  2638. pStrmExt
  2639. );
  2640. #else // Win9x code base
  2641. ExInitializeWorkItem( &pStrmExt->IoWorkItem, DVCancelSrbWorkItemRoutine, pStrmExt);
  2642. if(TRUE) {
  2643. // Set to non-signal
  2644. KeClearEvent(&pStrmExt->hCancelDoneEvent); // Before queuing; just in case it return the work item is completed.
  2645. ExQueueWorkItem(
  2646. &pStrmExt->IoWorkItem,
  2647. DelayedWorkQueue // CriticalWorkQueue
  2648. );
  2649. #endif
  2650. TRACE(TL_STRM_WARNING,("CancelWorkItm queued\n"));
  2651. }
  2652. #ifdef USE_WDM110 // Win2000 code base
  2653. else {
  2654. InterlockedExchange(&pStrmExt->lCancelStateWorkItem, 0);
  2655. ASSERT(pStrmExt->pIoWorkItem && "IoAllocateWorkItem failed.\n");
  2656. }
  2657. #endif
  2658. }
  2659. VOID
  2660. DVCRCancelOnePacket(
  2661. IN PHW_STREAM_REQUEST_BLOCK pSrbToCancel
  2662. )
  2663. /*++
  2664. Routine Description:
  2665. Search pending read lists for the SRB to be cancel. If found cancel it.
  2666. --*/
  2667. {
  2668. PDVCR_EXTENSION pDevExt;
  2669. PSTREAMEX pStrmExt;
  2670. pDevExt = (PDVCR_EXTENSION) pSrbToCancel->HwDeviceExtension;
  2671. // Cannot cancel device Srb.
  2672. if ((pSrbToCancel->Flags & SRB_HW_FLAGS_STREAM_REQUEST) != SRB_HW_FLAGS_STREAM_REQUEST) {
  2673. TRACE(TL_PNP_WARNING,("CancelOnePacket: Device SRB %x; cannot cancel!\n", pSrbToCancel));
  2674. ASSERT((pSrbToCancel->Flags & SRB_HW_FLAGS_STREAM_REQUEST) == SRB_HW_FLAGS_STREAM_REQUEST );
  2675. return;
  2676. }
  2677. // Can try to cancel a stream Srb and only if the stream extension still around.
  2678. pStrmExt = (PSTREAMEX) pSrbToCancel->StreamObject->HwStreamExtension;
  2679. if(pStrmExt == NULL) {
  2680. TRACE(TL_PNP_ERROR,("CancelOnePacket: pSrbTocancel %x but pStrmExt %x\n", pSrbToCancel, pStrmExt));
  2681. ASSERT(pStrmExt && "Stream SRB but stream extension is NULL\n");
  2682. return;
  2683. }
  2684. // We can only cancel SRB_READ/WRITE_DATA SRB
  2685. if((pSrbToCancel->Command != SRB_READ_DATA) && (pSrbToCancel->Command != SRB_WRITE_DATA)) {
  2686. TRACE(TL_PNP_ERROR,("CancelOnePacket: pSrbTocancel %x; Command:%d not SRB_READ,WRITE_DATA\n", pSrbToCancel, pSrbToCancel->Command));
  2687. ASSERT(pSrbToCancel->Command == SRB_READ_DATA || pSrbToCancel->Command == SRB_WRITE_DATA);
  2688. return;
  2689. }
  2690. TRACE(TL_STRM_TRACE,("CancelOnePacket: KSSt %d; Srb:%x;\n", pStrmExt->StreamState, pSrbToCancel));
  2691. // This is called at DispatchLevel.
  2692. // We will create a work item to do the cancelling (detaching buffers) at the passive level.
  2693. AVCTapeCreateAbortWorkItem(pDevExt, pStrmExt);
  2694. }
  2695. VOID
  2696. DVTimeoutHandler(
  2697. IN PHW_STREAM_REQUEST_BLOCK pSrb
  2698. )
  2699. /*++
  2700. Routine Description:
  2701. This routine is called when a packet has been in the minidriver too long.
  2702. It can only valid if we are it wa a streaming packet and in PAUSE state;
  2703. else we have a problem!
  2704. Arguments:
  2705. pSrb - Pointer to Stream request block
  2706. Return Value:
  2707. Nothing
  2708. --*/
  2709. {
  2710. //
  2711. // Note:
  2712. // Called from StreamClass at DisptchLevel
  2713. //
  2714. //
  2715. // We only expect stream SRB, but not device SRB.
  2716. //
  2717. if ( (pSrb->Flags & SRB_HW_FLAGS_STREAM_REQUEST) != SRB_HW_FLAGS_STREAM_REQUEST) {
  2718. TRACE(TL_PNP_WARNING,("TimeoutHandler: Device SRB %x timed out!\n", pSrb));
  2719. ASSERT((pSrb->Flags & SRB_HW_FLAGS_STREAM_REQUEST) == SRB_HW_FLAGS_STREAM_REQUEST );
  2720. return;
  2721. } else {
  2722. //
  2723. // pSrb->StreamObject (and pStrmExt) only valid if it is a stream SRB
  2724. //
  2725. PSTREAMEX pStrmExt;
  2726. pStrmExt = (PSTREAMEX) pSrb->StreamObject->HwStreamExtension;
  2727. ASSERT(pStrmExt);
  2728. if(!pStrmExt) {
  2729. TRACE(TL_PNP_ERROR,("TimeoutHandler: Stream SRB %x timeout with ppStrmExt %x\n", pSrb, pStrmExt));
  2730. ASSERT(pStrmExt);
  2731. return;
  2732. }
  2733. //
  2734. // Reset Timeout counter, or we are going to get this call immediately.
  2735. //
  2736. pSrb->TimeoutCounter = pSrb->TimeoutOriginal;
  2737. }
  2738. }
  2739. NTSTATUS
  2740. AVCTapeEventHandler(
  2741. IN PHW_EVENT_DESCRIPTOR pEventDescriptor
  2742. )
  2743. /*++
  2744. Routine Description:
  2745. This routine is called to enable/disable and possibly process events.
  2746. --*/
  2747. {
  2748. PKSEVENT_TIME_MARK pEventTime;
  2749. PSTREAMEX pStrmExt;
  2750. if(IsEqualGUID (&KSEVENTSETID_Clock, pEventDescriptor->EventEntry->EventSet->Set)) {
  2751. if(pEventDescriptor->EventEntry->EventItem->EventId == KSEVENT_CLOCK_POSITION_MARK) {
  2752. if(pEventDescriptor->Enable) {
  2753. // Note: According to the DDK, StreamClass queues pEventDescriptor->EventEntry, and dellaocate
  2754. // every other structures, including the pEventDescriptor->EventData.
  2755. if(pEventDescriptor->StreamObject) {
  2756. pStrmExt = (PSTREAMEX) pEventDescriptor->StreamObject->HwStreamExtension;
  2757. pEventTime = (PKSEVENT_TIME_MARK) pEventDescriptor->EventData;
  2758. // Cache the event data (Specified in the ExtraEntryData of KSEVENT_ITEM)
  2759. RtlCopyMemory((pEventDescriptor->EventEntry+1), pEventDescriptor->EventData, sizeof(KSEVENT_TIME_MARK));
  2760. TRACE(TL_CLK_TRACE,("CurrentStreamTime:%d, MarkTime:%d\n", (DWORD) pStrmExt->CurrentStreamTime, (DWORD) pEventTime->MarkTime));
  2761. }
  2762. } else {
  2763. // Disabled!
  2764. TRACE(TL_CLK_TRACE,("KSEVENT_CLOCK_POSITION_MARK disabled!\n"));
  2765. }
  2766. return STATUS_SUCCESS;
  2767. }
  2768. } else if(IsEqualGUID (&KSEVENTSETID_Connection, pEventDescriptor->EventEntry->EventSet->Set)) {
  2769. TRACE(TL_STRM_TRACE,("Connecytion event: pEventDescriptor:%x; id:%d\n", pEventDescriptor, pEventDescriptor->EventEntry->EventItem->EventId));
  2770. if(pEventDescriptor->EventEntry->EventItem->EventId == KSEVENT_CONNECTION_ENDOFSTREAM) {
  2771. if(pEventDescriptor->Enable) {
  2772. TRACE(TL_STRM_TRACE,("KSEVENT_CONNECTION_ENDOFSTREAM enabled!\n"));
  2773. } else {
  2774. TRACE(TL_STRM_TRACE,("KSEVENT_CONNECTION_ENDOFSTREAM disabled!\n"));
  2775. }
  2776. return STATUS_SUCCESS;
  2777. }
  2778. }
  2779. TRACE(TL_PNP_ERROR|TL_CLK_ERROR,("NOT_SUPPORTED event: pEventDescriptor:%x\n", pEventDescriptor));
  2780. ASSERT(FALSE);
  2781. return STATUS_NOT_SUPPORTED;
  2782. }
  2783. VOID
  2784. AVCTapeSignalClockEvent(
  2785. IN PKDPC Dpc,
  2786. IN PSTREAMEX pStrmExt,
  2787. IN PVOID SystemArgument1,
  2788. IN PVOID SystemArgument2
  2789. )
  2790. /*++
  2791. Routine Description:
  2792. This routine is called when we are the clock provider and when our clock "tick".
  2793. Find a pending clock event, signal it if it has expired.
  2794. --*/
  2795. {
  2796. PKSEVENT_ENTRY pEvent, pLast;
  2797. pEvent = NULL;
  2798. pLast = NULL;
  2799. while((
  2800. pEvent = StreamClassGetNextEvent(
  2801. pStrmExt->pDevExt,
  2802. pStrmExt->pStrmObject,
  2803. (GUID *)&KSEVENTSETID_Clock,
  2804. KSEVENT_CLOCK_POSITION_MARK,
  2805. pLast ))
  2806. != NULL ) {
  2807. #if 1
  2808. #define CLOCK_ADJUSTMENT 400000
  2809. if (((PKSEVENT_TIME_MARK)(pEvent +1))->MarkTime <= pStrmExt->CurrentStreamTime + CLOCK_ADJUSTMENT) {
  2810. #else
  2811. if (((PKSEVENT_TIME_MARK)(pEvent +1))->MarkTime <= pStrmExt->CurrentStreamTime) {
  2812. #endif
  2813. TRACE(TL_CLK_TRACE,("Clock event %x with id %d; Data:%x; tmMark:%d; tmCurrentStream:%d; Notify!\n",
  2814. pEvent, KSEVENT_CLOCK_POSITION_MARK, (PKSEVENT_TIME_MARK)(pEvent +1),
  2815. (DWORD) (((PKSEVENT_TIME_MARK)(pEvent +1))->MarkTime), (DWORD) pStrmExt->CurrentStreamTime));
  2816. ASSERT( ((PKSEVENT_TIME_MARK)(pEvent +1))->MarkTime != 0 );
  2817. //
  2818. // signal the event here
  2819. //
  2820. StreamClassStreamNotification(
  2821. SignalStreamEvent,
  2822. pStrmExt->pStrmObject,
  2823. pEvent
  2824. );
  2825. } else {
  2826. TRACE(TL_CLK_WARNING,("Still early! ClockEvent: MarkTime:%d, tmStream%d\n",
  2827. (DWORD) (((PKSEVENT_TIME_MARK)(pEvent +1))->MarkTime), (DWORD) pStrmExt->CurrentStreamTime));
  2828. }
  2829. pLast = pEvent;
  2830. }
  2831. #if DBG
  2832. if(pLast == NULL) {
  2833. TRACE(TL_CLK_WARNING,("No clock event in the queued! State:%d; tmCurrentStream:%d\n", pStrmExt->StreamState, (DWORD) pStrmExt->CurrentStreamTime));
  2834. }
  2835. #endif
  2836. }
  2837. VOID
  2838. AVCTapeStreamClockRtn(
  2839. IN PHW_TIME_CONTEXT TimeContext
  2840. )
  2841. /*++
  2842. Routine Description:
  2843. This routine is called whenever someone in the graph wants to know what time it is, and we are the Master Clock.
  2844. --*/
  2845. {
  2846. PDVCR_EXTENSION pDevExt;
  2847. PHW_STREAM_OBJECT pStrmObj;
  2848. PSTREAMEX pStrmExt;
  2849. // Call at dispatch level
  2850. pDevExt = (PDVCR_EXTENSION) TimeContext->HwDeviceExtension;
  2851. pStrmObj = TimeContext->HwStreamObject;
  2852. if(pStrmObj)
  2853. pStrmExt = pStrmObj->HwStreamExtension;
  2854. else
  2855. pStrmExt = 0;
  2856. if(!pDevExt || !pStrmExt) {
  2857. ASSERT(pDevExt && pStrmExt);
  2858. return;
  2859. }
  2860. switch (TimeContext->Function) {
  2861. case TIME_GET_STREAM_TIME:
  2862. //
  2863. // How long since the stream was first set into the run state?
  2864. //
  2865. ASSERT(pStrmExt->hMasterClock && "We are not master clock but we were qureied?");
  2866. TimeContext->Time = pStrmExt->CurrentStreamTime;
  2867. TimeContext->SystemTime = GetSystemTime();
  2868. TRACE(TL_STRM_WARNING|TL_CLK_TRACE,("State:%d; tmStream:%d tmSys:%d\n", pStrmExt->StreamState, (DWORD) TimeContext->Time, (DWORD) TimeContext->SystemTime ));
  2869. break;
  2870. default:
  2871. ASSERT(TimeContext->Function == TIME_GET_STREAM_TIME && "Unsupport clock func");
  2872. break;
  2873. } // switch TimeContext->Function
  2874. }
  2875. NTSTATUS
  2876. AVCTapeOpenCloseMasterClock (
  2877. PSTREAMEX pStrmExt,
  2878. HANDLE hMasterClockHandle
  2879. )
  2880. /*++
  2881. Routine Description:
  2882. We can be a clock provider.
  2883. --*/
  2884. {
  2885. PAGED_CODE();
  2886. // Make sure the stream exist.
  2887. if(pStrmExt == NULL) {
  2888. TRACE(TL_STRM_ERROR|TL_CLK_ERROR,("OpenCloseMasterClock: stream is not yet running.\n"));
  2889. ASSERT(pStrmExt);
  2890. return STATUS_UNSUCCESSFUL;
  2891. }
  2892. TRACE(TL_CLK_WARNING,("OpenCloseMasterClock: pStrmExt %x; hMyClock:%x->%x\n",
  2893. pStrmExt, pStrmExt->hMyClock, hMasterClockHandle));
  2894. if(hMasterClockHandle) {
  2895. // Open master clock
  2896. ASSERT(pStrmExt->hMyClock == NULL && "OpenMasterClk while hMyClock is not NULL!");
  2897. pStrmExt->hMyClock = hMasterClockHandle;
  2898. } else {
  2899. // Close master clock
  2900. ASSERT(pStrmExt->hMyClock && "CloseMasterClk while hMyClock is NULL!");
  2901. pStrmExt->hMyClock = NULL;
  2902. }
  2903. return STATUS_SUCCESS;
  2904. }
  2905. NTSTATUS
  2906. AVCTapeIndicateMasterClock (
  2907. PSTREAMEX pStrmExt,
  2908. HANDLE hIndicateClockHandle
  2909. )
  2910. /*++
  2911. Routine Description:
  2912. Compare the indicate clock handle with my clock handle.
  2913. If the same, we are the master clock; else, other device is
  2914. the master clock.
  2915. Note: either hMasterClock or hClock can be set.
  2916. --*/
  2917. {
  2918. PAGED_CODE();
  2919. // Make sure the stream exist.
  2920. if (pStrmExt == NULL) {
  2921. TRACE(TL_STRM_ERROR|TL_CLK_ERROR,("AVCTapeIndicateMasterClock: stream is not yet running.\n"));
  2922. ASSERT(pStrmExt);
  2923. return STATUS_UNSUCCESSFUL;
  2924. }
  2925. TRACE(TL_STRM_TRACE|TL_CLK_WARNING,("IndicateMasterClock[Enter]: pStrmExt:%x; hMyClk:%x; IndMClk:%x; pClk:%x, pMClk:%x\n",
  2926. pStrmExt, pStrmExt->hMyClock, hIndicateClockHandle, pStrmExt->hClock, pStrmExt->hMasterClock));
  2927. // it not null, set master clock accordingly.
  2928. if(hIndicateClockHandle == pStrmExt->hMyClock) {
  2929. pStrmExt->hMasterClock = hIndicateClockHandle;
  2930. pStrmExt->hClock = NULL;
  2931. } else {
  2932. pStrmExt->hMasterClock = NULL;
  2933. pStrmExt->hClock = hIndicateClockHandle;
  2934. }
  2935. TRACE(TL_STRM_TRACE|TL_CLK_TRACE,("IndicateMasterClk[Exit]: hMyClk:%x; IndMClk:%x; pClk:%x; pMClk:%x\n",
  2936. pStrmExt->hMyClock, hIndicateClockHandle, pStrmExt->hClock, pStrmExt->hMasterClock));
  2937. return STATUS_SUCCESS;
  2938. }