Source code of Windows XP (NT5)
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

2527 lines
77 KiB

  1. //----------------------------------------------------------------------------
  2. //
  3. // Register portions of AMD64 machine implementation.
  4. //
  5. // Copyright (C) Microsoft Corporation, 2000-2001.
  6. //
  7. //----------------------------------------------------------------------------
  8. #include "ntsdp.hpp"
  9. // See Get/SetRegVal comments in machine.hpp.
  10. #define RegValError Do_not_use_GetSetRegVal_in_machine_implementations
  11. #define GetRegVal(index, val) RegValError
  12. #define GetRegVal32(index) RegValError
  13. #define GetRegVal64(index) RegValError
  14. #define SetRegVal(index, val) RegValError
  15. #define SetRegVal32(index, val) RegValError
  16. #define SetRegVal64(index, val) RegValError
  17. #define REGALL_SEGREG REGALL_EXTRA0
  18. #define REGALL_MMXREG REGALL_EXTRA1
  19. #define REGALL_DREG REGALL_EXTRA2
  20. REGALLDESC g_Amd64AllExtraDesc[] =
  21. {
  22. REGALL_SEGREG, "Segment registers",
  23. REGALL_MMXREG, "MMX registers",
  24. REGALL_DREG, "Debug registers and, in kernel, CR4",
  25. REGALL_XMMREG, "SSE XMM registers",
  26. 0, NULL,
  27. };
  28. #define REGALL_CREG REGALL_EXTRA4
  29. #define REGALL_DESC REGALL_EXTRA5
  30. REGALLDESC g_Amd64KernelExtraDesc[] =
  31. {
  32. REGALL_CREG, "CR0, CR2 and CR3",
  33. REGALL_DESC, "Descriptor and task state",
  34. 0, NULL,
  35. };
  36. char g_Rax[] = "rax";
  37. char g_Rcx[] = "rcx";
  38. char g_Rdx[] = "rdx";
  39. char g_Rbx[] = "rbx";
  40. char g_Rsp[] = "rsp";
  41. char g_Rbp[] = "rbp";
  42. char g_Rsi[] = "rsi";
  43. char g_Rdi[] = "rdi";
  44. char g_Rip[] = "rip";
  45. char g_Xmm8[] = "xmm8";
  46. char g_Xmm9[] = "xmm9";
  47. char g_Xmm10[] = "xmm10";
  48. char g_Xmm11[] = "xmm11";
  49. char g_Xmm12[] = "xmm12";
  50. char g_Xmm13[] = "xmm13";
  51. char g_Xmm14[] = "xmm14";
  52. char g_Xmm15[] = "xmm15";
  53. char g_Cr8[] = "cr8";
  54. char g_Spl[] = "spl";
  55. char g_Bpl[] = "bpl";
  56. char g_Sil[] = "sil";
  57. char g_Dil[] = "dil";
  58. char g_R8d[] = "r8d";
  59. char g_R9d[] = "r9d";
  60. char g_R10d[] = "r10d";
  61. char g_R11d[] = "r11d";
  62. char g_R12d[] = "r12d";
  63. char g_R13d[] = "r13d";
  64. char g_R14d[] = "r14d";
  65. char g_R15d[] = "r15d";
  66. char g_R8w[] = "r8w";
  67. char g_R9w[] = "r9w";
  68. char g_R10w[] = "r10w";
  69. char g_R11w[] = "r11w";
  70. char g_R12w[] = "r12w";
  71. char g_R13w[] = "r13w";
  72. char g_R14w[] = "r14w";
  73. char g_R15w[] = "r15w";
  74. char g_R8b[] = "r8b";
  75. char g_R9b[] = "r9b";
  76. char g_R10b[] = "r10b";
  77. char g_R11b[] = "r11b";
  78. char g_R12b[] = "r12b";
  79. char g_R13b[] = "r13b";
  80. char g_R14b[] = "r14b";
  81. char g_R15b[] = "r15b";
  82. REGDEF g_Amd64Defs[] =
  83. {
  84. { g_Rax, AMD64_RAX },
  85. { g_Rcx, AMD64_RCX },
  86. { g_Rdx, AMD64_RDX },
  87. { g_Rbx, AMD64_RBX },
  88. { g_Rsp, AMD64_RSP },
  89. { g_Rbp, AMD64_RBP },
  90. { g_Rsi, AMD64_RSI },
  91. { g_Rdi, AMD64_RDI },
  92. { g_R8, AMD64_R8 },
  93. { g_R9, AMD64_R9 },
  94. { g_R10, AMD64_R10 },
  95. { g_R11, AMD64_R11 },
  96. { g_R12, AMD64_R12 },
  97. { g_R13, AMD64_R13 },
  98. { g_R14, AMD64_R14 },
  99. { g_R15, AMD64_R15 },
  100. { g_Rip, AMD64_RIP },
  101. { g_Efl, AMD64_EFL },
  102. { g_Cs, AMD64_CS },
  103. { g_Ds, AMD64_DS },
  104. { g_Es, AMD64_ES },
  105. { g_Fs, AMD64_FS },
  106. { g_Gs, AMD64_GS },
  107. { g_Ss, AMD64_SS },
  108. { g_Dr0, AMD64_DR0 },
  109. { g_Dr1, AMD64_DR1 },
  110. { g_Dr2, AMD64_DR2 },
  111. { g_Dr3, AMD64_DR3 },
  112. { g_Dr6, AMD64_DR6 },
  113. { g_Dr7, AMD64_DR7 },
  114. { g_Fpcw, AMD64_FPCW },
  115. { g_Fpsw, AMD64_FPSW },
  116. { g_Fptw, AMD64_FPTW },
  117. { g_St0, AMD64_ST0 },
  118. { g_St1, AMD64_ST1 },
  119. { g_St2, AMD64_ST2 },
  120. { g_St3, AMD64_ST3 },
  121. { g_St4, AMD64_ST4 },
  122. { g_St5, AMD64_ST5 },
  123. { g_St6, AMD64_ST6 },
  124. { g_St7, AMD64_ST7 },
  125. { g_Mm0, AMD64_MM0 },
  126. { g_Mm1, AMD64_MM1 },
  127. { g_Mm2, AMD64_MM2 },
  128. { g_Mm3, AMD64_MM3 },
  129. { g_Mm4, AMD64_MM4 },
  130. { g_Mm5, AMD64_MM5 },
  131. { g_Mm6, AMD64_MM6 },
  132. { g_Mm7, AMD64_MM7 },
  133. { g_Mxcsr, AMD64_MXCSR },
  134. { g_Xmm0, AMD64_XMM0 },
  135. { g_Xmm1, AMD64_XMM1 },
  136. { g_Xmm2, AMD64_XMM2 },
  137. { g_Xmm3, AMD64_XMM3 },
  138. { g_Xmm4, AMD64_XMM4 },
  139. { g_Xmm5, AMD64_XMM5 },
  140. { g_Xmm6, AMD64_XMM6 },
  141. { g_Xmm7, AMD64_XMM7 },
  142. { g_Xmm8, AMD64_XMM8 },
  143. { g_Xmm9, AMD64_XMM9 },
  144. { g_Xmm10, AMD64_XMM10 },
  145. { g_Xmm11, AMD64_XMM11 },
  146. { g_Xmm12, AMD64_XMM12 },
  147. { g_Xmm13, AMD64_XMM13 },
  148. { g_Xmm14, AMD64_XMM14 },
  149. { g_Xmm15, AMD64_XMM15 },
  150. { g_Eax, AMD64_EAX },
  151. { g_Ecx, AMD64_ECX },
  152. { g_Edx, AMD64_EDX },
  153. { g_Ebx, AMD64_EBX },
  154. { g_Esp, AMD64_ESP },
  155. { g_Ebp, AMD64_EBP },
  156. { g_Esi, AMD64_ESI },
  157. { g_Edi, AMD64_EDI },
  158. { g_R8d, AMD64_R8D },
  159. { g_R9d, AMD64_R9D },
  160. { g_R10d, AMD64_R10D },
  161. { g_R11d, AMD64_R11D },
  162. { g_R12d, AMD64_R12D },
  163. { g_R13d, AMD64_R13D },
  164. { g_R14d, AMD64_R14D },
  165. { g_R15d, AMD64_R15D },
  166. { g_Eip, AMD64_EIP },
  167. { g_Ax, AMD64_AX },
  168. { g_Cx, AMD64_CX },
  169. { g_Dx, AMD64_DX },
  170. { g_Bx, AMD64_BX },
  171. { g_Sp, AMD64_SP },
  172. { g_Bp, AMD64_BP },
  173. { g_Si, AMD64_SI },
  174. { g_Di, AMD64_DI },
  175. { g_R8w, AMD64_R8W },
  176. { g_R9w, AMD64_R9W },
  177. { g_R10w, AMD64_R10W },
  178. { g_R11w, AMD64_R11W },
  179. { g_R12w, AMD64_R12W },
  180. { g_R13w, AMD64_R13W },
  181. { g_R14w, AMD64_R14W },
  182. { g_R15w, AMD64_R15W },
  183. { g_Ip, AMD64_IP },
  184. { g_Fl, AMD64_FL },
  185. { g_Al, AMD64_AL },
  186. { g_Cl, AMD64_CL },
  187. { g_Dl, AMD64_DL },
  188. { g_Bl, AMD64_BL },
  189. { g_Spl, AMD64_SPL },
  190. { g_Bpl, AMD64_BPL },
  191. { g_Sil, AMD64_SIL },
  192. { g_Dil, AMD64_DIL },
  193. { g_R8b, AMD64_R8B },
  194. { g_R9b, AMD64_R9B },
  195. { g_R10b, AMD64_R10B },
  196. { g_R11b, AMD64_R11B },
  197. { g_R12b, AMD64_R12B },
  198. { g_R13b, AMD64_R13B },
  199. { g_R14b, AMD64_R14B },
  200. { g_R15b, AMD64_R15B },
  201. { g_Ah, AMD64_AH },
  202. { g_Ch, AMD64_CH },
  203. { g_Dh, AMD64_DH },
  204. { g_Bh, AMD64_BH },
  205. { g_Iopl, AMD64_IOPL },
  206. { g_Of, AMD64_OF },
  207. { g_Df, AMD64_DF },
  208. { g_If, AMD64_IF },
  209. { g_Tf, AMD64_TF },
  210. { g_Sf, AMD64_SF },
  211. { g_Zf, AMD64_ZF },
  212. { g_Af, AMD64_AF },
  213. { g_Pf, AMD64_PF },
  214. { g_Cf, AMD64_CF },
  215. { g_Vip, AMD64_VIP },
  216. { g_Vif, AMD64_VIF },
  217. { NULL, REG_ERROR },
  218. };
  219. REGDEF g_Amd64KernelReg[] =
  220. {
  221. { g_Cr0, AMD64_CR0 },
  222. { g_Cr2, AMD64_CR2 },
  223. { g_Cr3, AMD64_CR3 },
  224. { g_Cr4, AMD64_CR4 },
  225. #ifdef HAVE_AMD64_CR8
  226. { g_Cr8, AMD64_CR8 },
  227. #endif
  228. { g_Gdtr, AMD64_GDTR },
  229. { g_Gdtl, AMD64_GDTL },
  230. { g_Idtr, AMD64_IDTR },
  231. { g_Idtl, AMD64_IDTL },
  232. { g_Tr, AMD64_TR },
  233. { g_Ldtr, AMD64_LDTR },
  234. { NULL, REG_ERROR },
  235. };
  236. REGSUBDEF g_Amd64SubDefs[] =
  237. {
  238. { AMD64_EAX, AMD64_RAX, 0, 0xffffffff }, // EAX register
  239. { AMD64_ECX, AMD64_RCX, 0, 0xffffffff }, // ECX register
  240. { AMD64_EDX, AMD64_RDX, 0, 0xffffffff }, // EDX register
  241. { AMD64_EBX, AMD64_RBX, 0, 0xffffffff }, // EBX register
  242. { AMD64_ESP, AMD64_RSP, 0, 0xffffffff }, // ESP register
  243. { AMD64_EBP, AMD64_RBP, 0, 0xffffffff }, // EBP register
  244. { AMD64_ESI, AMD64_RSI, 0, 0xffffffff }, // ESI register
  245. { AMD64_EDI, AMD64_RDI, 0, 0xffffffff }, // EDI register
  246. { AMD64_R8D, AMD64_R8, 0, 0xffffffff }, // R8D register
  247. { AMD64_R9D, AMD64_R9, 0, 0xffffffff }, // R9D register
  248. { AMD64_R10D, AMD64_R10, 0, 0xffffffff }, // R10D register
  249. { AMD64_R11D, AMD64_R11, 0, 0xffffffff }, // R11D register
  250. { AMD64_R12D, AMD64_R12, 0, 0xffffffff }, // R12D register
  251. { AMD64_R13D, AMD64_R13, 0, 0xffffffff }, // R13D register
  252. { AMD64_R14D, AMD64_R14, 0, 0xffffffff }, // R14D register
  253. { AMD64_R15D, AMD64_R15, 0, 0xffffffff }, // R15D register
  254. { AMD64_EIP, AMD64_RIP, 0, 0xffffffff }, // EIP register
  255. { AMD64_AX, AMD64_RAX, 0, 0xffff }, // AX register
  256. { AMD64_CX, AMD64_RCX, 0, 0xffff }, // CX register
  257. { AMD64_DX, AMD64_RDX, 0, 0xffff }, // DX register
  258. { AMD64_BX, AMD64_RBX, 0, 0xffff }, // BX register
  259. { AMD64_SP, AMD64_RSP, 0, 0xffff }, // SP register
  260. { AMD64_BP, AMD64_RBP, 0, 0xffff }, // BP register
  261. { AMD64_SI, AMD64_RSI, 0, 0xffff }, // SI register
  262. { AMD64_DI, AMD64_RDI, 0, 0xffff }, // DI register
  263. { AMD64_R8W, AMD64_R8, 0, 0xffff }, // R8W register
  264. { AMD64_R9W, AMD64_R9, 0, 0xffff }, // R9W register
  265. { AMD64_R10W, AMD64_R10, 0, 0xffff }, // R10W register
  266. { AMD64_R11W, AMD64_R11, 0, 0xffff }, // R11W register
  267. { AMD64_R12W, AMD64_R12, 0, 0xffff }, // R12W register
  268. { AMD64_R13W, AMD64_R13, 0, 0xffff }, // R13W register
  269. { AMD64_R14W, AMD64_R14, 0, 0xffff }, // R14W register
  270. { AMD64_R15W, AMD64_R15, 0, 0xffff }, // R15W register
  271. { AMD64_IP, AMD64_RIP, 0, 0xffff }, // IP register
  272. { AMD64_FL, AMD64_EFL, 0, 0xffff }, // FL register
  273. { AMD64_AL, AMD64_RAX, 0, 0xff }, // AL register
  274. { AMD64_CL, AMD64_RCX, 0, 0xff }, // CL register
  275. { AMD64_DL, AMD64_RDX, 0, 0xff }, // DL register
  276. { AMD64_BL, AMD64_RBX, 0, 0xff }, // BL register
  277. { AMD64_SPL, AMD64_RSP, 0, 0xff }, // SPL register
  278. { AMD64_BPL, AMD64_RBP, 0, 0xff }, // BPL register
  279. { AMD64_SIL, AMD64_RSI, 0, 0xff }, // SIL register
  280. { AMD64_DIL, AMD64_RDI, 0, 0xff }, // DIL register
  281. { AMD64_R8B, AMD64_R8, 0, 0xff }, // R8B register
  282. { AMD64_R9B, AMD64_R9, 0, 0xff }, // R9B register
  283. { AMD64_R10B, AMD64_R10, 0, 0xff }, // R10B register
  284. { AMD64_R11B, AMD64_R11, 0, 0xff }, // R11B register
  285. { AMD64_R12B, AMD64_R12, 0, 0xff }, // R12B register
  286. { AMD64_R13B, AMD64_R13, 0, 0xff }, // R13B register
  287. { AMD64_R14B, AMD64_R14, 0, 0xff }, // R14B register
  288. { AMD64_R15B, AMD64_R15, 0, 0xff }, // R15B register
  289. { AMD64_AH, AMD64_RAX, 8, 0xff }, // AH register
  290. { AMD64_CH, AMD64_RCX, 8, 0xff }, // CH register
  291. { AMD64_DH, AMD64_RDX, 8, 0xff }, // DH register
  292. { AMD64_BH, AMD64_RBX, 8, 0xff }, // BH register
  293. { AMD64_IOPL, AMD64_EFL, 12, 3 }, // IOPL level value
  294. { AMD64_OF, AMD64_EFL, 11, 1 }, // OF (overflow flag)
  295. { AMD64_DF, AMD64_EFL, 10, 1 }, // DF (direction flag)
  296. { AMD64_IF, AMD64_EFL, 9, 1 }, // IF (interrupt enable flag)
  297. { AMD64_TF, AMD64_EFL, 8, 1 }, // TF (trace flag)
  298. { AMD64_SF, AMD64_EFL, 7, 1 }, // SF (sign flag)
  299. { AMD64_ZF, AMD64_EFL, 6, 1 }, // ZF (zero flag)
  300. { AMD64_AF, AMD64_EFL, 4, 1 }, // AF (aux carry flag)
  301. { AMD64_PF, AMD64_EFL, 2, 1 }, // PF (parity flag)
  302. { AMD64_CF, AMD64_EFL, 0, 1 }, // CF (carry flag)
  303. { AMD64_VIP, AMD64_EFL, 20, 1 }, // VIP (virtual interrupt pending)
  304. { AMD64_VIF, AMD64_EFL, 19, 1 }, // VIF (virtual interrupt flag)
  305. { REG_ERROR, REG_ERROR, 0, 0 }
  306. };
  307. RegisterGroup g_Amd64BaseGroup =
  308. {
  309. NULL, 0, g_Amd64Defs, g_Amd64SubDefs, g_Amd64AllExtraDesc
  310. };
  311. RegisterGroup g_Amd64KernelGroup =
  312. {
  313. NULL, 0, g_Amd64KernelReg, NULL, g_Amd64KernelExtraDesc
  314. };
  315. // First ExecTypes entry must be the actual processor type.
  316. ULONG g_Amd64ExecTypes[] =
  317. {
  318. IMAGE_FILE_MACHINE_AMD64
  319. };
  320. Amd64MachineInfo g_Amd64Machine;
  321. BOOL g_Amd64InCode64;
  322. HRESULT
  323. Amd64MachineInfo::InitializeConstants(void)
  324. {
  325. m_FullName = "AMD x86-64";
  326. m_AbbrevName = "AMD64";
  327. m_PageSize = AMD64_PAGE_SIZE;
  328. m_PageShift = AMD64_PAGE_SHIFT;
  329. m_NumExecTypes = 1;
  330. m_ExecTypes = g_Amd64ExecTypes;
  331. m_Ptr64 = TRUE;
  332. m_AllMask = REGALL_INT64 | REGALL_SEGREG;
  333. m_MaxDataBreakpoints = 4;
  334. m_SymPrefix = NULL;
  335. return MachineInfo::InitializeConstants();
  336. }
  337. HRESULT
  338. Amd64MachineInfo::InitializeForTarget(void)
  339. {
  340. m_Groups = &g_Amd64BaseGroup;
  341. g_Amd64BaseGroup.Next = NULL;
  342. if (IS_KERNEL_TARGET())
  343. {
  344. g_Amd64BaseGroup.Next = &g_Amd64KernelGroup;
  345. }
  346. m_OffsetPrcbProcessorState =
  347. FIELD_OFFSET(AMD64_PARTIAL_KPRCB, ProcessorState);
  348. m_OffsetPrcbNumber =
  349. FIELD_OFFSET(AMD64_PARTIAL_KPRCB, Number);
  350. m_TriagePrcbOffset = AMD64_TRIAGE_PRCB_ADDRESS;
  351. m_SizePrcb = AMD64_KPRCB_SIZE;
  352. m_OffsetKThreadApcProcess =
  353. FIELD_OFFSET(CROSS_PLATFORM_THREAD, Amd64Thread.ApcState.Process);
  354. m_OffsetKThreadTeb =
  355. FIELD_OFFSET(CROSS_PLATFORM_THREAD, Amd64Thread.Teb);
  356. m_OffsetKThreadInitialStack =
  357. FIELD_OFFSET(CROSS_PLATFORM_THREAD, Amd64Thread.InitialStack);
  358. m_OffsetKThreadNextProcessor = AMD64_KTHREAD_NEXTPROCESSOR_OFFSET;
  359. m_OffsetEprocessPeb = AMD64_PEB_IN_EPROCESS;
  360. m_OffsetEprocessDirectoryTableBase =
  361. AMD64_DIRECTORY_TABLE_BASE_IN_EPROCESS;
  362. m_SizeTargetContext = sizeof(AMD64_CONTEXT);
  363. m_OffsetTargetContextFlags = FIELD_OFFSET(AMD64_CONTEXT, ContextFlags);
  364. m_SizeCanonicalContext = sizeof(AMD64_CONTEXT);
  365. m_SverCanonicalContext = NT_SVER_W2K;
  366. m_SizeControlReport = sizeof(AMD64_DBGKD_CONTROL_REPORT);
  367. m_SizeEThread = AMD64_ETHREAD_SIZE;
  368. m_SizeEProcess = AMD64_EPROCESS_SIZE;
  369. m_OffsetSpecialRegisters = AMD64_DEBUG_CONTROL_SPACE_KSPECIAL;
  370. m_SizeKspecialRegisters = sizeof(AMD64_KSPECIAL_REGISTERS);
  371. m_SizePartialKThread = sizeof(AMD64_THREAD);
  372. m_SharedUserDataOffset = IS_KERNEL_TARGET() ?
  373. AMD64_KI_USER_SHARED_DATA : MM_SHARED_USER_DATA_VA;
  374. return MachineInfo::InitializeForTarget();
  375. }
  376. void
  377. Amd64MachineInfo::
  378. InitializeContext(ULONG64 Pc,
  379. PDBGKD_ANY_CONTROL_REPORT ControlReport)
  380. {
  381. m_Context.Amd64Context.Rip = Pc;
  382. m_ContextState = Pc ? MCTX_PC : MCTX_NONE;
  383. if (ControlReport != NULL)
  384. {
  385. BpOut("InitializeContext(%d) DR6 %I64X DR7 %I64X\n",
  386. g_RegContextProcessor, ControlReport->Amd64ControlReport.Dr6,
  387. ControlReport->Amd64ControlReport.Dr7);
  388. m_Context.Amd64Context.Dr6 = ControlReport->Amd64ControlReport.Dr6;
  389. m_Context.Amd64Context.Dr7 = ControlReport->Amd64ControlReport.Dr7;
  390. m_ContextState = MCTX_DR67_REPORT;
  391. if (ControlReport->Amd64ControlReport.ReportFlags &
  392. AMD64_REPORT_INCLUDES_SEGS)
  393. {
  394. m_Context.Amd64Context.SegCs =
  395. ControlReport->Amd64ControlReport.SegCs;
  396. m_Context.Amd64Context.SegDs =
  397. ControlReport->Amd64ControlReport.SegDs;
  398. m_Context.Amd64Context.SegEs =
  399. ControlReport->Amd64ControlReport.SegEs;
  400. m_Context.Amd64Context.SegFs =
  401. ControlReport->Amd64ControlReport.SegFs;
  402. m_Context.Amd64Context.EFlags =
  403. ControlReport->Amd64ControlReport.EFlags;
  404. m_ContextState = MCTX_REPORT;
  405. }
  406. }
  407. g_X86InVm86 = FALSE;
  408. g_X86InCode16 = FALSE;
  409. // In the absence of other information, assume we're
  410. // executing 64-bit code.
  411. g_Amd64InCode64 = TRUE;
  412. if (IS_CONTEXT_POSSIBLE())
  413. {
  414. if (ControlReport == NULL ||
  415. (ControlReport->Amd64ControlReport.ReportFlags &
  416. AMD64_REPORT_STANDARD_CS) == 0)
  417. {
  418. DESCRIPTOR64 Desc;
  419. // Check what kind of code segment we're in.
  420. if (GetSegRegDescriptor(SEGREG_CODE, &Desc) != S_OK)
  421. {
  422. WarnOut("CS descriptor lookup failed\n");
  423. }
  424. else if ((Desc.Flags & X86_DESC_LONG_MODE) == 0)
  425. {
  426. g_Amd64InCode64 = FALSE;
  427. g_X86InVm86 = X86_IS_VM86(GetReg32(X86_EFL));
  428. g_X86InCode16 = (Desc.Flags & X86_DESC_DEFAULT_BIG) == 0;
  429. }
  430. }
  431. else
  432. {
  433. // We're in a standard code segment so cache
  434. // a default descriptor for CS to avoid further
  435. // CS lookups.
  436. EmulateNtSelDescriptor(this, m_Context.Amd64Context.SegCs,
  437. &m_SegRegDesc[SEGREG_CODE]);
  438. }
  439. }
  440. // Add instructions to cache only if we're in flat mode.
  441. if (Pc && ControlReport != NULL &&
  442. !g_X86InVm86 && !g_X86InCode16 && g_Amd64InCode64)
  443. {
  444. CacheReportInstructions
  445. (Pc, ControlReport->Amd64ControlReport.InstructionCount,
  446. ControlReport->Amd64ControlReport.InstructionStream);
  447. }
  448. }
  449. HRESULT
  450. Amd64MachineInfo::KdGetContextState(ULONG State)
  451. {
  452. HRESULT Status;
  453. if (State >= MCTX_CONTEXT && m_ContextState < MCTX_CONTEXT)
  454. {
  455. Status = g_Target->GetContext(g_RegContextThread->Handle, &m_Context);
  456. if (Status != S_OK)
  457. {
  458. return Status;
  459. }
  460. m_ContextState = MCTX_CONTEXT;
  461. }
  462. if (State >= MCTX_FULL && m_ContextState < MCTX_FULL)
  463. {
  464. Status = g_Target->GetTargetSpecialRegisters
  465. (g_RegContextThread->Handle, (PCROSS_PLATFORM_KSPECIAL_REGISTERS)
  466. &m_SpecialRegContext);
  467. if (Status != S_OK)
  468. {
  469. return Status;
  470. }
  471. Status = g_Target->GetTargetSegRegDescriptors
  472. (g_RegContextThread->Handle, 0, SEGREG_COUNT, m_SegRegDesc);
  473. if (Status != S_OK)
  474. {
  475. return Status;
  476. }
  477. m_ContextState = MCTX_FULL;
  478. KdSetSpecialRegistersInContext();
  479. BpOut("GetContextState(%d) DR6 %I64X DR7 %I64X\n",
  480. g_RegContextProcessor, m_SpecialRegContext.KernelDr6,
  481. m_SpecialRegContext.KernelDr7);
  482. }
  483. return S_OK;
  484. }
  485. HRESULT
  486. Amd64MachineInfo::KdSetContext(void)
  487. {
  488. HRESULT Status;
  489. Status = g_Target->SetContext(g_RegContextThread->Handle, &m_Context);
  490. if (Status != S_OK)
  491. {
  492. return Status;
  493. }
  494. KdGetSpecialRegistersFromContext();
  495. Status = g_Target->SetTargetSpecialRegisters
  496. (g_RegContextThread->Handle, (PCROSS_PLATFORM_KSPECIAL_REGISTERS)
  497. &m_SpecialRegContext);
  498. BpOut("SetContext(%d) DR6 %I64X DR7 %I64X\n",
  499. g_RegContextProcessor, m_SpecialRegContext.KernelDr6,
  500. m_SpecialRegContext.KernelDr7);
  501. return S_OK;
  502. }
  503. HRESULT
  504. Amd64MachineInfo::ConvertContextFrom(PCROSS_PLATFORM_CONTEXT Context,
  505. ULONG FromSver, ULONG FromSize,
  506. PVOID From)
  507. {
  508. if (FromSize >= sizeof(AMD64_CONTEXT))
  509. {
  510. memcpy(Context, From, sizeof(AMD64_CONTEXT));
  511. }
  512. else
  513. {
  514. return E_INVALIDARG;
  515. }
  516. return S_OK;
  517. }
  518. HRESULT
  519. Amd64MachineInfo::ConvertContextTo(PCROSS_PLATFORM_CONTEXT Context,
  520. ULONG ToSver, ULONG ToSize, PVOID To)
  521. {
  522. if (ToSize >= sizeof(AMD64_CONTEXT))
  523. {
  524. memcpy(To, Context, sizeof(AMD64_CONTEXT));
  525. }
  526. else
  527. {
  528. return E_INVALIDARG;
  529. }
  530. return S_OK;
  531. }
  532. void
  533. Amd64MachineInfo::InitializeContextFlags(PCROSS_PLATFORM_CONTEXT Context,
  534. ULONG Version)
  535. {
  536. ULONG ContextFlags;
  537. ContextFlags = AMD64_CONTEXT_FULL | AMD64_CONTEXT_SEGMENTS;
  538. if (IS_USER_TARGET())
  539. {
  540. ContextFlags |= AMD64_CONTEXT_DEBUG_REGISTERS;
  541. }
  542. Context->Amd64Context.ContextFlags = ContextFlags;
  543. }
  544. HRESULT
  545. Amd64MachineInfo::GetContextFromThreadStack(ULONG64 ThreadBase,
  546. PCROSS_PLATFORM_THREAD Thread,
  547. PCROSS_PLATFORM_CONTEXT Context,
  548. PDEBUG_STACK_FRAME Frame,
  549. PULONG RunningOnProc)
  550. {
  551. HRESULT Status;
  552. UCHAR Proc;
  553. //
  554. // Check to see if the thread is currently running.
  555. //
  556. if (Thread->Amd64Thread.State == 2)
  557. {
  558. if ((Status = g_Target->ReadAllVirtual
  559. (ThreadBase + m_OffsetKThreadNextProcessor,
  560. &Proc, sizeof(Proc))) != S_OK)
  561. {
  562. return Status;
  563. }
  564. *RunningOnProc = Proc;
  565. return S_FALSE;
  566. }
  567. //
  568. // The thread isn't running so read its stored context information.
  569. //
  570. AMD64_KSWITCH_FRAME SwitchFrame;
  571. if ((Status = g_Target->ReadAllVirtual(Thread->Amd64Thread.KernelStack,
  572. &SwitchFrame,
  573. sizeof(SwitchFrame))) != S_OK)
  574. {
  575. return Status;
  576. }
  577. Context->Amd64Context.Rbp = SwitchFrame.Rbp;
  578. Context->Amd64Context.Rsp =
  579. Thread->Amd64Thread.KernelStack + sizeof(SwitchFrame);
  580. Context->Amd64Context.Rip = SwitchFrame.Return;
  581. Frame->StackOffset = Context->Amd64Context.Rsp;
  582. Frame->FrameOffset = Context->Amd64Context.Rbp;
  583. Frame->InstructionOffset = Context->Amd64Context.Rip;
  584. return S_OK;
  585. }
  586. HRESULT
  587. Amd64MachineInfo::GetExdiContext(IUnknown* Exdi, PEXDI_CONTEXT Context)
  588. {
  589. // Always ask for everything.
  590. Context->Amd64Context.RegGroupSelection.fSegmentRegs = TRUE;
  591. Context->Amd64Context.RegGroupSelection.fControlRegs = TRUE;
  592. Context->Amd64Context.RegGroupSelection.fIntegerRegs = TRUE;
  593. Context->Amd64Context.RegGroupSelection.fFloatingPointRegs = TRUE;
  594. Context->Amd64Context.RegGroupSelection.fDebugRegs = TRUE;
  595. Context->Amd64Context.RegGroupSelection.fSegmentDescriptors = TRUE;
  596. Context->Amd64Context.RegGroupSelection.fSSERegisters = TRUE;
  597. Context->Amd64Context.RegGroupSelection.fSystemRegisters = TRUE;
  598. return ((IeXdiX86_64Context*)Exdi)->GetContext(&Context->Amd64Context);
  599. }
  600. HRESULT
  601. Amd64MachineInfo::SetExdiContext(IUnknown* Exdi, PEXDI_CONTEXT Context)
  602. {
  603. // Don't change the existing group selections on the assumption
  604. // that there was a full get prior to any modifications so
  605. // all groups are valid.
  606. return ((IeXdiX86_64Context*)Exdi)->SetContext(Context->Amd64Context);
  607. }
  608. void
  609. Amd64MachineInfo::ConvertExdiContextFromContext
  610. (PCROSS_PLATFORM_CONTEXT Context, PEXDI_CONTEXT ExdiContext)
  611. {
  612. if (Context->Amd64Context.ContextFlags & AMD64_CONTEXT_SEGMENTS)
  613. {
  614. ExdiContext->Amd64Context.SegDs = Context->Amd64Context.SegDs;
  615. ExdiContext->Amd64Context.SegEs = Context->Amd64Context.SegEs;
  616. ExdiContext->Amd64Context.SegFs = Context->Amd64Context.SegFs;
  617. ExdiContext->Amd64Context.SegGs = Context->Amd64Context.SegGs;
  618. }
  619. if (Context->Amd64Context.ContextFlags & AMD64_CONTEXT_CONTROL)
  620. {
  621. ExdiContext->Amd64Context.SegCs = Context->Amd64Context.SegCs;
  622. ExdiContext->Amd64Context.Rip = Context->Amd64Context.Rip;
  623. ExdiContext->Amd64Context.SegSs = Context->Amd64Context.SegSs;
  624. ExdiContext->Amd64Context.Rsp = Context->Amd64Context.Rsp;
  625. ExdiContext->Amd64Context.EFlags = Context->Amd64Context.EFlags;
  626. }
  627. if (Context->Amd64Context.ContextFlags & AMD64_CONTEXT_DEBUG_REGISTERS)
  628. {
  629. ExdiContext->Amd64Context.Dr0 = Context->Amd64Context.Dr0;
  630. ExdiContext->Amd64Context.Dr1 = Context->Amd64Context.Dr1;
  631. ExdiContext->Amd64Context.Dr2 = Context->Amd64Context.Dr2;
  632. ExdiContext->Amd64Context.Dr3 = Context->Amd64Context.Dr3;
  633. ExdiContext->Amd64Context.Dr6 = Context->Amd64Context.Dr6;
  634. ExdiContext->Amd64Context.Dr7 = Context->Amd64Context.Dr7;
  635. }
  636. if (Context->Amd64Context.ContextFlags & AMD64_CONTEXT_INTEGER)
  637. {
  638. ExdiContext->Amd64Context.Rax = Context->Amd64Context.Rax;
  639. ExdiContext->Amd64Context.Rcx = Context->Amd64Context.Rcx;
  640. ExdiContext->Amd64Context.Rdx = Context->Amd64Context.Rdx;
  641. ExdiContext->Amd64Context.Rbx = Context->Amd64Context.Rbx;
  642. ExdiContext->Amd64Context.Rbp = Context->Amd64Context.Rbp;
  643. ExdiContext->Amd64Context.Rsi = Context->Amd64Context.Rsi;
  644. ExdiContext->Amd64Context.Rdi = Context->Amd64Context.Rdi;
  645. ExdiContext->Amd64Context.R8 = Context->Amd64Context.R8;
  646. ExdiContext->Amd64Context.R9 = Context->Amd64Context.R9;
  647. ExdiContext->Amd64Context.R10 = Context->Amd64Context.R10;
  648. ExdiContext->Amd64Context.R11 = Context->Amd64Context.R11;
  649. ExdiContext->Amd64Context.R12 = Context->Amd64Context.R12;
  650. ExdiContext->Amd64Context.R13 = Context->Amd64Context.R13;
  651. ExdiContext->Amd64Context.R14 = Context->Amd64Context.R14;
  652. ExdiContext->Amd64Context.R15 = Context->Amd64Context.R15;
  653. }
  654. if (Context->Amd64Context.ContextFlags & AMD64_CONTEXT_FLOATING_POINT)
  655. {
  656. ExdiContext->Amd64Context.ControlWord =
  657. Context->Amd64Context.FltSave.ControlWord;
  658. ExdiContext->Amd64Context.StatusWord =
  659. Context->Amd64Context.FltSave.StatusWord;
  660. ExdiContext->Amd64Context.TagWord =
  661. Context->Amd64Context.FltSave.TagWord;
  662. ExdiContext->Amd64Context.ErrorOffset =
  663. Context->Amd64Context.FltSave.ErrorOffset;
  664. ExdiContext->Amd64Context.ErrorSelector =
  665. Context->Amd64Context.FltSave.ErrorSelector;
  666. ExdiContext->Amd64Context.DataOffset =
  667. Context->Amd64Context.FltSave.DataOffset;
  668. ExdiContext->Amd64Context.DataSelector =
  669. Context->Amd64Context.FltSave.DataSelector;
  670. ExdiContext->Amd64Context.RegMXCSR =
  671. Context->Amd64Context.MxCsr;
  672. for (ULONG i = 0; i < 8; i++)
  673. {
  674. memcpy(ExdiContext->Amd64Context.RegisterArea + i * 10,
  675. Context->Amd64Context.FltSave.FloatRegisters + i * 10,
  676. 10);
  677. }
  678. memcpy(ExdiContext->Amd64Context.RegSSE,
  679. &Context->Amd64Context.Xmm0, 16 * sizeof(AMD64_M128));
  680. }
  681. }
  682. void
  683. Amd64MachineInfo::ConvertExdiContextToContext(PEXDI_CONTEXT ExdiContext,
  684. PCROSS_PLATFORM_CONTEXT Context)
  685. {
  686. Context->Amd64Context.SegCs = (USHORT)ExdiContext->Amd64Context.SegCs;
  687. Context->Amd64Context.SegDs = (USHORT)ExdiContext->Amd64Context.SegDs;
  688. Context->Amd64Context.SegEs = (USHORT)ExdiContext->Amd64Context.SegEs;
  689. Context->Amd64Context.SegFs = (USHORT)ExdiContext->Amd64Context.SegFs;
  690. Context->Amd64Context.SegGs = (USHORT)ExdiContext->Amd64Context.SegGs;
  691. Context->Amd64Context.SegSs = (USHORT)ExdiContext->Amd64Context.SegSs;
  692. Context->Amd64Context.EFlags = (ULONG)ExdiContext->Amd64Context.EFlags;
  693. Context->Amd64Context.Dr0 = ExdiContext->Amd64Context.Dr0;
  694. Context->Amd64Context.Dr1 = ExdiContext->Amd64Context.Dr1;
  695. Context->Amd64Context.Dr2 = ExdiContext->Amd64Context.Dr2;
  696. Context->Amd64Context.Dr3 = ExdiContext->Amd64Context.Dr3;
  697. Context->Amd64Context.Dr6 = ExdiContext->Amd64Context.Dr6;
  698. Context->Amd64Context.Dr7 = ExdiContext->Amd64Context.Dr7;
  699. Context->Amd64Context.Rax = ExdiContext->Amd64Context.Rax;
  700. Context->Amd64Context.Rcx = ExdiContext->Amd64Context.Rcx;
  701. Context->Amd64Context.Rdx = ExdiContext->Amd64Context.Rdx;
  702. Context->Amd64Context.Rbx = ExdiContext->Amd64Context.Rbx;
  703. Context->Amd64Context.Rsp = ExdiContext->Amd64Context.Rsp;
  704. Context->Amd64Context.Rbp = ExdiContext->Amd64Context.Rbp;
  705. Context->Amd64Context.Rsi = ExdiContext->Amd64Context.Rsi;
  706. Context->Amd64Context.Rdi = ExdiContext->Amd64Context.Rdi;
  707. Context->Amd64Context.R8 = ExdiContext->Amd64Context.R8;
  708. Context->Amd64Context.R9 = ExdiContext->Amd64Context.R9;
  709. Context->Amd64Context.R10 = ExdiContext->Amd64Context.R10;
  710. Context->Amd64Context.R11 = ExdiContext->Amd64Context.R11;
  711. Context->Amd64Context.R12 = ExdiContext->Amd64Context.R12;
  712. Context->Amd64Context.R13 = ExdiContext->Amd64Context.R13;
  713. Context->Amd64Context.R14 = ExdiContext->Amd64Context.R14;
  714. Context->Amd64Context.R15 = ExdiContext->Amd64Context.R15;
  715. Context->Amd64Context.Rip = ExdiContext->Amd64Context.Rip;
  716. Context->Amd64Context.FltSave.ControlWord =
  717. (USHORT)ExdiContext->Amd64Context.ControlWord;
  718. Context->Amd64Context.FltSave.StatusWord =
  719. (USHORT)ExdiContext->Amd64Context.StatusWord;
  720. Context->Amd64Context.FltSave.TagWord =
  721. (USHORT)ExdiContext->Amd64Context.TagWord;
  722. // XXX drewb - No ErrorOpcode in x86_64.
  723. Context->Amd64Context.FltSave.ErrorOpcode = 0;
  724. Context->Amd64Context.FltSave.ErrorOffset =
  725. ExdiContext->Amd64Context.ErrorOffset;
  726. Context->Amd64Context.FltSave.ErrorSelector =
  727. (USHORT)ExdiContext->Amd64Context.ErrorSelector;
  728. Context->Amd64Context.FltSave.DataOffset =
  729. ExdiContext->Amd64Context.DataOffset;
  730. Context->Amd64Context.FltSave.DataSelector =
  731. (USHORT)ExdiContext->Amd64Context.DataSelector;
  732. Context->Amd64Context.MxCsr =
  733. ExdiContext->Amd64Context.RegMXCSR;
  734. for (ULONG i = 0; i < 8; i++)
  735. {
  736. memcpy(Context->Amd64Context.FltSave.FloatRegisters + i * 10,
  737. ExdiContext->Amd64Context.RegisterArea + i * 10, 10);
  738. }
  739. memcpy(&Context->Amd64Context.Xmm0, ExdiContext->Amd64Context.RegSSE,
  740. 16 * sizeof(AMD64_M128));
  741. }
  742. void
  743. Amd64MachineInfo::ConvertExdiContextToSegDescs(PEXDI_CONTEXT ExdiContext,
  744. ULONG Start, ULONG Count,
  745. PDESCRIPTOR64 Descs)
  746. {
  747. while (Count-- > 0)
  748. {
  749. SEG64_DESC_INFO* Desc;
  750. switch(Start)
  751. {
  752. case SEGREG_CODE:
  753. Desc = &ExdiContext->Amd64Context.DescriptorCs;
  754. break;
  755. case SEGREG_DATA:
  756. Desc = &ExdiContext->Amd64Context.DescriptorDs;
  757. break;
  758. case SEGREG_STACK:
  759. Desc = &ExdiContext->Amd64Context.DescriptorSs;
  760. break;
  761. case SEGREG_ES:
  762. Desc = &ExdiContext->Amd64Context.DescriptorEs;
  763. break;
  764. case SEGREG_FS:
  765. Desc = &ExdiContext->Amd64Context.DescriptorFs;
  766. break;
  767. case SEGREG_GS:
  768. Desc = &ExdiContext->Amd64Context.DescriptorGs;
  769. break;
  770. case SEGREG_GDT:
  771. Descs->Base = ExdiContext->Amd64Context.GDTBase;
  772. Descs->Limit = ExdiContext->Amd64Context.GDTLimit;
  773. Descs->Flags = X86_DESC_PRESENT;
  774. Desc = NULL;
  775. break;
  776. case SEGREG_LDT:
  777. Desc = &ExdiContext->Amd64Context.SegLDT;
  778. break;
  779. default:
  780. Descs->Flags = SEGDESC_INVALID;
  781. Desc = NULL;
  782. break;
  783. }
  784. if (Desc != NULL)
  785. {
  786. Descs->Base = Desc->SegBase;
  787. Descs->Limit = Desc->SegLimit;
  788. Descs->Flags =
  789. ((Desc->SegFlags >> 4) & 0xf00) |
  790. (Desc->SegFlags & 0xff);
  791. }
  792. Descs++;
  793. Start++;
  794. }
  795. }
  796. void
  797. Amd64MachineInfo::ConvertExdiContextFromSpecial
  798. (PCROSS_PLATFORM_KSPECIAL_REGISTERS Special,
  799. PEXDI_CONTEXT ExdiContext)
  800. {
  801. ExdiContext->Amd64Context.RegCr0 = Special->Amd64Special.Cr0;
  802. ExdiContext->Amd64Context.RegCr2 = Special->Amd64Special.Cr2;
  803. ExdiContext->Amd64Context.RegCr3 = Special->Amd64Special.Cr3;
  804. ExdiContext->Amd64Context.RegCr4 = Special->Amd64Special.Cr4;
  805. #ifdef HAVE_AMD64_CR8
  806. ExdiContext->Amd64Context.RegCr8 = Special->Amd64Special.Cr8;
  807. #endif
  808. ExdiContext->Amd64Context.Dr0 = Special->Amd64Special.KernelDr0;
  809. ExdiContext->Amd64Context.Dr1 = Special->Amd64Special.KernelDr1;
  810. ExdiContext->Amd64Context.Dr2 = Special->Amd64Special.KernelDr2;
  811. ExdiContext->Amd64Context.Dr3 = Special->Amd64Special.KernelDr3;
  812. ExdiContext->Amd64Context.Dr6 = Special->Amd64Special.KernelDr6;
  813. ExdiContext->Amd64Context.Dr7 = Special->Amd64Special.KernelDr7;
  814. ExdiContext->Amd64Context.GDTLimit = Special->Amd64Special.Gdtr.Limit;
  815. ExdiContext->Amd64Context.GDTBase = Special->Amd64Special.Gdtr.Base;
  816. ExdiContext->Amd64Context.IDTLimit = Special->Amd64Special.Idtr.Limit;
  817. ExdiContext->Amd64Context.IDTBase = Special->Amd64Special.Idtr.Base;
  818. ExdiContext->Amd64Context.SelTSS = Special->Amd64Special.Tr;
  819. ExdiContext->Amd64Context.SelLDT = Special->Amd64Special.Ldtr;
  820. }
  821. void
  822. Amd64MachineInfo::ConvertExdiContextToSpecial
  823. (PEXDI_CONTEXT ExdiContext,
  824. PCROSS_PLATFORM_KSPECIAL_REGISTERS Special)
  825. {
  826. Special->Amd64Special.Cr0 = ExdiContext->Amd64Context.RegCr0;
  827. Special->Amd64Special.Cr2 = ExdiContext->Amd64Context.RegCr2;
  828. Special->Amd64Special.Cr3 = ExdiContext->Amd64Context.RegCr3;
  829. Special->Amd64Special.Cr4 = ExdiContext->Amd64Context.RegCr4;
  830. #ifdef HAVE_AMD64_CR8
  831. Special->Amd64Special.Cr8 = ExdiContext->Amd64Context.RegCr8;
  832. #endif
  833. Special->Amd64Special.KernelDr0 = ExdiContext->Amd64Context.Dr0;
  834. Special->Amd64Special.KernelDr1 = ExdiContext->Amd64Context.Dr1;
  835. Special->Amd64Special.KernelDr2 = ExdiContext->Amd64Context.Dr2;
  836. Special->Amd64Special.KernelDr3 = ExdiContext->Amd64Context.Dr3;
  837. Special->Amd64Special.KernelDr6 = ExdiContext->Amd64Context.Dr6;
  838. Special->Amd64Special.KernelDr7 = ExdiContext->Amd64Context.Dr7;
  839. Special->Amd64Special.Gdtr.Limit =
  840. (USHORT)ExdiContext->Amd64Context.GDTLimit;
  841. Special->Amd64Special.Gdtr.Base = ExdiContext->Amd64Context.GDTBase;
  842. Special->Amd64Special.Idtr.Limit =
  843. (USHORT)ExdiContext->Amd64Context.IDTLimit;
  844. Special->Amd64Special.Idtr.Base = ExdiContext->Amd64Context.IDTBase;
  845. Special->Amd64Special.Tr = (USHORT)ExdiContext->Amd64Context.SelTSS;
  846. Special->Amd64Special.Ldtr = (USHORT)ExdiContext->Amd64Context.SelLDT;
  847. }
  848. int
  849. Amd64MachineInfo::GetType(ULONG RegNum)
  850. {
  851. if (RegNum >= AMD64_MM_FIRST && RegNum <= AMD64_MM_LAST)
  852. {
  853. return REGVAL_VECTOR64;
  854. }
  855. else if (RegNum >= AMD64_XMM_FIRST && RegNum <= AMD64_XMM_LAST)
  856. {
  857. return REGVAL_VECTOR128;
  858. }
  859. else if (RegNum >= AMD64_ST_FIRST && RegNum <= AMD64_ST_LAST)
  860. {
  861. return REGVAL_FLOAT10;
  862. }
  863. else if ((RegNum >= AMD64_SEG_FIRST && RegNum <= AMD64_SEG_LAST) ||
  864. (RegNum >= AMD64_FPCTRL_FIRST && RegNum <= AMD64_FPCTRL_LAST) ||
  865. RegNum == AMD64_TR || RegNum == AMD64_LDTR ||
  866. RegNum == AMD64_GDTL || RegNum == AMD64_IDTL)
  867. {
  868. return REGVAL_INT16;
  869. }
  870. else if (RegNum == AMD64_EFL || RegNum == AMD64_MXCSR)
  871. {
  872. return REGVAL_INT32;
  873. }
  874. else if (RegNum < AMD64_SUBREG_BASE)
  875. {
  876. return REGVAL_INT64;
  877. }
  878. else
  879. {
  880. return REGVAL_SUB64;
  881. }
  882. }
  883. BOOL
  884. Amd64MachineInfo::GetVal(ULONG RegNum, REGVAL* Val)
  885. {
  886. // The majority of the registers are 64-bit so default
  887. // to that type.
  888. Val->type = REGVAL_INT64;
  889. switch(m_ContextState)
  890. {
  891. case MCTX_PC:
  892. if (RegNum == AMD64_RIP)
  893. {
  894. Val->i64 = m_Context.Amd64Context.Rip;
  895. return TRUE;
  896. }
  897. goto MctxContext;
  898. case MCTX_DR67_REPORT:
  899. switch(RegNum)
  900. {
  901. case AMD64_DR6:
  902. Val->i64 = m_Context.Amd64Context.Dr6;
  903. break;
  904. case AMD64_DR7:
  905. Val->i64 = m_Context.Amd64Context.Dr7;
  906. break;
  907. default:
  908. goto MctxContext;
  909. }
  910. return TRUE;
  911. case MCTX_REPORT:
  912. switch(RegNum)
  913. {
  914. case AMD64_RIP:
  915. Val->i64 = m_Context.Amd64Context.Rip;
  916. break;
  917. case AMD64_EFL:
  918. Val->type = REGVAL_INT32;
  919. Val->i64 = m_Context.Amd64Context.EFlags;
  920. break;
  921. case AMD64_CS:
  922. Val->type = REGVAL_INT16;
  923. Val->i64 = m_Context.Amd64Context.SegCs;
  924. break;
  925. case AMD64_DS:
  926. Val->type = REGVAL_INT16;
  927. Val->i64 = m_Context.Amd64Context.SegDs;
  928. break;
  929. case AMD64_ES:
  930. Val->type = REGVAL_INT16;
  931. Val->i64 = m_Context.Amd64Context.SegEs;
  932. break;
  933. case AMD64_FS:
  934. Val->type = REGVAL_INT16;
  935. Val->i64 = m_Context.Amd64Context.SegFs;
  936. break;
  937. case AMD64_DR6:
  938. Val->i64 = m_Context.Amd64Context.Dr6;
  939. break;
  940. case AMD64_DR7:
  941. Val->i64 = m_Context.Amd64Context.Dr7;
  942. break;
  943. default:
  944. goto MctxContext;
  945. }
  946. return TRUE;
  947. case MCTX_NONE:
  948. MctxContext:
  949. if (GetContextState(MCTX_CONTEXT) != S_OK)
  950. {
  951. return FALSE;
  952. }
  953. // Fall through.
  954. case MCTX_CONTEXT:
  955. switch(RegNum)
  956. {
  957. case AMD64_RIP:
  958. Val->i64 = m_Context.Amd64Context.Rip;
  959. return TRUE;
  960. case AMD64_EFL:
  961. Val->type = REGVAL_INT32;
  962. Val->i64 = m_Context.Amd64Context.EFlags;
  963. return TRUE;
  964. case AMD64_CS:
  965. Val->type = REGVAL_INT16;
  966. Val->i64 = m_Context.Amd64Context.SegCs;
  967. return TRUE;
  968. case AMD64_DS:
  969. Val->type = REGVAL_INT16;
  970. Val->i64 = m_Context.Amd64Context.SegDs;
  971. return TRUE;
  972. case AMD64_ES:
  973. Val->type = REGVAL_INT16;
  974. Val->i64 = m_Context.Amd64Context.SegEs;
  975. return TRUE;
  976. case AMD64_FS:
  977. Val->type = REGVAL_INT16;
  978. Val->i64 = m_Context.Amd64Context.SegFs;
  979. return TRUE;
  980. case AMD64_RAX:
  981. Val->i64 = m_Context.Amd64Context.Rax;
  982. return TRUE;
  983. case AMD64_RCX:
  984. Val->i64 = m_Context.Amd64Context.Rcx;
  985. return TRUE;
  986. case AMD64_RDX:
  987. Val->i64 = m_Context.Amd64Context.Rdx;
  988. return TRUE;
  989. case AMD64_RBX:
  990. Val->i64 = m_Context.Amd64Context.Rbx;
  991. return TRUE;
  992. case AMD64_RSP:
  993. Val->i64 = m_Context.Amd64Context.Rsp;
  994. return TRUE;
  995. case AMD64_RBP:
  996. Val->i64 = m_Context.Amd64Context.Rbp;
  997. return TRUE;
  998. case AMD64_RSI:
  999. Val->i64 = m_Context.Amd64Context.Rsi;
  1000. return TRUE;
  1001. case AMD64_RDI:
  1002. Val->i64 = m_Context.Amd64Context.Rdi;
  1003. return TRUE;
  1004. case AMD64_R8:
  1005. Val->i64 = m_Context.Amd64Context.R8;
  1006. return TRUE;
  1007. case AMD64_R9:
  1008. Val->i64 = m_Context.Amd64Context.R9;
  1009. return TRUE;
  1010. case AMD64_R10:
  1011. Val->i64 = m_Context.Amd64Context.R10;
  1012. return TRUE;
  1013. case AMD64_R11:
  1014. Val->i64 = m_Context.Amd64Context.R11;
  1015. return TRUE;
  1016. case AMD64_R12:
  1017. Val->i64 = m_Context.Amd64Context.R12;
  1018. return TRUE;
  1019. case AMD64_R13:
  1020. Val->i64 = m_Context.Amd64Context.R13;
  1021. return TRUE;
  1022. case AMD64_R14:
  1023. Val->i64 = m_Context.Amd64Context.R14;
  1024. return TRUE;
  1025. case AMD64_R15:
  1026. Val->i64 = m_Context.Amd64Context.R15;
  1027. return TRUE;
  1028. case AMD64_GS:
  1029. Val->type = REGVAL_INT16;
  1030. Val->i64 = m_Context.Amd64Context.SegGs;
  1031. return TRUE;
  1032. case AMD64_SS:
  1033. Val->type = REGVAL_INT16;
  1034. Val->i64 = m_Context.Amd64Context.SegSs;
  1035. return TRUE;
  1036. case AMD64_FPCW:
  1037. Val->type = REGVAL_INT16;
  1038. Val->i64 = m_Context.Amd64Context.FltSave.ControlWord;
  1039. return TRUE;
  1040. case AMD64_FPSW:
  1041. Val->type = REGVAL_INT16;
  1042. Val->i64 = m_Context.Amd64Context.FltSave.StatusWord;
  1043. return TRUE;
  1044. case AMD64_FPTW:
  1045. Val->type = REGVAL_INT16;
  1046. Val->i64 = m_Context.Amd64Context.FltSave.TagWord;
  1047. return TRUE;
  1048. case AMD64_MXCSR:
  1049. Val->type = REGVAL_INT32;
  1050. Val->i64 = m_Context.Amd64Context.MxCsr;
  1051. return TRUE;
  1052. }
  1053. if (RegNum >= AMD64_MM_FIRST && RegNum <= AMD64_MM_LAST)
  1054. {
  1055. Val->type = REGVAL_VECTOR64;
  1056. Val->i64 = *(PULONG64)&m_Context.Amd64Context.FltSave.
  1057. FloatRegisters[GetMmxRegOffset(RegNum - AMD64_MM_FIRST,
  1058. GetReg32(AMD64_FPSW)) * 10];
  1059. return TRUE;
  1060. }
  1061. else if (RegNum >= AMD64_XMM_FIRST && RegNum <= AMD64_XMM_LAST)
  1062. {
  1063. Val->type = REGVAL_VECTOR128;
  1064. memcpy(Val->bytes, (PUCHAR)&m_Context.Amd64Context.Xmm0 +
  1065. (RegNum - AMD64_XMM_FIRST) * 16, 16);
  1066. return TRUE;
  1067. }
  1068. else if (RegNum >= AMD64_ST_FIRST && RegNum <= AMD64_ST_LAST)
  1069. {
  1070. Val->type = REGVAL_FLOAT10;
  1071. memcpy(Val->f10, &m_Context.Amd64Context.FltSave.
  1072. FloatRegisters[(RegNum - AMD64_ST_FIRST) * 10],
  1073. sizeof(Val->f10));
  1074. return TRUE;
  1075. }
  1076. //
  1077. // The requested register is not in our current context, load up
  1078. // a complete context
  1079. //
  1080. if (GetContextState(MCTX_FULL) != S_OK)
  1081. {
  1082. return FALSE;
  1083. }
  1084. break;
  1085. }
  1086. //
  1087. // We must have a complete context...
  1088. //
  1089. switch(RegNum)
  1090. {
  1091. case AMD64_RAX:
  1092. Val->i64 = m_Context.Amd64Context.Rax;
  1093. return TRUE;
  1094. case AMD64_RCX:
  1095. Val->i64 = m_Context.Amd64Context.Rcx;
  1096. return TRUE;
  1097. case AMD64_RDX:
  1098. Val->i64 = m_Context.Amd64Context.Rdx;
  1099. return TRUE;
  1100. case AMD64_RBX:
  1101. Val->i64 = m_Context.Amd64Context.Rbx;
  1102. return TRUE;
  1103. case AMD64_RSP:
  1104. Val->i64 = m_Context.Amd64Context.Rsp;
  1105. return TRUE;
  1106. case AMD64_RBP:
  1107. Val->i64 = m_Context.Amd64Context.Rbp;
  1108. return TRUE;
  1109. case AMD64_RSI:
  1110. Val->i64 = m_Context.Amd64Context.Rsi;
  1111. return TRUE;
  1112. case AMD64_RDI:
  1113. Val->i64 = m_Context.Amd64Context.Rdi;
  1114. return TRUE;
  1115. case AMD64_R8:
  1116. Val->i64 = m_Context.Amd64Context.R8;
  1117. return TRUE;
  1118. case AMD64_R9:
  1119. Val->i64 = m_Context.Amd64Context.R9;
  1120. return TRUE;
  1121. case AMD64_R10:
  1122. Val->i64 = m_Context.Amd64Context.R10;
  1123. return TRUE;
  1124. case AMD64_R11:
  1125. Val->i64 = m_Context.Amd64Context.R11;
  1126. return TRUE;
  1127. case AMD64_R12:
  1128. Val->i64 = m_Context.Amd64Context.R12;
  1129. return TRUE;
  1130. case AMD64_R13:
  1131. Val->i64 = m_Context.Amd64Context.R13;
  1132. return TRUE;
  1133. case AMD64_R14:
  1134. Val->i64 = m_Context.Amd64Context.R14;
  1135. return TRUE;
  1136. case AMD64_R15:
  1137. Val->i64 = m_Context.Amd64Context.R15;
  1138. return TRUE;
  1139. case AMD64_RIP:
  1140. Val->i64 = m_Context.Amd64Context.Rip;
  1141. return TRUE;
  1142. case AMD64_EFL:
  1143. Val->type = REGVAL_INT32;
  1144. Val->i64 = m_Context.Amd64Context.EFlags;
  1145. return TRUE;
  1146. case AMD64_CS:
  1147. Val->type = REGVAL_INT16;
  1148. Val->i64 = m_Context.Amd64Context.SegCs;
  1149. return TRUE;
  1150. case AMD64_DS:
  1151. Val->type = REGVAL_INT16;
  1152. Val->i64 = m_Context.Amd64Context.SegDs;
  1153. return TRUE;
  1154. case AMD64_ES:
  1155. Val->type = REGVAL_INT16;
  1156. Val->i64 = m_Context.Amd64Context.SegEs;
  1157. return TRUE;
  1158. case AMD64_FS:
  1159. Val->type = REGVAL_INT16;
  1160. Val->i64 = m_Context.Amd64Context.SegFs;
  1161. return TRUE;
  1162. case AMD64_GS:
  1163. Val->type = REGVAL_INT16;
  1164. Val->i64 = m_Context.Amd64Context.SegGs;
  1165. return TRUE;
  1166. case AMD64_SS:
  1167. Val->type = REGVAL_INT16;
  1168. Val->i64 = m_Context.Amd64Context.SegSs;
  1169. return TRUE;
  1170. case AMD64_DR0:
  1171. Val->i64 = m_Context.Amd64Context.Dr0;
  1172. return TRUE;
  1173. case AMD64_DR1:
  1174. Val->i64 = m_Context.Amd64Context.Dr1;
  1175. return TRUE;
  1176. case AMD64_DR2:
  1177. Val->i64 = m_Context.Amd64Context.Dr2;
  1178. return TRUE;
  1179. case AMD64_DR3:
  1180. Val->i64 = m_Context.Amd64Context.Dr3;
  1181. return TRUE;
  1182. case AMD64_DR6:
  1183. Val->i64 = m_Context.Amd64Context.Dr6;
  1184. return TRUE;
  1185. case AMD64_DR7:
  1186. Val->i64 = m_Context.Amd64Context.Dr7;
  1187. return TRUE;
  1188. case AMD64_FPCW:
  1189. Val->type = REGVAL_INT16;
  1190. Val->i64 = m_Context.Amd64Context.FltSave.ControlWord;
  1191. return TRUE;
  1192. case AMD64_FPSW:
  1193. Val->type = REGVAL_INT16;
  1194. Val->i64 = m_Context.Amd64Context.FltSave.StatusWord;
  1195. return TRUE;
  1196. case AMD64_FPTW:
  1197. Val->type = REGVAL_INT16;
  1198. Val->i64 = m_Context.Amd64Context.FltSave.TagWord;
  1199. return TRUE;
  1200. case AMD64_MXCSR:
  1201. Val->type = REGVAL_INT32;
  1202. Val->i64 = m_Context.Amd64Context.MxCsr;
  1203. return TRUE;
  1204. }
  1205. if (RegNum >= AMD64_MM_FIRST && RegNum <= AMD64_MM_LAST)
  1206. {
  1207. Val->type = REGVAL_VECTOR64;
  1208. Val->i64 = *(PULONG64)&m_Context.Amd64Context.FltSave.
  1209. FloatRegisters[GetMmxRegOffset(RegNum - AMD64_MM_FIRST,
  1210. GetReg32(AMD64_FPSW)) * 10];
  1211. return TRUE;
  1212. }
  1213. else if (RegNum >= AMD64_XMM_FIRST && RegNum <= AMD64_XMM_LAST)
  1214. {
  1215. Val->type = REGVAL_VECTOR128;
  1216. memcpy(Val->bytes, (PUCHAR)&m_Context.Amd64Context.Xmm0 +
  1217. (RegNum - AMD64_XMM_FIRST) * 16, 16);
  1218. return TRUE;
  1219. }
  1220. else if (RegNum >= AMD64_ST_FIRST && RegNum <= AMD64_ST_LAST)
  1221. {
  1222. Val->type = REGVAL_FLOAT10;
  1223. memcpy(Val->f10, &m_Context.Amd64Context.FltSave.
  1224. FloatRegisters[(RegNum - AMD64_ST_FIRST) * 10],
  1225. sizeof(Val->f10));
  1226. return TRUE;
  1227. }
  1228. if (IS_KERNEL_TARGET())
  1229. {
  1230. switch(RegNum)
  1231. {
  1232. case AMD64_CR0:
  1233. Val->i64 = m_SpecialRegContext.Cr0;
  1234. return TRUE;
  1235. case AMD64_CR2:
  1236. Val->i64 = m_SpecialRegContext.Cr2;
  1237. return TRUE;
  1238. case AMD64_CR3:
  1239. Val->i64 = m_SpecialRegContext.Cr3;
  1240. return TRUE;
  1241. case AMD64_CR4:
  1242. Val->i64 = m_SpecialRegContext.Cr4;
  1243. return TRUE;
  1244. #ifdef HAVE_AMD64_CR8
  1245. case AMD64_CR8:
  1246. Val->i64 = m_SpecialRegContext.Cr8;
  1247. return TRUE;
  1248. #endif
  1249. case AMD64_GDTR:
  1250. Val->i64 = m_SpecialRegContext.Gdtr.Base;
  1251. return TRUE;
  1252. case AMD64_GDTL:
  1253. Val->type = REGVAL_INT16;
  1254. Val->i64 = m_SpecialRegContext.Gdtr.Limit;
  1255. return TRUE;
  1256. case AMD64_IDTR:
  1257. Val->i64 = m_SpecialRegContext.Idtr.Base;
  1258. return TRUE;
  1259. case AMD64_IDTL:
  1260. Val->type = REGVAL_INT16;
  1261. Val->i64 = m_SpecialRegContext.Idtr.Limit;
  1262. return TRUE;
  1263. case AMD64_TR:
  1264. Val->type = REGVAL_INT16;
  1265. Val->i64 = m_SpecialRegContext.Tr;
  1266. return TRUE;
  1267. case AMD64_LDTR:
  1268. Val->type = REGVAL_INT16;
  1269. Val->i64 = m_SpecialRegContext.Ldtr;
  1270. return TRUE;
  1271. }
  1272. }
  1273. ErrOut("Amd64MachineInfo::GetVal: "
  1274. "unknown register %lx requested\n", RegNum);
  1275. return REG_ERROR;
  1276. }
  1277. BOOL
  1278. Amd64MachineInfo::SetVal(ULONG RegNum, REGVAL* Val)
  1279. {
  1280. if (RegNum >= AMD64_SUBREG_BASE)
  1281. {
  1282. return FALSE;
  1283. }
  1284. // Optimize away some common cases where registers are
  1285. // set to their current value.
  1286. if ((m_ContextState >= MCTX_PC && RegNum == AMD64_RIP &&
  1287. Val->i64 == m_Context.Amd64Context.Rip) ||
  1288. (((m_ContextState >= MCTX_DR67_REPORT &&
  1289. m_ContextState <= MCTX_REPORT) ||
  1290. m_ContextState >= MCTX_FULL) && RegNum == AMD64_DR7 &&
  1291. Val->i64 == m_Context.Amd64Context.Dr7))
  1292. {
  1293. return TRUE;
  1294. }
  1295. if (GetContextState(MCTX_DIRTY) != S_OK)
  1296. {
  1297. return FALSE;
  1298. }
  1299. if (RegNum >= AMD64_MM_FIRST && RegNum <= AMD64_MM_LAST)
  1300. {
  1301. *(PULONG64)&m_Context.Amd64Context.FltSave.
  1302. FloatRegisters[GetMmxRegOffset(RegNum - AMD64_MM_FIRST,
  1303. GetReg32(AMD64_FPSW)) * 10] =
  1304. Val->i64;
  1305. goto Notify;
  1306. }
  1307. else if (RegNum >= AMD64_XMM_FIRST && RegNum <= AMD64_XMM_LAST)
  1308. {
  1309. memcpy((PUCHAR)&m_Context.Amd64Context.Xmm0 +
  1310. (RegNum - AMD64_XMM_FIRST) * 16, Val->bytes, 16);
  1311. goto Notify;
  1312. }
  1313. else if (RegNum >= AMD64_ST_FIRST && RegNum <= AMD64_ST_LAST)
  1314. {
  1315. memcpy(&m_Context.Amd64Context.FltSave.
  1316. FloatRegisters[(RegNum - AMD64_ST_FIRST) * 10],
  1317. Val->f10, sizeof(Val->f10));
  1318. goto Notify;
  1319. }
  1320. BOOL Recognized;
  1321. Recognized = TRUE;
  1322. switch(RegNum)
  1323. {
  1324. case AMD64_RAX:
  1325. m_Context.Amd64Context.Rax = Val->i64;
  1326. break;
  1327. case AMD64_RCX:
  1328. m_Context.Amd64Context.Rcx = Val->i64;
  1329. break;
  1330. case AMD64_RDX:
  1331. m_Context.Amd64Context.Rdx = Val->i64;
  1332. break;
  1333. case AMD64_RBX:
  1334. m_Context.Amd64Context.Rbx = Val->i64;
  1335. break;
  1336. case AMD64_RSP:
  1337. m_Context.Amd64Context.Rsp = Val->i64;
  1338. break;
  1339. case AMD64_RBP:
  1340. m_Context.Amd64Context.Rbp = Val->i64;
  1341. break;
  1342. case AMD64_RSI:
  1343. m_Context.Amd64Context.Rsi = Val->i64;
  1344. break;
  1345. case AMD64_RDI:
  1346. m_Context.Amd64Context.Rdi = Val->i64;
  1347. break;
  1348. case AMD64_R8:
  1349. m_Context.Amd64Context.R8 = Val->i64;
  1350. break;
  1351. case AMD64_R9:
  1352. m_Context.Amd64Context.R9 = Val->i64;
  1353. break;
  1354. case AMD64_R10:
  1355. m_Context.Amd64Context.R10 = Val->i64;
  1356. break;
  1357. case AMD64_R11:
  1358. m_Context.Amd64Context.R11 = Val->i64;
  1359. break;
  1360. case AMD64_R12:
  1361. m_Context.Amd64Context.R12 = Val->i64;
  1362. break;
  1363. case AMD64_R13:
  1364. m_Context.Amd64Context.R13 = Val->i64;
  1365. break;
  1366. case AMD64_R14:
  1367. m_Context.Amd64Context.R14 = Val->i64;
  1368. break;
  1369. case AMD64_R15:
  1370. m_Context.Amd64Context.R15 = Val->i64;
  1371. break;
  1372. case AMD64_RIP:
  1373. m_Context.Amd64Context.Rip = Val->i64;
  1374. break;
  1375. case AMD64_EFL:
  1376. if (IS_KERNEL_TARGET())
  1377. {
  1378. // leave TF clear
  1379. m_Context.Amd64Context.EFlags = Val->i32 & ~0x100;
  1380. }
  1381. else
  1382. {
  1383. // allow TF set
  1384. m_Context.Amd64Context.EFlags = Val->i32;
  1385. }
  1386. break;
  1387. case AMD64_CS:
  1388. m_Context.Amd64Context.SegCs = Val->i16;
  1389. m_SegRegDesc[SEGREG_CODE].Flags = SEGDESC_INVALID;
  1390. break;
  1391. case AMD64_DS:
  1392. m_Context.Amd64Context.SegDs = Val->i16;
  1393. m_SegRegDesc[SEGREG_DATA].Flags = SEGDESC_INVALID;
  1394. break;
  1395. case AMD64_ES:
  1396. m_Context.Amd64Context.SegEs = Val->i16;
  1397. m_SegRegDesc[SEGREG_ES].Flags = SEGDESC_INVALID;
  1398. break;
  1399. case AMD64_FS:
  1400. m_Context.Amd64Context.SegFs = Val->i16;
  1401. m_SegRegDesc[SEGREG_FS].Flags = SEGDESC_INVALID;
  1402. break;
  1403. case AMD64_GS:
  1404. m_Context.Amd64Context.SegGs = Val->i16;
  1405. m_SegRegDesc[SEGREG_GS].Flags = SEGDESC_INVALID;
  1406. break;
  1407. case AMD64_SS:
  1408. m_Context.Amd64Context.SegSs = Val->i16;
  1409. m_SegRegDesc[SEGREG_STACK].Flags = SEGDESC_INVALID;
  1410. break;
  1411. case AMD64_DR0:
  1412. m_Context.Amd64Context.Dr0 = Val->i64;
  1413. break;
  1414. case AMD64_DR1:
  1415. m_Context.Amd64Context.Dr1 = Val->i64;
  1416. break;
  1417. case AMD64_DR2:
  1418. m_Context.Amd64Context.Dr2 = Val->i64;
  1419. break;
  1420. case AMD64_DR3:
  1421. m_Context.Amd64Context.Dr3 = Val->i64;
  1422. break;
  1423. case AMD64_DR6:
  1424. m_Context.Amd64Context.Dr6 = Val->i64;
  1425. break;
  1426. case AMD64_DR7:
  1427. m_Context.Amd64Context.Dr7 = Val->i64;
  1428. break;
  1429. case AMD64_FPCW:
  1430. m_Context.Amd64Context.FltSave.ControlWord = Val->i16;
  1431. break;
  1432. case AMD64_FPSW:
  1433. m_Context.Amd64Context.FltSave.StatusWord = Val->i16;
  1434. break;
  1435. case AMD64_FPTW:
  1436. m_Context.Amd64Context.FltSave.TagWord = Val->i16;
  1437. break;
  1438. case AMD64_MXCSR:
  1439. m_Context.Amd64Context.MxCsr = Val->i32;
  1440. break;
  1441. default:
  1442. Recognized = FALSE;
  1443. break;
  1444. }
  1445. if (!Recognized && IS_KERNEL_TARGET())
  1446. {
  1447. Recognized = TRUE;
  1448. switch(RegNum)
  1449. {
  1450. case AMD64_CR0:
  1451. m_SpecialRegContext.Cr0 = Val->i64;
  1452. break;
  1453. case AMD64_CR2:
  1454. m_SpecialRegContext.Cr2 = Val->i64;
  1455. break;
  1456. case AMD64_CR3:
  1457. m_SpecialRegContext.Cr3 = Val->i64;
  1458. break;
  1459. case AMD64_CR4:
  1460. m_SpecialRegContext.Cr4 = Val->i64;
  1461. break;
  1462. #ifdef HAVE_AMD64_CR8
  1463. case AMD64_CR8:
  1464. m_SpecialRegContext.Cr8 = Val->i64;
  1465. break;
  1466. #endif
  1467. case AMD64_GDTR:
  1468. m_SpecialRegContext.Gdtr.Base = Val->i64;
  1469. break;
  1470. case AMD64_GDTL:
  1471. m_SpecialRegContext.Gdtr.Limit = Val->i16;
  1472. break;
  1473. case AMD64_IDTR:
  1474. m_SpecialRegContext.Idtr.Base = Val->i64;
  1475. break;
  1476. case AMD64_IDTL:
  1477. m_SpecialRegContext.Idtr.Limit = Val->i16;
  1478. break;
  1479. case AMD64_TR:
  1480. m_SpecialRegContext.Tr = Val->i16;
  1481. break;
  1482. case AMD64_LDTR:
  1483. m_SpecialRegContext.Ldtr = Val->i16;
  1484. break;
  1485. default:
  1486. Recognized = FALSE;
  1487. break;
  1488. }
  1489. }
  1490. if (!Recognized)
  1491. {
  1492. ErrOut("Amd64MachineInfo::SetVal: "
  1493. "unknown register %lx requested\n", RegNum);
  1494. return FALSE;
  1495. }
  1496. Notify:
  1497. NotifyChangeDebuggeeState(DEBUG_CDS_REGISTERS,
  1498. RegCountFromIndex(RegNum));
  1499. return TRUE;
  1500. }
  1501. void
  1502. Amd64MachineInfo::GetPC(PADDR Address)
  1503. {
  1504. FormAddr(SEGREG_CODE, GetReg64(AMD64_RIP),
  1505. FORM_CODE | FORM_SEGREG | X86_FORM_VM86(GetReg32(AMD64_EFL)),
  1506. Address);
  1507. }
  1508. void
  1509. Amd64MachineInfo::SetPC(PADDR paddr)
  1510. {
  1511. // We set RIP to the offset (the non-translated value),
  1512. // because we may not be in "flat" mode.
  1513. SetReg64(AMD64_RIP, Off(*paddr));
  1514. }
  1515. void
  1516. Amd64MachineInfo::GetFP(PADDR Addr)
  1517. {
  1518. FormAddr(SEGREG_STACK, GetReg64(AMD64_RBP),
  1519. FORM_SEGREG | X86_FORM_VM86(GetReg32(AMD64_EFL)), Addr);
  1520. }
  1521. void
  1522. Amd64MachineInfo::GetSP(PADDR Addr)
  1523. {
  1524. FormAddr(SEGREG_STACK, GetReg64(AMD64_RSP),
  1525. FORM_SEGREG | X86_FORM_VM86(GetReg32(AMD64_EFL)), Addr);
  1526. }
  1527. ULONG64
  1528. Amd64MachineInfo::GetArgReg(void)
  1529. {
  1530. return GetReg64(AMD64_RAX);
  1531. }
  1532. ULONG
  1533. Amd64MachineInfo::GetSegRegNum(ULONG SegReg)
  1534. {
  1535. switch(SegReg)
  1536. {
  1537. case SEGREG_CODE:
  1538. return AMD64_CS;
  1539. case SEGREG_DATA:
  1540. return AMD64_DS;
  1541. case SEGREG_STACK:
  1542. return AMD64_SS;
  1543. case SEGREG_ES:
  1544. return AMD64_ES;
  1545. case SEGREG_FS:
  1546. return AMD64_FS;
  1547. case SEGREG_GS:
  1548. return AMD64_GS;
  1549. case SEGREG_LDT:
  1550. return AMD64_LDTR;
  1551. }
  1552. return 0;
  1553. }
  1554. HRESULT
  1555. Amd64MachineInfo::GetSegRegDescriptor(ULONG SegReg, PDESCRIPTOR64 Desc)
  1556. {
  1557. if (SegReg == SEGREG_GDT)
  1558. {
  1559. Desc->Base = GetReg64(AMD64_GDTR);
  1560. Desc->Limit = GetReg32(AMD64_GDTL);
  1561. Desc->Flags = 0;
  1562. return S_OK;
  1563. }
  1564. // Check and see if we already have a cached descriptor.
  1565. if (m_SegRegDesc[SegReg].Flags != SEGDESC_INVALID)
  1566. {
  1567. *Desc = m_SegRegDesc[SegReg];
  1568. return S_OK;
  1569. }
  1570. HRESULT Status;
  1571. // Attempt to retrieve segment descriptors directly.
  1572. if ((Status = GetContextState(MCTX_FULL)) != S_OK)
  1573. {
  1574. return Status;
  1575. }
  1576. // Check and see if we now have a cached descriptor.
  1577. if (m_SegRegDesc[SegReg].Flags != SEGDESC_INVALID)
  1578. {
  1579. *Desc = m_SegRegDesc[SegReg];
  1580. return S_OK;
  1581. }
  1582. //
  1583. // Direct information is not available so look things up
  1584. // in the descriptor tables.
  1585. //
  1586. ULONG RegNum = GetSegRegNum(SegReg);
  1587. if (RegNum == 0)
  1588. {
  1589. return E_INVALIDARG;
  1590. }
  1591. // Do a quick sanity test to prevent bad values
  1592. // from causing problems.
  1593. ULONG Selector = GetReg32(RegNum);
  1594. if (SegReg == SEGREG_LDT && (Selector & 4))
  1595. {
  1596. // The ldtr selector says that it's an LDT selector,
  1597. // which is invalid. An LDT selector should always
  1598. // reference the GDT.
  1599. ErrOut("Invalid LDTR contents: %04X\n", Selector);
  1600. return E_FAIL;
  1601. }
  1602. return g_Target->GetSelDescriptor(this, g_RegContextThread->Handle,
  1603. Selector, Desc);
  1604. }
  1605. void
  1606. Amd64MachineInfo::OutputAll(ULONG Mask, ULONG OutMask)
  1607. {
  1608. if (GetContextState(MCTX_FULL) != S_OK)
  1609. {
  1610. ErrOut("Unable to retrieve register information\n");
  1611. return;
  1612. }
  1613. if (Mask & (REGALL_INT32 | REGALL_INT64))
  1614. {
  1615. ULONG Efl;
  1616. MaskOut(OutMask, "rax=%016I64x rbx=%016I64x rcx=%016I64x\n",
  1617. GetReg64(AMD64_RAX), GetReg64(AMD64_RBX),
  1618. GetReg64(AMD64_RCX));
  1619. MaskOut(OutMask, "rdx=%016I64x rsi=%016I64x rdi=%016I64x\n",
  1620. GetReg64(AMD64_RDX), GetReg64(AMD64_RSI),
  1621. GetReg64(AMD64_RDI));
  1622. MaskOut(OutMask, "rip=%016I64x rsp=%016I64x rbp=%016I64x\n",
  1623. GetReg64(AMD64_RIP), GetReg64(AMD64_RSP),
  1624. GetReg64(AMD64_RBP));
  1625. MaskOut(OutMask, " r8=%016I64x r9=%016I64x r10=%016I64x\n",
  1626. GetReg64(AMD64_R8), GetReg64(AMD64_R9),
  1627. GetReg64(AMD64_R10));
  1628. MaskOut(OutMask, "r11=%016I64x r12=%016I64x r13=%016I64x\n",
  1629. GetReg64(AMD64_R11), GetReg64(AMD64_R12),
  1630. GetReg64(AMD64_R13));
  1631. MaskOut(OutMask, "r14=%016I64x r15=%016I64x\n",
  1632. GetReg64(AMD64_R14), GetReg64(AMD64_R15));
  1633. Efl = GetReg32(AMD64_EFL);
  1634. MaskOut(OutMask, "iopl=%1lx %s %s %s %s %s %s %s %s %s %s\n",
  1635. ((Efl >> X86_SHIFT_FLAGIOPL) & X86_BIT_FLAGIOPL),
  1636. (Efl & X86_BIT_FLAGVIP) ? "vip" : " ",
  1637. (Efl & X86_BIT_FLAGVIF) ? "vif" : " ",
  1638. (Efl & X86_BIT_FLAGOF) ? "ov" : "nv",
  1639. (Efl & X86_BIT_FLAGDF) ? "dn" : "up",
  1640. (Efl & X86_BIT_FLAGIF) ? "ei" : "di",
  1641. (Efl & X86_BIT_FLAGSF) ? "ng" : "pl",
  1642. (Efl & X86_BIT_FLAGZF) ? "zr" : "nz",
  1643. (Efl & X86_BIT_FLAGAF) ? "ac" : "na",
  1644. (Efl & X86_BIT_FLAGPF) ? "po" : "pe",
  1645. (Efl & X86_BIT_FLAGCF) ? "cy" : "nc");
  1646. }
  1647. if (Mask & REGALL_SEGREG)
  1648. {
  1649. MaskOut(OutMask, "cs=%04lx ss=%04lx ds=%04lx es=%04lx fs=%04lx "
  1650. "gs=%04lx efl=%08lx\n",
  1651. GetReg32(AMD64_CS),
  1652. GetReg32(AMD64_SS),
  1653. GetReg32(AMD64_DS),
  1654. GetReg32(AMD64_ES),
  1655. GetReg32(AMD64_FS),
  1656. GetReg32(AMD64_GS),
  1657. GetReg32(AMD64_EFL));
  1658. }
  1659. if (Mask & REGALL_FLOAT)
  1660. {
  1661. ULONG i;
  1662. REGVAL Val;
  1663. char Buf[32];
  1664. MaskOut(OutMask, "fpcw=%04X fpsw=%04X fptw=%04X\n",
  1665. GetReg32(AMD64_FPCW),
  1666. GetReg32(AMD64_FPSW),
  1667. GetReg32(AMD64_FPTW));
  1668. for (i = AMD64_ST_FIRST; i <= AMD64_ST_LAST; i++)
  1669. {
  1670. GetVal(i, &Val);
  1671. _uldtoa((_ULDOUBLE *)&Val.f10, sizeof(Buf), Buf);
  1672. MaskOut(OutMask, "st%d=%s ", i - AMD64_ST_FIRST, Buf);
  1673. i++;
  1674. GetVal(i, &Val);
  1675. _uldtoa((_ULDOUBLE *)&Val.f10, sizeof(Buf), Buf);
  1676. MaskOut(OutMask, "st%d=%s\n", i - AMD64_ST_FIRST, Buf);
  1677. }
  1678. }
  1679. if (Mask & REGALL_MMXREG)
  1680. {
  1681. ULONG i;
  1682. REGVAL Val;
  1683. for (i = AMD64_MM_FIRST; i <= AMD64_MM_LAST; i++)
  1684. {
  1685. GetVal(i, &Val);
  1686. MaskOut(OutMask, "mm%d=%016I64x ", i - AMD64_MM_FIRST, Val.i64);
  1687. i++;
  1688. GetVal(i, &Val);
  1689. MaskOut(OutMask, "mm%d=%016I64x\n", i - AMD64_MM_FIRST, Val.i64);
  1690. }
  1691. }
  1692. if (Mask & REGALL_XMMREG)
  1693. {
  1694. ULONG i;
  1695. REGVAL Val;
  1696. for (i = AMD64_XMM_FIRST; i <= AMD64_XMM_LAST; i++)
  1697. {
  1698. GetVal(i, &Val);
  1699. MaskOut(OutMask, "xmm%d=%hg %hg %hg %hg\n", i - AMD64_XMM_FIRST,
  1700. *(float *)&Val.bytes[3 * sizeof(float)],
  1701. *(float *)&Val.bytes[2 * sizeof(float)],
  1702. *(float *)&Val.bytes[1 * sizeof(float)],
  1703. *(float *)&Val.bytes[0 * sizeof(float)]);
  1704. }
  1705. }
  1706. if (Mask & REGALL_CREG)
  1707. {
  1708. MaskOut(OutMask, "cr0=%016I64x cr2=%016I64x cr3=%016I64x\n",
  1709. GetReg64(AMD64_CR0),
  1710. GetReg64(AMD64_CR2),
  1711. GetReg64(AMD64_CR3));
  1712. #ifdef HAVE_AMD64_CR8
  1713. MaskOut(OutMask, "cr8=%016I64x\n",
  1714. GetReg64(AMD64_CR8));
  1715. #endif
  1716. }
  1717. if (Mask & REGALL_DREG)
  1718. {
  1719. MaskOut(OutMask, "dr0=%016I64x dr1=%016I64x dr2=%016I64x\n",
  1720. GetReg64(AMD64_DR0),
  1721. GetReg64(AMD64_DR1),
  1722. GetReg64(AMD64_DR2));
  1723. MaskOut(OutMask, "dr3=%016I64x dr6=%016I64x dr7=%016I64x",
  1724. GetReg64(AMD64_DR3),
  1725. GetReg64(AMD64_DR6),
  1726. GetReg64(AMD64_DR7));
  1727. if (IS_USER_TARGET())
  1728. {
  1729. MaskOut(OutMask, "\n");
  1730. }
  1731. else
  1732. {
  1733. MaskOut(OutMask, " cr4=%016I64x\n", GetReg64(AMD64_CR4));
  1734. }
  1735. }
  1736. if (Mask & REGALL_DESC)
  1737. {
  1738. MaskOut(OutMask, "gdtr=%016I64x gdtl=%04lx idtr=%016I64x "
  1739. "idtl=%04lx tr=%04lx ldtr=%04x\n",
  1740. GetReg64(AMD64_GDTR),
  1741. GetReg32(AMD64_GDTL),
  1742. GetReg64(AMD64_IDTR),
  1743. GetReg32(AMD64_IDTL),
  1744. GetReg32(AMD64_TR),
  1745. GetReg32(AMD64_LDTR));
  1746. }
  1747. }
  1748. TRACEMODE
  1749. Amd64MachineInfo::GetTraceMode (void)
  1750. {
  1751. if (IS_KERNEL_TARGET())
  1752. {
  1753. return m_TraceMode;
  1754. }
  1755. else
  1756. {
  1757. return ((GetReg32(AMD64_EFL) & X86_BIT_FLAGTF) != 0) ?
  1758. TRACE_INSTRUCTION : TRACE_NONE;
  1759. }
  1760. }
  1761. void
  1762. Amd64MachineInfo::SetTraceMode (TRACEMODE Mode)
  1763. {
  1764. // (XXX olegk - review for TRACE_TAKEN_BRANCH)
  1765. DBG_ASSERT(Mode != TRACE_TAKEN_BRANCH);
  1766. if (IS_KERNEL_TARGET())
  1767. {
  1768. m_TraceMode = Mode;
  1769. }
  1770. else
  1771. {
  1772. ULONG Efl = GetReg32(AMD64_EFL);
  1773. switch (Mode)
  1774. {
  1775. case TRACE_NONE:
  1776. Efl &= ~X86_BIT_FLAGTF;
  1777. break;
  1778. case TRACE_INSTRUCTION:
  1779. Efl |= X86_BIT_FLAGTF;
  1780. break;
  1781. }
  1782. SetReg32(AMD64_EFL, Efl);
  1783. }
  1784. }
  1785. BOOL
  1786. Amd64MachineInfo::IsStepStatusSupported(ULONG Status)
  1787. {
  1788. switch (Status)
  1789. {
  1790. case DEBUG_STATUS_STEP_INTO:
  1791. case DEBUG_STATUS_STEP_OVER:
  1792. return TRUE;
  1793. default:
  1794. return FALSE;
  1795. }
  1796. }
  1797. void
  1798. Amd64MachineInfo::KdUpdateControlSet
  1799. (PDBGKD_ANY_CONTROL_SET ControlSet)
  1800. {
  1801. ControlSet->Amd64ControlSet.TraceFlag =
  1802. (GetTraceMode() == TRACE_INSTRUCTION);
  1803. ControlSet->Amd64ControlSet.Dr7 = GetReg64(AMD64_DR7);
  1804. BpOut("UpdateControlSet(%d) trace %d, DR7 %I64X\n",
  1805. g_RegContextProcessor, ControlSet->Amd64ControlSet.TraceFlag,
  1806. ControlSet->Amd64ControlSet.Dr7);
  1807. if (!g_WatchFunctions.IsStarted() && g_WatchBeginCurFunc != 1)
  1808. {
  1809. ControlSet->Amd64ControlSet.CurrentSymbolStart = 0;
  1810. ControlSet->Amd64ControlSet.CurrentSymbolEnd = 0;
  1811. }
  1812. else
  1813. {
  1814. ControlSet->Amd64ControlSet.CurrentSymbolStart = g_WatchBeginCurFunc;
  1815. ControlSet->Amd64ControlSet.CurrentSymbolEnd = g_WatchEndCurFunc;
  1816. }
  1817. }
  1818. void
  1819. Amd64MachineInfo::KdSaveProcessorState(void)
  1820. {
  1821. MachineInfo::KdSaveProcessorState();
  1822. m_SavedSpecialRegContext = m_SpecialRegContext;
  1823. }
  1824. void
  1825. Amd64MachineInfo::KdRestoreProcessorState(void)
  1826. {
  1827. MachineInfo::KdRestoreProcessorState();
  1828. m_SpecialRegContext = m_SavedSpecialRegContext;
  1829. }
  1830. ULONG
  1831. Amd64MachineInfo::ExecutingMachine(void)
  1832. {
  1833. return IMAGE_FILE_MACHINE_AMD64;
  1834. }
  1835. HRESULT
  1836. Amd64MachineInfo::SetPageDirectory(ULONG Idx, ULONG64 PageDir,
  1837. PULONG NextIdx)
  1838. {
  1839. HRESULT Status;
  1840. *NextIdx = PAGE_DIR_COUNT;
  1841. if (PageDir == 0)
  1842. {
  1843. if ((Status = g_Target->ReadImplicitProcessInfoPointer
  1844. (m_OffsetEprocessDirectoryTableBase, &PageDir)) != S_OK)
  1845. {
  1846. return Status;
  1847. }
  1848. }
  1849. // Sanitize the value.
  1850. PageDir &= AMD64_PDBR_MASK;
  1851. // There is only one page directory so update all the slots.
  1852. m_PageDirectories[PAGE_DIR_USER] = PageDir;
  1853. m_PageDirectories[PAGE_DIR_SESSION] = PageDir;
  1854. m_PageDirectories[PAGE_DIR_KERNEL] = PageDir;
  1855. return S_OK;
  1856. }
  1857. #define AMD64_PAGE_FILE_INDEX(Entry) \
  1858. (((ULONG)(Entry) >> 28) & MAX_PAGING_FILE_MASK)
  1859. #define AMD64_PAGE_FILE_OFFSET(Entry) \
  1860. (((Entry) >> 32) << AMD64_PAGE_SHIFT)
  1861. HRESULT
  1862. Amd64MachineInfo::GetVirtualTranslationPhysicalOffsets(ULONG64 Virt,
  1863. PULONG64 Offsets,
  1864. ULONG OffsetsSize,
  1865. PULONG Levels,
  1866. PULONG PfIndex,
  1867. PULONG64 LastVal)
  1868. {
  1869. HRESULT Status;
  1870. *Levels = 0;
  1871. if (m_Translating)
  1872. {
  1873. return E_UNEXPECTED;
  1874. }
  1875. m_Translating = TRUE;
  1876. //
  1877. // Reset the page directory in case it was 0
  1878. //
  1879. if (m_PageDirectories[PAGE_DIR_SINGLE] == 0)
  1880. {
  1881. if ((Status = SetDefaultPageDirectories(1 << PAGE_DIR_SINGLE)) != S_OK)
  1882. {
  1883. m_Translating = FALSE;
  1884. return Status;
  1885. }
  1886. }
  1887. KdOut("Amd64VtoP: Virt %s, pagedir %s\n",
  1888. FormatAddr64(Virt),
  1889. FormatDisp64(m_PageDirectories[PAGE_DIR_SINGLE]));
  1890. (*Levels)++;
  1891. if (Offsets != NULL && OffsetsSize > 0)
  1892. {
  1893. *Offsets++ = m_PageDirectories[PAGE_DIR_SINGLE];
  1894. OffsetsSize--;
  1895. }
  1896. //
  1897. // Certain ranges of the system are mapped directly.
  1898. //
  1899. if ((Virt >= AMD64_PHYSICAL_START) && (Virt <= AMD64_PHYSICAL_END))
  1900. {
  1901. *LastVal = Virt - AMD64_PHYSICAL_START;
  1902. KdOut("Amd64VtoP: Direct phys %s\n", FormatAddr64(*LastVal));
  1903. (*Levels)++;
  1904. if (Offsets != NULL && OffsetsSize > 0)
  1905. {
  1906. *Offsets++ = *LastVal;
  1907. OffsetsSize--;
  1908. }
  1909. m_Translating = FALSE;
  1910. return S_OK;
  1911. }
  1912. ULONG64 Addr;
  1913. ULONG64 Entry;
  1914. // Read the Page Map Level 4 entry.
  1915. Addr = (((Virt >> AMD64_PML4E_SHIFT) & AMD64_PML4E_MASK) *
  1916. sizeof(Entry)) + m_PageDirectories[PAGE_DIR_SINGLE];
  1917. KdOut("Amd64VtoP: PML4E %s\n", FormatAddr64(Addr));
  1918. (*Levels)++;
  1919. if (Offsets != NULL && OffsetsSize > 0)
  1920. {
  1921. *Offsets++ = Addr;
  1922. OffsetsSize--;
  1923. }
  1924. if ((Status = g_Target->
  1925. ReadAllPhysical(Addr, &Entry, sizeof(Entry))) != S_OK)
  1926. {
  1927. KdOut("Amd64VtoP: PML4E read error 0x%X\n", Status);
  1928. m_Translating = FALSE;
  1929. return Status;
  1930. }
  1931. // Read the Page Directory Pointer entry.
  1932. if (Entry == 0)
  1933. {
  1934. KdOut("Amd64VtoP: zero PML4E\n");
  1935. m_Translating = FALSE;
  1936. return HR_PAGE_NOT_AVAILABLE;
  1937. }
  1938. else if (!(Entry & 1))
  1939. {
  1940. Addr = (((Virt >> AMD64_PDPE_SHIFT) & AMD64_PDPE_MASK) *
  1941. sizeof(Entry)) + AMD64_PAGE_FILE_OFFSET(Entry);
  1942. KdOut("Amd64VtoP: pagefile PDPE %d:%s\n",
  1943. AMD64_PAGE_FILE_INDEX(Entry), FormatAddr64(Addr));
  1944. if ((Status = g_Target->
  1945. ReadPageFile(AMD64_PAGE_FILE_INDEX(Entry), Addr,
  1946. &Entry, sizeof(Entry))) != S_OK)
  1947. {
  1948. KdOut("Amd64VtoP: PML4E not present, 0x%X\n", Status);
  1949. m_Translating = FALSE;
  1950. return Status;
  1951. }
  1952. }
  1953. else
  1954. {
  1955. Addr = (((Virt >> AMD64_PDPE_SHIFT) & AMD64_PDPE_MASK) *
  1956. sizeof(Entry)) + (Entry & AMD64_VALID_PFN_MASK);
  1957. KdOut("Amd64VtoP: PDPE %s\n", FormatAddr64(Addr));
  1958. (*Levels)++;
  1959. if (Offsets != NULL && OffsetsSize > 0)
  1960. {
  1961. *Offsets++ = Addr;
  1962. OffsetsSize--;
  1963. }
  1964. if ((Status = g_Target->
  1965. ReadAllPhysical(Addr, &Entry, sizeof(Entry))) != S_OK)
  1966. {
  1967. KdOut("Amd64VtoP: PDPE read error 0x%X\n", Status);
  1968. m_Translating = FALSE;
  1969. return Status;
  1970. }
  1971. }
  1972. // Read the Page Directory entry.
  1973. if (Entry == 0)
  1974. {
  1975. KdOut("Amd64VtoP: zero PDPE\n");
  1976. m_Translating = FALSE;
  1977. return HR_PAGE_NOT_AVAILABLE;
  1978. }
  1979. else if (!(Entry & 1))
  1980. {
  1981. Addr = (((Virt >> AMD64_PDE_SHIFT) & AMD64_PDE_MASK) *
  1982. sizeof(Entry)) + AMD64_PAGE_FILE_OFFSET(Entry);
  1983. KdOut("Amd64VtoP: pagefile PDE %d:%s\n",
  1984. AMD64_PAGE_FILE_INDEX(Entry), FormatAddr64(Addr));
  1985. if ((Status = g_Target->
  1986. ReadPageFile(AMD64_PAGE_FILE_INDEX(Entry), Addr,
  1987. &Entry, sizeof(Entry))) != S_OK)
  1988. {
  1989. KdOut("Amd64VtoP: PDPE not present, 0x%X\n", Status);
  1990. m_Translating = FALSE;
  1991. return Status;
  1992. }
  1993. }
  1994. else
  1995. {
  1996. Addr = (((Virt >> AMD64_PDE_SHIFT) & AMD64_PDE_MASK) *
  1997. sizeof(Entry)) + (Entry & AMD64_VALID_PFN_MASK);
  1998. KdOut("Amd64VtoP: PDE %s\n", FormatAddr64(Addr));
  1999. (*Levels)++;
  2000. if (Offsets != NULL && OffsetsSize > 0)
  2001. {
  2002. *Offsets++ = Addr;
  2003. OffsetsSize--;
  2004. }
  2005. if ((Status = g_Target->
  2006. ReadAllPhysical(Addr, &Entry, sizeof(Entry))) != S_OK)
  2007. {
  2008. KdOut("Amd64VtoP: PDE read error 0x%X\n", Status);
  2009. m_Translating = FALSE;
  2010. return Status;
  2011. }
  2012. }
  2013. // Check for a large page. Large pages can
  2014. // never be paged out so also check for the present bit.
  2015. if ((Entry & (AMD64_LARGE_PAGE_MASK | 1)) == (AMD64_LARGE_PAGE_MASK | 1))
  2016. {
  2017. *LastVal = ((Entry & ~(AMD64_LARGE_PAGE_SIZE - 1)) |
  2018. (Virt & (AMD64_LARGE_PAGE_SIZE - 1)));
  2019. KdOut("Amd64VtoP: Large page mapped phys %s\n",
  2020. FormatAddr64(*LastVal));
  2021. (*Levels)++;
  2022. if (Offsets != NULL && OffsetsSize > 0)
  2023. {
  2024. *Offsets++ = *LastVal;
  2025. OffsetsSize--;
  2026. }
  2027. m_Translating = FALSE;
  2028. return S_OK;
  2029. }
  2030. // Read the Page Table entry.
  2031. if (Entry == 0)
  2032. {
  2033. KdOut("Amd64VtoP: zero PDE\n");
  2034. m_Translating = FALSE;
  2035. return HR_PAGE_NOT_AVAILABLE;
  2036. }
  2037. else if (!(Entry & 1))
  2038. {
  2039. Addr = (((Virt >> AMD64_PTE_SHIFT) & AMD64_PTE_MASK) *
  2040. sizeof(Entry)) + AMD64_PAGE_FILE_OFFSET(Entry);
  2041. KdOut("Amd64VtoP: pagefile PTE %d:%s\n",
  2042. AMD64_PAGE_FILE_INDEX(Entry), FormatAddr64(Addr));
  2043. if ((Status = g_Target->
  2044. ReadPageFile(AMD64_PAGE_FILE_INDEX(Entry), Addr,
  2045. &Entry, sizeof(Entry))) != S_OK)
  2046. {
  2047. KdOut("Amd64VtoP: PDE not present, 0x%X\n", Status);
  2048. m_Translating = FALSE;
  2049. return Status;
  2050. }
  2051. }
  2052. else
  2053. {
  2054. Addr = (((Virt >> AMD64_PTE_SHIFT) & AMD64_PTE_MASK) *
  2055. sizeof(Entry)) + (Entry & AMD64_VALID_PFN_MASK);
  2056. KdOut("Amd64VtoP: PTE %s\n", FormatAddr64(Addr));
  2057. (*Levels)++;
  2058. if (Offsets != NULL && OffsetsSize > 0)
  2059. {
  2060. *Offsets++ = Addr;
  2061. OffsetsSize--;
  2062. }
  2063. if ((Status = g_Target->
  2064. ReadAllPhysical(Addr, &Entry, sizeof(Entry))) != S_OK)
  2065. {
  2066. KdOut("Amd64VtoP: PTE read error 0x%X\n", Status);
  2067. m_Translating = FALSE;
  2068. return Status;
  2069. }
  2070. }
  2071. if (!(Entry & 0x1) &&
  2072. ((Entry & AMD64_MM_PTE_PROTOTYPE_MASK) ||
  2073. !(Entry & AMD64_MM_PTE_TRANSITION_MASK)))
  2074. {
  2075. if (Entry == 0)
  2076. {
  2077. KdOut("Amd64VtoP: zero PTE\n");
  2078. Status = HR_PAGE_NOT_AVAILABLE;
  2079. }
  2080. else if (Entry & AMD64_MM_PTE_PROTOTYPE_MASK)
  2081. {
  2082. KdOut("Amd64VtoP: prototype PTE\n");
  2083. Status = HR_PAGE_NOT_AVAILABLE;
  2084. }
  2085. else
  2086. {
  2087. *PfIndex = AMD64_PAGE_FILE_INDEX(Entry);
  2088. *LastVal = (Virt & (AMD64_PAGE_SIZE - 1)) +
  2089. AMD64_PAGE_FILE_OFFSET(Entry);
  2090. KdOut("Amd64VtoP: PTE not present, pagefile %d:%s\n",
  2091. *PfIndex, FormatAddr64(*LastVal));
  2092. Status = HR_PAGE_IN_PAGE_FILE;
  2093. }
  2094. m_Translating = FALSE;
  2095. return Status;
  2096. }
  2097. *LastVal = ((Entry & AMD64_VALID_PFN_MASK) |
  2098. (Virt & (AMD64_PAGE_SIZE - 1)));
  2099. KdOut("Amd64VtoP: Mapped phys %s\n", FormatAddr64(*LastVal));
  2100. (*Levels)++;
  2101. if (Offsets != NULL && OffsetsSize > 0)
  2102. {
  2103. *Offsets++ = *LastVal;
  2104. OffsetsSize--;
  2105. }
  2106. m_Translating = FALSE;
  2107. return S_OK;
  2108. }
  2109. HRESULT
  2110. Amd64MachineInfo::GetBaseTranslationVirtualOffset(PULONG64 Offset)
  2111. {
  2112. *Offset = AMD64_BASE_VIRT;
  2113. return S_OK;
  2114. }
  2115. BOOL
  2116. Amd64MachineInfo::DisplayTrapFrame(ULONG64 FrameAddress,
  2117. PCROSS_PLATFORM_CONTEXT Context)
  2118. {
  2119. ErrOut("DisplayTrapFrame not implemented\n");
  2120. return FALSE;
  2121. }
  2122. void
  2123. Amd64MachineInfo::ValidateCxr(PCROSS_PLATFORM_CONTEXT Context)
  2124. {
  2125. // XXX drewb - Not implemented.
  2126. }
  2127. void
  2128. Amd64MachineInfo::OutputFunctionEntry(PVOID RawEntry)
  2129. {
  2130. _PIMAGE_RUNTIME_FUNCTION_ENTRY Entry =
  2131. (_PIMAGE_RUNTIME_FUNCTION_ENTRY)RawEntry;
  2132. dprintf("BeginAddress = %s\n",
  2133. FormatAddr64(Entry->BeginAddress));
  2134. dprintf("EndAddress = %s\n",
  2135. FormatAddr64(Entry->EndAddress));
  2136. dprintf("UnwindInfoAddress = %s\n",
  2137. FormatAddr64(Entry->UnwindInfoAddress));
  2138. }
  2139. HRESULT
  2140. Amd64MachineInfo::ReadDynamicFunctionTable(ULONG64 Table,
  2141. PULONG64 NextTable,
  2142. PULONG64 MinAddress,
  2143. PULONG64 MaxAddress,
  2144. PULONG64 BaseAddress,
  2145. PULONG64 TableData,
  2146. PULONG TableSize,
  2147. PWSTR OutOfProcessDll,
  2148. PCROSS_PLATFORM_DYNAMIC_FUNCTION_TABLE RawTable)
  2149. {
  2150. HRESULT Status;
  2151. if ((Status = g_Target->
  2152. ReadAllVirtual(Table, &RawTable->Amd64Table,
  2153. sizeof(RawTable->Amd64Table))) != S_OK)
  2154. {
  2155. return Status;
  2156. }
  2157. *NextTable = RawTable->Amd64Table.ListEntry.Flink;
  2158. *MinAddress = RawTable->Amd64Table.MinimumAddress;
  2159. *MaxAddress = RawTable->Amd64Table.MaximumAddress;
  2160. *BaseAddress = RawTable->Amd64Table.BaseAddress;
  2161. if (RawTable->Amd64Table.Type == AMD64_RF_CALLBACK)
  2162. {
  2163. ULONG Done;
  2164. *TableData = 0;
  2165. *TableSize = 0;
  2166. if ((Status = g_Target->
  2167. ReadVirtual(RawTable->Amd64Table.OutOfProcessCallbackDll,
  2168. OutOfProcessDll, (MAX_PATH - 1) * sizeof(WCHAR),
  2169. &Done)) != S_OK)
  2170. {
  2171. return Status;
  2172. }
  2173. OutOfProcessDll[Done / sizeof(WCHAR)] = 0;
  2174. }
  2175. else
  2176. {
  2177. *TableData = RawTable->Amd64Table.FunctionTable;
  2178. *TableSize = RawTable->Amd64Table.EntryCount *
  2179. sizeof(_IMAGE_RUNTIME_FUNCTION_ENTRY);
  2180. OutOfProcessDll[0] = 0;
  2181. }
  2182. return S_OK;
  2183. }
  2184. PVOID
  2185. Amd64MachineInfo::FindDynamicFunctionEntry(PCROSS_PLATFORM_DYNAMIC_FUNCTION_TABLE Table,
  2186. ULONG64 Address,
  2187. PVOID TableData,
  2188. ULONG TableSize)
  2189. {
  2190. ULONG i;
  2191. _PIMAGE_RUNTIME_FUNCTION_ENTRY Func;
  2192. static _IMAGE_RUNTIME_FUNCTION_ENTRY s_RetFunc;
  2193. Func = (_PIMAGE_RUNTIME_FUNCTION_ENTRY)TableData;
  2194. for (i = 0; i < TableSize / sizeof(_IMAGE_RUNTIME_FUNCTION_ENTRY); i++)
  2195. {
  2196. if (Address >= Table->Amd64Table.BaseAddress + Func->BeginAddress &&
  2197. Address < Table->Amd64Table.BaseAddress + Func->EndAddress)
  2198. {
  2199. // The table data is temporary so copy the data into
  2200. // a static buffer for longer-term storage.
  2201. s_RetFunc.BeginAddress = Func->BeginAddress;
  2202. s_RetFunc.EndAddress = Func->EndAddress;
  2203. s_RetFunc.UnwindInfoAddress = Func->UnwindInfoAddress;
  2204. return (PVOID)&s_RetFunc;
  2205. }
  2206. Func++;
  2207. }
  2208. return NULL;
  2209. }
  2210. HRESULT
  2211. Amd64MachineInfo::ReadKernelProcessorId
  2212. (ULONG Processor, PDEBUG_PROCESSOR_IDENTIFICATION_ALL Id)
  2213. {
  2214. HRESULT Status;
  2215. ULONG64 Prcb, PrcbMember;
  2216. ULONG Data;
  2217. if ((Status = g_Target->
  2218. GetProcessorSystemDataOffset(Processor, DEBUG_DATA_KPRCB_OFFSET,
  2219. &Prcb)) != S_OK)
  2220. {
  2221. return Status;
  2222. }
  2223. PrcbMember = Prcb + FIELD_OFFSET(AMD64_PARTIAL_KPRCB, CpuType);
  2224. if ((Status = g_Target->
  2225. ReadAllVirtual(PrcbMember, &Data, sizeof(Data))) != S_OK)
  2226. {
  2227. return Status;
  2228. }
  2229. Id->Amd64.Family = Data & 0xf;
  2230. Id->Amd64.Model = (Data >> 24) & 0xf;
  2231. Id->Amd64.Stepping = (Data >> 16) & 0xf;
  2232. PrcbMember = Prcb + FIELD_OFFSET(AMD64_PARTIAL_KPRCB, VendorString);
  2233. if ((Status = g_Target->
  2234. ReadAllVirtual(PrcbMember, Id->Amd64.VendorString,
  2235. sizeof(Id->Amd64.VendorString))) != S_OK)
  2236. {
  2237. return Status;
  2238. }
  2239. return S_OK;
  2240. }
  2241. void
  2242. Amd64MachineInfo::KdGetSpecialRegistersFromContext(void)
  2243. {
  2244. DBG_ASSERT(m_ContextState >= MCTX_FULL);
  2245. m_SpecialRegContext.KernelDr0 = m_Context.Amd64Context.Dr0;
  2246. m_SpecialRegContext.KernelDr1 = m_Context.Amd64Context.Dr1;
  2247. m_SpecialRegContext.KernelDr2 = m_Context.Amd64Context.Dr2;
  2248. m_SpecialRegContext.KernelDr3 = m_Context.Amd64Context.Dr3;
  2249. m_SpecialRegContext.KernelDr6 = m_Context.Amd64Context.Dr6;
  2250. m_SpecialRegContext.KernelDr7 = m_Context.Amd64Context.Dr7;
  2251. }
  2252. void
  2253. Amd64MachineInfo::KdSetSpecialRegistersInContext(void)
  2254. {
  2255. DBG_ASSERT(m_ContextState >= MCTX_FULL);
  2256. m_Context.Amd64Context.Dr0 = m_SpecialRegContext.KernelDr0;
  2257. m_Context.Amd64Context.Dr1 = m_SpecialRegContext.KernelDr1;
  2258. m_Context.Amd64Context.Dr2 = m_SpecialRegContext.KernelDr2;
  2259. m_Context.Amd64Context.Dr3 = m_SpecialRegContext.KernelDr3;
  2260. m_Context.Amd64Context.Dr6 = m_SpecialRegContext.KernelDr6;
  2261. m_Context.Amd64Context.Dr7 = m_SpecialRegContext.KernelDr7;
  2262. }