Leaked source code of windows server 2003
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

3352 lines
106 KiB

  1. //----------------------------------------------------------------------------
  2. //
  3. // Register portions of AMD64 machine implementation.
  4. //
  5. // Copyright (C) Microsoft Corporation, 2000-2002.
  6. //
  7. //----------------------------------------------------------------------------
  8. #include "ntsdp.hpp"
  9. #define REGALL_SEGREG REGALL_EXTRA0
  10. #define REGALL_MMXREG REGALL_EXTRA1
  11. #define REGALL_DREG REGALL_EXTRA2
  12. REGALLDESC g_Amd64AllExtraDesc[] =
  13. {
  14. REGALL_SEGREG, "Segment registers",
  15. REGALL_MMXREG, "MMX registers",
  16. REGALL_DREG, "Debug registers and, in kernel, CR4",
  17. REGALL_XMMREG, "SSE XMM registers",
  18. 0, NULL,
  19. };
  20. #define REGALL_CREG REGALL_EXTRA4
  21. #define REGALL_DESC REGALL_EXTRA5
  22. REGALLDESC g_Amd64KernelExtraDesc[] =
  23. {
  24. REGALL_CREG, "CR0, CR2 and CR3",
  25. REGALL_DESC, "Descriptor and task state",
  26. 0, NULL,
  27. };
  28. char g_Rax[] = "rax";
  29. char g_Rcx[] = "rcx";
  30. char g_Rdx[] = "rdx";
  31. char g_Rbx[] = "rbx";
  32. char g_Rsp[] = "rsp";
  33. char g_Rbp[] = "rbp";
  34. char g_Rsi[] = "rsi";
  35. char g_Rdi[] = "rdi";
  36. char g_Rip[] = "rip";
  37. char g_KMxcsr[] = "kmxcsr";
  38. char g_KDr0[] = "kdr0";
  39. char g_KDr1[] = "kdr1";
  40. char g_KDr2[] = "kdr2";
  41. char g_KDr3[] = "kdr3";
  42. char g_KDr6[] = "kdr6";
  43. char g_KDr7[] = "kdr7";
  44. char g_Xmm8[] = "xmm8";
  45. char g_Xmm9[] = "xmm9";
  46. char g_Xmm10[] = "xmm10";
  47. char g_Xmm11[] = "xmm11";
  48. char g_Xmm12[] = "xmm12";
  49. char g_Xmm13[] = "xmm13";
  50. char g_Xmm14[] = "xmm14";
  51. char g_Xmm15[] = "xmm15";
  52. char g_Cr8[] = "cr8";
  53. char g_Spl[] = "spl";
  54. char g_Bpl[] = "bpl";
  55. char g_Sil[] = "sil";
  56. char g_Dil[] = "dil";
  57. char g_R8d[] = "r8d";
  58. char g_R9d[] = "r9d";
  59. char g_R10d[] = "r10d";
  60. char g_R11d[] = "r11d";
  61. char g_R12d[] = "r12d";
  62. char g_R13d[] = "r13d";
  63. char g_R14d[] = "r14d";
  64. char g_R15d[] = "r15d";
  65. char g_R8w[] = "r8w";
  66. char g_R9w[] = "r9w";
  67. char g_R10w[] = "r10w";
  68. char g_R11w[] = "r11w";
  69. char g_R12w[] = "r12w";
  70. char g_R13w[] = "r13w";
  71. char g_R14w[] = "r14w";
  72. char g_R15w[] = "r15w";
  73. char g_R8b[] = "r8b";
  74. char g_R9b[] = "r9b";
  75. char g_R10b[] = "r10b";
  76. char g_R11b[] = "r11b";
  77. char g_R12b[] = "r12b";
  78. char g_R13b[] = "r13b";
  79. char g_R14b[] = "r14b";
  80. char g_R15b[] = "r15b";
  81. REGDEF g_Amd64Defs[] =
  82. {
  83. { g_Rax, AMD64_RAX },
  84. { g_Rcx, AMD64_RCX },
  85. { g_Rdx, AMD64_RDX },
  86. { g_Rbx, AMD64_RBX },
  87. { g_Rsp, AMD64_RSP },
  88. { g_Rbp, AMD64_RBP },
  89. { g_Rsi, AMD64_RSI },
  90. { g_Rdi, AMD64_RDI },
  91. { g_R8, AMD64_R8 },
  92. { g_R9, AMD64_R9 },
  93. { g_R10, AMD64_R10 },
  94. { g_R11, AMD64_R11 },
  95. { g_R12, AMD64_R12 },
  96. { g_R13, AMD64_R13 },
  97. { g_R14, AMD64_R14 },
  98. { g_R15, AMD64_R15 },
  99. { g_Rip, AMD64_RIP },
  100. { g_Efl, AMD64_EFL },
  101. { g_Cs, AMD64_CS },
  102. { g_Ds, AMD64_DS },
  103. { g_Es, AMD64_ES },
  104. { g_Fs, AMD64_FS },
  105. { g_Gs, AMD64_GS },
  106. { g_Ss, AMD64_SS },
  107. { g_Dr0, AMD64_DR0 },
  108. { g_Dr1, AMD64_DR1 },
  109. { g_Dr2, AMD64_DR2 },
  110. { g_Dr3, AMD64_DR3 },
  111. { g_Dr6, AMD64_DR6 },
  112. { g_Dr7, AMD64_DR7 },
  113. { g_Fpcw, AMD64_FPCW },
  114. { g_Fpsw, AMD64_FPSW },
  115. { g_Fptw, AMD64_FPTW },
  116. { g_St0, AMD64_ST0 },
  117. { g_St1, AMD64_ST1 },
  118. { g_St2, AMD64_ST2 },
  119. { g_St3, AMD64_ST3 },
  120. { g_St4, AMD64_ST4 },
  121. { g_St5, AMD64_ST5 },
  122. { g_St6, AMD64_ST6 },
  123. { g_St7, AMD64_ST7 },
  124. { g_Mm0, AMD64_MM0 },
  125. { g_Mm1, AMD64_MM1 },
  126. { g_Mm2, AMD64_MM2 },
  127. { g_Mm3, AMD64_MM3 },
  128. { g_Mm4, AMD64_MM4 },
  129. { g_Mm5, AMD64_MM5 },
  130. { g_Mm6, AMD64_MM6 },
  131. { g_Mm7, AMD64_MM7 },
  132. { g_Mxcsr, AMD64_MXCSR },
  133. { g_Xmm0, AMD64_XMM0 },
  134. { g_Xmm1, AMD64_XMM1 },
  135. { g_Xmm2, AMD64_XMM2 },
  136. { g_Xmm3, AMD64_XMM3 },
  137. { g_Xmm4, AMD64_XMM4 },
  138. { g_Xmm5, AMD64_XMM5 },
  139. { g_Xmm6, AMD64_XMM6 },
  140. { g_Xmm7, AMD64_XMM7 },
  141. { g_Xmm8, AMD64_XMM8 },
  142. { g_Xmm9, AMD64_XMM9 },
  143. { g_Xmm10, AMD64_XMM10 },
  144. { g_Xmm11, AMD64_XMM11 },
  145. { g_Xmm12, AMD64_XMM12 },
  146. { g_Xmm13, AMD64_XMM13 },
  147. { g_Xmm14, AMD64_XMM14 },
  148. { g_Xmm15, AMD64_XMM15 },
  149. { g_Eax, AMD64_EAX },
  150. { g_Ecx, AMD64_ECX },
  151. { g_Edx, AMD64_EDX },
  152. { g_Ebx, AMD64_EBX },
  153. { g_Esp, AMD64_ESP },
  154. { g_Ebp, AMD64_EBP },
  155. { g_Esi, AMD64_ESI },
  156. { g_Edi, AMD64_EDI },
  157. { g_R8d, AMD64_R8D },
  158. { g_R9d, AMD64_R9D },
  159. { g_R10d, AMD64_R10D },
  160. { g_R11d, AMD64_R11D },
  161. { g_R12d, AMD64_R12D },
  162. { g_R13d, AMD64_R13D },
  163. { g_R14d, AMD64_R14D },
  164. { g_R15d, AMD64_R15D },
  165. { g_Eip, AMD64_EIP },
  166. { g_Ax, AMD64_AX },
  167. { g_Cx, AMD64_CX },
  168. { g_Dx, AMD64_DX },
  169. { g_Bx, AMD64_BX },
  170. { g_Sp, AMD64_SP },
  171. { g_Bp, AMD64_BP },
  172. { g_Si, AMD64_SI },
  173. { g_Di, AMD64_DI },
  174. { g_R8w, AMD64_R8W },
  175. { g_R9w, AMD64_R9W },
  176. { g_R10w, AMD64_R10W },
  177. { g_R11w, AMD64_R11W },
  178. { g_R12w, AMD64_R12W },
  179. { g_R13w, AMD64_R13W },
  180. { g_R14w, AMD64_R14W },
  181. { g_R15w, AMD64_R15W },
  182. { g_Ip, AMD64_IP },
  183. { g_Fl, AMD64_FL },
  184. { g_Al, AMD64_AL },
  185. { g_Cl, AMD64_CL },
  186. { g_Dl, AMD64_DL },
  187. { g_Bl, AMD64_BL },
  188. { g_Spl, AMD64_SPL },
  189. { g_Bpl, AMD64_BPL },
  190. { g_Sil, AMD64_SIL },
  191. { g_Dil, AMD64_DIL },
  192. { g_R8b, AMD64_R8B },
  193. { g_R9b, AMD64_R9B },
  194. { g_R10b, AMD64_R10B },
  195. { g_R11b, AMD64_R11B },
  196. { g_R12b, AMD64_R12B },
  197. { g_R13b, AMD64_R13B },
  198. { g_R14b, AMD64_R14B },
  199. { g_R15b, AMD64_R15B },
  200. { g_Ah, AMD64_AH },
  201. { g_Ch, AMD64_CH },
  202. { g_Dh, AMD64_DH },
  203. { g_Bh, AMD64_BH },
  204. { g_Iopl, AMD64_IOPL },
  205. { g_Of, AMD64_OF },
  206. { g_Df, AMD64_DF },
  207. { g_If, AMD64_IF },
  208. { g_Tf, AMD64_TF },
  209. { g_Sf, AMD64_SF },
  210. { g_Zf, AMD64_ZF },
  211. { g_Af, AMD64_AF },
  212. { g_Pf, AMD64_PF },
  213. { g_Cf, AMD64_CF },
  214. { g_Vip, AMD64_VIP },
  215. { g_Vif, AMD64_VIF },
  216. { NULL, REG_ERROR },
  217. };
  218. REGDEF g_Amd64KernelReg[] =
  219. {
  220. { g_Cr0, AMD64_CR0 },
  221. { g_Cr2, AMD64_CR2 },
  222. { g_Cr3, AMD64_CR3 },
  223. { g_Cr4, AMD64_CR4 },
  224. { g_Cr8, AMD64_CR8 },
  225. { g_Gdtr, AMD64_GDTR },
  226. { g_Gdtl, AMD64_GDTL },
  227. { g_Idtr, AMD64_IDTR },
  228. { g_Idtl, AMD64_IDTL },
  229. { g_Tr, AMD64_TR },
  230. { g_Ldtr, AMD64_LDTR },
  231. { g_KMxcsr,AMD64_KMXCSR},
  232. { g_KDr0, AMD64_KDR0 },
  233. { g_KDr1, AMD64_KDR1 },
  234. { g_KDr2, AMD64_KDR2 },
  235. { g_KDr3, AMD64_KDR3 },
  236. { g_KDr6, AMD64_KDR6 },
  237. { g_KDr7, AMD64_KDR7 },
  238. { NULL, REG_ERROR },
  239. };
  240. REGSUBDEF g_Amd64SubDefs[] =
  241. {
  242. { AMD64_EAX, AMD64_RAX, 0, 0xffffffff }, // EAX register
  243. { AMD64_ECX, AMD64_RCX, 0, 0xffffffff }, // ECX register
  244. { AMD64_EDX, AMD64_RDX, 0, 0xffffffff }, // EDX register
  245. { AMD64_EBX, AMD64_RBX, 0, 0xffffffff }, // EBX register
  246. { AMD64_ESP, AMD64_RSP, 0, 0xffffffff }, // ESP register
  247. { AMD64_EBP, AMD64_RBP, 0, 0xffffffff }, // EBP register
  248. { AMD64_ESI, AMD64_RSI, 0, 0xffffffff }, // ESI register
  249. { AMD64_EDI, AMD64_RDI, 0, 0xffffffff }, // EDI register
  250. { AMD64_R8D, AMD64_R8, 0, 0xffffffff }, // R8D register
  251. { AMD64_R9D, AMD64_R9, 0, 0xffffffff }, // R9D register
  252. { AMD64_R10D, AMD64_R10, 0, 0xffffffff }, // R10D register
  253. { AMD64_R11D, AMD64_R11, 0, 0xffffffff }, // R11D register
  254. { AMD64_R12D, AMD64_R12, 0, 0xffffffff }, // R12D register
  255. { AMD64_R13D, AMD64_R13, 0, 0xffffffff }, // R13D register
  256. { AMD64_R14D, AMD64_R14, 0, 0xffffffff }, // R14D register
  257. { AMD64_R15D, AMD64_R15, 0, 0xffffffff }, // R15D register
  258. { AMD64_EIP, AMD64_RIP, 0, 0xffffffff }, // EIP register
  259. { AMD64_AX, AMD64_RAX, 0, 0xffff }, // AX register
  260. { AMD64_CX, AMD64_RCX, 0, 0xffff }, // CX register
  261. { AMD64_DX, AMD64_RDX, 0, 0xffff }, // DX register
  262. { AMD64_BX, AMD64_RBX, 0, 0xffff }, // BX register
  263. { AMD64_SP, AMD64_RSP, 0, 0xffff }, // SP register
  264. { AMD64_BP, AMD64_RBP, 0, 0xffff }, // BP register
  265. { AMD64_SI, AMD64_RSI, 0, 0xffff }, // SI register
  266. { AMD64_DI, AMD64_RDI, 0, 0xffff }, // DI register
  267. { AMD64_R8W, AMD64_R8, 0, 0xffff }, // R8W register
  268. { AMD64_R9W, AMD64_R9, 0, 0xffff }, // R9W register
  269. { AMD64_R10W, AMD64_R10, 0, 0xffff }, // R10W register
  270. { AMD64_R11W, AMD64_R11, 0, 0xffff }, // R11W register
  271. { AMD64_R12W, AMD64_R12, 0, 0xffff }, // R12W register
  272. { AMD64_R13W, AMD64_R13, 0, 0xffff }, // R13W register
  273. { AMD64_R14W, AMD64_R14, 0, 0xffff }, // R14W register
  274. { AMD64_R15W, AMD64_R15, 0, 0xffff }, // R15W register
  275. { AMD64_IP, AMD64_RIP, 0, 0xffff }, // IP register
  276. { AMD64_FL, AMD64_EFL, 0, 0xffff }, // FL register
  277. { AMD64_AL, AMD64_RAX, 0, 0xff }, // AL register
  278. { AMD64_CL, AMD64_RCX, 0, 0xff }, // CL register
  279. { AMD64_DL, AMD64_RDX, 0, 0xff }, // DL register
  280. { AMD64_BL, AMD64_RBX, 0, 0xff }, // BL register
  281. { AMD64_SPL, AMD64_RSP, 0, 0xff }, // SPL register
  282. { AMD64_BPL, AMD64_RBP, 0, 0xff }, // BPL register
  283. { AMD64_SIL, AMD64_RSI, 0, 0xff }, // SIL register
  284. { AMD64_DIL, AMD64_RDI, 0, 0xff }, // DIL register
  285. { AMD64_R8B, AMD64_R8, 0, 0xff }, // R8B register
  286. { AMD64_R9B, AMD64_R9, 0, 0xff }, // R9B register
  287. { AMD64_R10B, AMD64_R10, 0, 0xff }, // R10B register
  288. { AMD64_R11B, AMD64_R11, 0, 0xff }, // R11B register
  289. { AMD64_R12B, AMD64_R12, 0, 0xff }, // R12B register
  290. { AMD64_R13B, AMD64_R13, 0, 0xff }, // R13B register
  291. { AMD64_R14B, AMD64_R14, 0, 0xff }, // R14B register
  292. { AMD64_R15B, AMD64_R15, 0, 0xff }, // R15B register
  293. { AMD64_AH, AMD64_RAX, 8, 0xff }, // AH register
  294. { AMD64_CH, AMD64_RCX, 8, 0xff }, // CH register
  295. { AMD64_DH, AMD64_RDX, 8, 0xff }, // DH register
  296. { AMD64_BH, AMD64_RBX, 8, 0xff }, // BH register
  297. { AMD64_IOPL, AMD64_EFL, 12, 3 }, // IOPL level value
  298. { AMD64_OF, AMD64_EFL, 11, 1 }, // OF (overflow flag)
  299. { AMD64_DF, AMD64_EFL, 10, 1 }, // DF (direction flag)
  300. { AMD64_IF, AMD64_EFL, 9, 1 }, // IF (interrupt enable flag)
  301. { AMD64_TF, AMD64_EFL, 8, 1 }, // TF (trace flag)
  302. { AMD64_SF, AMD64_EFL, 7, 1 }, // SF (sign flag)
  303. { AMD64_ZF, AMD64_EFL, 6, 1 }, // ZF (zero flag)
  304. { AMD64_AF, AMD64_EFL, 4, 1 }, // AF (aux carry flag)
  305. { AMD64_PF, AMD64_EFL, 2, 1 }, // PF (parity flag)
  306. { AMD64_CF, AMD64_EFL, 0, 1 }, // CF (carry flag)
  307. { AMD64_VIP, AMD64_EFL, 20, 1 }, // VIP (virtual interrupt pending)
  308. { AMD64_VIF, AMD64_EFL, 19, 1 }, // VIF (virtual interrupt flag)
  309. { REG_ERROR, REG_ERROR, 0, 0 }
  310. };
  311. RegisterGroup g_Amd64BaseGroup =
  312. {
  313. 0, g_Amd64Defs, g_Amd64SubDefs, g_Amd64AllExtraDesc
  314. };
  315. RegisterGroup g_Amd64KernelGroup =
  316. {
  317. 0, g_Amd64KernelReg, NULL, g_Amd64KernelExtraDesc
  318. };
  319. // First ExecTypes entry must be the actual processor type.
  320. ULONG g_Amd64ExecTypes[] =
  321. {
  322. IMAGE_FILE_MACHINE_AMD64, IMAGE_FILE_MACHINE_I386,
  323. };
  324. // This array must be sorted by CV reg value.
  325. CvRegMap g_Amd64CvRegMap[] =
  326. {
  327. {CV_AMD64_AL, AMD64_AL},
  328. {CV_AMD64_CL, AMD64_CL},
  329. {CV_AMD64_DL, AMD64_DL},
  330. {CV_AMD64_BL, AMD64_BL},
  331. {CV_AMD64_AH, AMD64_AH},
  332. {CV_AMD64_CH, AMD64_CH},
  333. {CV_AMD64_DH, AMD64_DH},
  334. {CV_AMD64_BH, AMD64_BH},
  335. {CV_AMD64_AX, AMD64_AX},
  336. {CV_AMD64_CX, AMD64_CX},
  337. {CV_AMD64_DX, AMD64_DX},
  338. {CV_AMD64_BX, AMD64_BX},
  339. {CV_AMD64_SP, AMD64_SP},
  340. {CV_AMD64_BP, AMD64_BP},
  341. {CV_AMD64_SI, AMD64_SI},
  342. {CV_AMD64_DI, AMD64_DI},
  343. {CV_AMD64_EAX, AMD64_EAX},
  344. {CV_AMD64_ECX, AMD64_ECX},
  345. {CV_AMD64_EDX, AMD64_EDX},
  346. {CV_AMD64_EBX, AMD64_EBX},
  347. {CV_AMD64_ESP, AMD64_ESP},
  348. {CV_AMD64_EBP, AMD64_EBP},
  349. {CV_AMD64_ESI, AMD64_ESI},
  350. {CV_AMD64_EDI, AMD64_EDI},
  351. {CV_AMD64_ES, AMD64_ES},
  352. {CV_AMD64_CS, AMD64_CS},
  353. {CV_AMD64_SS, AMD64_SS},
  354. {CV_AMD64_DS, AMD64_DS},
  355. {CV_AMD64_FS, AMD64_FS},
  356. {CV_AMD64_GS, AMD64_GS},
  357. {CV_AMD64_FLAGS, AMD64_FL},
  358. {CV_AMD64_RIP, AMD64_RIP},
  359. {CV_AMD64_EFLAGS, AMD64_EFL},
  360. {CV_AMD64_CR0, AMD64_CR0},
  361. {CV_AMD64_CR2, AMD64_CR2},
  362. {CV_AMD64_CR3, AMD64_CR3},
  363. {CV_AMD64_CR4, AMD64_CR4},
  364. {CV_AMD64_CR8, AMD64_CR8},
  365. {CV_AMD64_DR0, AMD64_DR0},
  366. {CV_AMD64_DR1, AMD64_DR1},
  367. {CV_AMD64_DR2, AMD64_DR2},
  368. {CV_AMD64_DR3, AMD64_DR3},
  369. {CV_AMD64_DR6, AMD64_DR6},
  370. {CV_AMD64_DR7, AMD64_DR7},
  371. {CV_AMD64_GDTR, AMD64_GDTR},
  372. {CV_AMD64_GDTL, AMD64_GDTL},
  373. {CV_AMD64_IDTR, AMD64_IDTR},
  374. {CV_AMD64_IDTL, AMD64_IDTL},
  375. {CV_AMD64_LDTR, AMD64_LDTR},
  376. {CV_AMD64_TR, AMD64_TR},
  377. {CV_AMD64_ST0, AMD64_ST0},
  378. {CV_AMD64_ST1, AMD64_ST1},
  379. {CV_AMD64_ST2, AMD64_ST2},
  380. {CV_AMD64_ST3, AMD64_ST3},
  381. {CV_AMD64_ST4, AMD64_ST4},
  382. {CV_AMD64_ST5, AMD64_ST5},
  383. {CV_AMD64_ST6, AMD64_ST6},
  384. {CV_AMD64_ST7, AMD64_ST7},
  385. {CV_AMD64_CTRL, AMD64_FPCW},
  386. {CV_AMD64_STAT, AMD64_FPSW},
  387. {CV_AMD64_TAG, AMD64_FPTW},
  388. {CV_AMD64_MM0, AMD64_MM0},
  389. {CV_AMD64_MM1, AMD64_MM1},
  390. {CV_AMD64_MM2, AMD64_MM2},
  391. {CV_AMD64_MM3, AMD64_MM3},
  392. {CV_AMD64_MM4, AMD64_MM4},
  393. {CV_AMD64_MM5, AMD64_MM5},
  394. {CV_AMD64_MM6, AMD64_MM6},
  395. {CV_AMD64_MM7, AMD64_MM7},
  396. {CV_AMD64_XMM0, AMD64_XMM0},
  397. {CV_AMD64_XMM1, AMD64_XMM1},
  398. {CV_AMD64_XMM2, AMD64_XMM2},
  399. {CV_AMD64_XMM3, AMD64_XMM3},
  400. {CV_AMD64_XMM4, AMD64_XMM4},
  401. {CV_AMD64_XMM5, AMD64_XMM5},
  402. {CV_AMD64_XMM6, AMD64_XMM6},
  403. {CV_AMD64_XMM7, AMD64_XMM7},
  404. {CV_AMD64_MXCSR, AMD64_MXCSR},
  405. {CV_AMD64_XMM8, AMD64_XMM8},
  406. {CV_AMD64_XMM9, AMD64_XMM9},
  407. {CV_AMD64_XMM10, AMD64_XMM10},
  408. {CV_AMD64_XMM11, AMD64_XMM11},
  409. {CV_AMD64_XMM12, AMD64_XMM12},
  410. {CV_AMD64_XMM13, AMD64_XMM13},
  411. {CV_AMD64_XMM14, AMD64_XMM14},
  412. {CV_AMD64_XMM15, AMD64_XMM15},
  413. {CV_AMD64_SIL, AMD64_SIL},
  414. {CV_AMD64_DIL, AMD64_DIL},
  415. {CV_AMD64_BPL, AMD64_BPL},
  416. {CV_AMD64_SPL, AMD64_SPL},
  417. {CV_AMD64_RAX, AMD64_RAX},
  418. {CV_AMD64_RBX, AMD64_RBX},
  419. {CV_AMD64_RCX, AMD64_RCX},
  420. {CV_AMD64_RDX, AMD64_RDX},
  421. {CV_AMD64_RSI, AMD64_RSI},
  422. {CV_AMD64_RDI, AMD64_RDI},
  423. {CV_AMD64_RBP, AMD64_RBP},
  424. {CV_AMD64_RSP, AMD64_RSP},
  425. {CV_AMD64_R8, AMD64_R8},
  426. {CV_AMD64_R9, AMD64_R9},
  427. {CV_AMD64_R10, AMD64_R10},
  428. {CV_AMD64_R11, AMD64_R11},
  429. {CV_AMD64_R12, AMD64_R12},
  430. {CV_AMD64_R13, AMD64_R13},
  431. {CV_AMD64_R14, AMD64_R14},
  432. {CV_AMD64_R15, AMD64_R15},
  433. {CV_AMD64_R8B, AMD64_R8B},
  434. {CV_AMD64_R9B, AMD64_R9B},
  435. {CV_AMD64_R10B, AMD64_R10B},
  436. {CV_AMD64_R11B, AMD64_R11B},
  437. {CV_AMD64_R12B, AMD64_R12B},
  438. {CV_AMD64_R13B, AMD64_R13B},
  439. {CV_AMD64_R14B, AMD64_R14B},
  440. {CV_AMD64_R15B, AMD64_R15B},
  441. {CV_AMD64_R8W, AMD64_R8W},
  442. {CV_AMD64_R9W, AMD64_R9W},
  443. {CV_AMD64_R10W, AMD64_R10W},
  444. {CV_AMD64_R11W, AMD64_R11W},
  445. {CV_AMD64_R12W, AMD64_R12W},
  446. {CV_AMD64_R13W, AMD64_R13W},
  447. {CV_AMD64_R14W, AMD64_R14W},
  448. {CV_AMD64_R15W, AMD64_R15W},
  449. {CV_AMD64_R8D, AMD64_R8D},
  450. {CV_AMD64_R9D, AMD64_R9D},
  451. {CV_AMD64_R10D, AMD64_R10D},
  452. {CV_AMD64_R11D, AMD64_R11D},
  453. {CV_AMD64_R12D, AMD64_R12D},
  454. {CV_AMD64_R13D, AMD64_R13D},
  455. {CV_AMD64_R14D, AMD64_R14D},
  456. {CV_AMD64_R15D, AMD64_R15D},
  457. };
  458. BOOL g_Amd64InCode64;
  459. Amd64MachineInfo::Amd64MachineInfo(TargetInfo* Target)
  460. : BaseX86MachineInfo(Target)
  461. {
  462. m_FullName = "AMD x86-64";
  463. m_AbbrevName = "AMD64";
  464. m_PageSize = AMD64_PAGE_SIZE;
  465. m_PageShift = AMD64_PAGE_SHIFT;
  466. m_NumExecTypes = DIMA(g_Amd64ExecTypes);
  467. m_ExecTypes = g_Amd64ExecTypes;
  468. m_Ptr64 = TRUE;
  469. m_RetRegIndex = AMD64_RAX;
  470. m_AllMask = REGALL_INT64 | REGALL_SEGREG;
  471. m_SizeCanonicalContext = sizeof(AMD64_CONTEXT);
  472. m_SverCanonicalContext = NT_SVER_XP;
  473. m_MaxDataBreakpoints = 4;
  474. m_SymPrefix = NULL;
  475. m_CvRegMapSize = DIMA(g_Amd64CvRegMap);
  476. m_CvRegMap = g_Amd64CvRegMap;
  477. }
  478. HRESULT
  479. Amd64MachineInfo::Initialize(void)
  480. {
  481. m_NumGroups = 1;
  482. m_Groups[0] = &g_Amd64BaseGroup;
  483. if (IS_KERNEL_TARGET(m_Target))
  484. {
  485. m_Groups[m_NumGroups] = &g_Amd64KernelGroup;
  486. m_NumGroups++;
  487. }
  488. return MachineInfo::Initialize();
  489. }
  490. void
  491. Amd64MachineInfo::GetSystemTypeInfo(PSYSTEM_TYPE_INFO Info)
  492. {
  493. Info->TriagePrcbOffset = AMD64_TRIAGE_PRCB_ADDRESS;
  494. Info->SizeTargetContext = sizeof(AMD64_CONTEXT);
  495. Info->OffsetTargetContextFlags = FIELD_OFFSET(AMD64_CONTEXT, ContextFlags);
  496. Info->SizeControlReport = sizeof(AMD64_DBGKD_CONTROL_REPORT);
  497. Info->OffsetSpecialRegisters = AMD64_DEBUG_CONTROL_SPACE_KSPECIAL;
  498. Info->SizeKspecialRegisters = sizeof(AMD64_KSPECIAL_REGISTERS);
  499. Info->SizePageFrameNumber = sizeof(ULONG64);
  500. Info->SizePte = sizeof(ULONG64);
  501. Info->SizeDynamicFunctionTable = sizeof(AMD64_DYNAMIC_FUNCTION_TABLE);
  502. Info->SizeRuntimeFunction = sizeof(_IMAGE_RUNTIME_FUNCTION_ENTRY);
  503. Info->SharedUserDataOffset = 0;
  504. Info->UmSharedUserDataOffset = 0;
  505. Info->UmSharedSysCallOffset = 0;
  506. Info->UmSharedSysCallSize = 0;
  507. if (m_Target->m_PlatformId == VER_PLATFORM_WIN32_NT)
  508. {
  509. Info->SharedUserDataOffset = IS_KERNEL_TARGET(m_Target) ?
  510. AMD64_KI_USER_SHARED_DATA : MM_SHARED_USER_DATA_VA;
  511. Info->UmSharedUserDataOffset = MM_SHARED_USER_DATA_VA;
  512. }
  513. }
  514. void
  515. Amd64MachineInfo::GetDefaultKdData(PKDDEBUGGER_DATA64 KdData)
  516. {
  517. //
  518. // Parts of the data block may already be filled out
  519. // so don't destroy anything that's already set.
  520. //
  521. // AMD64 should always have a certain amount of
  522. // the data block present. This routine is also
  523. // called for default initialization before any
  524. // data block data has been retrieve, though, so
  525. // limit the assert to just the data-block-read case.
  526. DBG_ASSERT(!KdData->Header.Size ||
  527. KdData->OffsetKThreadNextProcessor);
  528. if (!KdData->SizePcr)
  529. {
  530. KdData->SizePcr = AMD64_KPCR_SIZE;
  531. KdData->OffsetPcrSelfPcr = AMD64_KPCR_SELF;
  532. KdData->OffsetPcrCurrentPrcb = AMD64_KPCR_CURRENT_PRCB;
  533. KdData->OffsetPcrContainedPrcb = AMD64_KPCR_PRCB;
  534. KdData->OffsetPcrInitialBStore = 0;
  535. KdData->OffsetPcrBStoreLimit = 0;
  536. KdData->OffsetPcrInitialStack = 0;
  537. KdData->OffsetPcrStackLimit = 0;
  538. KdData->OffsetPrcbPcrPage = 0;
  539. KdData->OffsetPrcbProcStateSpecialReg = AMD64_KPRCB_SPECIAL_REG;
  540. KdData->GdtR0Code = AMD64_KGDT64_R0_CODE;
  541. KdData->GdtR0Data = AMD64_KGDT64_R0_DATA;
  542. KdData->GdtR0Pcr = 0;
  543. KdData->GdtR3Code = AMD64_KGDT64_R3_CODE + 3;
  544. KdData->GdtR3Data = AMD64_KGDT64_R3_DATA + 3;
  545. KdData->GdtR3Teb = 0;
  546. KdData->GdtLdt = 0;
  547. KdData->GdtTss = AMD64_KGDT64_SYS_TSS;
  548. KdData->Gdt64R3CmCode = AMD64_KGDT64_R3_CMCODE + 3;
  549. KdData->Gdt64R3CmTeb = AMD64_KGDT64_R3_CMTEB + 3;
  550. }
  551. }
  552. void
  553. Amd64MachineInfo::
  554. InitializeContext(ULONG64 Pc,
  555. PDBGKD_ANY_CONTROL_REPORT ControlReport)
  556. {
  557. m_Context.Amd64Context.Rip = Pc;
  558. m_ContextState = Pc ? MCTX_PC : MCTX_NONE;
  559. if (ControlReport != NULL)
  560. {
  561. BpOut("InitializeContext(%d) DR6 %I64X DR7 %I64X\n",
  562. m_Target->m_RegContextProcessor,
  563. ControlReport->Amd64ControlReport.Dr6,
  564. ControlReport->Amd64ControlReport.Dr7);
  565. m_Special.Amd64Special.KernelDr6 = ControlReport->Amd64ControlReport.Dr6;
  566. m_Special.Amd64Special.KernelDr7 = ControlReport->Amd64ControlReport.Dr7;
  567. m_ContextState = MCTX_DR67_REPORT;
  568. if (ControlReport->Amd64ControlReport.ReportFlags &
  569. AMD64_REPORT_INCLUDES_SEGS)
  570. {
  571. m_Context.Amd64Context.SegCs =
  572. ControlReport->Amd64ControlReport.SegCs;
  573. m_Context.Amd64Context.SegDs =
  574. ControlReport->Amd64ControlReport.SegDs;
  575. m_Context.Amd64Context.SegEs =
  576. ControlReport->Amd64ControlReport.SegEs;
  577. m_Context.Amd64Context.SegFs =
  578. ControlReport->Amd64ControlReport.SegFs;
  579. m_Context.Amd64Context.EFlags =
  580. ControlReport->Amd64ControlReport.EFlags;
  581. m_ContextState = MCTX_REPORT;
  582. }
  583. }
  584. g_X86InVm86 = FALSE;
  585. g_X86InCode16 = FALSE;
  586. // In the absence of other information, assume we're
  587. // executing 64-bit code.
  588. g_Amd64InCode64 = TRUE;
  589. // XXX drewb - For the moment, always assume user-mode
  590. // is flat 64-bit.
  591. if (IS_KERNEL_TARGET(m_Target) && IS_CONTEXT_POSSIBLE(m_Target))
  592. {
  593. if (ControlReport == NULL ||
  594. (ControlReport->Amd64ControlReport.ReportFlags &
  595. AMD64_REPORT_STANDARD_CS) == 0)
  596. {
  597. DESCRIPTOR64 Desc;
  598. // Check what kind of code segment we're in.
  599. if (GetSegRegDescriptor(SEGREG_CODE, &Desc) != S_OK)
  600. {
  601. WarnOut("CS descriptor lookup failed\n");
  602. }
  603. else if ((Desc.Flags & X86_DESC_LONG_MODE) == 0)
  604. {
  605. g_Amd64InCode64 = FALSE;
  606. g_X86InVm86 = X86_IS_VM86(GetReg32(X86_EFL));
  607. g_X86InCode16 = (Desc.Flags & X86_DESC_DEFAULT_BIG) == 0;
  608. }
  609. }
  610. else
  611. {
  612. // We're in a standard code segment so cache
  613. // a default descriptor for CS to avoid further
  614. // CS lookups.
  615. m_Target->EmulateNtAmd64SelDescriptor(m_Target->m_RegContextThread,
  616. this,
  617. m_Context.Amd64Context.SegCs,
  618. &m_SegRegDesc[SEGREG_CODE]);
  619. }
  620. }
  621. // Add instructions to cache only if we're in flat mode.
  622. if (Pc && ControlReport != NULL &&
  623. !g_X86InVm86 && !g_X86InCode16 && g_Amd64InCode64)
  624. {
  625. CacheReportInstructions
  626. (Pc, ControlReport->Amd64ControlReport.InstructionCount,
  627. ControlReport->Amd64ControlReport.InstructionStream);
  628. }
  629. }
  630. HRESULT
  631. Amd64MachineInfo::KdGetContextState(ULONG State)
  632. {
  633. HRESULT Status;
  634. if (State >= MCTX_CONTEXT && m_ContextState < MCTX_CONTEXT)
  635. {
  636. Status = m_Target->GetContext(m_Target->m_RegContextThread->m_Handle,
  637. &m_Context);
  638. if (Status != S_OK)
  639. {
  640. return Status;
  641. }
  642. m_ContextState = MCTX_CONTEXT;
  643. }
  644. if (State >= MCTX_FULL && m_ContextState < MCTX_FULL)
  645. {
  646. Status = m_Target->GetTargetSpecialRegisters
  647. (m_Target->m_RegContextThread->m_Handle,
  648. (PCROSS_PLATFORM_KSPECIAL_REGISTERS)&m_Special.Amd64Special);
  649. if (Status != S_OK)
  650. {
  651. return Status;
  652. }
  653. Status = m_Target->GetTargetSegRegDescriptors
  654. (m_Target->m_RegContextThread->m_Handle,
  655. 0, SEGREG_COUNT, m_SegRegDesc);
  656. if (Status != S_OK)
  657. {
  658. return Status;
  659. }
  660. m_ContextState = MCTX_FULL;
  661. BpOut("GetContextState(%d) DR6 %I64X DR7 %I64X\n",
  662. m_Target->m_RegContextProcessor, m_Special.Amd64Special.KernelDr6,
  663. m_Special.Amd64Special.KernelDr7);
  664. }
  665. return S_OK;
  666. }
  667. HRESULT
  668. Amd64MachineInfo::KdSetContext(void)
  669. {
  670. HRESULT Status;
  671. Status = m_Target->SetContext(m_Target->m_RegContextThread->m_Handle,
  672. &m_Context);
  673. if (Status != S_OK)
  674. {
  675. return Status;
  676. }
  677. Status = m_Target->SetTargetSpecialRegisters
  678. (m_Target->m_RegContextThread->m_Handle,
  679. (PCROSS_PLATFORM_KSPECIAL_REGISTERS) &m_Special.Amd64Special);
  680. BpOut("SetContext(%d) DR6 %I64X DR7 %I64X\n",
  681. m_Target->m_RegContextProcessor, m_Special.Amd64Special.KernelDr6,
  682. m_Special.Amd64Special.KernelDr7);
  683. return S_OK;
  684. }
  685. HRESULT
  686. Amd64MachineInfo::ConvertContextFrom(PCROSS_PLATFORM_CONTEXT Context,
  687. ULONG FromSver, ULONG FromSize,
  688. PVOID From)
  689. {
  690. if (FromSize >= sizeof(AMD64_CONTEXT))
  691. {
  692. memcpy(Context, From, sizeof(AMD64_CONTEXT));
  693. }
  694. else
  695. {
  696. return E_INVALIDARG;
  697. }
  698. return S_OK;
  699. }
  700. HRESULT
  701. Amd64MachineInfo::ConvertContextTo(PCROSS_PLATFORM_CONTEXT Context,
  702. ULONG ToSver, ULONG ToSize, PVOID To)
  703. {
  704. if (ToSize >= sizeof(AMD64_CONTEXT))
  705. {
  706. memcpy(To, Context, sizeof(AMD64_CONTEXT));
  707. }
  708. else
  709. {
  710. return E_INVALIDARG;
  711. }
  712. return S_OK;
  713. }
  714. void
  715. Amd64MachineInfo::InitializeContextFlags(PCROSS_PLATFORM_CONTEXT Context,
  716. ULONG Version)
  717. {
  718. ULONG ContextFlags;
  719. ContextFlags = AMD64_CONTEXT_FULL | AMD64_CONTEXT_SEGMENTS;
  720. if (IS_USER_TARGET(m_Target))
  721. {
  722. ContextFlags |= AMD64_CONTEXT_DEBUG_REGISTERS;
  723. }
  724. Context->Amd64Context.ContextFlags = ContextFlags;
  725. }
  726. HRESULT
  727. Amd64MachineInfo::GetContextFromThreadStack(ULONG64 ThreadBase,
  728. PCROSS_PLATFORM_CONTEXT Context,
  729. ULONG64 Stack)
  730. {
  731. HRESULT Status;
  732. AMD64_KSWITCH_FRAME SwitchFrame;
  733. if ((Status = m_Target->ReadAllVirtual(m_Target->m_ProcessHead,
  734. Stack,
  735. &SwitchFrame,
  736. sizeof(SwitchFrame))) != S_OK)
  737. {
  738. return Status;
  739. }
  740. ZeroMemory(Context, sizeof(*Context));
  741. Context->Amd64Context.Rbp = SwitchFrame.Rbp;
  742. Context->Amd64Context.Rsp = Stack + sizeof(SwitchFrame);
  743. Context->Amd64Context.Rip = SwitchFrame.Return;
  744. return S_OK;
  745. }
  746. HRESULT
  747. Amd64MachineInfo::GetContextFromFiber(ProcessInfo* Process,
  748. ULONG64 FiberBase,
  749. PCROSS_PLATFORM_CONTEXT Context,
  750. BOOL Verbose)
  751. {
  752. HRESULT Status;
  753. AMD64_FIBER Fiber;
  754. if ((Status = m_Target->
  755. ReadAllVirtual(Process, FiberBase, &Fiber, sizeof(Fiber))) != S_OK)
  756. {
  757. if (Verbose)
  758. {
  759. ErrOut("Unable to read fiber data at %s\n",
  760. FormatMachineAddr64(this, FiberBase));
  761. }
  762. return Status;
  763. }
  764. if ((Status = ConvertContextFrom(Context, m_Target->m_SystemVersion,
  765. m_Target->m_TypeInfo.SizeTargetContext,
  766. &Fiber.FiberContext)) != S_OK)
  767. {
  768. if (Verbose)
  769. {
  770. ErrOut("Unable to convert context to canonical form\n");
  771. }
  772. return Status;
  773. }
  774. if (Verbose)
  775. {
  776. dprintf("Fiber at %s Fiber data: %s\n",
  777. FormatMachineAddr64(this, FiberBase),
  778. FormatMachineAddr64(this, Fiber.FiberData));
  779. dprintf(" Stack base: %s Stack limit: %s\n",
  780. FormatMachineAddr64(this, Fiber.StackBase),
  781. FormatMachineAddr64(this, Fiber.StackLimit));
  782. }
  783. return S_OK;
  784. }
  785. HRESULT
  786. Amd64MachineInfo::GetContextFromTrapFrame(ULONG64 TrapBase,
  787. PCROSS_PLATFORM_CONTEXT Context,
  788. BOOL Verbose)
  789. {
  790. HRESULT Status;
  791. AMD64_KTRAP_FRAME TrapContents;
  792. if ((Status = m_Target->ReadAllVirtual(m_Target->m_ProcessHead,
  793. TrapBase, &TrapContents,
  794. sizeof(TrapContents))) != S_OK)
  795. {
  796. if (Verbose)
  797. {
  798. ErrOut("Unable to read trap frame at %s\n",
  799. FormatMachineAddr64(this, TrapBase));
  800. }
  801. return Status;
  802. }
  803. ZeroMemory(Context, sizeof(*Context));
  804. #define CPCXT(Fld) Context->Amd64Context.Fld = TrapContents.Fld
  805. CPCXT(MxCsr); CPCXT(Rax); CPCXT(Rcx); CPCXT(Rdx); CPCXT(R8);
  806. CPCXT(R9); CPCXT(R10); CPCXT(R11); CPCXT(Dr0); CPCXT(Dr1);
  807. CPCXT(Dr2); CPCXT(Dr3); CPCXT(Dr6); CPCXT(Dr7);
  808. CPCXT(Xmm0); CPCXT(Xmm1); CPCXT(Xmm2); CPCXT(Xmm3); CPCXT(Xmm4);
  809. CPCXT(Xmm5);
  810. CPCXT(SegDs); CPCXT(SegEs); CPCXT(SegFs); CPCXT(SegGs);
  811. CPCXT(Rbx); CPCXT(Rdi); CPCXT(Rsi); CPCXT(Rbp); CPCXT(Rip);
  812. CPCXT(SegCs); CPCXT(EFlags); CPCXT(Rsp); CPCXT(SegSs);
  813. #undef CPCXT
  814. return S_OK;
  815. }
  816. void
  817. Amd64MachineInfo::GetScopeFrameFromContext(PCROSS_PLATFORM_CONTEXT Context,
  818. PDEBUG_STACK_FRAME ScopeFrame)
  819. {
  820. ZeroMemory(ScopeFrame, sizeof(*ScopeFrame));
  821. ScopeFrame->InstructionOffset = Context->Amd64Context.Rip;
  822. ScopeFrame->FrameOffset = Context->Amd64Context.Rbp;
  823. ScopeFrame->StackOffset = Context->Amd64Context.Rsp;
  824. }
  825. HRESULT
  826. Amd64MachineInfo::GetScopeFrameRegister(ULONG Reg,
  827. PDEBUG_STACK_FRAME ScopeFrame,
  828. PULONG64 Value)
  829. {
  830. HRESULT Status;
  831. REGVAL RegVal;
  832. switch(Reg)
  833. {
  834. case AMD64_RSP:
  835. *Value = ScopeFrame->StackOffset;
  836. return S_OK;
  837. case AMD64_RBP:
  838. *Value = ScopeFrame->FrameOffset;
  839. return S_OK;
  840. default:
  841. RegVal.I64 = 0;
  842. if ((Status = FullGetVal(Reg, &RegVal)) != S_OK)
  843. {
  844. return Status;
  845. }
  846. *Value = RegVal.I64;
  847. return S_OK;
  848. }
  849. }
  850. HRESULT
  851. Amd64MachineInfo::SetScopeFrameRegister(ULONG Reg,
  852. PDEBUG_STACK_FRAME ScopeFrame,
  853. ULONG64 Value)
  854. {
  855. REGVAL RegVal;
  856. switch(Reg)
  857. {
  858. case AMD64_RSP:
  859. ScopeFrame->StackOffset = Value;
  860. return S_OK;
  861. case AMD64_RBP:
  862. ScopeFrame->FrameOffset = Value;
  863. return S_OK;
  864. default:
  865. RegVal.Type = GetType(Reg);
  866. RegVal.I64 = Value;
  867. return FullSetVal(Reg, &RegVal);
  868. }
  869. }
  870. HRESULT
  871. Amd64MachineInfo::GetExdiContext(IUnknown* Exdi, PEXDI_CONTEXT Context,
  872. EXDI_CONTEXT_TYPE CtxType)
  873. {
  874. return StaticGetExdiContext(Exdi, Context, CtxType);
  875. }
  876. HRESULT
  877. Amd64MachineInfo::SetExdiContext(IUnknown* Exdi, PEXDI_CONTEXT Context,
  878. EXDI_CONTEXT_TYPE CtxType)
  879. {
  880. DBG_ASSERT(CtxType == EXDI_CTX_AMD64);
  881. // Don't change the existing group selections on the assumption
  882. // that there was a full get prior to any modifications so
  883. // all groups are valid.
  884. return ((IeXdiX86_64Context*)Exdi)->SetContext(Context->Amd64Context);
  885. }
  886. void
  887. Amd64MachineInfo::ConvertExdiContextFromContext
  888. (PCROSS_PLATFORM_CONTEXT Context, PEXDI_CONTEXT ExdiContext,
  889. EXDI_CONTEXT_TYPE CtxType)
  890. {
  891. DBG_ASSERT(CtxType == EXDI_CTX_AMD64);
  892. if (Context->Amd64Context.ContextFlags & AMD64_CONTEXT_SEGMENTS)
  893. {
  894. ExdiContext->Amd64Context.SegDs = Context->Amd64Context.SegDs;
  895. ExdiContext->Amd64Context.SegEs = Context->Amd64Context.SegEs;
  896. ExdiContext->Amd64Context.SegFs = Context->Amd64Context.SegFs;
  897. ExdiContext->Amd64Context.SegGs = Context->Amd64Context.SegGs;
  898. }
  899. if (Context->Amd64Context.ContextFlags & AMD64_CONTEXT_CONTROL)
  900. {
  901. ExdiContext->Amd64Context.SegCs = Context->Amd64Context.SegCs;
  902. ExdiContext->Amd64Context.Rip = Context->Amd64Context.Rip;
  903. ExdiContext->Amd64Context.SegSs = Context->Amd64Context.SegSs;
  904. ExdiContext->Amd64Context.Rsp = Context->Amd64Context.Rsp;
  905. ExdiContext->Amd64Context.EFlags = Context->Amd64Context.EFlags;
  906. }
  907. if (Context->Amd64Context.ContextFlags & AMD64_CONTEXT_DEBUG_REGISTERS)
  908. {
  909. ExdiContext->Amd64Context.Dr0 = Context->Amd64Context.Dr0;
  910. ExdiContext->Amd64Context.Dr1 = Context->Amd64Context.Dr1;
  911. ExdiContext->Amd64Context.Dr2 = Context->Amd64Context.Dr2;
  912. ExdiContext->Amd64Context.Dr3 = Context->Amd64Context.Dr3;
  913. ExdiContext->Amd64Context.Dr6 = Context->Amd64Context.Dr6;
  914. ExdiContext->Amd64Context.Dr7 = Context->Amd64Context.Dr7;
  915. }
  916. if (Context->Amd64Context.ContextFlags & AMD64_CONTEXT_INTEGER)
  917. {
  918. ExdiContext->Amd64Context.Rax = Context->Amd64Context.Rax;
  919. ExdiContext->Amd64Context.Rcx = Context->Amd64Context.Rcx;
  920. ExdiContext->Amd64Context.Rdx = Context->Amd64Context.Rdx;
  921. ExdiContext->Amd64Context.Rbx = Context->Amd64Context.Rbx;
  922. ExdiContext->Amd64Context.Rbp = Context->Amd64Context.Rbp;
  923. ExdiContext->Amd64Context.Rsi = Context->Amd64Context.Rsi;
  924. ExdiContext->Amd64Context.Rdi = Context->Amd64Context.Rdi;
  925. ExdiContext->Amd64Context.R8 = Context->Amd64Context.R8;
  926. ExdiContext->Amd64Context.R9 = Context->Amd64Context.R9;
  927. ExdiContext->Amd64Context.R10 = Context->Amd64Context.R10;
  928. ExdiContext->Amd64Context.R11 = Context->Amd64Context.R11;
  929. ExdiContext->Amd64Context.R12 = Context->Amd64Context.R12;
  930. ExdiContext->Amd64Context.R13 = Context->Amd64Context.R13;
  931. ExdiContext->Amd64Context.R14 = Context->Amd64Context.R14;
  932. ExdiContext->Amd64Context.R15 = Context->Amd64Context.R15;
  933. }
  934. if (Context->Amd64Context.ContextFlags & AMD64_CONTEXT_FLOATING_POINT)
  935. {
  936. ExdiContext->Amd64Context.ControlWord =
  937. Context->Amd64Context.FltSave.ControlWord;
  938. ExdiContext->Amd64Context.StatusWord =
  939. Context->Amd64Context.FltSave.StatusWord;
  940. ExdiContext->Amd64Context.TagWord =
  941. Context->Amd64Context.FltSave.TagWord;
  942. ExdiContext->Amd64Context.ErrorOffset =
  943. Context->Amd64Context.FltSave.ErrorOffset;
  944. ExdiContext->Amd64Context.ErrorSelector =
  945. Context->Amd64Context.FltSave.ErrorSelector;
  946. ExdiContext->Amd64Context.DataOffset =
  947. Context->Amd64Context.FltSave.DataOffset;
  948. ExdiContext->Amd64Context.DataSelector =
  949. Context->Amd64Context.FltSave.DataSelector;
  950. ExdiContext->Amd64Context.RegMXCSR =
  951. Context->Amd64Context.MxCsr;
  952. for (ULONG i = 0; i < 8; i++)
  953. {
  954. memcpy(ExdiContext->Amd64Context.RegisterArea + i * 10,
  955. Context->Amd64Context.FltSave.FloatRegisters + i * 10,
  956. 10);
  957. }
  958. memcpy(ExdiContext->Amd64Context.RegSSE,
  959. &Context->Amd64Context.Xmm0, 16 * sizeof(AMD64_M128));
  960. }
  961. }
  962. void
  963. Amd64MachineInfo::ConvertExdiContextToContext(PEXDI_CONTEXT ExdiContext,
  964. EXDI_CONTEXT_TYPE CtxType,
  965. PCROSS_PLATFORM_CONTEXT Context)
  966. {
  967. DBG_ASSERT(CtxType == EXDI_CTX_AMD64);
  968. Context->Amd64Context.SegCs = (USHORT)ExdiContext->Amd64Context.SegCs;
  969. Context->Amd64Context.SegDs = (USHORT)ExdiContext->Amd64Context.SegDs;
  970. Context->Amd64Context.SegEs = (USHORT)ExdiContext->Amd64Context.SegEs;
  971. Context->Amd64Context.SegFs = (USHORT)ExdiContext->Amd64Context.SegFs;
  972. Context->Amd64Context.SegGs = (USHORT)ExdiContext->Amd64Context.SegGs;
  973. Context->Amd64Context.SegSs = (USHORT)ExdiContext->Amd64Context.SegSs;
  974. Context->Amd64Context.EFlags = (ULONG)ExdiContext->Amd64Context.EFlags;
  975. Context->Amd64Context.Dr0 = ExdiContext->Amd64Context.Dr0;
  976. Context->Amd64Context.Dr1 = ExdiContext->Amd64Context.Dr1;
  977. Context->Amd64Context.Dr2 = ExdiContext->Amd64Context.Dr2;
  978. Context->Amd64Context.Dr3 = ExdiContext->Amd64Context.Dr3;
  979. Context->Amd64Context.Dr6 = ExdiContext->Amd64Context.Dr6;
  980. Context->Amd64Context.Dr7 = ExdiContext->Amd64Context.Dr7;
  981. Context->Amd64Context.Rax = ExdiContext->Amd64Context.Rax;
  982. Context->Amd64Context.Rcx = ExdiContext->Amd64Context.Rcx;
  983. Context->Amd64Context.Rdx = ExdiContext->Amd64Context.Rdx;
  984. Context->Amd64Context.Rbx = ExdiContext->Amd64Context.Rbx;
  985. Context->Amd64Context.Rsp = ExdiContext->Amd64Context.Rsp;
  986. Context->Amd64Context.Rbp = ExdiContext->Amd64Context.Rbp;
  987. Context->Amd64Context.Rsi = ExdiContext->Amd64Context.Rsi;
  988. Context->Amd64Context.Rdi = ExdiContext->Amd64Context.Rdi;
  989. Context->Amd64Context.R8 = ExdiContext->Amd64Context.R8;
  990. Context->Amd64Context.R9 = ExdiContext->Amd64Context.R9;
  991. Context->Amd64Context.R10 = ExdiContext->Amd64Context.R10;
  992. Context->Amd64Context.R11 = ExdiContext->Amd64Context.R11;
  993. Context->Amd64Context.R12 = ExdiContext->Amd64Context.R12;
  994. Context->Amd64Context.R13 = ExdiContext->Amd64Context.R13;
  995. Context->Amd64Context.R14 = ExdiContext->Amd64Context.R14;
  996. Context->Amd64Context.R15 = ExdiContext->Amd64Context.R15;
  997. Context->Amd64Context.Rip = ExdiContext->Amd64Context.Rip;
  998. Context->Amd64Context.FltSave.ControlWord =
  999. (USHORT)ExdiContext->Amd64Context.ControlWord;
  1000. Context->Amd64Context.FltSave.StatusWord =
  1001. (USHORT)ExdiContext->Amd64Context.StatusWord;
  1002. Context->Amd64Context.FltSave.TagWord =
  1003. (USHORT)ExdiContext->Amd64Context.TagWord;
  1004. // XXX drewb - No ErrorOpcode in x86_64.
  1005. Context->Amd64Context.FltSave.ErrorOpcode = 0;
  1006. Context->Amd64Context.FltSave.ErrorOffset =
  1007. ExdiContext->Amd64Context.ErrorOffset;
  1008. Context->Amd64Context.FltSave.ErrorSelector =
  1009. (USHORT)ExdiContext->Amd64Context.ErrorSelector;
  1010. Context->Amd64Context.FltSave.DataOffset =
  1011. ExdiContext->Amd64Context.DataOffset;
  1012. Context->Amd64Context.FltSave.DataSelector =
  1013. (USHORT)ExdiContext->Amd64Context.DataSelector;
  1014. Context->Amd64Context.MxCsr =
  1015. ExdiContext->Amd64Context.RegMXCSR;
  1016. for (ULONG i = 0; i < 8; i++)
  1017. {
  1018. memcpy(Context->Amd64Context.FltSave.FloatRegisters + i * 10,
  1019. ExdiContext->Amd64Context.RegisterArea + i * 10, 10);
  1020. }
  1021. memcpy(&Context->Amd64Context.Xmm0, ExdiContext->Amd64Context.RegSSE,
  1022. 16 * sizeof(AMD64_M128));
  1023. }
  1024. void
  1025. Amd64MachineInfo::ConvertExdiContextToSegDescs(PEXDI_CONTEXT ExdiContext,
  1026. EXDI_CONTEXT_TYPE CtxType,
  1027. ULONG Start, ULONG Count,
  1028. PDESCRIPTOR64 Descs)
  1029. {
  1030. DBG_ASSERT(CtxType == EXDI_CTX_AMD64);
  1031. while (Count-- > 0)
  1032. {
  1033. SEG64_DESC_INFO* Desc;
  1034. switch(Start)
  1035. {
  1036. case SEGREG_CODE:
  1037. Desc = &ExdiContext->Amd64Context.DescriptorCs;
  1038. break;
  1039. case SEGREG_DATA:
  1040. Desc = &ExdiContext->Amd64Context.DescriptorDs;
  1041. break;
  1042. case SEGREG_STACK:
  1043. Desc = &ExdiContext->Amd64Context.DescriptorSs;
  1044. break;
  1045. case SEGREG_ES:
  1046. Desc = &ExdiContext->Amd64Context.DescriptorEs;
  1047. break;
  1048. case SEGREG_FS:
  1049. Desc = &ExdiContext->Amd64Context.DescriptorFs;
  1050. break;
  1051. case SEGREG_GS:
  1052. Desc = &ExdiContext->Amd64Context.DescriptorGs;
  1053. break;
  1054. case SEGREG_GDT:
  1055. Descs->Base = ExdiContext->Amd64Context.GDTBase;
  1056. Descs->Limit = ExdiContext->Amd64Context.GDTLimit;
  1057. Descs->Flags = X86_DESC_PRESENT;
  1058. Desc = NULL;
  1059. break;
  1060. case SEGREG_LDT:
  1061. Desc = &ExdiContext->Amd64Context.SegLDT;
  1062. break;
  1063. default:
  1064. Descs->Flags = SEGDESC_INVALID;
  1065. Desc = NULL;
  1066. break;
  1067. }
  1068. if (Desc != NULL)
  1069. {
  1070. Descs->Base = Desc->SegBase;
  1071. Descs->Limit = Desc->SegLimit;
  1072. Descs->Flags =
  1073. ((Desc->SegFlags >> 4) & 0xf00) |
  1074. (Desc->SegFlags & 0xff);
  1075. }
  1076. Descs++;
  1077. Start++;
  1078. }
  1079. }
  1080. void
  1081. Amd64MachineInfo::ConvertExdiContextFromSpecial
  1082. (PCROSS_PLATFORM_KSPECIAL_REGISTERS Special,
  1083. PEXDI_CONTEXT ExdiContext, EXDI_CONTEXT_TYPE CtxType)
  1084. {
  1085. DBG_ASSERT(CtxType == EXDI_CTX_AMD64);
  1086. ExdiContext->Amd64Context.RegCr0 = Special->Amd64Special.Cr0;
  1087. ExdiContext->Amd64Context.RegCr2 = Special->Amd64Special.Cr2;
  1088. ExdiContext->Amd64Context.RegCr3 = Special->Amd64Special.Cr3;
  1089. ExdiContext->Amd64Context.RegCr4 = Special->Amd64Special.Cr4;
  1090. ExdiContext->Amd64Context.RegCr8 = Special->Amd64Special.Cr8;
  1091. ExdiContext->Amd64Context.RegMXCSR = Special->Amd64Special.MxCsr;
  1092. ExdiContext->Amd64Context.Dr0 = Special->Amd64Special.KernelDr0;
  1093. ExdiContext->Amd64Context.Dr1 = Special->Amd64Special.KernelDr1;
  1094. ExdiContext->Amd64Context.Dr2 = Special->Amd64Special.KernelDr2;
  1095. ExdiContext->Amd64Context.Dr3 = Special->Amd64Special.KernelDr3;
  1096. ExdiContext->Amd64Context.Dr6 = Special->Amd64Special.KernelDr6;
  1097. ExdiContext->Amd64Context.Dr7 = Special->Amd64Special.KernelDr7;
  1098. ExdiContext->Amd64Context.GDTLimit = Special->Amd64Special.Gdtr.Limit;
  1099. ExdiContext->Amd64Context.GDTBase = Special->Amd64Special.Gdtr.Base;
  1100. ExdiContext->Amd64Context.IDTLimit = Special->Amd64Special.Idtr.Limit;
  1101. ExdiContext->Amd64Context.IDTBase = Special->Amd64Special.Idtr.Base;
  1102. ExdiContext->Amd64Context.SelTSS = Special->Amd64Special.Tr;
  1103. ExdiContext->Amd64Context.SelLDT = Special->Amd64Special.Ldtr;
  1104. }
  1105. void
  1106. Amd64MachineInfo::ConvertExdiContextToSpecial
  1107. (PEXDI_CONTEXT ExdiContext, EXDI_CONTEXT_TYPE CtxType,
  1108. PCROSS_PLATFORM_KSPECIAL_REGISTERS Special)
  1109. {
  1110. DBG_ASSERT(CtxType == EXDI_CTX_AMD64);
  1111. Special->Amd64Special.Cr0 = ExdiContext->Amd64Context.RegCr0;
  1112. Special->Amd64Special.Cr2 = ExdiContext->Amd64Context.RegCr2;
  1113. Special->Amd64Special.Cr3 = ExdiContext->Amd64Context.RegCr3;
  1114. Special->Amd64Special.Cr4 = ExdiContext->Amd64Context.RegCr4;
  1115. Special->Amd64Special.Cr8 = ExdiContext->Amd64Context.RegCr8;
  1116. Special->Amd64Special.MxCsr = ExdiContext->Amd64Context.RegMXCSR;
  1117. Special->Amd64Special.KernelDr0 = ExdiContext->Amd64Context.Dr0;
  1118. Special->Amd64Special.KernelDr1 = ExdiContext->Amd64Context.Dr1;
  1119. Special->Amd64Special.KernelDr2 = ExdiContext->Amd64Context.Dr2;
  1120. Special->Amd64Special.KernelDr3 = ExdiContext->Amd64Context.Dr3;
  1121. Special->Amd64Special.KernelDr6 = ExdiContext->Amd64Context.Dr6;
  1122. Special->Amd64Special.KernelDr7 = ExdiContext->Amd64Context.Dr7;
  1123. Special->Amd64Special.Gdtr.Limit =
  1124. (USHORT)ExdiContext->Amd64Context.GDTLimit;
  1125. Special->Amd64Special.Gdtr.Base = ExdiContext->Amd64Context.GDTBase;
  1126. Special->Amd64Special.Idtr.Limit =
  1127. (USHORT)ExdiContext->Amd64Context.IDTLimit;
  1128. Special->Amd64Special.Idtr.Base = ExdiContext->Amd64Context.IDTBase;
  1129. Special->Amd64Special.Tr = (USHORT)ExdiContext->Amd64Context.SelTSS;
  1130. Special->Amd64Special.Ldtr = (USHORT)ExdiContext->Amd64Context.SelLDT;
  1131. }
  1132. int
  1133. Amd64MachineInfo::GetType(ULONG RegNum)
  1134. {
  1135. if (RegNum >= AMD64_MM_FIRST && RegNum <= AMD64_MM_LAST)
  1136. {
  1137. return REGVAL_VECTOR64;
  1138. }
  1139. else if (RegNum >= AMD64_XMM_FIRST && RegNum <= AMD64_XMM_LAST)
  1140. {
  1141. return REGVAL_VECTOR128;
  1142. }
  1143. else if (RegNum >= AMD64_ST_FIRST && RegNum <= AMD64_ST_LAST)
  1144. {
  1145. return REGVAL_FLOAT10;
  1146. }
  1147. else if ((RegNum >= AMD64_SEG_FIRST && RegNum <= AMD64_SEG_LAST) ||
  1148. (RegNum >= AMD64_FPCTRL_FIRST && RegNum <= AMD64_FPCTRL_LAST) ||
  1149. RegNum == AMD64_TR || RegNum == AMD64_LDTR ||
  1150. RegNum == AMD64_GDTL || RegNum == AMD64_IDTL)
  1151. {
  1152. return REGVAL_INT16;
  1153. }
  1154. else if (RegNum == AMD64_EFL ||
  1155. RegNum == AMD64_MXCSR || RegNum == AMD64_KMXCSR)
  1156. {
  1157. return REGVAL_INT32;
  1158. }
  1159. else if (RegNum < AMD64_SUBREG_BASE)
  1160. {
  1161. return REGVAL_INT64;
  1162. }
  1163. else
  1164. {
  1165. return REGVAL_SUB64;
  1166. }
  1167. }
  1168. HRESULT
  1169. Amd64MachineInfo::GetVal(ULONG RegNum, REGVAL* Val)
  1170. {
  1171. HRESULT Status;
  1172. // The majority of the registers are 64-bit so default
  1173. // to that type.
  1174. Val->Type = REGVAL_INT64;
  1175. switch(m_ContextState)
  1176. {
  1177. case MCTX_PC:
  1178. if (RegNum == AMD64_RIP)
  1179. {
  1180. Val->I64 = m_Context.Amd64Context.Rip;
  1181. return S_OK;
  1182. }
  1183. goto MctxContext;
  1184. case MCTX_DR67_REPORT:
  1185. switch(RegNum)
  1186. {
  1187. case AMD64_KDR6:
  1188. Val->I64 = m_Special.Amd64Special.KernelDr6;
  1189. break;
  1190. case AMD64_KDR7:
  1191. Val->I64 = m_Special.Amd64Special.KernelDr7;
  1192. break;
  1193. default:
  1194. goto MctxContext;
  1195. }
  1196. return S_OK;
  1197. case MCTX_REPORT:
  1198. switch(RegNum)
  1199. {
  1200. case AMD64_RIP:
  1201. Val->I64 = m_Context.Amd64Context.Rip;
  1202. break;
  1203. case AMD64_EFL:
  1204. Val->Type = REGVAL_INT32;
  1205. Val->I64 = m_Context.Amd64Context.EFlags;
  1206. break;
  1207. case AMD64_CS:
  1208. Val->Type = REGVAL_INT16;
  1209. Val->I64 = m_Context.Amd64Context.SegCs;
  1210. break;
  1211. case AMD64_DS:
  1212. Val->Type = REGVAL_INT16;
  1213. Val->I64 = m_Context.Amd64Context.SegDs;
  1214. break;
  1215. case AMD64_ES:
  1216. Val->Type = REGVAL_INT16;
  1217. Val->I64 = m_Context.Amd64Context.SegEs;
  1218. break;
  1219. case AMD64_FS:
  1220. Val->Type = REGVAL_INT16;
  1221. Val->I64 = m_Context.Amd64Context.SegFs;
  1222. break;
  1223. case AMD64_KDR6:
  1224. Val->I64 = m_Special.Amd64Special.KernelDr6;
  1225. break;
  1226. case AMD64_KDR7:
  1227. Val->I64 = m_Special.Amd64Special.KernelDr7;
  1228. break;
  1229. default:
  1230. goto MctxContext;
  1231. }
  1232. return S_OK;
  1233. case MCTX_NONE:
  1234. MctxContext:
  1235. if ((Status = GetContextState(MCTX_CONTEXT)) != S_OK)
  1236. {
  1237. return Status;
  1238. }
  1239. // Fall through.
  1240. case MCTX_CONTEXT:
  1241. switch(RegNum)
  1242. {
  1243. case AMD64_RIP:
  1244. Val->I64 = m_Context.Amd64Context.Rip;
  1245. return S_OK;
  1246. case AMD64_EFL:
  1247. Val->Type = REGVAL_INT32;
  1248. Val->I64 = m_Context.Amd64Context.EFlags;
  1249. return S_OK;
  1250. case AMD64_CS:
  1251. Val->Type = REGVAL_INT16;
  1252. Val->I64 = m_Context.Amd64Context.SegCs;
  1253. return S_OK;
  1254. case AMD64_DS:
  1255. Val->Type = REGVAL_INT16;
  1256. Val->I64 = m_Context.Amd64Context.SegDs;
  1257. return S_OK;
  1258. case AMD64_ES:
  1259. Val->Type = REGVAL_INT16;
  1260. Val->I64 = m_Context.Amd64Context.SegEs;
  1261. return S_OK;
  1262. case AMD64_FS:
  1263. Val->Type = REGVAL_INT16;
  1264. Val->I64 = m_Context.Amd64Context.SegFs;
  1265. return S_OK;
  1266. case AMD64_RAX:
  1267. Val->I64 = m_Context.Amd64Context.Rax;
  1268. return S_OK;
  1269. case AMD64_RCX:
  1270. Val->I64 = m_Context.Amd64Context.Rcx;
  1271. return S_OK;
  1272. case AMD64_RDX:
  1273. Val->I64 = m_Context.Amd64Context.Rdx;
  1274. return S_OK;
  1275. case AMD64_RBX:
  1276. Val->I64 = m_Context.Amd64Context.Rbx;
  1277. return S_OK;
  1278. case AMD64_RSP:
  1279. Val->I64 = m_Context.Amd64Context.Rsp;
  1280. return S_OK;
  1281. case AMD64_RBP:
  1282. Val->I64 = m_Context.Amd64Context.Rbp;
  1283. return S_OK;
  1284. case AMD64_RSI:
  1285. Val->I64 = m_Context.Amd64Context.Rsi;
  1286. return S_OK;
  1287. case AMD64_RDI:
  1288. Val->I64 = m_Context.Amd64Context.Rdi;
  1289. return S_OK;
  1290. case AMD64_R8:
  1291. Val->I64 = m_Context.Amd64Context.R8;
  1292. return S_OK;
  1293. case AMD64_R9:
  1294. Val->I64 = m_Context.Amd64Context.R9;
  1295. return S_OK;
  1296. case AMD64_R10:
  1297. Val->I64 = m_Context.Amd64Context.R10;
  1298. return S_OK;
  1299. case AMD64_R11:
  1300. Val->I64 = m_Context.Amd64Context.R11;
  1301. return S_OK;
  1302. case AMD64_R12:
  1303. Val->I64 = m_Context.Amd64Context.R12;
  1304. return S_OK;
  1305. case AMD64_R13:
  1306. Val->I64 = m_Context.Amd64Context.R13;
  1307. return S_OK;
  1308. case AMD64_R14:
  1309. Val->I64 = m_Context.Amd64Context.R14;
  1310. return S_OK;
  1311. case AMD64_R15:
  1312. Val->I64 = m_Context.Amd64Context.R15;
  1313. return S_OK;
  1314. case AMD64_GS:
  1315. Val->Type = REGVAL_INT16;
  1316. Val->I64 = m_Context.Amd64Context.SegGs;
  1317. return S_OK;
  1318. case AMD64_SS:
  1319. Val->Type = REGVAL_INT16;
  1320. Val->I64 = m_Context.Amd64Context.SegSs;
  1321. return S_OK;
  1322. case AMD64_FPCW:
  1323. Val->Type = REGVAL_INT16;
  1324. Val->I64 = m_Context.Amd64Context.FltSave.ControlWord;
  1325. return S_OK;
  1326. case AMD64_FPSW:
  1327. Val->Type = REGVAL_INT16;
  1328. Val->I64 = m_Context.Amd64Context.FltSave.StatusWord;
  1329. return S_OK;
  1330. case AMD64_FPTW:
  1331. Val->Type = REGVAL_INT16;
  1332. Val->I64 = m_Context.Amd64Context.FltSave.TagWord;
  1333. return S_OK;
  1334. case AMD64_MXCSR:
  1335. Val->Type = REGVAL_INT32;
  1336. Val->I64 = m_Context.Amd64Context.MxCsr;
  1337. return S_OK;
  1338. }
  1339. if (RegNum >= AMD64_MM_FIRST && RegNum <= AMD64_MM_LAST)
  1340. {
  1341. Val->Type = REGVAL_VECTOR64;
  1342. Val->I64 = *(ULONG64 UNALIGNED*)&m_Context.Amd64Context.FltSave.
  1343. FloatRegisters[GetMmxRegOffset(RegNum - AMD64_MM_FIRST,
  1344. GetReg32(AMD64_FPSW)) * 10];
  1345. return S_OK;
  1346. }
  1347. else if (RegNum >= AMD64_XMM_FIRST && RegNum <= AMD64_XMM_LAST)
  1348. {
  1349. Val->Type = REGVAL_VECTOR128;
  1350. memcpy(Val->Bytes, (PUCHAR)&m_Context.Amd64Context.Xmm0 +
  1351. (RegNum - AMD64_XMM_FIRST) * 16, 16);
  1352. return S_OK;
  1353. }
  1354. else if (RegNum >= AMD64_ST_FIRST && RegNum <= AMD64_ST_LAST)
  1355. {
  1356. Val->Type = REGVAL_FLOAT10;
  1357. memcpy(Val->F10, &m_Context.Amd64Context.FltSave.
  1358. FloatRegisters[(RegNum - AMD64_ST_FIRST) * 10],
  1359. sizeof(Val->F10));
  1360. return S_OK;
  1361. }
  1362. //
  1363. // The requested register is not in our current context, load up
  1364. // a complete context
  1365. //
  1366. if ((Status = GetContextState(MCTX_FULL)) != S_OK)
  1367. {
  1368. return Status;
  1369. }
  1370. break;
  1371. }
  1372. //
  1373. // We must have a complete context...
  1374. //
  1375. switch(RegNum)
  1376. {
  1377. case AMD64_RAX:
  1378. Val->I64 = m_Context.Amd64Context.Rax;
  1379. return S_OK;
  1380. case AMD64_RCX:
  1381. Val->I64 = m_Context.Amd64Context.Rcx;
  1382. return S_OK;
  1383. case AMD64_RDX:
  1384. Val->I64 = m_Context.Amd64Context.Rdx;
  1385. return S_OK;
  1386. case AMD64_RBX:
  1387. Val->I64 = m_Context.Amd64Context.Rbx;
  1388. return S_OK;
  1389. case AMD64_RSP:
  1390. Val->I64 = m_Context.Amd64Context.Rsp;
  1391. return S_OK;
  1392. case AMD64_RBP:
  1393. Val->I64 = m_Context.Amd64Context.Rbp;
  1394. return S_OK;
  1395. case AMD64_RSI:
  1396. Val->I64 = m_Context.Amd64Context.Rsi;
  1397. return S_OK;
  1398. case AMD64_RDI:
  1399. Val->I64 = m_Context.Amd64Context.Rdi;
  1400. return S_OK;
  1401. case AMD64_R8:
  1402. Val->I64 = m_Context.Amd64Context.R8;
  1403. return S_OK;
  1404. case AMD64_R9:
  1405. Val->I64 = m_Context.Amd64Context.R9;
  1406. return S_OK;
  1407. case AMD64_R10:
  1408. Val->I64 = m_Context.Amd64Context.R10;
  1409. return S_OK;
  1410. case AMD64_R11:
  1411. Val->I64 = m_Context.Amd64Context.R11;
  1412. return S_OK;
  1413. case AMD64_R12:
  1414. Val->I64 = m_Context.Amd64Context.R12;
  1415. return S_OK;
  1416. case AMD64_R13:
  1417. Val->I64 = m_Context.Amd64Context.R13;
  1418. return S_OK;
  1419. case AMD64_R14:
  1420. Val->I64 = m_Context.Amd64Context.R14;
  1421. return S_OK;
  1422. case AMD64_R15:
  1423. Val->I64 = m_Context.Amd64Context.R15;
  1424. return S_OK;
  1425. case AMD64_RIP:
  1426. Val->I64 = m_Context.Amd64Context.Rip;
  1427. return S_OK;
  1428. case AMD64_EFL:
  1429. Val->Type = REGVAL_INT32;
  1430. Val->I64 = m_Context.Amd64Context.EFlags;
  1431. return S_OK;
  1432. case AMD64_CS:
  1433. Val->Type = REGVAL_INT16;
  1434. Val->I64 = m_Context.Amd64Context.SegCs;
  1435. return S_OK;
  1436. case AMD64_DS:
  1437. Val->Type = REGVAL_INT16;
  1438. Val->I64 = m_Context.Amd64Context.SegDs;
  1439. return S_OK;
  1440. case AMD64_ES:
  1441. Val->Type = REGVAL_INT16;
  1442. Val->I64 = m_Context.Amd64Context.SegEs;
  1443. return S_OK;
  1444. case AMD64_FS:
  1445. Val->Type = REGVAL_INT16;
  1446. Val->I64 = m_Context.Amd64Context.SegFs;
  1447. return S_OK;
  1448. case AMD64_GS:
  1449. Val->Type = REGVAL_INT16;
  1450. Val->I64 = m_Context.Amd64Context.SegGs;
  1451. return S_OK;
  1452. case AMD64_SS:
  1453. Val->Type = REGVAL_INT16;
  1454. Val->I64 = m_Context.Amd64Context.SegSs;
  1455. return S_OK;
  1456. case AMD64_DR0:
  1457. Val->I64 = m_Context.Amd64Context.Dr0;
  1458. return S_OK;
  1459. case AMD64_DR1:
  1460. Val->I64 = m_Context.Amd64Context.Dr1;
  1461. return S_OK;
  1462. case AMD64_DR2:
  1463. Val->I64 = m_Context.Amd64Context.Dr2;
  1464. return S_OK;
  1465. case AMD64_DR3:
  1466. Val->I64 = m_Context.Amd64Context.Dr3;
  1467. return S_OK;
  1468. case AMD64_DR6:
  1469. Val->I64 = m_Context.Amd64Context.Dr6;
  1470. return S_OK;
  1471. case AMD64_DR7:
  1472. Val->I64 = m_Context.Amd64Context.Dr7;
  1473. return S_OK;
  1474. case AMD64_FPCW:
  1475. Val->Type = REGVAL_INT16;
  1476. Val->I64 = m_Context.Amd64Context.FltSave.ControlWord;
  1477. return S_OK;
  1478. case AMD64_FPSW:
  1479. Val->Type = REGVAL_INT16;
  1480. Val->I64 = m_Context.Amd64Context.FltSave.StatusWord;
  1481. return S_OK;
  1482. case AMD64_FPTW:
  1483. Val->Type = REGVAL_INT16;
  1484. Val->I64 = m_Context.Amd64Context.FltSave.TagWord;
  1485. return S_OK;
  1486. case AMD64_MXCSR:
  1487. Val->Type = REGVAL_INT32;
  1488. Val->I64 = m_Context.Amd64Context.MxCsr;
  1489. return S_OK;
  1490. }
  1491. if (RegNum >= AMD64_MM_FIRST && RegNum <= AMD64_MM_LAST)
  1492. {
  1493. Val->Type = REGVAL_VECTOR64;
  1494. Val->I64 = *(ULONG64 UNALIGNED*)&m_Context.Amd64Context.FltSave.
  1495. FloatRegisters[GetMmxRegOffset(RegNum - AMD64_MM_FIRST,
  1496. GetReg32(AMD64_FPSW)) * 10];
  1497. return S_OK;
  1498. }
  1499. else if (RegNum >= AMD64_XMM_FIRST && RegNum <= AMD64_XMM_LAST)
  1500. {
  1501. Val->Type = REGVAL_VECTOR128;
  1502. memcpy(Val->Bytes, (PUCHAR)&m_Context.Amd64Context.Xmm0 +
  1503. (RegNum - AMD64_XMM_FIRST) * 16, 16);
  1504. return S_OK;
  1505. }
  1506. else if (RegNum >= AMD64_ST_FIRST && RegNum <= AMD64_ST_LAST)
  1507. {
  1508. Val->Type = REGVAL_FLOAT10;
  1509. memcpy(Val->F10, &m_Context.Amd64Context.FltSave.
  1510. FloatRegisters[(RegNum - AMD64_ST_FIRST) * 10],
  1511. sizeof(Val->F10));
  1512. return S_OK;
  1513. }
  1514. if (IS_KERNEL_TARGET(m_Target))
  1515. {
  1516. switch(RegNum)
  1517. {
  1518. case AMD64_CR0:
  1519. Val->I64 = m_Special.Amd64Special.Cr0;
  1520. return S_OK;
  1521. case AMD64_CR2:
  1522. Val->I64 = m_Special.Amd64Special.Cr2;
  1523. return S_OK;
  1524. case AMD64_CR3:
  1525. Val->I64 = m_Special.Amd64Special.Cr3;
  1526. return S_OK;
  1527. case AMD64_CR4:
  1528. Val->I64 = m_Special.Amd64Special.Cr4;
  1529. return S_OK;
  1530. case AMD64_CR8:
  1531. Val->I64 = m_Special.Amd64Special.Cr8;
  1532. return S_OK;
  1533. case AMD64_GDTR:
  1534. Val->I64 = m_Special.Amd64Special.Gdtr.Base;
  1535. return S_OK;
  1536. case AMD64_GDTL:
  1537. Val->Type = REGVAL_INT16;
  1538. Val->I64 = m_Special.Amd64Special.Gdtr.Limit;
  1539. return S_OK;
  1540. case AMD64_IDTR:
  1541. Val->I64 = m_Special.Amd64Special.Idtr.Base;
  1542. return S_OK;
  1543. case AMD64_IDTL:
  1544. Val->Type = REGVAL_INT16;
  1545. Val->I64 = m_Special.Amd64Special.Idtr.Limit;
  1546. return S_OK;
  1547. case AMD64_TR:
  1548. Val->Type = REGVAL_INT16;
  1549. Val->I64 = m_Special.Amd64Special.Tr;
  1550. return S_OK;
  1551. case AMD64_LDTR:
  1552. Val->Type = REGVAL_INT16;
  1553. Val->I64 = m_Special.Amd64Special.Ldtr;
  1554. return S_OK;
  1555. case AMD64_KMXCSR:
  1556. Val->Type = REGVAL_INT32;
  1557. Val->I64 = m_Special.Amd64Special.MxCsr;
  1558. return S_OK;
  1559. case AMD64_KDR0:
  1560. Val->I64 = m_Special.Amd64Special.KernelDr0;
  1561. return S_OK;
  1562. case AMD64_KDR1:
  1563. Val->I64 = m_Special.Amd64Special.KernelDr1;
  1564. return S_OK;
  1565. case AMD64_KDR2:
  1566. Val->I64 = m_Special.Amd64Special.KernelDr2;
  1567. return S_OK;
  1568. case AMD64_KDR3:
  1569. Val->I64 = m_Special.Amd64Special.KernelDr3;
  1570. return S_OK;
  1571. case AMD64_KDR6:
  1572. Val->I64 = m_Special.Amd64Special.KernelDr6;
  1573. return S_OK;
  1574. case AMD64_KDR7:
  1575. Val->I64 = m_Special.Amd64Special.KernelDr7;
  1576. return S_OK;
  1577. }
  1578. }
  1579. ErrOut("Amd64MachineInfo::GetVal: "
  1580. "unknown register %lx requested\n", RegNum);
  1581. return E_INVALIDARG;
  1582. }
  1583. HRESULT
  1584. Amd64MachineInfo::SetVal(ULONG RegNum, REGVAL* Val)
  1585. {
  1586. HRESULT Status;
  1587. if (m_ContextIsReadOnly)
  1588. {
  1589. return HRESULT_FROM_WIN32(ERROR_WRITE_FAULT);
  1590. }
  1591. if (RegNum >= AMD64_SUBREG_BASE)
  1592. {
  1593. return E_INVALIDARG;
  1594. }
  1595. // Optimize away some common cases where registers are
  1596. // set to their current value.
  1597. if ((m_ContextState >= MCTX_PC && RegNum == AMD64_RIP &&
  1598. Val->I64 == m_Context.Amd64Context.Rip) ||
  1599. (((m_ContextState >= MCTX_DR67_REPORT &&
  1600. m_ContextState <= MCTX_REPORT) ||
  1601. m_ContextState >= MCTX_FULL) && RegNum == AMD64_KDR7 &&
  1602. Val->I64 == m_Special.Amd64Special.KernelDr7))
  1603. {
  1604. return S_OK;
  1605. }
  1606. if ((Status = GetContextState(MCTX_DIRTY)) != S_OK)
  1607. {
  1608. return Status;
  1609. }
  1610. if (RegNum >= AMD64_MM_FIRST && RegNum <= AMD64_MM_LAST)
  1611. {
  1612. *(ULONG64 UNALIGNED*)&m_Context.Amd64Context.FltSave.
  1613. FloatRegisters[GetMmxRegOffset(RegNum - AMD64_MM_FIRST,
  1614. GetReg32(AMD64_FPSW)) * 10] =
  1615. Val->I64;
  1616. goto Notify;
  1617. }
  1618. else if (RegNum >= AMD64_XMM_FIRST && RegNum <= AMD64_XMM_LAST)
  1619. {
  1620. memcpy((PUCHAR)&m_Context.Amd64Context.Xmm0 +
  1621. (RegNum - AMD64_XMM_FIRST) * 16, Val->Bytes, 16);
  1622. goto Notify;
  1623. }
  1624. else if (RegNum >= AMD64_ST_FIRST && RegNum <= AMD64_ST_LAST)
  1625. {
  1626. memcpy(&m_Context.Amd64Context.FltSave.
  1627. FloatRegisters[(RegNum - AMD64_ST_FIRST) * 10],
  1628. Val->F10, sizeof(Val->F10));
  1629. goto Notify;
  1630. }
  1631. BOOL Recognized;
  1632. Recognized = TRUE;
  1633. switch(RegNum)
  1634. {
  1635. case AMD64_RAX:
  1636. m_Context.Amd64Context.Rax = Val->I64;
  1637. break;
  1638. case AMD64_RCX:
  1639. m_Context.Amd64Context.Rcx = Val->I64;
  1640. break;
  1641. case AMD64_RDX:
  1642. m_Context.Amd64Context.Rdx = Val->I64;
  1643. break;
  1644. case AMD64_RBX:
  1645. m_Context.Amd64Context.Rbx = Val->I64;
  1646. break;
  1647. case AMD64_RSP:
  1648. m_Context.Amd64Context.Rsp = Val->I64;
  1649. break;
  1650. case AMD64_RBP:
  1651. m_Context.Amd64Context.Rbp = Val->I64;
  1652. break;
  1653. case AMD64_RSI:
  1654. m_Context.Amd64Context.Rsi = Val->I64;
  1655. break;
  1656. case AMD64_RDI:
  1657. m_Context.Amd64Context.Rdi = Val->I64;
  1658. break;
  1659. case AMD64_R8:
  1660. m_Context.Amd64Context.R8 = Val->I64;
  1661. break;
  1662. case AMD64_R9:
  1663. m_Context.Amd64Context.R9 = Val->I64;
  1664. break;
  1665. case AMD64_R10:
  1666. m_Context.Amd64Context.R10 = Val->I64;
  1667. break;
  1668. case AMD64_R11:
  1669. m_Context.Amd64Context.R11 = Val->I64;
  1670. break;
  1671. case AMD64_R12:
  1672. m_Context.Amd64Context.R12 = Val->I64;
  1673. break;
  1674. case AMD64_R13:
  1675. m_Context.Amd64Context.R13 = Val->I64;
  1676. break;
  1677. case AMD64_R14:
  1678. m_Context.Amd64Context.R14 = Val->I64;
  1679. break;
  1680. case AMD64_R15:
  1681. m_Context.Amd64Context.R15 = Val->I64;
  1682. break;
  1683. case AMD64_RIP:
  1684. m_Context.Amd64Context.Rip = Val->I64;
  1685. break;
  1686. case AMD64_EFL:
  1687. if (IS_KERNEL_TARGET(m_Target))
  1688. {
  1689. // leave TF clear
  1690. m_Context.Amd64Context.EFlags = Val->I32 & ~0x100;
  1691. }
  1692. else
  1693. {
  1694. // allow TF set
  1695. m_Context.Amd64Context.EFlags = Val->I32;
  1696. }
  1697. break;
  1698. case AMD64_CS:
  1699. m_Context.Amd64Context.SegCs = Val->I16;
  1700. m_SegRegDesc[SEGREG_CODE].Flags = SEGDESC_INVALID;
  1701. break;
  1702. case AMD64_DS:
  1703. m_Context.Amd64Context.SegDs = Val->I16;
  1704. m_SegRegDesc[SEGREG_DATA].Flags = SEGDESC_INVALID;
  1705. break;
  1706. case AMD64_ES:
  1707. m_Context.Amd64Context.SegEs = Val->I16;
  1708. m_SegRegDesc[SEGREG_ES].Flags = SEGDESC_INVALID;
  1709. break;
  1710. case AMD64_FS:
  1711. m_Context.Amd64Context.SegFs = Val->I16;
  1712. m_SegRegDesc[SEGREG_FS].Flags = SEGDESC_INVALID;
  1713. break;
  1714. case AMD64_GS:
  1715. m_Context.Amd64Context.SegGs = Val->I16;
  1716. m_SegRegDesc[SEGREG_GS].Flags = SEGDESC_INVALID;
  1717. break;
  1718. case AMD64_SS:
  1719. m_Context.Amd64Context.SegSs = Val->I16;
  1720. m_SegRegDesc[SEGREG_STACK].Flags = SEGDESC_INVALID;
  1721. break;
  1722. case AMD64_DR0:
  1723. m_Context.Amd64Context.Dr0 = Val->I64;
  1724. break;
  1725. case AMD64_DR1:
  1726. m_Context.Amd64Context.Dr1 = Val->I64;
  1727. break;
  1728. case AMD64_DR2:
  1729. m_Context.Amd64Context.Dr2 = Val->I64;
  1730. break;
  1731. case AMD64_DR3:
  1732. m_Context.Amd64Context.Dr3 = Val->I64;
  1733. break;
  1734. case AMD64_DR6:
  1735. m_Context.Amd64Context.Dr6 = Val->I64;
  1736. break;
  1737. case AMD64_DR7:
  1738. m_Context.Amd64Context.Dr7 = Val->I64;
  1739. break;
  1740. case AMD64_FPCW:
  1741. m_Context.Amd64Context.FltSave.ControlWord = Val->I16;
  1742. break;
  1743. case AMD64_FPSW:
  1744. m_Context.Amd64Context.FltSave.StatusWord = Val->I16;
  1745. break;
  1746. case AMD64_FPTW:
  1747. m_Context.Amd64Context.FltSave.TagWord = Val->I16;
  1748. break;
  1749. case AMD64_MXCSR:
  1750. m_Context.Amd64Context.MxCsr = Val->I32;
  1751. break;
  1752. default:
  1753. Recognized = FALSE;
  1754. break;
  1755. }
  1756. if (!Recognized && IS_KERNEL_TARGET(m_Target))
  1757. {
  1758. Recognized = TRUE;
  1759. switch(RegNum)
  1760. {
  1761. case AMD64_CR0:
  1762. m_Special.Amd64Special.Cr0 = Val->I64;
  1763. break;
  1764. case AMD64_CR2:
  1765. m_Special.Amd64Special.Cr2 = Val->I64;
  1766. break;
  1767. case AMD64_CR3:
  1768. m_Special.Amd64Special.Cr3 = Val->I64;
  1769. break;
  1770. case AMD64_CR4:
  1771. m_Special.Amd64Special.Cr4 = Val->I64;
  1772. break;
  1773. case AMD64_CR8:
  1774. m_Special.Amd64Special.Cr8 = Val->I64;
  1775. break;
  1776. case AMD64_GDTR:
  1777. m_Special.Amd64Special.Gdtr.Base = Val->I64;
  1778. break;
  1779. case AMD64_GDTL:
  1780. m_Special.Amd64Special.Gdtr.Limit = Val->I16;
  1781. break;
  1782. case AMD64_IDTR:
  1783. m_Special.Amd64Special.Idtr.Base = Val->I64;
  1784. break;
  1785. case AMD64_IDTL:
  1786. m_Special.Amd64Special.Idtr.Limit = Val->I16;
  1787. break;
  1788. case AMD64_TR:
  1789. m_Special.Amd64Special.Tr = Val->I16;
  1790. break;
  1791. case AMD64_LDTR:
  1792. m_Special.Amd64Special.Ldtr = Val->I16;
  1793. break;
  1794. case AMD64_KMXCSR:
  1795. m_Special.Amd64Special.MxCsr = Val->I32;
  1796. break;
  1797. case AMD64_KDR0:
  1798. m_Special.Amd64Special.KernelDr0 = Val->I64;
  1799. break;
  1800. case AMD64_KDR1:
  1801. m_Special.Amd64Special.KernelDr1 = Val->I64;
  1802. break;
  1803. case AMD64_KDR2:
  1804. m_Special.Amd64Special.KernelDr2 = Val->I64;
  1805. break;
  1806. case AMD64_KDR3:
  1807. m_Special.Amd64Special.KernelDr3 = Val->I64;
  1808. break;
  1809. case AMD64_KDR6:
  1810. m_Special.Amd64Special.KernelDr6 = Val->I64;
  1811. break;
  1812. case AMD64_KDR7:
  1813. m_Special.Amd64Special.KernelDr7 = Val->I64;
  1814. break;
  1815. default:
  1816. Recognized = FALSE;
  1817. break;
  1818. }
  1819. }
  1820. if (!Recognized)
  1821. {
  1822. ErrOut("Amd64MachineInfo::SetVal: "
  1823. "unknown register %lx requested\n", RegNum);
  1824. return E_INVALIDARG;
  1825. }
  1826. Notify:
  1827. NotifyChangeDebuggeeState(DEBUG_CDS_REGISTERS,
  1828. RegCountFromIndex(RegNum));
  1829. return S_OK;
  1830. }
  1831. void
  1832. Amd64MachineInfo::GetPC(PADDR Address)
  1833. {
  1834. // Right now assume that user-mode is always flat 64-bit.
  1835. // This may need to change depending on what WOW support exists.
  1836. if (IS_USER_TARGET(m_Target))
  1837. {
  1838. ADDRFLAT(Address, GetReg64(AMD64_RIP));
  1839. }
  1840. else
  1841. {
  1842. FormAddr(SEGREG_CODE, GetReg64(AMD64_RIP),
  1843. FORM_CODE | FORM_SEGREG | X86_FORM_VM86(GetReg32(AMD64_EFL)),
  1844. Address);
  1845. }
  1846. }
  1847. void
  1848. Amd64MachineInfo::SetPC(PADDR paddr)
  1849. {
  1850. // We set RIP to the offset (the non-translated value),
  1851. // because we may not be in "flat" mode.
  1852. SetReg64(AMD64_RIP, Off(*paddr));
  1853. }
  1854. void
  1855. Amd64MachineInfo::GetFP(PADDR Addr)
  1856. {
  1857. // Right now assume that user-mode is always flat 64-bit.
  1858. // This may need to change depending on what WOW support exists.
  1859. if (IS_USER_TARGET(m_Target))
  1860. {
  1861. ADDRFLAT(Addr, GetReg64(AMD64_RBP));
  1862. }
  1863. else
  1864. {
  1865. FormAddr(SEGREG_STACK, GetReg64(AMD64_RBP),
  1866. FORM_SEGREG | X86_FORM_VM86(GetReg32(AMD64_EFL)), Addr);
  1867. }
  1868. }
  1869. void
  1870. Amd64MachineInfo::GetSP(PADDR Addr)
  1871. {
  1872. // Right now assume that user-mode is always flat 64-bit.
  1873. // This may need to change depending on what WOW support exists.
  1874. if (IS_USER_TARGET(m_Target))
  1875. {
  1876. ADDRFLAT(Addr, GetReg64(AMD64_RSP));
  1877. }
  1878. else
  1879. {
  1880. FormAddr(SEGREG_STACK, GetReg64(AMD64_RSP),
  1881. FORM_SEGREG | X86_FORM_VM86(GetReg32(AMD64_EFL)), Addr);
  1882. }
  1883. }
  1884. ULONG64
  1885. Amd64MachineInfo::GetArgReg(void)
  1886. {
  1887. return GetReg64(AMD64_RCX);
  1888. }
  1889. ULONG64
  1890. Amd64MachineInfo::GetRetReg(void)
  1891. {
  1892. return GetReg64(AMD64_RAX);
  1893. }
  1894. ULONG
  1895. Amd64MachineInfo::GetSegRegNum(ULONG SegReg)
  1896. {
  1897. //
  1898. // BUGBUG forrestf: the following is here as a workaround for segment
  1899. // decoding that isn't working correctly yet.
  1900. //
  1901. if (IS_USER_TARGET(m_Target))
  1902. {
  1903. return 0;
  1904. }
  1905. switch(SegReg)
  1906. {
  1907. case SEGREG_CODE:
  1908. return AMD64_CS;
  1909. case SEGREG_DATA:
  1910. return AMD64_DS;
  1911. case SEGREG_STACK:
  1912. return AMD64_SS;
  1913. case SEGREG_ES:
  1914. return AMD64_ES;
  1915. case SEGREG_FS:
  1916. return AMD64_FS;
  1917. case SEGREG_GS:
  1918. return AMD64_GS;
  1919. case SEGREG_LDT:
  1920. return AMD64_LDTR;
  1921. }
  1922. return 0;
  1923. }
  1924. HRESULT
  1925. Amd64MachineInfo::GetSegRegDescriptor(ULONG SegReg, PDESCRIPTOR64 Desc)
  1926. {
  1927. if (SegReg == SEGREG_GDT)
  1928. {
  1929. Desc->Base = GetReg64(AMD64_GDTR);
  1930. Desc->Limit = GetReg32(AMD64_GDTL);
  1931. Desc->Flags = 0;
  1932. return S_OK;
  1933. }
  1934. // Check and see if we already have a cached descriptor.
  1935. if (m_SegRegDesc[SegReg].Flags != SEGDESC_INVALID)
  1936. {
  1937. *Desc = m_SegRegDesc[SegReg];
  1938. return S_OK;
  1939. }
  1940. HRESULT Status;
  1941. // Attempt to retrieve segment descriptors directly.
  1942. if ((Status = GetContextState(MCTX_FULL)) != S_OK)
  1943. {
  1944. return Status;
  1945. }
  1946. // Check and see if we now have a cached descriptor.
  1947. if (m_SegRegDesc[SegReg].Flags != SEGDESC_INVALID)
  1948. {
  1949. *Desc = m_SegRegDesc[SegReg];
  1950. return S_OK;
  1951. }
  1952. //
  1953. // Direct information is not available so look things up
  1954. // in the descriptor tables.
  1955. //
  1956. ULONG RegNum = GetSegRegNum(SegReg);
  1957. if (RegNum == 0)
  1958. {
  1959. return E_INVALIDARG;
  1960. }
  1961. // Do a quick sanity test to prevent bad values
  1962. // from causing problems.
  1963. ULONG Selector = GetReg32(RegNum);
  1964. if (SegReg == SEGREG_LDT && (Selector & 4))
  1965. {
  1966. // The ldtr selector says that it's an LDT selector,
  1967. // which is invalid. An LDT selector should always
  1968. // reference the GDT.
  1969. ErrOut("Invalid LDTR contents: %04X\n", Selector);
  1970. return E_FAIL;
  1971. }
  1972. return m_Target->GetSelDescriptor(m_Target->m_RegContextThread, this,
  1973. Selector, Desc);
  1974. }
  1975. void
  1976. Amd64MachineInfo::OutputAll(ULONG Mask, ULONG OutMask)
  1977. {
  1978. if (GetContextState(MCTX_FULL) != S_OK)
  1979. {
  1980. ErrOut("Unable to retrieve register information\n");
  1981. return;
  1982. }
  1983. if (Mask & (REGALL_INT32 | REGALL_INT64))
  1984. {
  1985. ULONG Efl;
  1986. MaskOut(OutMask, "rax=%016I64x rbx=%016I64x rcx=%016I64x\n",
  1987. GetReg64(AMD64_RAX), GetReg64(AMD64_RBX),
  1988. GetReg64(AMD64_RCX));
  1989. MaskOut(OutMask, "rdx=%016I64x rsi=%016I64x rdi=%016I64x\n",
  1990. GetReg64(AMD64_RDX), GetReg64(AMD64_RSI),
  1991. GetReg64(AMD64_RDI));
  1992. MaskOut(OutMask, "rip=%016I64x rsp=%016I64x rbp=%016I64x\n",
  1993. GetReg64(AMD64_RIP), GetReg64(AMD64_RSP),
  1994. GetReg64(AMD64_RBP));
  1995. MaskOut(OutMask, " r8=%016I64x r9=%016I64x r10=%016I64x\n",
  1996. GetReg64(AMD64_R8), GetReg64(AMD64_R9),
  1997. GetReg64(AMD64_R10));
  1998. MaskOut(OutMask, "r11=%016I64x r12=%016I64x r13=%016I64x\n",
  1999. GetReg64(AMD64_R11), GetReg64(AMD64_R12),
  2000. GetReg64(AMD64_R13));
  2001. MaskOut(OutMask, "r14=%016I64x r15=%016I64x\n",
  2002. GetReg64(AMD64_R14), GetReg64(AMD64_R15));
  2003. Efl = GetReg32(AMD64_EFL);
  2004. MaskOut(OutMask, "iopl=%1lx %s %s %s %s %s %s %s %s %s %s\n",
  2005. ((Efl >> X86_SHIFT_FLAGIOPL) & X86_BIT_FLAGIOPL),
  2006. (Efl & X86_BIT_FLAGVIP) ? "vip" : " ",
  2007. (Efl & X86_BIT_FLAGVIF) ? "vif" : " ",
  2008. (Efl & X86_BIT_FLAGOF) ? "ov" : "nv",
  2009. (Efl & X86_BIT_FLAGDF) ? "dn" : "up",
  2010. (Efl & X86_BIT_FLAGIF) ? "ei" : "di",
  2011. (Efl & X86_BIT_FLAGSF) ? "ng" : "pl",
  2012. (Efl & X86_BIT_FLAGZF) ? "zr" : "nz",
  2013. (Efl & X86_BIT_FLAGAF) ? "ac" : "na",
  2014. (Efl & X86_BIT_FLAGPF) ? "po" : "pe",
  2015. (Efl & X86_BIT_FLAGCF) ? "cy" : "nc");
  2016. }
  2017. if (Mask & REGALL_SEGREG)
  2018. {
  2019. MaskOut(OutMask, "cs=%04lx ss=%04lx ds=%04lx es=%04lx fs=%04lx "
  2020. "gs=%04lx efl=%08lx\n",
  2021. GetReg32(AMD64_CS),
  2022. GetReg32(AMD64_SS),
  2023. GetReg32(AMD64_DS),
  2024. GetReg32(AMD64_ES),
  2025. GetReg32(AMD64_FS),
  2026. GetReg32(AMD64_GS),
  2027. GetReg32(AMD64_EFL));
  2028. }
  2029. if (Mask & REGALL_FLOAT)
  2030. {
  2031. ULONG i;
  2032. REGVAL Val;
  2033. char Buf[32];
  2034. MaskOut(OutMask, "fpcw=%04X fpsw=%04X fptw=%04X\n",
  2035. GetReg32(AMD64_FPCW),
  2036. GetReg32(AMD64_FPSW),
  2037. GetReg32(AMD64_FPTW));
  2038. for (i = AMD64_ST_FIRST; i <= AMD64_ST_LAST; i++)
  2039. {
  2040. GetVal(i, &Val);
  2041. _uldtoa((_ULDOUBLE *)&Val.F10, sizeof(Buf), Buf);
  2042. MaskOut(OutMask, "st%d=%s ", i - AMD64_ST_FIRST, Buf);
  2043. i++;
  2044. GetVal(i, &Val);
  2045. _uldtoa((_ULDOUBLE *)&Val.F10, sizeof(Buf), Buf);
  2046. MaskOut(OutMask, "st%d=%s\n", i - AMD64_ST_FIRST, Buf);
  2047. }
  2048. }
  2049. if (Mask & REGALL_MMXREG)
  2050. {
  2051. ULONG i;
  2052. REGVAL Val;
  2053. for (i = AMD64_MM_FIRST; i <= AMD64_MM_LAST; i++)
  2054. {
  2055. GetVal(i, &Val);
  2056. MaskOut(OutMask, "mm%d=%016I64x ", i - AMD64_MM_FIRST, Val.I64);
  2057. i++;
  2058. GetVal(i, &Val);
  2059. MaskOut(OutMask, "mm%d=%016I64x\n", i - AMD64_MM_FIRST, Val.I64);
  2060. }
  2061. }
  2062. if (Mask & REGALL_XMMREG)
  2063. {
  2064. ULONG i;
  2065. REGVAL Val;
  2066. for (i = AMD64_XMM_FIRST; i <= AMD64_XMM_LAST; i++)
  2067. {
  2068. GetVal(i, &Val);
  2069. MaskOut(OutMask, "xmm%d=%hg %hg %hg %hg\n", i - AMD64_XMM_FIRST,
  2070. *(float *)&Val.Bytes[3 * sizeof(float)],
  2071. *(float *)&Val.Bytes[2 * sizeof(float)],
  2072. *(float *)&Val.Bytes[1 * sizeof(float)],
  2073. *(float *)&Val.Bytes[0 * sizeof(float)]);
  2074. }
  2075. }
  2076. if (Mask & REGALL_CREG)
  2077. {
  2078. MaskOut(OutMask, "cr0=%016I64x cr2=%016I64x cr3=%016I64x\n",
  2079. GetReg64(AMD64_CR0),
  2080. GetReg64(AMD64_CR2),
  2081. GetReg64(AMD64_CR3));
  2082. MaskOut(OutMask, "cr8=%016I64x\n",
  2083. GetReg64(AMD64_CR8));
  2084. }
  2085. if (Mask & REGALL_DREG)
  2086. {
  2087. MaskOut(OutMask, "dr0=%016I64x dr1=%016I64x dr2=%016I64x\n",
  2088. GetReg64(AMD64_DR0),
  2089. GetReg64(AMD64_DR1),
  2090. GetReg64(AMD64_DR2));
  2091. MaskOut(OutMask, "dr3=%016I64x dr6=%016I64x dr7=%016I64x",
  2092. GetReg64(AMD64_DR3),
  2093. GetReg64(AMD64_DR6),
  2094. GetReg64(AMD64_DR7));
  2095. if (IS_USER_TARGET(m_Target))
  2096. {
  2097. MaskOut(OutMask, "\n");
  2098. }
  2099. else
  2100. {
  2101. MaskOut(OutMask, " cr4=%016I64x\n", GetReg64(AMD64_CR4));
  2102. MaskOut(OutMask, "kdr0=%016I64x kdr1=%016I64x kdr2=%016I64x\n",
  2103. GetReg64(AMD64_KDR0),
  2104. GetReg64(AMD64_KDR1),
  2105. GetReg64(AMD64_KDR2));
  2106. MaskOut(OutMask, "kdr3=%016I64x kdr6=%016I64x kdr7=%016I64x",
  2107. GetReg64(AMD64_KDR3),
  2108. GetReg64(AMD64_KDR6),
  2109. GetReg64(AMD64_KDR7));
  2110. }
  2111. }
  2112. if (Mask & REGALL_DESC)
  2113. {
  2114. MaskOut(OutMask, "gdtr=%016I64x gdtl=%04lx idtr=%016I64x "
  2115. "idtl=%04lx tr=%04lx ldtr=%04x\n",
  2116. GetReg64(AMD64_GDTR),
  2117. GetReg32(AMD64_GDTL),
  2118. GetReg64(AMD64_IDTR),
  2119. GetReg32(AMD64_IDTL),
  2120. GetReg32(AMD64_TR),
  2121. GetReg32(AMD64_LDTR));
  2122. }
  2123. }
  2124. HRESULT
  2125. Amd64MachineInfo::SetAndOutputTrapFrame(ULONG64 TrapBase,
  2126. PCROSS_PLATFORM_CONTEXT Context)
  2127. {
  2128. return SetAndOutputContext(Context, TRUE, REGALL_INT64);
  2129. }
  2130. TRACEMODE
  2131. Amd64MachineInfo::GetTraceMode (void)
  2132. {
  2133. if (IS_KERNEL_TARGET(m_Target))
  2134. {
  2135. return m_TraceMode;
  2136. }
  2137. else
  2138. {
  2139. return ((GetReg32(AMD64_EFL) & X86_BIT_FLAGTF) != 0) ?
  2140. TRACE_INSTRUCTION : TRACE_NONE;
  2141. }
  2142. }
  2143. void
  2144. Amd64MachineInfo::SetTraceMode (TRACEMODE Mode)
  2145. {
  2146. // (XXX olegk - review for TRACE_TAKEN_BRANCH)
  2147. DBG_ASSERT(Mode != TRACE_TAKEN_BRANCH);
  2148. if (IS_KERNEL_TARGET(m_Target))
  2149. {
  2150. m_TraceMode = Mode;
  2151. }
  2152. else
  2153. {
  2154. ULONG Efl = GetReg32(AMD64_EFL);
  2155. switch (Mode)
  2156. {
  2157. case TRACE_NONE:
  2158. Efl &= ~X86_BIT_FLAGTF;
  2159. break;
  2160. case TRACE_INSTRUCTION:
  2161. Efl |= X86_BIT_FLAGTF;
  2162. break;
  2163. }
  2164. SetReg32(AMD64_EFL, Efl);
  2165. }
  2166. }
  2167. BOOL
  2168. Amd64MachineInfo::IsStepStatusSupported(ULONG Status)
  2169. {
  2170. switch (Status)
  2171. {
  2172. case DEBUG_STATUS_STEP_INTO:
  2173. case DEBUG_STATUS_STEP_OVER:
  2174. return TRUE;
  2175. default:
  2176. return FALSE;
  2177. }
  2178. }
  2179. void
  2180. Amd64MachineInfo::KdUpdateControlSet
  2181. (PDBGKD_ANY_CONTROL_SET ControlSet)
  2182. {
  2183. ControlSet->Amd64ControlSet.TraceFlag =
  2184. (GetTraceMode() == TRACE_INSTRUCTION);
  2185. ControlSet->Amd64ControlSet.Dr7 = GetReg64(AMD64_KDR7);
  2186. BpOut("UpdateControlSet(%d) trace %d, DR7 %I64X\n",
  2187. m_Target->m_RegContextProcessor,
  2188. ControlSet->Amd64ControlSet.TraceFlag,
  2189. ControlSet->Amd64ControlSet.Dr7);
  2190. if (!g_WatchFunctions.IsStarted() && g_WatchBeginCurFunc != 1)
  2191. {
  2192. ControlSet->Amd64ControlSet.CurrentSymbolStart = 0;
  2193. ControlSet->Amd64ControlSet.CurrentSymbolEnd = 0;
  2194. }
  2195. else
  2196. {
  2197. ControlSet->Amd64ControlSet.CurrentSymbolStart = g_WatchBeginCurFunc;
  2198. ControlSet->Amd64ControlSet.CurrentSymbolEnd = g_WatchEndCurFunc;
  2199. }
  2200. }
  2201. ULONG
  2202. Amd64MachineInfo::ExecutingMachine(void)
  2203. {
  2204. if (IS_USER_TARGET(m_Target) &&
  2205. IsIa32CodeSegment())
  2206. {
  2207. return IMAGE_FILE_MACHINE_I386;
  2208. }
  2209. return IMAGE_FILE_MACHINE_AMD64;
  2210. }
  2211. HRESULT
  2212. Amd64MachineInfo::SetPageDirectory(ThreadInfo* Thread,
  2213. ULONG Idx, ULONG64 PageDir,
  2214. PULONG NextIdx)
  2215. {
  2216. HRESULT Status;
  2217. *NextIdx = PAGE_DIR_COUNT;
  2218. if (PageDir == 0)
  2219. {
  2220. if ((Status = m_Target->ReadImplicitProcessInfoPointer
  2221. (Thread,
  2222. m_Target->m_KdDebuggerData.OffsetEprocessDirectoryTableBase,
  2223. &PageDir)) != S_OK)
  2224. {
  2225. return Status;
  2226. }
  2227. }
  2228. // Sanitize the value.
  2229. PageDir &= AMD64_PDBR_MASK;
  2230. // There is only one page directory so update all the slots.
  2231. m_PageDirectories[PAGE_DIR_USER] = PageDir;
  2232. m_PageDirectories[PAGE_DIR_SESSION] = PageDir;
  2233. m_PageDirectories[PAGE_DIR_KERNEL] = PageDir;
  2234. return S_OK;
  2235. }
  2236. #define AMD64_PAGE_FILE_INDEX(Entry) \
  2237. (((ULONG)(Entry) >> 28) & MAX_PAGING_FILE_MASK)
  2238. #define AMD64_PAGE_FILE_OFFSET(Entry) \
  2239. (((Entry) >> 32) << AMD64_PAGE_SHIFT)
  2240. HRESULT
  2241. Amd64MachineInfo::GetVirtualTranslationPhysicalOffsets(ThreadInfo* Thread,
  2242. ULONG64 Virt,
  2243. PULONG64 Offsets,
  2244. ULONG OffsetsSize,
  2245. PULONG Levels,
  2246. PULONG PfIndex,
  2247. PULONG64 LastVal)
  2248. {
  2249. HRESULT Status;
  2250. *Levels = 0;
  2251. if (m_Translating)
  2252. {
  2253. return E_UNEXPECTED;
  2254. }
  2255. m_Translating = TRUE;
  2256. //
  2257. // Reset the page directory in case it was 0
  2258. //
  2259. if (m_PageDirectories[PAGE_DIR_SINGLE] == 0)
  2260. {
  2261. if ((Status = SetDefaultPageDirectories(Thread,
  2262. 1 << PAGE_DIR_SINGLE)) != S_OK)
  2263. {
  2264. m_Translating = FALSE;
  2265. return Status;
  2266. }
  2267. }
  2268. KdOut("Amd64VtoP: Virt %s, pagedir %s\n",
  2269. FormatMachineAddr64(this, Virt),
  2270. FormatDisp64(m_PageDirectories[PAGE_DIR_SINGLE]));
  2271. (*Levels)++;
  2272. if (Offsets != NULL && OffsetsSize > 0)
  2273. {
  2274. *Offsets++ = m_PageDirectories[PAGE_DIR_SINGLE];
  2275. OffsetsSize--;
  2276. }
  2277. //
  2278. // Certain ranges of the system are mapped directly.
  2279. //
  2280. if ((Virt >= AMD64_PHYSICAL_START) && (Virt <= AMD64_PHYSICAL_END))
  2281. {
  2282. *LastVal = Virt - AMD64_PHYSICAL_START;
  2283. KdOut("Amd64VtoP: Direct phys %s\n",
  2284. FormatDisp64(*LastVal));
  2285. (*Levels)++;
  2286. if (Offsets != NULL && OffsetsSize > 0)
  2287. {
  2288. *Offsets++ = *LastVal;
  2289. OffsetsSize--;
  2290. }
  2291. m_Translating = FALSE;
  2292. return S_OK;
  2293. }
  2294. ULONG64 Addr;
  2295. ULONG64 Entry;
  2296. // Read the Page Map Level 4 entry.
  2297. Addr = (((Virt >> AMD64_PML4E_SHIFT) & AMD64_PML4E_MASK) *
  2298. sizeof(Entry)) + m_PageDirectories[PAGE_DIR_SINGLE];
  2299. KdOut("Amd64VtoP: PML4E %s\n", FormatDisp64(Addr));
  2300. (*Levels)++;
  2301. if (Offsets != NULL && OffsetsSize > 0)
  2302. {
  2303. *Offsets++ = Addr;
  2304. OffsetsSize--;
  2305. }
  2306. if ((Status = m_Target->
  2307. ReadAllPhysical(Addr, &Entry, sizeof(Entry))) != S_OK)
  2308. {
  2309. KdOut("Amd64VtoP: PML4E read error 0x%X\n", Status);
  2310. m_Translating = FALSE;
  2311. return Status;
  2312. }
  2313. // Read the Page Directory Pointer entry.
  2314. if (Entry == 0)
  2315. {
  2316. KdOut("Amd64VtoP: zero PML4E\n");
  2317. m_Translating = FALSE;
  2318. return HR_PAGE_NOT_AVAILABLE;
  2319. }
  2320. else if (!(Entry & 1))
  2321. {
  2322. Addr = (((Virt >> AMD64_PDPE_SHIFT) & AMD64_PDPE_MASK) *
  2323. sizeof(Entry)) + AMD64_PAGE_FILE_OFFSET(Entry);
  2324. KdOut("Amd64VtoP: pagefile PDPE %d:%s\n",
  2325. AMD64_PAGE_FILE_INDEX(Entry), FormatDisp64(Addr));
  2326. if ((Status = m_Target->
  2327. ReadPageFile(AMD64_PAGE_FILE_INDEX(Entry), Addr,
  2328. &Entry, sizeof(Entry))) != S_OK)
  2329. {
  2330. KdOut("Amd64VtoP: PML4E not present, 0x%X\n", Status);
  2331. m_Translating = FALSE;
  2332. return Status;
  2333. }
  2334. }
  2335. else
  2336. {
  2337. Addr = (((Virt >> AMD64_PDPE_SHIFT) & AMD64_PDPE_MASK) *
  2338. sizeof(Entry)) + (Entry & AMD64_VALID_PFN_MASK);
  2339. KdOut("Amd64VtoP: PDPE %s\n", FormatDisp64(Addr));
  2340. (*Levels)++;
  2341. if (Offsets != NULL && OffsetsSize > 0)
  2342. {
  2343. *Offsets++ = Addr;
  2344. OffsetsSize--;
  2345. }
  2346. if ((Status = m_Target->
  2347. ReadAllPhysical(Addr, &Entry, sizeof(Entry))) != S_OK)
  2348. {
  2349. KdOut("Amd64VtoP: PDPE read error 0x%X\n", Status);
  2350. m_Translating = FALSE;
  2351. return Status;
  2352. }
  2353. }
  2354. // Read the Page Directory entry.
  2355. if (Entry == 0)
  2356. {
  2357. KdOut("Amd64VtoP: zero PDPE\n");
  2358. m_Translating = FALSE;
  2359. return HR_PAGE_NOT_AVAILABLE;
  2360. }
  2361. else if (!(Entry & 1))
  2362. {
  2363. Addr = (((Virt >> AMD64_PDE_SHIFT) & AMD64_PDE_MASK) *
  2364. sizeof(Entry)) + AMD64_PAGE_FILE_OFFSET(Entry);
  2365. KdOut("Amd64VtoP: pagefile PDE %d:%s\n",
  2366. AMD64_PAGE_FILE_INDEX(Entry), FormatDisp64(Addr));
  2367. if ((Status = m_Target->
  2368. ReadPageFile(AMD64_PAGE_FILE_INDEX(Entry), Addr,
  2369. &Entry, sizeof(Entry))) != S_OK)
  2370. {
  2371. KdOut("Amd64VtoP: PDPE not present, 0x%X\n", Status);
  2372. m_Translating = FALSE;
  2373. return Status;
  2374. }
  2375. }
  2376. else
  2377. {
  2378. Addr = (((Virt >> AMD64_PDE_SHIFT) & AMD64_PDE_MASK) *
  2379. sizeof(Entry)) + (Entry & AMD64_VALID_PFN_MASK);
  2380. KdOut("Amd64VtoP: PDE %s\n", FormatDisp64(Addr));
  2381. (*Levels)++;
  2382. if (Offsets != NULL && OffsetsSize > 0)
  2383. {
  2384. *Offsets++ = Addr;
  2385. OffsetsSize--;
  2386. }
  2387. if ((Status = m_Target->
  2388. ReadAllPhysical(Addr, &Entry, sizeof(Entry))) != S_OK)
  2389. {
  2390. KdOut("Amd64VtoP: PDE read error 0x%X\n", Status);
  2391. m_Translating = FALSE;
  2392. return Status;
  2393. }
  2394. }
  2395. // Check for a large page. Large pages can
  2396. // never be paged out so also check for the present bit.
  2397. if ((Entry & (AMD64_LARGE_PAGE_MASK | 1)) == (AMD64_LARGE_PAGE_MASK | 1))
  2398. {
  2399. *LastVal = ((Entry & ~(AMD64_LARGE_PAGE_SIZE - 1)) |
  2400. (Virt & (AMD64_LARGE_PAGE_SIZE - 1)));
  2401. KdOut("Amd64VtoP: Large page mapped phys %s\n",
  2402. FormatDisp64(*LastVal));
  2403. (*Levels)++;
  2404. if (Offsets != NULL && OffsetsSize > 0)
  2405. {
  2406. *Offsets++ = *LastVal;
  2407. OffsetsSize--;
  2408. }
  2409. m_Translating = FALSE;
  2410. return S_OK;
  2411. }
  2412. // Read the Page Table entry.
  2413. if (Entry == 0)
  2414. {
  2415. KdOut("Amd64VtoP: zero PDE\n");
  2416. m_Translating = FALSE;
  2417. return HR_PAGE_NOT_AVAILABLE;
  2418. }
  2419. else if (!(Entry & 1))
  2420. {
  2421. Addr = (((Virt >> AMD64_PTE_SHIFT) & AMD64_PTE_MASK) *
  2422. sizeof(Entry)) + AMD64_PAGE_FILE_OFFSET(Entry);
  2423. KdOut("Amd64VtoP: pagefile PTE %d:%s\n",
  2424. AMD64_PAGE_FILE_INDEX(Entry), FormatDisp64(Addr));
  2425. if ((Status = m_Target->
  2426. ReadPageFile(AMD64_PAGE_FILE_INDEX(Entry), Addr,
  2427. &Entry, sizeof(Entry))) != S_OK)
  2428. {
  2429. KdOut("Amd64VtoP: PDE not present, 0x%X\n", Status);
  2430. m_Translating = FALSE;
  2431. return Status;
  2432. }
  2433. }
  2434. else
  2435. {
  2436. Addr = (((Virt >> AMD64_PTE_SHIFT) & AMD64_PTE_MASK) *
  2437. sizeof(Entry)) + (Entry & AMD64_VALID_PFN_MASK);
  2438. KdOut("Amd64VtoP: PTE %s\n", FormatDisp64(Addr));
  2439. (*Levels)++;
  2440. if (Offsets != NULL && OffsetsSize > 0)
  2441. {
  2442. *Offsets++ = Addr;
  2443. OffsetsSize--;
  2444. }
  2445. if ((Status = m_Target->
  2446. ReadAllPhysical(Addr, &Entry, sizeof(Entry))) != S_OK)
  2447. {
  2448. KdOut("Amd64VtoP: PTE read error 0x%X\n", Status);
  2449. m_Translating = FALSE;
  2450. return Status;
  2451. }
  2452. }
  2453. if (!(Entry & 0x1) &&
  2454. ((Entry & AMD64_MM_PTE_PROTOTYPE_MASK) ||
  2455. !(Entry & AMD64_MM_PTE_TRANSITION_MASK)))
  2456. {
  2457. if (Entry == 0)
  2458. {
  2459. KdOut("Amd64VtoP: zero PTE\n");
  2460. Status = HR_PAGE_NOT_AVAILABLE;
  2461. }
  2462. else if (Entry & AMD64_MM_PTE_PROTOTYPE_MASK)
  2463. {
  2464. KdOut("Amd64VtoP: prototype PTE\n");
  2465. Status = HR_PAGE_NOT_AVAILABLE;
  2466. }
  2467. else
  2468. {
  2469. *PfIndex = AMD64_PAGE_FILE_INDEX(Entry);
  2470. *LastVal = (Virt & (AMD64_PAGE_SIZE - 1)) +
  2471. AMD64_PAGE_FILE_OFFSET(Entry);
  2472. KdOut("Amd64VtoP: PTE not present, pagefile %d:%s\n",
  2473. *PfIndex, FormatDisp64(*LastVal));
  2474. Status = HR_PAGE_IN_PAGE_FILE;
  2475. }
  2476. m_Translating = FALSE;
  2477. return Status;
  2478. }
  2479. *LastVal = ((Entry & AMD64_VALID_PFN_MASK) |
  2480. (Virt & (AMD64_PAGE_SIZE - 1)));
  2481. KdOut("Amd64VtoP: Mapped phys %s\n", FormatDisp64(*LastVal));
  2482. (*Levels)++;
  2483. if (Offsets != NULL && OffsetsSize > 0)
  2484. {
  2485. *Offsets++ = *LastVal;
  2486. OffsetsSize--;
  2487. }
  2488. m_Translating = FALSE;
  2489. return S_OK;
  2490. }
  2491. HRESULT
  2492. Amd64MachineInfo::GetBaseTranslationVirtualOffset(PULONG64 Offset)
  2493. {
  2494. *Offset = AMD64_BASE_VIRT;
  2495. return S_OK;
  2496. }
  2497. void
  2498. Amd64MachineInfo::DecodePte(ULONG64 Pte, PULONG64 PageFrameNumber,
  2499. PULONG Flags)
  2500. {
  2501. *PageFrameNumber = (Pte & AMD64_VALID_PFN_MASK) >> AMD64_PAGE_SHIFT;
  2502. *Flags = (Pte & 1) ? MPTE_FLAG_VALID : 0;
  2503. }
  2504. void
  2505. Amd64MachineInfo::OutputFunctionEntry(PVOID RawEntry)
  2506. {
  2507. _PIMAGE_RUNTIME_FUNCTION_ENTRY Entry =
  2508. (_PIMAGE_RUNTIME_FUNCTION_ENTRY)RawEntry;
  2509. dprintf("BeginAddress = %s\n",
  2510. FormatMachineAddr64(this, Entry->BeginAddress));
  2511. dprintf("EndAddress = %s\n",
  2512. FormatMachineAddr64(this, Entry->EndAddress));
  2513. dprintf("UnwindInfoAddress = %s\n",
  2514. FormatMachineAddr64(this, Entry->UnwindInfoAddress));
  2515. }
  2516. HRESULT
  2517. Amd64MachineInfo::ReadDynamicFunctionTable(ProcessInfo* Process,
  2518. ULONG64 Table,
  2519. PULONG64 NextTable,
  2520. PULONG64 MinAddress,
  2521. PULONG64 MaxAddress,
  2522. PULONG64 BaseAddress,
  2523. PULONG64 TableData,
  2524. PULONG TableSize,
  2525. PWSTR OutOfProcessDll,
  2526. PCROSS_PLATFORM_DYNAMIC_FUNCTION_TABLE RawTable)
  2527. {
  2528. HRESULT Status;
  2529. if ((Status = m_Target->
  2530. ReadAllVirtual(Process, Table, &RawTable->Amd64Table,
  2531. sizeof(RawTable->Amd64Table))) != S_OK)
  2532. {
  2533. return Status;
  2534. }
  2535. *NextTable = RawTable->Amd64Table.ListEntry.Flink;
  2536. *MinAddress = RawTable->Amd64Table.MinimumAddress;
  2537. *MaxAddress = RawTable->Amd64Table.MaximumAddress;
  2538. *BaseAddress = RawTable->Amd64Table.BaseAddress;
  2539. if (RawTable->Amd64Table.Type == AMD64_RF_CALLBACK)
  2540. {
  2541. ULONG Done;
  2542. *TableData = 0;
  2543. *TableSize = 0;
  2544. if ((Status = m_Target->
  2545. ReadVirtual(Process, RawTable->Amd64Table.OutOfProcessCallbackDll,
  2546. OutOfProcessDll, (MAX_PATH - 1) * sizeof(WCHAR),
  2547. &Done)) != S_OK)
  2548. {
  2549. return Status;
  2550. }
  2551. OutOfProcessDll[Done / sizeof(WCHAR)] = 0;
  2552. }
  2553. else
  2554. {
  2555. *TableData = RawTable->Amd64Table.FunctionTable;
  2556. *TableSize = RawTable->Amd64Table.EntryCount *
  2557. sizeof(_IMAGE_RUNTIME_FUNCTION_ENTRY);
  2558. OutOfProcessDll[0] = 0;
  2559. }
  2560. return S_OK;
  2561. }
  2562. PVOID
  2563. Amd64MachineInfo::FindDynamicFunctionEntry(PCROSS_PLATFORM_DYNAMIC_FUNCTION_TABLE Table,
  2564. ULONG64 Address,
  2565. PVOID TableData,
  2566. ULONG TableSize)
  2567. {
  2568. ULONG i;
  2569. _PIMAGE_RUNTIME_FUNCTION_ENTRY Func;
  2570. static _IMAGE_RUNTIME_FUNCTION_ENTRY s_RetFunc;
  2571. Func = (_PIMAGE_RUNTIME_FUNCTION_ENTRY)TableData;
  2572. for (i = 0; i < TableSize / sizeof(_IMAGE_RUNTIME_FUNCTION_ENTRY); i++)
  2573. {
  2574. if (Address >= Table->Amd64Table.BaseAddress + Func->BeginAddress &&
  2575. Address < Table->Amd64Table.BaseAddress + Func->EndAddress)
  2576. {
  2577. // The table data is temporary so copy the data into
  2578. // a static buffer for longer-term storage.
  2579. s_RetFunc.BeginAddress = Func->BeginAddress;
  2580. s_RetFunc.EndAddress = Func->EndAddress;
  2581. s_RetFunc.UnwindInfoAddress = Func->UnwindInfoAddress;
  2582. return (PVOID)&s_RetFunc;
  2583. }
  2584. Func++;
  2585. }
  2586. return NULL;
  2587. }
  2588. HRESULT
  2589. Amd64MachineInfo::GetUnwindInfoBounds(ProcessInfo* Process,
  2590. ULONG64 TableBase,
  2591. PVOID RawTableEntries,
  2592. ULONG EntryIndex,
  2593. PULONG64 Start,
  2594. PULONG Size)
  2595. {
  2596. HRESULT Status;
  2597. _PIMAGE_RUNTIME_FUNCTION_ENTRY FuncEnt =
  2598. (_PIMAGE_RUNTIME_FUNCTION_ENTRY)RawTableEntries + EntryIndex;
  2599. AMD64_UNWIND_INFO Info;
  2600. *Start = TableBase + FuncEnt->UnwindInfoAddress;
  2601. if ((Status = m_Target->
  2602. ReadAllVirtual(Process, *Start, &Info, sizeof(Info))) != S_OK)
  2603. {
  2604. return Status;
  2605. }
  2606. *Size = sizeof(Info) + (Info.CountOfCodes - 1) * sizeof(AMD64_UNWIND_CODE);
  2607. // An extra alignment code and pointer may be added on to handle
  2608. // the chained info case where the chain pointer is just
  2609. // beyond the end of the normal code array.
  2610. if ((Info.Flags & AMD64_UNW_FLAG_CHAININFO) != 0)
  2611. {
  2612. if ((Info.CountOfCodes & 1) != 0)
  2613. {
  2614. (*Size) += sizeof(AMD64_UNWIND_CODE);
  2615. }
  2616. (*Size) += sizeof(ULONG64);
  2617. }
  2618. return S_OK;
  2619. }
  2620. HRESULT
  2621. Amd64MachineInfo::ReadKernelProcessorId
  2622. (ULONG Processor, PDEBUG_PROCESSOR_IDENTIFICATION_ALL Id)
  2623. {
  2624. HRESULT Status;
  2625. ULONG64 Prcb;
  2626. ULONG Data;
  2627. if ((Status = m_Target->
  2628. GetProcessorSystemDataOffset(Processor, DEBUG_DATA_KPRCB_OFFSET,
  2629. &Prcb)) != S_OK)
  2630. {
  2631. return Status;
  2632. }
  2633. if ((Status = m_Target->
  2634. ReadAllVirtual(m_Target->m_ProcessHead,
  2635. Prcb + m_Target->m_KdDebuggerData.OffsetPrcbCpuType,
  2636. &Data, sizeof(Data))) != S_OK)
  2637. {
  2638. return Status;
  2639. }
  2640. Id->Amd64.Family = Data & 0xf;
  2641. Id->Amd64.Model = (Data >> 24) & 0xf;
  2642. Id->Amd64.Stepping = (Data >> 16) & 0xf;
  2643. if ((Status = m_Target->
  2644. ReadAllVirtual(m_Target->m_ProcessHead,
  2645. Prcb +
  2646. m_Target->m_KdDebuggerData.OffsetPrcbVendorString,
  2647. Id->Amd64.VendorString,
  2648. sizeof(Id->Amd64.VendorString))) != S_OK)
  2649. {
  2650. return Status;
  2651. }
  2652. return S_OK;
  2653. }
  2654. HRESULT
  2655. Amd64MachineInfo::StaticGetExdiContext(IUnknown* Exdi, PEXDI_CONTEXT Context,
  2656. EXDI_CONTEXT_TYPE CtxType)
  2657. {
  2658. DBG_ASSERT(CtxType == EXDI_CTX_AMD64);
  2659. // Always ask for everything.
  2660. Context->Amd64Context.RegGroupSelection.fSegmentRegs = TRUE;
  2661. Context->Amd64Context.RegGroupSelection.fControlRegs = TRUE;
  2662. Context->Amd64Context.RegGroupSelection.fIntegerRegs = TRUE;
  2663. Context->Amd64Context.RegGroupSelection.fFloatingPointRegs = TRUE;
  2664. Context->Amd64Context.RegGroupSelection.fDebugRegs = TRUE;
  2665. Context->Amd64Context.RegGroupSelection.fSegmentDescriptors = TRUE;
  2666. Context->Amd64Context.RegGroupSelection.fSSERegisters = TRUE;
  2667. Context->Amd64Context.RegGroupSelection.fSystemRegisters = TRUE;
  2668. return ((IeXdiX86_64Context*)Exdi)->GetContext(&Context->Amd64Context);
  2669. }
  2670. //----------------------------------------------------------------------------
  2671. //
  2672. // X86OnAmd64MachineInfo.
  2673. //
  2674. //----------------------------------------------------------------------------
  2675. X86OnAmd64MachineInfo::X86OnAmd64MachineInfo(TargetInfo* Target)
  2676. : X86MachineInfo(Target)
  2677. {
  2678. // Nothing right now.
  2679. }
  2680. HRESULT
  2681. X86OnAmd64MachineInfo::UdGetContextState(ULONG State)
  2682. {
  2683. HRESULT Status;
  2684. if ((Status = m_Target->m_Machines[MACHIDX_AMD64]->
  2685. UdGetContextState(MCTX_FULL)) != S_OK)
  2686. {
  2687. return Status;
  2688. }
  2689. Amd64ContextToX86(&m_Target->m_Machines[MACHIDX_AMD64]->
  2690. m_Context.Amd64Context,
  2691. &m_Context.X86Nt5Context);
  2692. m_ContextState = MCTX_FULL;
  2693. return S_OK;
  2694. }
  2695. HRESULT
  2696. X86OnAmd64MachineInfo::UdSetContext(void)
  2697. {
  2698. m_Target->m_Machines[MACHIDX_AMD64]->
  2699. InitializeContextFlags(&m_Target->m_Machines[MACHIDX_AMD64]->m_Context,
  2700. m_Target->m_SystemVersion);
  2701. X86ContextToAmd64(&m_Context.X86Nt5Context,
  2702. &m_Target->m_Machines[MACHIDX_AMD64]->
  2703. m_Context.Amd64Context);
  2704. return m_Target->m_Machines[MACHIDX_AMD64]->UdSetContext();
  2705. }
  2706. HRESULT
  2707. X86OnAmd64MachineInfo::KdGetContextState(ULONG State)
  2708. {
  2709. HRESULT Status;
  2710. dprintf("The context is partially valid. "
  2711. "Only x86 user-mode context is available.\n");
  2712. if ((Status = m_Target->m_Machines[MACHIDX_AMD64]->
  2713. KdGetContextState(MCTX_FULL)) != S_OK)
  2714. {
  2715. return Status;
  2716. }
  2717. Amd64ContextToX86(&m_Target->m_Machines[MACHIDX_AMD64]->
  2718. m_Context.Amd64Context,
  2719. &m_Context.X86Nt5Context);
  2720. m_ContextState = MCTX_FULL;
  2721. return S_OK;
  2722. }
  2723. HRESULT
  2724. X86OnAmd64MachineInfo::KdSetContext(void)
  2725. {
  2726. dprintf("The context is partially valid. "
  2727. "Only x86 user-mode context is available.\n");
  2728. m_Target->m_Machines[MACHIDX_AMD64]->
  2729. InitializeContextFlags(&m_Target->m_Machines[MACHIDX_AMD64]->m_Context,
  2730. m_Target->m_SystemVersion);
  2731. X86ContextToAmd64(&m_Context.X86Nt5Context,
  2732. &m_Target->m_Machines[MACHIDX_AMD64]->
  2733. m_Context.Amd64Context);
  2734. return m_Target->m_Machines[MACHIDX_AMD64]->KdSetContext();
  2735. }
  2736. HRESULT
  2737. X86OnAmd64MachineInfo::GetSegRegDescriptor(ULONG SegReg, PDESCRIPTOR64 Desc)
  2738. {
  2739. ULONG RegNum = GetSegRegNum(SegReg);
  2740. if (RegNum == 0)
  2741. {
  2742. return E_INVALIDARG;
  2743. }
  2744. return m_Target->
  2745. EmulateNtAmd64SelDescriptor(m_Target->m_RegContextThread,
  2746. m_Target->m_Machines[MACHIDX_AMD64],
  2747. GetIntReg(RegNum),
  2748. Desc);
  2749. }
  2750. void
  2751. X86OnAmd64MachineInfo::Amd64ContextToX86(PAMD64_CONTEXT ContextAmd64,
  2752. PX86_NT5_CONTEXT ContextX86)
  2753. {
  2754. ULONG Ia32ContextFlags = ContextX86->ContextFlags;
  2755. ULONG i;
  2756. ULONG Tid = GetCurrentThreadId();
  2757. DebugClient* Client;
  2758. if ((Ia32ContextFlags & VDMCONTEXT_CONTROL) == VDMCONTEXT_CONTROL)
  2759. {
  2760. //
  2761. // And the control stuff
  2762. //
  2763. ContextX86->Ebp = (ULONG)ContextAmd64->Rbp;
  2764. ContextX86->SegCs = ContextAmd64->SegCs;
  2765. ContextX86->Eip = (ULONG)ContextAmd64->Rip;
  2766. ContextX86->SegSs = ContextAmd64->SegSs;
  2767. ContextX86->Esp = (ULONG)ContextAmd64->Rsp;
  2768. ContextX86->EFlags = ContextAmd64->EFlags;
  2769. }
  2770. if ((Ia32ContextFlags & VDMCONTEXT_INTEGER) == VDMCONTEXT_INTEGER)
  2771. {
  2772. //
  2773. // Now for the integer state...
  2774. //
  2775. ContextX86->Edi = (ULONG)ContextAmd64->Rdi;
  2776. ContextX86->Esi = (ULONG)ContextAmd64->Rsi;
  2777. ContextX86->Ebx = (ULONG)ContextAmd64->Rbx;
  2778. ContextX86->Edx = (ULONG)ContextAmd64->Rdx;
  2779. ContextX86->Ecx = (ULONG)ContextAmd64->Rcx;
  2780. ContextX86->Eax = (ULONG)ContextAmd64->Rax;
  2781. }
  2782. if ((Ia32ContextFlags & VDMCONTEXT_SEGMENTS) == VDMCONTEXT_SEGMENTS)
  2783. {
  2784. ContextX86->SegGs = ContextAmd64->SegGs;
  2785. ContextX86->SegEs = ContextAmd64->SegEs;
  2786. ContextX86->SegDs = ContextAmd64->SegDs;
  2787. ContextX86->SegSs = ContextAmd64->SegSs;
  2788. ContextX86->SegFs = ContextAmd64->SegFs;
  2789. ContextX86->SegCs = ContextAmd64->SegCs;
  2790. }
  2791. if ((Ia32ContextFlags & VDMCONTEXT_EXTENDED_REGISTERS) ==
  2792. VDMCONTEXT_EXTENDED_REGISTERS)
  2793. {
  2794. PX86_FXSAVE_FORMAT FxSave =
  2795. (PX86_FXSAVE_FORMAT)ContextX86->ExtendedRegisters;
  2796. FxSave->ControlWord = ContextAmd64->FltSave.ControlWord;
  2797. FxSave->StatusWord = ContextAmd64->FltSave.StatusWord;
  2798. FxSave->TagWord = ContextAmd64->FltSave.TagWord;
  2799. FxSave->ErrorOpcode = ContextAmd64->FltSave.ErrorOpcode;
  2800. FxSave->ErrorOffset = ContextAmd64->FltSave.ErrorOffset;
  2801. FxSave->ErrorSelector = ContextAmd64->FltSave.ErrorSelector;
  2802. FxSave->DataOffset = ContextAmd64->FltSave.DataOffset;
  2803. FxSave->DataSelector = ContextAmd64->FltSave.DataSelector;
  2804. FxSave->MXCsr = ContextAmd64->MxCsr;
  2805. for (i = 0; i < NUMBER_OF_387_REGS; i++)
  2806. {
  2807. memcpy(FxSave->RegisterArea + 16 * i,
  2808. ContextAmd64->FltSave.FloatRegisters + 10 * i,
  2809. 10);
  2810. }
  2811. for (i = 0; i < NUMBER_OF_XMMI_REGS; i++)
  2812. {
  2813. memcpy(FxSave->Reserved3 + 16 * i,
  2814. &ContextAmd64->Xmm0 + 16 * i,
  2815. 16);
  2816. }
  2817. }
  2818. if ((Ia32ContextFlags & VDMCONTEXT_FLOATING_POINT) ==
  2819. VDMCONTEXT_FLOATING_POINT)
  2820. {
  2821. //
  2822. // Copy over the floating point status/control stuff
  2823. //
  2824. ContextX86->FloatSave.ControlWord = ContextAmd64->FltSave.ControlWord;
  2825. ContextX86->FloatSave.StatusWord = ContextAmd64->FltSave.StatusWord;
  2826. ContextX86->FloatSave.TagWord = ContextAmd64->FltSave.TagWord;
  2827. ContextX86->FloatSave.ErrorOffset = ContextAmd64->FltSave.ErrorOffset;
  2828. ContextX86->FloatSave.ErrorSelector = ContextAmd64->FltSave.ErrorSelector;
  2829. ContextX86->FloatSave.DataOffset = ContextAmd64->FltSave.DataOffset;
  2830. ContextX86->FloatSave.DataSelector = ContextAmd64->FltSave.DataSelector;
  2831. for (i = 0; i < NUMBER_OF_387_REGS; i++)
  2832. {
  2833. memcpy(ContextX86->FloatSave.RegisterArea + 10 * i,
  2834. ContextAmd64->FltSave.FloatRegisters + 10 * i,
  2835. 10);
  2836. }
  2837. }
  2838. if ((Ia32ContextFlags & VDMCONTEXT_DEBUG_REGISTERS) ==
  2839. VDMCONTEXT_DEBUG_REGISTERS)
  2840. {
  2841. ContextX86->Dr0 = (ULONG)ContextAmd64->Dr0;
  2842. ContextX86->Dr1 = (ULONG)ContextAmd64->Dr1;
  2843. ContextX86->Dr2 = (ULONG)ContextAmd64->Dr2;
  2844. ContextX86->Dr3 = (ULONG)ContextAmd64->Dr3;
  2845. ContextX86->Dr6 = (ULONG)ContextAmd64->Dr6;
  2846. ContextX86->Dr7 = (ULONG)ContextAmd64->Dr7;
  2847. }
  2848. for (Client = g_Clients; Client != NULL; Client = Client->m_Next)
  2849. {
  2850. if (Client->m_ThreadId == Tid)
  2851. {
  2852. break;
  2853. }
  2854. }
  2855. DBG_ASSERT(Client != NULL);
  2856. if (!((Amd64MachineInfo*)m_Target->m_Machines[MACHIDX_AMD64])->
  2857. IsIa32CodeSegment())
  2858. {
  2859. if (g_Wow64exts == NULL)
  2860. {
  2861. dprintf("Need to load wow64exts.dll to retrieve context!\n");
  2862. return;
  2863. }
  2864. (*g_Wow64exts)(WOW64EXTS_GET_CONTEXT,
  2865. (ULONG64)Client,
  2866. (ULONG64)ContextX86,
  2867. (ULONG64)NULL);
  2868. return;
  2869. }
  2870. }
  2871. void
  2872. X86OnAmd64MachineInfo::X86ContextToAmd64(PX86_NT5_CONTEXT ContextX86,
  2873. PAMD64_CONTEXT ContextAmd64)
  2874. {
  2875. ULONG Ia32ContextFlags = ContextX86->ContextFlags;
  2876. ULONG i;
  2877. ULONG Tid = GetCurrentThreadId();
  2878. DebugClient* Client;
  2879. for (Client = g_Clients; Client != NULL; Client = Client->m_Next)
  2880. {
  2881. if (Client->m_ThreadId == Tid)
  2882. {
  2883. break;
  2884. }
  2885. }
  2886. DBG_ASSERT(Client != NULL);
  2887. if (!((Amd64MachineInfo*)m_Target->m_Machines[MACHIDX_AMD64])->
  2888. IsIa32CodeSegment())
  2889. {
  2890. if (g_Wow64exts == NULL)
  2891. {
  2892. dprintf("Need to load wow64exts.dll to retrieve context!\n");
  2893. return;
  2894. }
  2895. (*g_Wow64exts)(WOW64EXTS_SET_CONTEXT,
  2896. (ULONG64)Client,
  2897. (ULONG64)ContextX86,
  2898. (ULONG64)NULL);
  2899. return;
  2900. }
  2901. if ((Ia32ContextFlags & VDMCONTEXT_CONTROL) == VDMCONTEXT_CONTROL)
  2902. {
  2903. //
  2904. // And the control stuff
  2905. //
  2906. ContextAmd64->Rbp = ContextX86->Ebp;
  2907. ContextAmd64->Rip = ContextX86->Eip;
  2908. ContextAmd64->SegCs = (USHORT)ContextX86->SegCs;
  2909. ContextAmd64->Rsp = ContextX86->Esp;
  2910. ContextAmd64->SegSs = (USHORT)ContextX86->SegSs;
  2911. ContextAmd64->EFlags = ContextX86->EFlags;
  2912. }
  2913. if ((Ia32ContextFlags & VDMCONTEXT_INTEGER) == VDMCONTEXT_INTEGER)
  2914. {
  2915. //
  2916. // Now for the integer state...
  2917. //
  2918. ContextAmd64->Rdi = ContextX86->Edi;
  2919. ContextAmd64->Rsi = ContextX86->Esi;
  2920. ContextAmd64->Rbx = ContextX86->Ebx;
  2921. ContextAmd64->Rdx = ContextX86->Edx;
  2922. ContextAmd64->Rcx = ContextX86->Ecx;
  2923. ContextAmd64->Rax = ContextX86->Eax;
  2924. }
  2925. if ((Ia32ContextFlags & VDMCONTEXT_SEGMENTS) == VDMCONTEXT_SEGMENTS)
  2926. {
  2927. ContextAmd64->SegGs = (USHORT)ContextX86->SegGs;
  2928. ContextAmd64->SegEs = (USHORT)ContextX86->SegEs;
  2929. ContextAmd64->SegDs = (USHORT)ContextX86->SegDs;
  2930. ContextAmd64->SegSs = (USHORT)ContextX86->SegSs;
  2931. ContextAmd64->SegFs = (USHORT)ContextX86->SegFs;
  2932. ContextAmd64->SegCs = (USHORT)ContextX86->SegCs;
  2933. }
  2934. if ((Ia32ContextFlags & VDMCONTEXT_EXTENDED_REGISTERS) ==
  2935. VDMCONTEXT_EXTENDED_REGISTERS)
  2936. {
  2937. PX86_FXSAVE_FORMAT FxSave =
  2938. (PX86_FXSAVE_FORMAT)ContextX86->ExtendedRegisters;
  2939. //
  2940. // And copy over the floating point status/control stuff
  2941. //
  2942. ContextAmd64->FltSave.ControlWord = FxSave->ControlWord;
  2943. ContextAmd64->FltSave.StatusWord = FxSave->StatusWord;
  2944. ContextAmd64->FltSave.TagWord = FxSave->TagWord;
  2945. ContextAmd64->FltSave.ErrorOpcode = FxSave->ErrorOpcode;
  2946. ContextAmd64->FltSave.ErrorOffset = FxSave->ErrorOffset;
  2947. ContextAmd64->FltSave.ErrorSelector = (USHORT)FxSave->ErrorSelector;
  2948. ContextAmd64->FltSave.DataOffset = FxSave->DataOffset;
  2949. ContextAmd64->FltSave.DataSelector = (USHORT)FxSave->DataSelector;
  2950. ContextAmd64->MxCsr = FxSave->MXCsr;
  2951. for (i = 0; i < NUMBER_OF_387_REGS; i++)
  2952. {
  2953. memcpy(ContextAmd64->FltSave.FloatRegisters + 10 * i,
  2954. FxSave->RegisterArea + 16 * i,
  2955. 10);
  2956. }
  2957. for (i = 0; i < NUMBER_OF_XMMI_REGS; i++)
  2958. {
  2959. memcpy(&ContextAmd64->Xmm0 + 16 * i,
  2960. FxSave->Reserved3 + 16 * i,
  2961. 16);
  2962. }
  2963. }
  2964. if ((Ia32ContextFlags & VDMCONTEXT_FLOATING_POINT) ==
  2965. VDMCONTEXT_FLOATING_POINT)
  2966. {
  2967. //
  2968. // Copy over the floating point status/control stuff
  2969. // Leave the MXCSR stuff alone
  2970. //
  2971. ContextAmd64->FltSave.ControlWord = (USHORT)ContextX86->FloatSave.ControlWord;
  2972. ContextAmd64->FltSave.StatusWord = (USHORT)ContextX86->FloatSave.StatusWord;
  2973. ContextAmd64->FltSave.TagWord = (USHORT)ContextX86->FloatSave.TagWord;
  2974. ContextAmd64->FltSave.ErrorOffset = ContextX86->FloatSave.ErrorOffset;
  2975. ContextAmd64->FltSave.ErrorSelector = (USHORT)ContextX86->FloatSave.ErrorSelector;
  2976. ContextAmd64->FltSave.DataOffset = ContextX86->FloatSave.DataOffset;
  2977. ContextAmd64->FltSave.DataSelector = (USHORT)ContextX86->FloatSave.DataSelector;
  2978. for (i = 0; i < NUMBER_OF_387_REGS; i++)
  2979. {
  2980. memcpy(ContextAmd64->FltSave.FloatRegisters + 10 * i,
  2981. ContextX86->FloatSave.RegisterArea + 10 * i,
  2982. 10);
  2983. }
  2984. }
  2985. if ((Ia32ContextFlags & VDMCONTEXT_DEBUG_REGISTERS) ==
  2986. VDMCONTEXT_DEBUG_REGISTERS)
  2987. {
  2988. ContextAmd64->Dr0 = ContextX86->Dr0;
  2989. ContextAmd64->Dr1 = ContextX86->Dr1;
  2990. ContextAmd64->Dr2 = ContextX86->Dr2;
  2991. ContextAmd64->Dr3 = ContextX86->Dr3;
  2992. ContextAmd64->Dr6 = ContextX86->Dr6;
  2993. ContextAmd64->Dr7 = ContextX86->Dr7;
  2994. }
  2995. }