Changeset 47394 in vbox
- Timestamp:
- Jul 25, 2013 1:18:51 PM (12 years ago)
- svn:sync-xref-src-repo-rev:
- 87544
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAll.cpp
r47389 r47394 7026 7026 #define IEM_MC_FETCH_MREG_U64(a_u64Value, a_iMReg) \ 7027 7027 do { (a_u64Value) = pIemCpu->CTX_SUFF(pCtx)->fpu.aRegs[(a_iMReg)].mmx; } while (0) 7028 #define IEM_MC_FETCH_MREG_U32(a_u32Value, a_iMReg) \ 7029 do { (a_u32Value) = pIemCpu->CTX_SUFF(pCtx)->fpu.aRegs[(a_iMReg)].au32[0]; } while (0) 7028 7030 #define IEM_MC_STORE_MREG_U64(a_iMReg, a_u64Value) \ 7029 7031 do { pIemCpu->CTX_SUFF(pCtx)->fpu.aRegs[(a_iMReg)].mmx = (a_u64Value); } while (0) … … 7039 7041 #define IEM_MC_FETCH_XREG_U128(a_u128Value, a_iXReg) \ 7040 7042 do { (a_u128Value) = pIemCpu->CTX_SUFF(pCtx)->fpu.aXMM[(a_iXReg)].xmm; } while (0) 7043 #define IEM_MC_FETCH_XREG_U64(a_u64Value, a_iXReg) \ 7044 do { (a_u64Value) = pIemCpu->CTX_SUFF(pCtx)->fpu.aXMM[(a_iXReg)].au64[0]; } while (0) 7045 #define IEM_MC_FETCH_XREG_U32(a_u32Value, a_iXReg) \ 7046 do { (a_u32Value) = pIemCpu->CTX_SUFF(pCtx)->fpu.aXMM[(a_iXReg)].au32[0]; } while (0) 7041 7047 #define IEM_MC_STORE_XREG_U128(a_iXReg, a_u128Value) \ 7042 7048 do { pIemCpu->CTX_SUFF(pCtx)->fpu.aXMM[(a_iXReg)].xmm = (a_u128Value); } while (0) -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h
r47389 r47394 2122 2122 { 2123 2123 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2124 2125 2124 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ)) 2126 2125 { 2127 case 0: /* MMX */2128 2126 case IEM_OP_PRF_SIZE_OP: /* SSE */ 2129 break; 2130 default: 2131 return IEMOP_RAISE_INVALID_OPCODE(); 2132 } 2133 2134 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2135 { 2136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2137 IEM_MC_BEGIN(0, 1); 2138 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) 2139 { 2140 /* XMM, greg*/ 2141 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2142 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W) 2127 IEMOP_MNEMONIC("movd/q Wd/q,Ed/q"); 2128 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2143 2129 { 2144 IEM_MC_LOCAL(uint64_t, u64Tmp); 2145 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 2146 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); 2130 /* XMM, greg*/ 2131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2132 IEM_MC_BEGIN(0, 1); 2133 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2134 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W) 2135 { 2136 IEM_MC_LOCAL(uint64_t, u64Tmp); 2137 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 2138 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); 2139 } 2140 else 2141 { 2142 IEM_MC_LOCAL(uint32_t, u32Tmp); 2143 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 2144 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); 2145 } 2146 IEM_MC_ADVANCE_RIP(); 2147 IEM_MC_END(); 2147 2148 } 2148 2149 else 2149 2150 { 2150 IEM_MC_LOCAL(uint32_t, u32Tmp); 2151 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 2152 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); 2151 /* XMM, [mem] */ 2152 IEM_MC_BEGIN(0, 2); 2153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2154 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 2156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2157 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W) 2158 { 2159 IEM_MC_LOCAL(uint64_t, u64Tmp); 2160 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); 2161 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); 2162 } 2163 else 2164 { 2165 IEM_MC_LOCAL(uint32_t, u32Tmp); 2166 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); 2167 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); 2168 } 2169 IEM_MC_ADVANCE_RIP(); 2170 IEM_MC_END(); 2153 2171 } 2154 } 2155 else 2156 { 2157 /* MMX, greg */ 2158 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 2159 IEM_MC_LOCAL(uint64_t, u64Tmp); 2160 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W) 2161 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 2162 else 2163 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 2164 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp); 2165 } 2166 IEM_MC_ADVANCE_RIP(); 2167 IEM_MC_END(); 2168 } 2169 else 2170 { 2171 /* memory source operand. */ 2172 IEM_MC_BEGIN(0, 2); 2173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2174 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) 2175 { 2176 /* XMM, [mem] */ 2177 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 2179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2180 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W) 2172 return VINF_SUCCESS; 2173 2174 case 0: /* MMX */ 2175 IEMOP_MNEMONIC("movq/d Pd/q,Ed/q"); 2176 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2181 2177 { 2178 /* MMX, greg */ 2179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2180 IEM_MC_BEGIN(0, 1); 2181 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 2182 2182 IEM_MC_LOCAL(uint64_t, u64Tmp); 2183 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); 2184 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); 2183 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W) 2184 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 2185 else 2186 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 2187 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp); 2188 IEM_MC_ADVANCE_RIP(); 2189 IEM_MC_END(); 2185 2190 } 2186 2191 else 2187 2192 { 2188 IEM_MC_LOCAL(uint32_t, u32Tmp); 2189 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); 2190 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); 2193 /* MMX, [mem] */ 2194 IEM_MC_BEGIN(0, 2); 2195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2196 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 2197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 2198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2199 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W) 2200 { 2201 IEM_MC_LOCAL(uint64_t, u64Tmp); 2202 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); 2203 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp); 2204 } 2205 else 2206 { 2207 IEM_MC_LOCAL(uint32_t, u32Tmp); 2208 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); 2209 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp); 2210 } 2211 IEM_MC_ADVANCE_RIP(); 2212 IEM_MC_END(); 2191 2213 } 2192 } 2193 else 2194 { 2195 /* MMX, [mem] */ 2196 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 2197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 2198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2199 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W) 2200 { 2201 IEM_MC_LOCAL(uint64_t, u64Tmp); 2202 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); 2203 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp); 2204 } 2205 else 2206 { 2207 IEM_MC_LOCAL(uint32_t, u32Tmp); 2208 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); 2209 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp); 2210 } 2211 } 2212 IEM_MC_ADVANCE_RIP(); 2213 IEM_MC_END(); 2214 } 2215 2216 return VINF_SUCCESS; 2214 return VINF_SUCCESS; 2215 2216 default: 2217 return IEMOP_RAISE_INVALID_OPCODE(); 2218 } 2217 2219 } 2218 2220 … … 2501 2503 /** Opcode 0x0f 0x7d. */ 2502 2504 FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps); 2505 2506 2503 2507 /** Opcode 0x0f 0x7e. */ 2504 FNIEMOP_STUB(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq); 2508 FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq) 2509 { 2510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2511 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ)) 2512 { 2513 case IEM_OP_PRF_SIZE_OP: /* SSE */ 2514 IEMOP_MNEMONIC("movd/q Ed/q,Wd/q"); 2515 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2516 { 2517 /* greg, XMM */ 2518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2519 IEM_MC_BEGIN(0, 1); 2520 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2521 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W) 2522 { 2523 IEM_MC_LOCAL(uint64_t, u64Tmp); 2524 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 2525 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp); 2526 } 2527 else 2528 { 2529 IEM_MC_LOCAL(uint32_t, u32Tmp); 2530 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 2531 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp); 2532 } 2533 IEM_MC_ADVANCE_RIP(); 2534 IEM_MC_END(); 2535 } 2536 else 2537 { 2538 /* [mem], XMM */ 2539 IEM_MC_BEGIN(0, 2); 2540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2541 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 2543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2544 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W) 2545 { 2546 IEM_MC_LOCAL(uint64_t, u64Tmp); 2547 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 2548 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp); 2549 } 2550 else 2551 { 2552 IEM_MC_LOCAL(uint32_t, u32Tmp); 2553 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 2554 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp); 2555 } 2556 IEM_MC_ADVANCE_RIP(); 2557 IEM_MC_END(); 2558 } 2559 return VINF_SUCCESS; 2560 2561 case 0: /* MMX */ 2562 IEMOP_MNEMONIC("movq/d Ed/q,Pd/q"); 2563 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2564 { 2565 /* greg, MMX */ 2566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2567 IEM_MC_BEGIN(0, 1); 2568 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 2569 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W) 2570 { 2571 IEM_MC_LOCAL(uint64_t, u64Tmp); 2572 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 2573 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp); 2574 } 2575 else 2576 { 2577 IEM_MC_LOCAL(uint32_t, u32Tmp); 2578 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 2579 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp); 2580 } 2581 IEM_MC_ADVANCE_RIP(); 2582 IEM_MC_END(); 2583 } 2584 else 2585 { 2586 /* [mem], MMX */ 2587 IEM_MC_BEGIN(0, 2); 2588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2589 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 2590 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 2591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2592 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W) 2593 { 2594 IEM_MC_LOCAL(uint64_t, u64Tmp); 2595 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 2596 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp); 2597 } 2598 else 2599 { 2600 IEM_MC_LOCAL(uint32_t, u32Tmp); 2601 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 2602 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp); 2603 } 2604 IEM_MC_ADVANCE_RIP(); 2605 IEM_MC_END(); 2606 } 2607 return VINF_SUCCESS; 2608 2609 default: 2610 return IEMOP_RAISE_INVALID_OPCODE(); 2611 } 2612 } 2505 2613 2506 2614 -
trunk/src/VBox/VMM/testcase/tstIEMCheckMc.cpp
r47389 r47394 456 456 457 457 #define IEM_MC_FETCH_MREG_U64(a_u64Value, a_iMReg) do { (a_u64Value) = 0; CHK_TYPE(uint64_t, a_u64Value); } while (0) 458 #define IEM_MC_FETCH_MREG_U32(a_u32Value, a_iMReg) do { (a_u32Value) = 0; CHK_TYPE(uint32_t, a_u32Value); } while (0) 458 459 #define IEM_MC_STORE_MREG_U64(a_iMReg, a_u64Value) do { CHK_TYPE(uint64_t, a_u64Value); } while (0) 459 460 #define IEM_MC_STORE_MREG_U32_ZX_U64(a_iMReg, a_u32Value) do { CHK_TYPE(uint32_t, a_u32Value); } while (0) … … 462 463 #define IEM_MC_REF_MREG_U32_CONST(a_pu32Dst, a_iMReg) do { (a_pu32Dst) = (uint32_t const *)((uintptr_t)0); CHK_PTYPE(uint32_t const *, a_pu32Dst); } while (0) 463 464 464 #define IEM_MC_FETCH_XREG_U128(a_u128Value, a_iXReg) do { uint128_t u128Zero = {0,0}; (a_u128Value) = u128Zero; CHK_TYPE(uint128_t, a_u128Value); } while (0) 465 #define IEM_MC_STORE_XREG_U128(a_iXReg, a_u128Value) do { CHK_TYPE(uint128_t, a_u128Value); } while (0) 466 #define IEM_MC_STORE_XREG_U64_ZX_U128(a_iXReg, a_u64Value) do { CHK_TYPE(uint64_t, a_u64Value); } while (0) 467 #define IEM_MC_STORE_XREG_U32_ZX_U128(a_iXReg, a_u32Value) do { CHK_TYPE(uint32_t, a_u32Value); } while (0) 465 #define IEM_MC_FETCH_XREG_U128(a_u128Value, a_iXReg) do { uint128_t u128Zero = {0,0}; (a_u128Value) = u128Zero; CHK_TYPE(uint128_t, a_u128Value); } while (0) 466 #define IEM_MC_FETCH_XREG_U64(a_u64Value, a_iXReg) do { (a_u64Value) = 0; CHK_TYPE(uint64_t, a_u64Value); } while (0) 467 #define IEM_MC_FETCH_XREG_U32(a_u32Value, a_iXReg) do { (a_u32Value) = 0; CHK_TYPE(uint32_t, a_u32Value); } while (0) 468 #define IEM_MC_STORE_XREG_U128(a_iXReg, a_u128Value) do { CHK_TYPE(uint128_t, a_u128Value); } while (0) 469 #define IEM_MC_STORE_XREG_U64_ZX_U128(a_iXReg, a_u64Value) do { CHK_TYPE(uint64_t, a_u64Value); } while (0) 470 #define IEM_MC_STORE_XREG_U32_ZX_U128(a_iXReg, a_u32Value) do { CHK_TYPE(uint32_t, a_u32Value); } while (0) 468 471 #define IEM_MC_REF_XREG_U128(a_pu128Dst, a_iXReg) do { (a_pu128Dst) = (uint128_t *)((uintptr_t)0); CHK_PTYPE(uint128_t *, a_pu128Dst); } while (0) 469 472 #define IEM_MC_REF_XREG_U128_CONST(a_pu128Dst, a_iXReg) do { (a_pu128Dst) = (uint128_t const *)((uintptr_t)0); CHK_PTYPE(uint128_t const *, a_pu128Dst); } while (0)
Note:
See TracChangeset
for help on using the changeset viewer.