Changeset 47389 in vbox for trunk/src/VBox
- Timestamp:
- Jul 25, 2013 12:32:33 PM (11 years ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAll.cpp
r47385 r47389 5993 5993 5994 5994 /** 5995 * Stores a data dqword. 5996 * 5997 * @returns Strict VBox status code. 5998 * @param pIemCpu The IEM per CPU data. 5999 * @param iSegReg The index of the segment register to use for 6000 * this access. The base and limits are checked. 6001 * @param GCPtrMem The address of the guest memory. 6002 * @param u64Value The value to store. 6003 */ 6004 static VBOXSTRICTRC iemMemStoreDataU128(PIEMCPU pIemCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint128_t u128Value) 6005 { 6006 /* The lazy approach for now... */ 6007 uint128_t *pu128Dst; 6008 VBOXSTRICTRC rc = iemMemMap(pIemCpu, (void **)&pu128Dst, sizeof(*pu128Dst), iSegReg, GCPtrMem, IEM_ACCESS_DATA_W); 6009 if (rc == VINF_SUCCESS) 6010 { 6011 *pu128Dst = u128Value; 6012 rc = iemMemCommitAndUnmap(pIemCpu, pu128Dst, IEM_ACCESS_DATA_W); 6013 } 6014 return rc; 6015 } 6016 6017 6018 /** 6019 * Stores a data dqword, aligned. 6020 * 6021 * @returns Strict VBox status code. 6022 * @param pIemCpu The IEM per CPU data. 6023 * @param iSegReg The index of the segment register to use for 6024 * this access. The base and limits are checked. 6025 * @param GCPtrMem The address of the guest memory. 6026 * @param u64Value The value to store. 6027 */ 6028 static VBOXSTRICTRC iemMemStoreDataU128Aligned(PIEMCPU pIemCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint128_t u128Value) 6029 { 6030 /* The lazy approach for now... */ 6031 if (GCPtrMem & 15) 6032 return iemRaiseGeneralProtectionFault0(pIemCpu); 6033 6034 uint128_t *pu128Dst; 6035 VBOXSTRICTRC rc = iemMemMap(pIemCpu, (void **)&pu128Dst, sizeof(*pu128Dst), iSegReg, GCPtrMem, IEM_ACCESS_DATA_W); 6036 if (rc == VINF_SUCCESS) 6037 { 6038 *pu128Dst = u128Value; 6039 rc = iemMemCommitAndUnmap(pIemCpu, pu128Dst, IEM_ACCESS_DATA_W); 6040 } 6041 return rc; 6042 } 6043 6044 6045 /** 5995 6046 * Stores a descriptor register (sgdt, sidt). 5996 6047 * … … 6973 7024 6974 7025 7026 #define IEM_MC_FETCH_MREG_U64(a_u64Value, a_iMReg) \ 7027 do { (a_u64Value) = pIemCpu->CTX_SUFF(pCtx)->fpu.aRegs[(a_iMReg)].mmx; } while (0) 6975 7028 #define IEM_MC_STORE_MREG_U64(a_iMReg, a_u64Value) \ 6976 7029 do { pIemCpu->CTX_SUFF(pCtx)->fpu.aRegs[(a_iMReg)].mmx = (a_u64Value); } while (0) … … 6984 7037 (a_pu32Dst) = ((uint32_t const *)&pIemCpu->CTX_SUFF(pCtx)->fpu.aRegs[(a_iMReg)].mmx) 6985 7038 7039 #define IEM_MC_FETCH_XREG_U128(a_u128Value, a_iXReg) \ 7040 do { (a_u128Value) = pIemCpu->CTX_SUFF(pCtx)->fpu.aXMM[(a_iXReg)].xmm; } while (0) 7041 #define IEM_MC_STORE_XREG_U128(a_iXReg, a_u128Value) \ 7042 do { pIemCpu->CTX_SUFF(pCtx)->fpu.aXMM[(a_iXReg)].xmm = (a_u128Value); } while (0) 6986 7043 #define IEM_MC_STORE_XREG_U64_ZX_U128(a_iXReg, a_u64Value) \ 6987 7044 do { pIemCpu->CTX_SUFF(pCtx)->fpu.aXMM[(a_iXReg)].au64[0] = (a_u64Value); \ … … 7147 7204 (a_pr80Dst)->au16[4] = UINT16_C(0xffff); \ 7148 7205 } while (0) 7206 7207 #define IEM_MC_STORE_MEM_U128(a_iSeg, a_GCPtrMem, a_u128Value) \ 7208 IEM_MC_RETURN_ON_FAILURE(iemMemStoreDataU128(pIemCpu, (a_iSeg), (a_GCPtrMem), (a_u128Value))) 7209 #define IEM_MC_STORE_MEM_U128_ALIGN(a_iSeg, a_GCPtrMem, a_u128Value) \ 7210 IEM_MC_RETURN_ON_FAILURE(iemMemStoreDataU128Aligned(pIemCpu, (a_iSeg), (a_GCPtrMem), (a_u128Value))) 7149 7211 7150 7212 -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h
r47385 r47389 1849 1849 IEM_MC_ARG(uint128_t *, pDst, 0); 1850 1850 IEM_MC_ARG(uint64_t const *, pSrc, 1); 1851 IEM_MC_MAYBE_RAISE_ MMX_RELATED_XCPT();1851 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1852 1852 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 1853 1853 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); … … 1855 1855 IEM_MC_ADVANCE_RIP(); 1856 1856 IEM_MC_END(); 1857 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();1858 1857 } 1859 1858 else … … 1870 1869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1871 1870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1872 IEM_MC_MAYBE_RAISE_ MMX_RELATED_XCPT();1871 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1873 1872 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); 1874 1873 … … 1994 1993 IEM_MC_ARG(uint128_t *, pDst, 0); 1995 1994 IEM_MC_ARG(uint128_t const *, pSrc, 1); 1996 IEM_MC_MAYBE_RAISE_ MMX_RELATED_XCPT();1995 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1997 1996 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 1998 1997 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); … … 2000 1999 IEM_MC_ADVANCE_RIP(); 2001 2000 IEM_MC_END(); 2002 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();2003 2001 } 2004 2002 else … … 2015 2013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2016 2014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2017 IEM_MC_MAYBE_RAISE_ MMX_RELATED_XCPT();2015 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2018 2016 IEM_MC_FETCH_MEM_U128_ALIGN(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */ 2019 2017 … … 2219 2217 } 2220 2218 2219 2221 2220 /** Opcode 0x0f 0x6f. */ 2222 FNIEMOP_STUB(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq); // NEXT 2221 FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq) 2222 { 2223 bool fAligned = false; 2224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2225 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ)) 2226 { 2227 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */ 2228 fAligned = true; 2229 case IEM_OP_PRF_REPZ: /* SSE unaligned */ 2230 if (fAligned) 2231 IEMOP_MNEMONIC("movdqa Vdq,Wdq"); 2232 else 2233 IEMOP_MNEMONIC("movdqu Vdq,Wdq"); 2234 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2235 { 2236 /* 2237 * Register, register. 2238 */ 2239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2240 IEM_MC_BEGIN(0, 1); 2241 IEM_MC_LOCAL(uint128_t, u128Tmp); 2242 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2243 IEM_MC_FETCH_XREG_U128(u128Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 2244 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp); 2245 IEM_MC_ADVANCE_RIP(); 2246 IEM_MC_END(); 2247 } 2248 else 2249 { 2250 /* 2251 * Register, memory. 2252 */ 2253 IEM_MC_BEGIN(0, 2); 2254 IEM_MC_LOCAL(uint128_t, u128Tmp); 2255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2256 2257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2259 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2260 if (fAligned) 2261 IEM_MC_FETCH_MEM_U128_ALIGN(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); 2262 else 2263 IEM_MC_FETCH_MEM_U128(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); 2264 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp); 2265 2266 IEM_MC_ADVANCE_RIP(); 2267 IEM_MC_END(); 2268 } 2269 return VINF_SUCCESS; 2270 2271 case 0: /* MMX */ 2272 IEMOP_MNEMONIC("movq Pq,Qq"); 2273 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2274 { 2275 /* 2276 * Register, register. 2277 */ 2278 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */ 2279 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */ 2280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2281 IEM_MC_BEGIN(0, 1); 2282 IEM_MC_LOCAL(uint64_t, u64Tmp); 2283 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 2284 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK); 2285 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp); 2286 IEM_MC_ADVANCE_RIP(); 2287 IEM_MC_END(); 2288 } 2289 else 2290 { 2291 /* 2292 * Register, memory. 2293 */ 2294 IEM_MC_BEGIN(0, 2); 2295 IEM_MC_LOCAL(uint64_t, u64Tmp); 2296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2297 2298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2300 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 2301 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); 2302 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp); 2303 2304 IEM_MC_ADVANCE_RIP(); 2305 IEM_MC_END(); 2306 } 2307 return VINF_SUCCESS; 2308 2309 default: 2310 return IEMOP_RAISE_INVALID_OPCODE(); 2311 } 2312 } 2313 2223 2314 /** Opcode 0x0f 0x70. */ 2224 2315 FNIEMOP_STUB(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib); // NEXT … … 2412 2503 /** Opcode 0x0f 0x7e. */ 2413 2504 FNIEMOP_STUB(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq); 2505 2506 2414 2507 /** Opcode 0x0f 0x7f. */ 2415 FNIEMOP_STUB(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq); 2508 FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq) 2509 { 2510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2511 bool fAligned = false; 2512 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ)) 2513 { 2514 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */ 2515 fAligned = true; 2516 case IEM_OP_PRF_REPZ: /* SSE unaligned */ 2517 if (fAligned) 2518 IEMOP_MNEMONIC("movdqa Wdq,Vdq"); 2519 else 2520 IEMOP_MNEMONIC("movdqu Wdq,Vdq"); 2521 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2522 { 2523 /* 2524 * Register, register. 2525 */ 2526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2527 IEM_MC_BEGIN(0, 1); 2528 IEM_MC_LOCAL(uint128_t, u128Tmp); 2529 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2530 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 2531 IEM_MC_STORE_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u128Tmp); 2532 IEM_MC_ADVANCE_RIP(); 2533 IEM_MC_END(); 2534 } 2535 else 2536 { 2537 /* 2538 * Register, memory. 2539 */ 2540 IEM_MC_BEGIN(0, 2); 2541 IEM_MC_LOCAL(uint128_t, u128Tmp); 2542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2543 2544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2546 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2547 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 2548 if (fAligned) 2549 IEM_MC_STORE_MEM_U128_ALIGN(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp); 2550 else 2551 IEM_MC_STORE_MEM_U128(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp); 2552 2553 IEM_MC_ADVANCE_RIP(); 2554 IEM_MC_END(); 2555 } 2556 return VINF_SUCCESS; 2557 2558 case 0: /* MMX */ 2559 IEMOP_MNEMONIC("movq Qq,Pq"); 2560 2561 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2562 { 2563 /* 2564 * Register, register. 2565 */ 2566 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */ 2567 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */ 2568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2569 IEM_MC_BEGIN(0, 1); 2570 IEM_MC_LOCAL(uint64_t, u64Tmp); 2571 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 2572 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 2573 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp); 2574 IEM_MC_ADVANCE_RIP(); 2575 IEM_MC_END(); 2576 } 2577 else 2578 { 2579 /* 2580 * Register, memory. 2581 */ 2582 IEM_MC_BEGIN(0, 2); 2583 IEM_MC_LOCAL(uint64_t, u64Tmp); 2584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2585 2586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2588 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 2589 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 2590 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp); 2591 2592 IEM_MC_ADVANCE_RIP(); 2593 IEM_MC_END(); 2594 } 2595 return VINF_SUCCESS; 2596 2597 default: 2598 return IEMOP_RAISE_INVALID_OPCODE(); 2599 } 2600 } 2601 2416 2602 2417 2603 … … 5825 6011 IEM_MC_ARG(uint128_t *, pDst, 0); 5826 6012 IEM_MC_ARG(uint128_t const *, pSrc, 1); 5827 IEM_MC_MAYBE_RAISE_ MMX_RELATED_XCPT();6013 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 5828 6014 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 5829 6015 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); … … 5831 6017 IEM_MC_ADVANCE_RIP(); 5832 6018 IEM_MC_END(); 5833 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();5834 6019 } 5835 6020 else … … 5846 6031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 5847 6032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5848 IEM_MC_MAYBE_RAISE_ MMX_RELATED_XCPT();6033 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 5849 6034 IEM_MC_FETCH_MEM_U128_ALIGN(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); 5850 6035 -
trunk/src/VBox/VMM/testcase/tstIEMCheckMc.cpp
r47385 r47389 454 454 #define IEM_MC_CLEAR_FSW_EX() do { } while (0) 455 455 456 457 #define IEM_MC_FETCH_MREG_U64(a_u64Value, a_iMReg) do { (a_u64Value) = 0; CHK_TYPE(uint64_t, a_u64Value); } while (0) 456 458 #define IEM_MC_STORE_MREG_U64(a_iMReg, a_u64Value) do { CHK_TYPE(uint64_t, a_u64Value); } while (0) 457 459 #define IEM_MC_STORE_MREG_U32_ZX_U64(a_iMReg, a_u32Value) do { CHK_TYPE(uint32_t, a_u32Value); } while (0) … … 460 462 #define IEM_MC_REF_MREG_U32_CONST(a_pu32Dst, a_iMReg) do { (a_pu32Dst) = (uint32_t const *)((uintptr_t)0); CHK_PTYPE(uint32_t const *, a_pu32Dst); } while (0) 461 463 464 #define IEM_MC_FETCH_XREG_U128(a_u128Value, a_iXReg) do { uint128_t u128Zero = {0,0}; (a_u128Value) = u128Zero; CHK_TYPE(uint128_t, a_u128Value); } while (0) 465 #define IEM_MC_STORE_XREG_U128(a_iXReg, a_u128Value) do { CHK_TYPE(uint128_t, a_u128Value); } while (0) 462 466 #define IEM_MC_STORE_XREG_U64_ZX_U128(a_iXReg, a_u64Value) do { CHK_TYPE(uint64_t, a_u64Value); } while (0) 463 467 #define IEM_MC_STORE_XREG_U32_ZX_U128(a_iXReg, a_u32Value) do { CHK_TYPE(uint32_t, a_u32Value); } while (0) … … 485 489 #define IEM_MC_FETCH_MEM_U64_DISP(a_u64Dst, a_iSeg, a_GCPtrMem, a_offDisp) \ 486 490 do { CHK_GCPTR(a_GCPtrMem); CHK_CONST(uint8_t, a_offDisp); CHK_TYPE(uint64_t, a_u64Dst); } while (0) 487 488 #define IEM_MC_FETCH_MEM_R32(a_r32Dst, a_iSeg, a_GCPtrMem) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(RTFLOAT32U, a_r32Dst);} while (0)489 #define IEM_MC_FETCH_MEM_R64(a_r64Dst, a_iSeg, a_GCPtrMem) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(RTFLOAT64U, a_r64Dst);} while (0)490 #define IEM_MC_FETCH_MEM_R80(a_r80Dst, a_iSeg, a_GCPtrMem) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(RTFLOAT80U, a_r80Dst);} while (0)491 492 #define IEM_MC_FETCH_MEM_U128(a_u128Dst, a_iSeg, a_GCPtrMem) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(uint128_t, a_u128Dst);} while (0)493 #define IEM_MC_FETCH_MEM_U128_ALIGN(a_u128Dst, a_iSeg, a_GCPtrMem) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(uint128_t, a_u128Dst);} while (0)494 491 495 492 #define IEM_MC_FETCH_MEM_U8_ZX_U16(a_u16Dst, a_iSeg, a_GCPtrMem) do { CHK_GCPTR(a_GCPtrMem); } while (0) … … 505 502 #define IEM_MC_FETCH_MEM_U16_SX_U64(a_u64Dst, a_iSeg, a_GCPtrMem) do { CHK_GCPTR(a_GCPtrMem); } while (0) 506 503 #define IEM_MC_FETCH_MEM_U32_SX_U64(a_u64Dst, a_iSeg, a_GCPtrMem) do { CHK_GCPTR(a_GCPtrMem); } while (0) 507 #define IEM_MC_STORE_MEM_U8(a_iSeg, a_GCPtrMem, a_u8Value) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(uint8_t, a_u8Value); CHK_SEG_IDX(a_iSeg); } while (0) 504 #define IEM_MC_FETCH_MEM_R32(a_r32Dst, a_iSeg, a_GCPtrMem) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(RTFLOAT32U, a_r32Dst);} while (0) 505 #define IEM_MC_FETCH_MEM_R64(a_r64Dst, a_iSeg, a_GCPtrMem) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(RTFLOAT64U, a_r64Dst);} while (0) 506 #define IEM_MC_FETCH_MEM_R80(a_r80Dst, a_iSeg, a_GCPtrMem) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(RTFLOAT80U, a_r80Dst);} while (0) 507 #define IEM_MC_FETCH_MEM_U128(a_u128Dst, a_iSeg, a_GCPtrMem) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(uint128_t, a_u128Dst);} while (0) 508 #define IEM_MC_FETCH_MEM_U128_ALIGN(a_u128Dst, a_iSeg, a_GCPtrMem) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(uint128_t, a_u128Dst);} while (0) 509 510 #define IEM_MC_STORE_MEM_U8(a_iSeg, a_GCPtrMem, a_u8Value) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(uint8_t, a_u8Value); CHK_SEG_IDX(a_iSeg); } while (0) 508 511 #define IEM_MC_STORE_MEM_U16(a_iSeg, a_GCPtrMem, a_u16Value) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(uint16_t, a_u16Value); } while (0) 509 512 #define IEM_MC_STORE_MEM_U32(a_iSeg, a_GCPtrMem, a_u32Value) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(uint32_t, a_u32Value); } while (0) … … 520 523 #define IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(a_pr64Dst) do { CHK_TYPE(PRTFLOAT64U, a_pr64Dst); } while (0) 521 524 #define IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(a_pr80Dst) do { CHK_TYPE(PRTFLOAT80U, a_pr80Dst); } while (0) 525 #define IEM_MC_STORE_MEM_U128(a_iSeg, a_GCPtrMem, a_u128Dst) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(uint128_t, a_u128Dst); CHK_SEG_IDX(a_iSeg);} while (0) 526 #define IEM_MC_STORE_MEM_U128_ALIGN(a_iSeg, a_GCPtrMem, a_u128Dst) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(uint128_t, a_u128Dst); CHK_SEG_IDX(a_iSeg);} while (0) 522 527 523 528 #define IEM_MC_PUSH_U16(a_u16Value) do {} while (0)
Note:
See TracChangeset
for help on using the changeset viewer.