Changeset 47407 in vbox for trunk/src/VBox/VMM
- Timestamp:
- Jul 25, 2013 7:37:36 PM (12 years ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 5 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAll.cpp
r47399 r47407 671 671 /** Function table for the PXOR instruction */ 672 672 static const IEMOPMEDIAF2 g_iemAImpl_pxor = { iemAImpl_pxor_u64, iemAImpl_pxor_u128 }; 673 /** Function table for the PCMPEQB instruction */ 674 static const IEMOPMEDIAF2 g_iemAImpl_pcmpeqb = { iemAImpl_pcmpeqb_u64, iemAImpl_pcmpeqb_u128 }; 675 /** Function table for the PCMPEQW instruction */ 676 static const IEMOPMEDIAF2 g_iemAImpl_pcmpeqw = { iemAImpl_pcmpeqw_u64, iemAImpl_pcmpeqw_u128 }; 677 /** Function table for the PCMPEQD instruction */ 678 static const IEMOPMEDIAF2 g_iemAImpl_pcmpeqd = { iemAImpl_pcmpeqd_u64, iemAImpl_pcmpeqd_u128 }; 673 679 674 680 … … 5825 5831 * @param GCPtrMem The address of the guest memory. 5826 5832 */ 5827 static VBOXSTRICTRC iemMemFetchDataU128Aligned (PIEMCPU pIemCpu, uint128_t *pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem)5833 static VBOXSTRICTRC iemMemFetchDataU128AlignedSse(PIEMCPU pIemCpu, uint128_t *pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) 5828 5834 { 5829 5835 /* The lazy approach for now... */ 5830 5836 /** @todo testcase: Ordering of \#SS(0) vs \#GP() vs \#PF on SSE stuff. */ 5831 if ( RT_UNLIKELY(GCPtrMem & 15))5837 if ((GCPtrMem & 15) && !(pIemCpu->CTX_SUFF(pCtx)->fpu.MXCSR & X86_MSXCR_MM)) /** @todo should probably check this *after* applying seg.u64Base... Check real HW. */ 5832 5838 return iemRaiseGeneralProtectionFault0(pIemCpu); 5833 5839 … … 6017 6023 6018 6024 /** 6019 * Stores a data dqword, aligned.6025 * Stores a data dqword, SSE aligned. 6020 6026 * 6021 6027 * @returns Strict VBox status code. … … 6026 6032 * @param u64Value The value to store. 6027 6033 */ 6028 static VBOXSTRICTRC iemMemStoreDataU128Aligned (PIEMCPU pIemCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint128_t u128Value)6034 static VBOXSTRICTRC iemMemStoreDataU128AlignedSse(PIEMCPU pIemCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint128_t u128Value) 6029 6035 { 6030 6036 /* The lazy approach for now... */ 6031 if ( GCPtrMem & 15)6037 if ((GCPtrMem & 15) && !(pIemCpu->CTX_SUFF(pCtx)->fpu.MXCSR & X86_MSXCR_MM)) /** @todo should probably check this *after* applying seg.u64Base... Check real HW. */ 6032 6038 return iemRaiseGeneralProtectionFault0(pIemCpu); 6033 6039 … … 7102 7108 #define IEM_MC_FETCH_MEM_U128(a_u128Dst, a_iSeg, a_GCPtrMem) \ 7103 7109 IEM_MC_RETURN_ON_FAILURE(iemMemFetchDataU128(pIemCpu, &(a_u128Dst), (a_iSeg), (a_GCPtrMem))) 7104 #define IEM_MC_FETCH_MEM_U128_ALIGN (a_u128Dst, a_iSeg, a_GCPtrMem) \7105 IEM_MC_RETURN_ON_FAILURE(iemMemFetchDataU128Aligned (pIemCpu, &(a_u128Dst), (a_iSeg), (a_GCPtrMem)))7110 #define IEM_MC_FETCH_MEM_U128_ALIGN_SSE(a_u128Dst, a_iSeg, a_GCPtrMem) \ 7111 IEM_MC_RETURN_ON_FAILURE(iemMemFetchDataU128AlignedSse(pIemCpu, &(a_u128Dst), (a_iSeg), (a_GCPtrMem))) 7106 7112 7107 7113 … … 7213 7219 #define IEM_MC_STORE_MEM_U128(a_iSeg, a_GCPtrMem, a_u128Value) \ 7214 7220 IEM_MC_RETURN_ON_FAILURE(iemMemStoreDataU128(pIemCpu, (a_iSeg), (a_GCPtrMem), (a_u128Value))) 7215 #define IEM_MC_STORE_MEM_U128_ALIGN (a_iSeg, a_GCPtrMem, a_u128Value) \7216 IEM_MC_RETURN_ON_FAILURE(iemMemStoreDataU128Aligned (pIemCpu, (a_iSeg), (a_GCPtrMem), (a_u128Value)))7221 #define IEM_MC_STORE_MEM_U128_ALIGN_SSE(a_iSeg, a_GCPtrMem, a_u128Value) \ 7222 IEM_MC_RETURN_ON_FAILURE(iemMemStoreDataU128AlignedSse(pIemCpu, (a_iSeg), (a_GCPtrMem), (a_u128Value))) 7217 7223 7218 7224 -
trunk/src/VBox/VMM/VMMAll/IEMAllAImpl.asm
r47399 r47407 2812 2812 2813 2813 IEMIMPL_MEDIA_F2 pxor 2814 IEMIMPL_MEDIA_F2 pcmpeqb 2815 IEMIMPL_MEDIA_F2 pcmpeqw 2816 IEMIMPL_MEDIA_F2 pcmpeqd 2814 2817 2815 2818 -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h
r47399 r47407 2014 2014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2015 2015 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2016 IEM_MC_FETCH_MEM_U128_ALIGN (uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */2016 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */ 2017 2017 2018 2018 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); … … 2261 2261 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2262 2262 if (fAligned) 2263 IEM_MC_FETCH_MEM_U128_ALIGN (u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);2263 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); 2264 2264 else 2265 2265 IEM_MC_FETCH_MEM_U128(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); … … 2340 2340 pfnAImpl = iemAImpl_pshufhw; 2341 2341 break; 2342 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 2342 2343 } 2343 2344 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) … … 2377 2378 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2378 2379 2379 IEM_MC_FETCH_MEM_U128_ALIGN (uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);2380 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); 2380 2381 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 2381 2382 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg); … … 2610 2611 2611 2612 2613 /** 2614 * Common worker for SSE2 and MMX instructions on the forms: 2615 * pxxx mm1, mm2/mem64 2616 * pxxx xmm1, xmm2/mem128 2617 * 2618 * Proper alignment of the 128-bit operand is enforced. 2619 * Exceptions type 4. 2620 */ 2621 FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl) 2622 { 2623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2624 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ)) 2625 { 2626 case IEM_OP_PRF_SIZE_OP: /* SSE */ 2627 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2628 { 2629 /* 2630 * Register, register. 2631 */ 2632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2633 IEM_MC_BEGIN(2, 0); 2634 IEM_MC_ARG(uint128_t *, pDst, 0); 2635 IEM_MC_ARG(uint128_t const *, pSrc, 1); 2636 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2637 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 2638 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 2639 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc); 2640 IEM_MC_ADVANCE_RIP(); 2641 IEM_MC_END(); 2642 } 2643 else 2644 { 2645 /* 2646 * Register, memory. 2647 */ 2648 IEM_MC_BEGIN(2, 2); 2649 IEM_MC_ARG(uint128_t *, pDst, 0); 2650 IEM_MC_LOCAL(uint128_t, uSrc); 2651 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1); 2652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2653 2654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2656 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2657 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); 2658 2659 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 2660 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc); 2661 2662 IEM_MC_ADVANCE_RIP(); 2663 IEM_MC_END(); 2664 } 2665 return VINF_SUCCESS; 2666 2667 case 0: /* MMX */ 2668 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2669 { 2670 /* 2671 * Register, register. 2672 */ 2673 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */ 2674 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */ 2675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2676 IEM_MC_BEGIN(2, 0); 2677 IEM_MC_ARG(uint64_t *, pDst, 0); 2678 IEM_MC_ARG(uint64_t const *, pSrc, 1); 2679 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 2680 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 2681 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK); 2682 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc); 2683 IEM_MC_ADVANCE_RIP(); 2684 IEM_MC_END(); 2685 } 2686 else 2687 { 2688 /* 2689 * Register, memory. 2690 */ 2691 IEM_MC_BEGIN(2, 2); 2692 IEM_MC_ARG(uint64_t *, pDst, 0); 2693 IEM_MC_LOCAL(uint64_t, uSrc); 2694 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1); 2695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2696 2697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2699 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 2700 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); 2701 2702 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 2703 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc); 2704 2705 IEM_MC_ADVANCE_RIP(); 2706 IEM_MC_END(); 2707 } 2708 return VINF_SUCCESS; 2709 2710 default: 2711 return IEMOP_RAISE_INVALID_OPCODE(); 2712 } 2713 } 2714 2715 2612 2716 /** Opcode 0x0f 0x74. */ 2613 FNIEMOP_STUB(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq); // NEXT 2717 FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq) 2718 { 2719 IEMOP_MNEMONIC("pcmpeqb"); 2720 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, &g_iemAImpl_pcmpeqb); 2721 } 2722 2723 2614 2724 /** Opcode 0x0f 0x75. */ 2615 FNIEMOP_STUB(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq); 2725 FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq) 2726 { 2727 IEMOP_MNEMONIC("pcmpeqw"); 2728 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, &g_iemAImpl_pcmpeqw); 2729 } 2730 2731 2616 2732 /** Opcode 0x0f 0x76. */ 2617 FNIEMOP_STUB(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq); 2733 FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq) 2734 { 2735 IEMOP_MNEMONIC("pcmpeqd"); 2736 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, &g_iemAImpl_pcmpeqd); 2737 } 2738 2739 2618 2740 /** Opcode 0x0f 0x77. */ 2619 2741 FNIEMOP_STUB(iemOp_emms); … … 2778 2900 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 2779 2901 if (fAligned) 2780 IEM_MC_STORE_MEM_U128_ALIGN (pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);2902 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp); 2781 2903 else 2782 2904 IEM_MC_STORE_MEM_U128(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp); … … 6221 6343 6222 6344 6223 /**6224 * Common worker for SSE2 and MMX instructions on the form:6225 * pxxxx [x]mmreg, [x]mmreg/mem[128|64]6226 *6227 * The 128-bit accesses must be aligned, i.e. exceptions type 4.6228 */6229 FNIEMOP_DEF_1(iemOpCommonMmxSse_PqVdq_QqWdq, PCIEMOPMEDIAF2, pImpl)6230 {6231 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);6232 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))6233 {6234 case IEM_OP_PRF_SIZE_OP: /* SSE */6235 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))6236 {6237 /*6238 * Register, register.6239 */6240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();6241 IEM_MC_BEGIN(2, 0);6242 IEM_MC_ARG(uint128_t *, pDst, 0);6243 IEM_MC_ARG(uint128_t const *, pSrc, 1);6244 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();6245 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);6246 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);6247 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);6248 IEM_MC_ADVANCE_RIP();6249 IEM_MC_END();6250 }6251 else6252 {6253 /*6254 * Register, memory.6255 */6256 IEM_MC_BEGIN(2, 2);6257 IEM_MC_ARG(uint128_t *, pDst, 0);6258 IEM_MC_LOCAL(uint128_t, uSrc);6259 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);6260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);6261 6262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);6263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();6264 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();6265 IEM_MC_FETCH_MEM_U128_ALIGN(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);6266 6267 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);6268 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);6269 6270 IEM_MC_ADVANCE_RIP();6271 IEM_MC_END();6272 }6273 return VINF_SUCCESS;6274 6275 case 0: /* MMX */6276 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))6277 {6278 /*6279 * Register, register.6280 */6281 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */6282 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */6283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();6284 IEM_MC_BEGIN(2, 0);6285 IEM_MC_ARG(uint64_t *, pDst, 0);6286 IEM_MC_ARG(uint64_t const *, pSrc, 1);6287 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();6288 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);6289 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);6290 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);6291 IEM_MC_ADVANCE_RIP();6292 IEM_MC_END();6293 }6294 else6295 {6296 /*6297 * Register, memory.6298 */6299 IEM_MC_BEGIN(2, 2);6300 IEM_MC_ARG(uint64_t *, pDst, 0);6301 IEM_MC_LOCAL(uint64_t, uSrc);6302 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);6303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);6304 6305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);6306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();6307 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();6308 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);6309 6310 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);6311 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);6312 6313 IEM_MC_ADVANCE_RIP();6314 IEM_MC_END();6315 }6316 return VINF_SUCCESS;6317 6318 default:6319 return IEMOP_RAISE_INVALID_OPCODE();6320 }6321 }6322 6323 6324 6345 /** Opcode 0x0f 0xef. */ 6325 6346 FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq) 6326 6347 { 6327 6348 IEMOP_MNEMONIC("pxor"); 6328 return FNIEMOP_CALL_1(iemOpCommonMmxSse_ PqVdq_QqWdq, &g_iemAImpl_pxor);6349 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, &g_iemAImpl_pxor); 6329 6350 } 6330 6351 -
trunk/src/VBox/VMM/include/IEMInternal.h
r47399 r47407 1074 1074 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAF2U128,(PCX86FXSTATE pFpuState, uint128_t *pu128Dst, uint128_t const *pu128Src)); 1075 1075 typedef FNIEMAIMPLMEDIAF2U128 *PFNIEMAIMPLMEDIAF2U128; 1076 FNIEMAIMPLMEDIAF2U64 iemAImpl_pxor_u64 ;1077 FNIEMAIMPLMEDIAF2U128 iemAImpl_pxor_u128 ;1076 FNIEMAIMPLMEDIAF2U64 iemAImpl_pxor_u64, iemAImpl_pcmpeqb_u64, iemAImpl_pcmpeqw_u64, iemAImpl_pcmpeqd_u64; 1077 FNIEMAIMPLMEDIAF2U128 iemAImpl_pxor_u128, iemAImpl_pcmpeqb_u128, iemAImpl_pcmpeqw_u128, iemAImpl_pcmpeqd_u128; 1078 1078 /** @} */ 1079 1079 -
trunk/src/VBox/VMM/testcase/tstIEMCheckMc.cpp
r47400 r47407 190 190 IEMOPMEDIAF1H1 g_iemAImpl_punpckhqdq; 191 191 IEMOPMEDIAF2 g_iemAImpl_pxor; 192 IEMOPMEDIAF2 g_iemAImpl_pcmpeqb; 193 IEMOPMEDIAF2 g_iemAImpl_pcmpeqw; 194 IEMOPMEDIAF2 g_iemAImpl_pcmpeqd; 192 195 193 196 … … 517 520 #define IEM_MC_FETCH_MEM_R80(a_r80Dst, a_iSeg, a_GCPtrMem) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(RTFLOAT80U, a_r80Dst);} while (0) 518 521 #define IEM_MC_FETCH_MEM_U128(a_u128Dst, a_iSeg, a_GCPtrMem) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(uint128_t, a_u128Dst);} while (0) 519 #define IEM_MC_FETCH_MEM_U128_ALIGN (a_u128Dst, a_iSeg, a_GCPtrMem)do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(uint128_t, a_u128Dst);} while (0)522 #define IEM_MC_FETCH_MEM_U128_ALIGN_SSE(a_u128Dst, a_iSeg, a_GCPtrMem) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(uint128_t, a_u128Dst);} while (0) 520 523 521 524 #define IEM_MC_STORE_MEM_U8(a_iSeg, a_GCPtrMem, a_u8Value) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(uint8_t, a_u8Value); CHK_SEG_IDX(a_iSeg); } while (0) … … 535 538 #define IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(a_pr80Dst) do { CHK_TYPE(PRTFLOAT80U, a_pr80Dst); } while (0) 536 539 #define IEM_MC_STORE_MEM_U128(a_iSeg, a_GCPtrMem, a_u128Dst) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(uint128_t, a_u128Dst); CHK_SEG_IDX(a_iSeg);} while (0) 537 #define IEM_MC_STORE_MEM_U128_ALIGN (a_iSeg, a_GCPtrMem, a_u128Dst)do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(uint128_t, a_u128Dst); CHK_SEG_IDX(a_iSeg);} while (0)540 #define IEM_MC_STORE_MEM_U128_ALIGN_SSE(a_iSeg, a_GCPtrMem, a_u128Dst) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(uint128_t, a_u128Dst); CHK_SEG_IDX(a_iSeg);} while (0) 538 541 539 542 #define IEM_MC_PUSH_U16(a_u16Value) do {} while (0)
Note:
See TracChangeset
for help on using the changeset viewer.