Changeset 65750 in vbox for trunk/src/VBox/VMM/VMMAll
- Timestamp:
- Feb 13, 2017 8:22:13 AM (8 years ago)
- svn:sync-xref-src-repo-rev:
- 113465
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h
r65749 r65750 3017 3017 3018 3018 3019 3020 /** Opcode 0x0f 0x6e. */ 3021 FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey) 3019 /** Opcode 0x0f 0x6e - movd/q Pd, Ey */ 3020 FNIEMOP_DEF(iemOp_movd_q_Pd_Ey) 3022 3021 { 3023 3022 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3024 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ)) 3025 { 3026 case IEM_OP_PRF_SIZE_OP: /* SSE */ 3027 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3028 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq"); 3029 else 3030 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed"); 3031 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3032 { 3033 /* XMM, greg*/ 3034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3035 IEM_MC_BEGIN(0, 1); 3036 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3037 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 3038 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3039 { 3040 IEM_MC_LOCAL(uint64_t, u64Tmp); 3041 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 3042 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); 3043 } 3044 else 3045 { 3046 IEM_MC_LOCAL(uint32_t, u32Tmp); 3047 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 3048 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); 3049 } 3050 IEM_MC_ADVANCE_RIP(); 3051 IEM_MC_END(); 3052 } 3053 else 3054 { 3055 /* XMM, [mem] */ 3056 IEM_MC_BEGIN(0, 2); 3057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3058 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */ 3059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 3060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3061 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 3062 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3063 { 3064 IEM_MC_LOCAL(uint64_t, u64Tmp); 3065 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3066 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); 3067 } 3068 else 3069 { 3070 IEM_MC_LOCAL(uint32_t, u32Tmp); 3071 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3072 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); 3073 } 3074 IEM_MC_ADVANCE_RIP(); 3075 IEM_MC_END(); 3076 } 3077 return VINF_SUCCESS; 3078 3079 case 0: /* MMX */ 3080 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3081 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq"); 3082 else 3083 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed"); 3084 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3085 { 3086 /* MMX, greg */ 3087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3088 IEM_MC_BEGIN(0, 1); 3089 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 3090 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); 3091 IEM_MC_LOCAL(uint64_t, u64Tmp); 3092 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3093 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 3094 else 3095 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 3096 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp); 3097 IEM_MC_ADVANCE_RIP(); 3098 IEM_MC_END(); 3099 } 3100 else 3101 { 3102 /* MMX, [mem] */ 3103 IEM_MC_BEGIN(0, 2); 3104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3105 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 3106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 3107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3108 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); 3109 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3110 { 3111 IEM_MC_LOCAL(uint64_t, u64Tmp); 3112 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3113 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp); 3114 } 3115 else 3116 { 3117 IEM_MC_LOCAL(uint32_t, u32Tmp); 3118 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3119 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp); 3120 } 3121 IEM_MC_ADVANCE_RIP(); 3122 IEM_MC_END(); 3123 } 3124 return VINF_SUCCESS; 3125 3126 default: 3127 return IEMOP_RAISE_INVALID_OPCODE(); 3128 } 3129 } 3023 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3024 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq"); 3025 else 3026 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed"); 3027 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3028 { 3029 /* MMX, greg */ 3030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3031 IEM_MC_BEGIN(0, 1); 3032 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 3033 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); 3034 IEM_MC_LOCAL(uint64_t, u64Tmp); 3035 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3036 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 3037 else 3038 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 3039 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp); 3040 IEM_MC_ADVANCE_RIP(); 3041 IEM_MC_END(); 3042 } 3043 else 3044 { 3045 /* MMX, [mem] */ 3046 IEM_MC_BEGIN(0, 2); 3047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3048 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 3049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 3050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3051 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); 3052 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3053 { 3054 IEM_MC_LOCAL(uint64_t, u64Tmp); 3055 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3056 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp); 3057 } 3058 else 3059 { 3060 IEM_MC_LOCAL(uint32_t, u32Tmp); 3061 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3062 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp); 3063 } 3064 IEM_MC_ADVANCE_RIP(); 3065 IEM_MC_END(); 3066 } 3067 return VINF_SUCCESS; 3068 } 3069 3070 /** Opcode 0x66 0x0f 0x6e - vmovd/q Vy, Ey */ 3071 FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey) 3072 { 3073 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3074 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3075 IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq"); 3076 else 3077 IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed"); 3078 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3079 { 3080 /* XMM, greg*/ 3081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3082 IEM_MC_BEGIN(0, 1); 3083 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3084 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 3085 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3086 { 3087 IEM_MC_LOCAL(uint64_t, u64Tmp); 3088 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 3089 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); 3090 } 3091 else 3092 { 3093 IEM_MC_LOCAL(uint32_t, u32Tmp); 3094 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 3095 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); 3096 } 3097 IEM_MC_ADVANCE_RIP(); 3098 IEM_MC_END(); 3099 } 3100 else 3101 { 3102 /* XMM, [mem] */ 3103 IEM_MC_BEGIN(0, 2); 3104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3105 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */ 3106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 3107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3108 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 3109 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3110 { 3111 IEM_MC_LOCAL(uint64_t, u64Tmp); 3112 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3113 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); 3114 } 3115 else 3116 { 3117 IEM_MC_LOCAL(uint32_t, u32Tmp); 3118 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3119 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); 3120 } 3121 IEM_MC_ADVANCE_RIP(); 3122 IEM_MC_END(); 3123 } 3124 return VINF_SUCCESS; 3125 } 3126 3127 /* Opcode 0xf3 0x0f 0x6e - invalid */ 3130 3128 3131 3129 … … 8126 8124 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8127 8125 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8128 /* 0x6e */ IEMOP_X4(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey),8126 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8129 8127 /* 0x6f */ IEMOP_X4(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq), 8130 8128
Note:
See TracChangeset
for help on using the changeset viewer.