Changeset 66977 in vbox for trunk/src/VBox/VMM/VMMAll
- Timestamp:
- May 19, 2017 12:30:05 PM (8 years ago)
- svn:sync-xref-src-repo-rev:
- 115522
- Location:
- trunk/src/VBox/VMM/VMMAll
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAll.cpp
r66957 r66977 11933 11933 # define IEM_MC_STORE_MEM_U256(a_iSeg, a_GCPtrMem, a_u256Value) \ 11934 11934 IEM_MC_RETURN_ON_FAILURE(iemMemStoreDataU256(pVCpu, (a_iSeg), (a_GCPtrMem), &(a_u256Value))) 11935 # define IEM_MC_STORE_MEM_U256_ALIGN_ SSE(a_iSeg, a_GCPtrMem, a_u256Value) \11936 IEM_MC_RETURN_ON_FAILURE(iemMemStoreDataU256Aligned Sse(pVCpu, (a_iSeg), (a_GCPtrMem), &(a_u256Value)))11935 # define IEM_MC_STORE_MEM_U256_ALIGN_AVX(a_iSeg, a_GCPtrMem, a_u256Value) \ 11936 IEM_MC_RETURN_ON_FAILURE(iemMemStoreDataU256AlignedAvx(pVCpu, (a_iSeg), (a_GCPtrMem), &(a_u256Value))) 11937 11937 #else 11938 11938 # define IEM_MC_STORE_MEM_U256(a_iSeg, a_GCPtrMem, a_u256Value) \ 11939 11939 iemMemStoreDataU256Jmp(pVCpu, (a_iSeg), (a_GCPtrMem), &(a_u256Value)) 11940 # define IEM_MC_STORE_MEM_U256_ALIGN_ SSE(a_iSeg, a_GCPtrMem, a_u256Value) \11941 iemMemStoreDataU256Aligned SseJmp(pVCpu, (a_iSeg), (a_GCPtrMem), &(a_u256Value))11940 # define IEM_MC_STORE_MEM_U256_ALIGN_AVX(a_iSeg, a_GCPtrMem, a_u256Value) \ 11941 iemMemStoreDataU256AlignedAvxJmp(pVCpu, (a_iSeg), (a_GCPtrMem), &(a_u256Value)) 11942 11942 #endif 11943 11943 -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h
r66976 r66977 1115 1115 1116 1116 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1117 IEM_MC_ PREPARE_AVX_USAGE();1117 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); 1118 1118 if (pVCpu->iem.s.uVexLength == 0) 1119 1119 IEM_MC_COPY_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, … … 1139 1139 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV(); 1140 1140 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1141 IEM_MC_ PREPARE_AVX_USAGE();1141 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); 1142 1142 1143 1143 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); … … 1156 1156 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV(); 1157 1157 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1158 IEM_MC_ PREPARE_AVX_USAGE();1158 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); 1159 1159 1160 1160 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); … … 1193 1193 1194 1194 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1195 IEM_MC_ PREPARE_AVX_USAGE();1195 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); 1196 1196 if (pVCpu->iem.s.uVexLength == 0) 1197 1197 IEM_MC_COPY_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, … … 1217 1217 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV(); 1218 1218 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1219 IEM_MC_ PREPARE_AVX_USAGE();1219 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); 1220 1220 1221 1221 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); … … 1234 1234 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV(); 1235 1235 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1236 IEM_MC_ PREPARE_AVX_USAGE();1236 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); 1237 1237 1238 1238 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); … … 1266 1266 */ 1267 1267 1268 /** Opcode VEX.0F 0x29 - vmovaps Wps, Vps */ 1269 FNIEMOP_STUB(iemOp_vmovaps_Wps_Vps); 1270 //FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps) 1271 //{ 1272 // IEMOP_MNEMONIC(vmovaps_Wps_Vps, "vmovaps Wps,Vps"); 1273 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1274 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1275 // { 1276 // /* 1277 // * Register, register. 1278 // */ 1279 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1280 // IEM_MC_BEGIN(0, 0); 1281 // IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1282 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1283 // IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1284 // ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1285 // IEM_MC_ADVANCE_RIP(); 1286 // IEM_MC_END(); 1287 // } 1288 // else 1289 // { 1290 // /* 1291 // * Memory, register. 1292 // */ 1293 // IEM_MC_BEGIN(0, 2); 1294 // IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */ 1295 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1296 // 1297 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1298 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1299 // IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1300 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 1301 // 1302 // IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1303 // IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 1304 // 1305 // IEM_MC_ADVANCE_RIP(); 1306 // IEM_MC_END(); 1307 // } 1308 // return VINF_SUCCESS; 1309 //} 1268 /** 1269 * @opcode 0x29 1270 * @oppfx none 1271 * @opcpuid avx 1272 * @opgroup og_avx_pcksclr_datamove 1273 * @opxcpttype 1 1274 * @optest op1=1 op2=2 -> op1=2 1275 * @optest op1=0 op2=-42 -> op1=-42 1276 * @note Almost identical to vmovapd. 1277 */ 1278 FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps) 1279 { 1280 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 1281 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1282 Assert(pVCpu->iem.s.uVexLength <= 1); 1283 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1284 { 1285 /* 1286 * Register, register. 1287 */ 1288 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV(); 1289 IEM_MC_BEGIN(1, 0); 1290 1291 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1292 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); 1293 if (pVCpu->iem.s.uVexLength == 0) 1294 IEM_MC_COPY_YREG_U128_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1295 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1296 else 1297 IEM_MC_COPY_YREG_U256_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1298 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1299 IEM_MC_ADVANCE_RIP(); 1300 IEM_MC_END(); 1301 } 1302 else 1303 { 1304 /* 1305 * Register, memory. 1306 */ 1307 if (pVCpu->iem.s.uVexLength == 0) 1308 { 1309 IEM_MC_BEGIN(0, 2); 1310 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1311 IEM_MC_LOCAL(RTUINT128U, uSrc); 1312 1313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1314 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV(); 1315 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1316 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); 1317 1318 IEM_MC_FETCH_YREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1319 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 1320 1321 IEM_MC_ADVANCE_RIP(); 1322 IEM_MC_END(); 1323 } 1324 else 1325 { 1326 IEM_MC_BEGIN(0, 2); 1327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1328 IEM_MC_LOCAL(RTUINT256U, uSrc); 1329 1330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1331 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV(); 1332 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1333 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); 1334 1335 IEM_MC_FETCH_YREG_U256(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1336 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 1337 1338 IEM_MC_ADVANCE_RIP(); 1339 IEM_MC_END(); 1340 } 1341 } 1342 return VINF_SUCCESS; 1343 } 1310 1344 1311 1345 /** Opcode VEX.66.0F 0x29 - vmovapd Wpd,Vpd */
Note:
See TracChangeset
for help on using the changeset viewer.