Changeset 66921 in vbox for trunk/src/VBox/VMM/VMMAll
- Timestamp:
- May 16, 2017 7:31:36 PM (8 years ago)
- svn:sync-xref-src-repo-rev:
- 115448
- Location:
- trunk/src/VBox/VMM/VMMAll
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsPython.py
r66920 r66921 235 235 'Uss_WO': ( 'IDX_UseModRM', 'rm', '%Uss', 'Uss', ), 236 236 'Usd': ( 'IDX_UseModRM', 'rm', '%Usd', 'Usd', ), 237 'Usd_WO': ( 'IDX_UseModRM', 'rm', '%Usd', 'Usd', ), 237 238 'Nq': ( 'IDX_UseModRM', 'rm', '%Qq', 'Nq', ), 238 239 -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h
r66920 r66921 547 547 548 548 549 /** 550 * @ opcode 0x11 551 * @ oppfx 0xf2 552 * @ opcpuid sse2 553 * @ opgroup og_sse2_pcksclr_datamove 554 * @ opxcpttype 5 555 * @ optest op1=1 op2=2 -> op1=2 556 * @ optest op1=0 op2=-42 -> op1=-42 557 */ 558 FNIEMOP_STUB(iemOp_vmovsd_Wsd_Hx_Vsd); 559 //FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd) 560 //{ 561 // IEMOP_MNEMONIC2(MR, VMOVSD, vmovsd, Wsd, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 562 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 563 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 564 // { 565 // /* 566 // * Register, register. 567 // */ 568 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 569 // IEM_MC_BEGIN(0, 1); 570 // IEM_MC_LOCAL(uint64_t, uSrc); 571 // 572 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 573 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 574 // IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 575 // IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc); 576 // 577 // IEM_MC_ADVANCE_RIP(); 578 // IEM_MC_END(); 579 // } 580 // else 581 // { 582 // /* 583 // * Memory, register. 584 // */ 585 // IEM_MC_BEGIN(0, 2); 586 // IEM_MC_LOCAL(uint64_t, uSrc); 587 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 588 // 589 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 590 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 591 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 592 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 593 // 594 // IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 595 // IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 596 // 597 // IEM_MC_ADVANCE_RIP(); 598 // IEM_MC_END(); 599 // } 600 // return VINF_SUCCESS; 601 //} 549 FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd) 550 { 551 Assert(pVCpu->iem.s.uVexLength <= 1); 552 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 553 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 554 { 555 /** 556 * @opcode 0x11 557 * @oppfx 0xf2 558 * @opcodesub 11 mr/reg 559 * @opcpuid avx 560 * @opgroup og_avx_simdfp_datamerge 561 * @opxcpttype 5 562 * @optest op1=1 op2=0 op3=2 -> op1=2 563 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea 564 * @optest op1=3 op2=-1 op3=0x77 -> 565 * op1=0xffffffffffffffff0000000000000077 566 */ 567 IEMOP_MNEMONIC3(VEX_MVR, VMOVSD, vmovsd, Usd_WO, HdqCsd, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 568 IEMOP_HLP_DONE_DECODING_NO_AVX_PREFIX(); 569 IEM_MC_BEGIN(0, 0); 570 571 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 572 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); 573 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 574 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 575 pVCpu->iem.s.uVex3rdReg /*Hss*/); 576 IEM_MC_ADVANCE_RIP(); 577 IEM_MC_END(); 578 } 579 else 580 { 581 /** 582 * @opdone 583 * @opcode 0x11 584 * @oppfx 0xf2 585 * @opcodesub 11 mr/reg 586 * @opcpuid avx 587 * @opgroup og_avx_simdfp_datamove 588 * @opxcpttype 5 589 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd 590 * @optest op1=1 op2=2 -> op1=2 591 * @optest op1=0 op2=-22 -> op1=-22 592 */ 593 IEMOP_MNEMONIC2(VEX_MR, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 594 IEM_MC_BEGIN(0, 2); 595 IEM_MC_LOCAL(uint64_t, uSrc); 596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 597 598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 599 IEMOP_HLP_DONE_DECODING_NO_AVX_PREFIX_AND_NO_VVVV(); 600 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 601 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); 602 603 IEM_MC_FETCH_YREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 604 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 605 606 IEM_MC_ADVANCE_RIP(); 607 IEM_MC_END(); 608 } 609 610 return VINF_SUCCESS; 611 } 602 612 603 613 … … 3078 3088 3079 3089 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd, 3080 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_H x_Vsd,3090 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd, 3081 3091 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx, 3082 3092 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
Note:
See TracChangeset
for help on using the changeset viewer.