Changeset 66932 in vbox for trunk/src/VBox/VMM/VMMAll
- Timestamp:
- May 17, 2017 11:02:08 AM (8 years ago)
- Location:
- trunk/src/VBox/VMM/VMMAll
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAll.cpp
r66920 r66932 11601 11601 IEM_MC_INT_CLEAR_ZMM_256_UP(pXStateTmp, a_iYRegDst); \ 11602 11602 } while (0) 11603 #define IEM_MC_MERGE_YREG_U64HI_U64_ZX_VLMAX(a_iYRegDst, a_iYRegSrc64, a_iYRegSrcHx) /* for vmovhlps */ \ 11604 do { PX86XSAVEAREA pXStateTmp = IEM_GET_CTX(pVCpu)->CTX_SUFF(pXState); \ 11605 uintptr_t const iYRegDstTmp = (a_iYRegDst); \ 11606 uintptr_t const iYRegSrc64Tmp = (a_iYRegSrc64); \ 11607 uintptr_t const iYRegSrcHxTmp = (a_iYRegSrcHx); \ 11608 pXStateTmp->x87.aXMM[iYRegDstTmp].au64[0] = pXStateTmp->x87.aXMM[iYRegSrc64Tmp].au64[1]; \ 11609 pXStateTmp->x87.aXMM[iYRegDstTmp].au64[1] = pXStateTmp->x87.aXMM[iYRegSrcHxTmp].au64[1]; \ 11610 pXStateTmp->u.YmmHi.aYmmHi[iYRegDstTmp].au64[0] = 0; \ 11611 pXStateTmp->u.YmmHi.aYmmHi[iYRegDstTmp].au64[1] = 0; \ 11612 IEM_MC_INT_CLEAR_ZMM_256_UP(pXStateTmp, a_iYRegDst); \ 11613 } while (0) 11614 #define IEM_MC_MERGE_YREG_U64LOCAL_U64_ZX_VLMAX(a_iYRegDst, a_u64Local, a_iYRegSrcHx) \ 11615 do { PX86XSAVEAREA pXStateTmp = IEM_GET_CTX(pVCpu)->CTX_SUFF(pXState); \ 11616 uintptr_t const iYRegDstTmp = (a_iYRegDst); \ 11617 uintptr_t const iYRegSrcHxTmp = (a_iYRegSrcHx); \ 11618 pXStateTmp->x87.aXMM[iYRegDstTmp].au64[0] = (a_u64Local); \ 11619 pXStateTmp->x87.aXMM[iYRegDstTmp].au64[1] = pXStateTmp->x87.aXMM[iYRegSrcHxTmp].au64[1]; \ 11620 pXStateTmp->u.YmmHi.aYmmHi[iYRegDstTmp].au64[0] = 0; \ 11621 pXStateTmp->u.YmmHi.aYmmHi[iYRegDstTmp].au64[1] = 0; \ 11622 IEM_MC_INT_CLEAR_ZMM_256_UP(pXStateTmp, a_iYRegDst); \ 11623 } while (0) 11603 11624 11604 11625 #ifndef IEM_WITH_SETJMP … … 12649 12670 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \ 12650 12671 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) )) \ 12672 { /* likely */ } \ 12673 else \ 12674 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \ 12675 } while (0) 12676 12677 /** 12678 * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz, 12679 * repnz or size prefixes are present, or if in real or v8086 mode. 12680 */ 12681 #define IEMOP_HLP_DONE_DECODING_NO_AVX_PREFIX_AND_L0() \ 12682 do \ 12683 { \ 12684 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \ 12685 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \ 12686 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \ 12687 && pVCpu->iem.s.uVexLength == 0)) \ 12651 12688 { /* likely */ } \ 12652 12689 else \ -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsPython.py
r66921 r66932 275 275 'HdqCss': ( 'IDX_UseModRM', 'vvvv', '%Hx', 'HdqCss', ), 276 276 'HdqCsd': ( 'IDX_UseModRM', 'vvvv', '%Hx', 'HdqCsd', ), 277 'HdqCq': ( 'IDX_UseModRM', 'vvvv', '%Hq', 'HdqCq', ), 277 278 278 279 # Immediate values. … … 332 333 'VEX_RM_REG': ( 'VEX.ModR/M', [ 'reg', 'rm' ], ), 333 334 'VEX_RM_MEM': ( 'VEX.ModR/M', [ 'reg', 'rm' ], ), 334 'VEX_XM': ( 'VEX.ModR/M', [ 'reg', 'rm' ], ), # same as VEX_RM_MEM 335 'VEX_XM': ( 'VEX.ModR/M', [ 'reg', 'rm' ], ), # same as VEX_RM_MEM, typo? 335 336 'VEX_MR': ( 'VEX.ModR/M', [ 'rm', 'reg' ], ), 336 337 'VEX_MR_REG': ( 'VEX.ModR/M', [ 'rm', 'reg' ], ), … … 341 342 'VEX_R': ( 'VEX.ModR/M', [ 'reg', ], ), 342 343 'VEX_RVM': ( 'VEX.ModR/M', [ 'reg', 'vvvv', 'rm'], ), 344 'VEX_RVM_REG': ( 'VEX.ModR/M', [ 'reg', 'vvvv', 'rm'], ), 345 'VEX_RVM_MEM': ( 'VEX.ModR/M', [ 'reg', 'vvvv', 'rm'], ), 343 346 'VEX_MVR': ( 'VEX.ModR/M', [ 'rm', 'vvvv', 'reg'], ), 347 'VEX_MVR_REG': ( 'VEX.ModR/M', [ 'rm', 'vvvv', 'reg'], ), 348 'VEX_MVR_MEM': ( 'VEX.ModR/M', [ 'rm', 'vvvv', 'reg'], ), 344 349 345 350 'FIXED': ( 'fixed', None, ) … … 487 492 '4UA': [], 488 493 '5': [], 494 '5LZ': [], # LZ = VEX.L must be zero. 489 495 '6': [], 490 496 '7': [], 497 '7LZ': [], 491 498 '8': [], 492 499 '11': [], -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h
r66921 r66932 612 612 613 613 614 FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps); 615 //FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps) 616 //{ 617 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 618 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 619 // { 620 // /** 621 // * @ opcode 0x12 622 // * @ opcodesub 11 mr/reg 623 // * @ oppfx none 624 // * @ opcpuid sse 625 // * @ opgroup og_sse_simdfp_datamove 626 // * @ opxcpttype 5 627 // * @ optest op1=1 op2=2 -> op1=2 628 // * @ optest op1=0 op2=-42 -> op1=-42 629 // */ 630 // IEMOP_MNEMONIC2(RM_REG, VMOVHLPS, vmovhlps, Vq, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 631 // 632 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 633 // IEM_MC_BEGIN(0, 1); 634 // IEM_MC_LOCAL(uint64_t, uSrc); 635 // 636 // IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 637 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 638 // IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 639 // IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc); 640 // 641 // IEM_MC_ADVANCE_RIP(); 642 // IEM_MC_END(); 643 // } 644 // else 645 // { 646 // /** 647 // * @ opdone 648 // * @ opcode 0x12 649 // * @ opcodesub !11 mr/reg 650 // * @ oppfx none 651 // * @ opcpuid sse 652 // * @ opgroup og_sse_simdfp_datamove 653 // * @ opxcpttype 5 654 // * @ optest op1=1 op2=2 -> op1=2 655 // * @ optest op1=0 op2=-42 -> op1=-42 656 // * @ opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps 657 // */ 658 // IEMOP_MNEMONIC2(RM_MEM, VMOVLPS, vmovlps, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 659 // 660 // IEM_MC_BEGIN(0, 2); 661 // IEM_MC_LOCAL(uint64_t, uSrc); 662 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 663 // 664 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 665 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 666 // IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 667 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 668 // 669 // IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 670 // IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc); 671 // 672 // IEM_MC_ADVANCE_RIP(); 673 // IEM_MC_END(); 674 // } 675 // return VINF_SUCCESS; 676 //} 614 FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps) 615 { 616 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 617 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 618 { 619 /** 620 * @opcode 0x12 621 * @opcodesub 11 mr/reg 622 * @oppfx none 623 * @opcpuid avx 624 * @opgroup og_avx_simdfp_datamerge 625 * @opxcpttype 7LZ 626 * @optest op2=0x22002201220222032204220522062207 627 * op3=0x3304330533063307 628 * -> op1=0x22002201220222033304330533063307 629 * @optest op2=-2 op3=-42 -> op1=-42 630 * @note op3 is only a 8-byte high XMM register half. 631 */ 632 IEMOP_MNEMONIC3(VEX_RVM, VMOVHLPS, vmovhlps, Vq_WO, HdqCq, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 633 634 IEMOP_HLP_DONE_DECODING_NO_AVX_PREFIX_AND_L0(); 635 IEM_MC_BEGIN(0, 0); 636 637 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 638 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); 639 IEM_MC_MERGE_YREG_U64HI_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 640 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 641 pVCpu->iem.s.uVex3rdReg /*Hq*/); 642 643 IEM_MC_ADVANCE_RIP(); 644 IEM_MC_END(); 645 } 646 else 647 { 648 /** 649 * @opdone 650 * @opcode 0x12 651 * @opcodesub !11 mr/reg 652 * @oppfx none 653 * @opcpuid avx 654 * @opgroup og_avx_simdfp_datamove 655 * @opxcpttype 5LZ 656 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps 657 * @optest op1=1 op2=0 op3=0 -> op1=0 658 * @optest op1=0 op2=-1 op3=-1 -> op1=-1 659 * @optest op1=1 op2=0x20000000000000000 op3=3 -> op1=0x20000000000000003 660 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042 661 */ 662 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HdqCq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 663 664 IEM_MC_BEGIN(0, 2); 665 IEM_MC_LOCAL(uint64_t, uSrc); 666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 667 668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 669 IEMOP_HLP_DONE_DECODING_NO_AVX_PREFIX_AND_L0(); 670 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 671 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); 672 673 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 674 IEM_MC_MERGE_YREG_U64LOCAL_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 675 uSrc, 676 pVCpu->iem.s.uVex3rdReg /*Hq*/); 677 678 IEM_MC_ADVANCE_RIP(); 679 IEM_MC_END(); 680 } 681 return VINF_SUCCESS; 682 } 677 683 678 684
Note:
See TracChangeset
for help on using the changeset viewer.