Changeset 103657 in vbox for trunk/src/VBox/VMM/VMMAll
- Timestamp:
- Mar 4, 2024 9:53:49 AM (9 months ago)
- Location:
- trunk/src/VBox/VMM/VMMAll
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h
r103648 r103657 145 145 146 146 /** 147 * Body for instructions like TEST & CMP , ++with a byte memory/registers as147 * Body for instructions like TEST & CMP with a byte memory/registers as 148 148 * operands. 149 149 */ 150 #define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8, a_EmitterBasename, a_fNativeArchs) \ 151 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \ 152 \ 150 #define IEMOP_BODY_BINARY_rm_r8_RO(a_bRm, a_fnNormalU8, a_EmitterBasename, a_fNativeArchs) \ 153 151 /* \ 154 152 * If rm is denoting a register, no more instruction bytes. \ 155 153 */ \ 156 if (IEM_IS_MODRM_REG_MODE( bRm)) \154 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \ 157 155 { \ 158 156 IEM_MC_BEGIN(3, 0, 0, 0); \ 159 157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 160 158 IEM_MC_ARG(uint8_t, u8Src, 1); \ 161 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \159 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 162 160 IEM_MC_NATIVE_IF(a_fNativeArchs) { \ 163 161 IEM_MC_LOCAL(uint8_t, u8Dst); \ 164 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \162 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \ 165 163 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \ 166 164 IEM_MC_LOCAL(uint32_t, uEFlags); \ … … 170 168 } IEM_MC_NATIVE_ELSE() { \ 171 169 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \ 172 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \170 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \ 173 171 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 174 172 IEM_MC_REF_EFLAGS(pEFlags); \ … … 189 187 IEM_MC_BEGIN(3, 3, 0, 0); \ 190 188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \ 192 190 IEMOP_HLP_DONE_DECODING(); \ 193 191 IEM_MC_NATIVE_IF(0) { \ … … 195 193 IEM_MC_FETCH_MEM_U8(u8Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 196 194 IEM_MC_LOCAL(uint8_t, u8SrcEmit); \ 197 IEM_MC_FETCH_GREG_U8(u8SrcEmit, IEM_GET_MODRM_REG(pVCpu, bRm)); \195 IEM_MC_FETCH_GREG_U8(u8SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 198 196 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \ 199 197 IEM_MC_LOCAL(uint32_t, uEFlags); \ … … 206 204 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 207 205 IEM_MC_ARG(uint8_t, u8Src, 1); \ 208 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \206 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 209 207 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 210 208 IEM_MC_FETCH_EFLAGS(EFlags); \ … … 543 541 * memory/register as the destination. 544 542 */ 545 #define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_EmitterBasename, a_fNativeArchs) \ 546 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \ 547 \ 543 #define IEMOP_BODY_BINARY_rm_rv_RO(a_bRm, a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_EmitterBasename, a_fNativeArchs) \ 548 544 /* \ 549 545 * If rm is denoting a register, no more instruction bytes. \ 550 546 */ \ 551 if (IEM_IS_MODRM_REG_MODE( bRm)) \547 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \ 552 548 { \ 553 549 switch (pVCpu->iem.s.enmEffOpSize) \ … … 557 553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 558 554 IEM_MC_ARG(uint16_t, u16Src, 1); \ 559 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \555 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 560 556 IEM_MC_NATIVE_IF(a_fNativeArchs) { \ 561 557 IEM_MC_LOCAL(uint16_t, u16Dst); \ 562 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \558 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \ 563 559 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \ 564 560 IEM_MC_LOCAL(uint32_t, uEFlags); \ … … 568 564 } IEM_MC_NATIVE_ELSE() { \ 569 565 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \ 570 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \566 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \ 571 567 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 572 568 IEM_MC_REF_EFLAGS(pEFlags); \ … … 581 577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 582 578 IEM_MC_ARG(uint32_t, u32Src, 1); \ 583 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \579 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 584 580 IEM_MC_NATIVE_IF(a_fNativeArchs) { \ 585 581 IEM_MC_LOCAL(uint32_t, u32Dst); \ 586 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \582 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \ 587 583 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \ 588 584 IEM_MC_LOCAL(uint32_t, uEFlags); \ … … 592 588 } IEM_MC_NATIVE_ELSE() { \ 593 589 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \ 594 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \590 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \ 595 591 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 596 592 IEM_MC_REF_EFLAGS(pEFlags); \ … … 605 601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 606 602 IEM_MC_ARG(uint64_t, u64Src, 1); \ 607 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \603 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 608 604 IEM_MC_NATIVE_IF(a_fNativeArchs) { \ 609 605 IEM_MC_LOCAL(uint64_t, u64Dst); \ 610 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \606 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \ 611 607 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \ 612 608 IEM_MC_LOCAL(uint32_t, uEFlags); \ … … 616 612 } IEM_MC_NATIVE_ELSE() { \ 617 613 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \ 618 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \614 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \ 619 615 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 620 616 IEM_MC_REF_EFLAGS(pEFlags); \ … … 642 638 IEM_MC_BEGIN(3, 3, 0, 0); \ 643 639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \ 645 641 IEMOP_HLP_DONE_DECODING(); \ 646 642 IEM_MC_NATIVE_IF(a_fNativeArchs) { \ … … 648 644 IEM_MC_FETCH_MEM_U16(u16Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 649 645 IEM_MC_LOCAL(uint16_t, u16SrcEmit); \ 650 IEM_MC_FETCH_GREG_U16(u16SrcEmit, IEM_GET_MODRM_REG(pVCpu, bRm)); \646 IEM_MC_FETCH_GREG_U16(u16SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 651 647 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \ 652 648 IEM_MC_LOCAL(uint32_t, uEFlags); \ … … 659 655 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 660 656 IEM_MC_ARG(uint16_t, u16Src, 1); \ 661 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \657 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 662 658 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 663 659 IEM_MC_FETCH_EFLAGS(EFlags); \ … … 673 669 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \ 674 670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \ 676 672 IEMOP_HLP_DONE_DECODING(); \ 677 673 IEM_MC_NATIVE_IF(a_fNativeArchs) { \ … … 679 675 IEM_MC_FETCH_MEM_U32(u32Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 680 676 IEM_MC_LOCAL(uint32_t, u32SrcEmit); \ 681 IEM_MC_FETCH_GREG_U32(u32SrcEmit, IEM_GET_MODRM_REG(pVCpu, bRm)); \677 IEM_MC_FETCH_GREG_U32(u32SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 682 678 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \ 683 679 IEM_MC_LOCAL(uint32_t, uEFlags); \ … … 690 686 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 691 687 IEM_MC_ARG(uint32_t, u32Src, 1); \ 692 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \688 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 693 689 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 694 690 IEM_MC_FETCH_EFLAGS(EFlags); \ … … 704 700 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \ 705 701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \ 707 703 IEMOP_HLP_DONE_DECODING(); \ 708 704 IEM_MC_NATIVE_IF(a_fNativeArchs) { \ … … 710 706 IEM_MC_FETCH_MEM_U64(u64Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 711 707 IEM_MC_LOCAL(uint64_t, u64SrcEmit); \ 712 IEM_MC_FETCH_GREG_U64(u64SrcEmit, IEM_GET_MODRM_REG(pVCpu, bRm)); \708 IEM_MC_FETCH_GREG_U64(u64SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 713 709 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \ 714 710 IEM_MC_LOCAL(uint32_t, uEFlags); \ … … 721 717 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 722 718 IEM_MC_ARG(uint64_t, u64Src, 1); \ 723 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \719 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 724 720 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 725 721 IEM_MC_FETCH_EFLAGS(EFlags); \ … … 1779 1775 { 1780 1776 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb"); 1781 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8, cmp, 0); 1777 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1778 IEMOP_BODY_BINARY_rm_r8_RO(bRm, iemAImpl_cmp_u8, cmp, 0); 1782 1779 } 1783 1780 … … 1790 1787 { 1791 1788 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv"); 1792 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, cmp, 0); 1789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1790 IEMOP_BODY_BINARY_rm_rv_RO(bRm, iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, cmp, 0); 1793 1791 } 1794 1792 … … 5361 5359 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb"); 5362 5360 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 5363 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8, test, RT_ARCH_VAL_AMD64); 5361 5362 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 5363 5364 /* 5365 * Deal with special case of 'test rN, rN' which is frequently used for testing for zero/non-zero registers. 5366 * This block only makes a differences when emitting native code, where we'll save a register fetch. 5367 */ 5368 if ( (bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT)) 5369 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB) 5370 { 5371 IEM_MC_BEGIN(3, 0, 0, 0); 5372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5373 IEM_MC_ARG(uint8_t, u8Src, 1); 5374 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); 5375 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64) { 5376 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ 5377 IEM_MC_LOCAL(uint32_t, uEFlags); 5378 IEM_MC_FETCH_EFLAGS(uEFlags); 5379 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u8Src, u8Src, uEFlags, 8); 5380 IEM_MC_COMMIT_EFLAGS(uEFlags); 5381 } IEM_MC_NATIVE_ELSE() { 5382 IEM_MC_ARG(uint8_t *, pu8Dst, 0); 5383 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 5384 IEM_MC_ARG(uint32_t *, pEFlags, 2); 5385 IEM_MC_REF_EFLAGS(pEFlags); 5386 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags); 5387 } IEM_MC_NATIVE_ENDIF(); 5388 IEM_MC_ADVANCE_RIP_AND_FINISH(); 5389 IEM_MC_END(); 5390 } 5391 5392 IEMOP_BODY_BINARY_rm_r8_RO(bRm, iemAImpl_test_u8, test, RT_ARCH_VAL_AMD64); 5364 5393 } 5365 5394 … … 5373 5402 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv"); 5374 5403 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 5375 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, test, RT_ARCH_VAL_AMD64); 5404 5405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 5406 5407 /* 5408 * Deal with special case of 'test rN, rN' which is frequently used for testing for zero/non-zero registers. 5409 * This block only makes a differences when emitting native code, where we'll save a register fetch. 5410 */ 5411 if ( (bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT)) 5412 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB) 5413 { 5414 switch (pVCpu->iem.s.enmEffOpSize) 5415 { 5416 case IEMMODE_16BIT: 5417 IEM_MC_BEGIN(3, 0, 0, 0); 5418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5419 IEM_MC_ARG(uint16_t, u16Src, 1); 5420 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); 5421 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64) { 5422 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ 5423 IEM_MC_LOCAL(uint32_t, uEFlags); 5424 IEM_MC_FETCH_EFLAGS(uEFlags); 5425 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u16Src, u16Src, uEFlags, 16); 5426 IEM_MC_COMMIT_EFLAGS(uEFlags); 5427 } IEM_MC_NATIVE_ELSE() { 5428 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 5429 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 5430 IEM_MC_ARG(uint32_t *, pEFlags, 2); 5431 IEM_MC_REF_EFLAGS(pEFlags); 5432 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags); 5433 } IEM_MC_NATIVE_ENDIF(); 5434 IEM_MC_ADVANCE_RIP_AND_FINISH(); 5435 IEM_MC_END(); 5436 break; 5437 5438 case IEMMODE_32BIT: 5439 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); 5440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5441 IEM_MC_ARG(uint32_t, u32Src, 1); 5442 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); 5443 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64) { 5444 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ 5445 IEM_MC_LOCAL(uint32_t, uEFlags); 5446 IEM_MC_FETCH_EFLAGS(uEFlags); 5447 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u32Src, u32Src, uEFlags, 32); 5448 IEM_MC_COMMIT_EFLAGS(uEFlags); 5449 } IEM_MC_NATIVE_ELSE() { 5450 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 5451 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 5452 IEM_MC_ARG(uint32_t *, pEFlags, 2); 5453 IEM_MC_REF_EFLAGS(pEFlags); 5454 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags); 5455 } IEM_MC_NATIVE_ENDIF(); 5456 IEM_MC_ADVANCE_RIP_AND_FINISH(); 5457 IEM_MC_END(); 5458 break; 5459 5460 case IEMMODE_64BIT: 5461 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); 5462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5463 IEM_MC_ARG(uint64_t, u64Src, 1); 5464 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); 5465 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64) { 5466 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ 5467 IEM_MC_LOCAL(uint32_t, uEFlags); 5468 IEM_MC_FETCH_EFLAGS(uEFlags); 5469 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u64Src, u64Src, uEFlags, 64); 5470 IEM_MC_COMMIT_EFLAGS(uEFlags); 5471 } IEM_MC_NATIVE_ELSE() { 5472 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 5473 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 5474 IEM_MC_ARG(uint32_t *, pEFlags, 2); 5475 IEM_MC_REF_EFLAGS(pEFlags); 5476 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags); 5477 } IEM_MC_NATIVE_ENDIF(); 5478 IEM_MC_ADVANCE_RIP_AND_FINISH(); 5479 IEM_MC_END(); 5480 break; 5481 5482 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 5483 } 5484 } 5485 5486 IEMOP_BODY_BINARY_rm_rv_RO(bRm, iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, test, RT_ARCH_VAL_AMD64); 5376 5487 } 5377 5488 -
trunk/src/VBox/VMM/VMMAll/target-x86/IEMAllN8veEmit-x86.h
r103648 r103657 204 204 * variants, and SF for 32-bit and 64-bit. */ 205 205 uint8_t const idxRegDst = iemNativeVarRegisterAcquire(pReNative, idxVarDst, &off, true /*fInitialized*/); 206 uint8_t const idxRegSrc = iemNativeVarRegisterAcquire(pReNative, idxVarSrc, &off, true /*fInitialized*/); 206 uint8_t const idxRegSrc = idxVarSrc == idxVarDst ? idxRegDst /* special case of 'test samereg,samereg' */ 207 : iemNativeVarRegisterAcquire(pReNative, idxVarSrc, &off, true /*fInitialized*/); 207 208 #ifndef RT_ARCH_AMD64 208 209 uint8_t const idxRegResult = iemNativeRegAllocTmp(pReNative, &off); … … 244 245 #endif 245 246 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 246 iemNativeVarRegisterRelease(pReNative, idxVarSrc); 247 if (idxVarSrc != idxVarDst) 248 iemNativeVarRegisterRelease(pReNative, idxVarSrc); 247 249 iemNativeVarRegisterRelease(pReNative, idxVarDst); 248 250
Note:
See TracChangeset
for help on using the changeset viewer.