Changeset 36857 in vbox
- Timestamp:
- Apr 27, 2011 2:54:49 PM (14 years ago)
- Location:
- trunk
- Files:
-
- 8 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/include/VBox/x86.h
r36849 r36857 1996 1996 /** 0x0c - Code selector. */ 1997 1997 uint16_t CS; 1998 uint16_t Rs vrd1;1998 uint16_t Rsrvd1; 1999 1999 /** 0x10 - Data pointer. */ 2000 2000 uint32_t FPUDP; -
trunk/src/VBox/VMM/VMMAll/IEMAll.cpp
r36851 r36857 358 358 iemAImpl_test_u32, NULL, 359 359 iemAImpl_test_u64, NULL 360 }; 361 362 /** Function table for the BT instruction. */ 363 static const IEMOPBINSIZES g_iemAImpl_bt = 364 { 365 NULL, NULL, 366 iemAImpl_bt_u16, NULL, 367 iemAImpl_bt_u32, NULL, 368 iemAImpl_bt_u64, NULL 369 }; 370 371 /** Function table for the BTC instruction. */ 372 static const IEMOPBINSIZES g_iemAImpl_btc = 373 { 374 NULL, NULL, 375 iemAImpl_btc_u16, iemAImpl_btc_u16_locked, 376 iemAImpl_btc_u32, iemAImpl_btc_u32_locked, 377 iemAImpl_btc_u64, iemAImpl_btc_u64_locked 378 }; 379 380 /** Function table for the BTR instruction. */ 381 static const IEMOPBINSIZES g_iemAImpl_btr = 382 { 383 NULL, NULL, 384 iemAImpl_btr_u16, iemAImpl_btr_u16_locked, 385 iemAImpl_btr_u32, iemAImpl_btr_u32_locked, 386 iemAImpl_btr_u64, iemAImpl_btr_u64_locked 387 }; 388 389 /** Function table for the BTS instruction. */ 390 static const IEMOPBINSIZES g_iemAImpl_bts = 391 { 392 NULL, NULL, 393 iemAImpl_bts_u16, iemAImpl_bts_u16_locked, 394 iemAImpl_bts_u32, iemAImpl_bts_u32_locked, 395 iemAImpl_bts_u64, iemAImpl_bts_u64_locked 396 }; 397 398 /** Function table for the BSF instruction. */ 399 static const IEMOPBINSIZES g_iemAImpl_bsf = 400 { 401 NULL, NULL, 402 iemAImpl_bsf_u16, NULL, 403 iemAImpl_bsf_u32, NULL, 404 iemAImpl_bsf_u64, NULL 405 }; 406 407 /** Function table for the BSR instruction. */ 408 static const IEMOPBINSIZES g_iemAImpl_bsr = 409 { 410 NULL, NULL, 411 iemAImpl_bsr_u16, NULL, 412 iemAImpl_bsr_u32, NULL, 413 iemAImpl_bsr_u64, NULL 360 414 }; 361 415 … … 3715 3769 #define IEM_MC_ADD_GREG_U32_TO_LOCAL(a_u32Value, a_iGReg) (a_u32Value) += iemGRegFetchU32(pIemCpu, (a_iGReg)) 3716 3770 #define IEM_MC_ADD_GREG_U64_TO_LOCAL(a_u64Value, a_iGReg) (a_u64Value) += iemGRegFetchU64(pIemCpu, (a_iGReg)) 3771 #define IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(a_EffAddr, a_i16) do { (a_EffAddr) += (a_i16); } while (0) 3772 #define IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(a_EffAddr, a_i32) do { (a_EffAddr) += (a_i32); } while (0) 3773 #define IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(a_EffAddr, a_i64) do { (a_EffAddr) += (a_i64); } while (0) 3774 3775 #define IEM_MC_AND_LOCAL_U16(a_u16Local, a_u16Mask) do { (a_u16Local) &= (a_u16Mask); } while (0) 3776 #define IEM_MC_AND_LOCAL_U32(a_u32Local, a_u32Mask) do { (a_u32Local) &= (a_u32Mask); } while (0) 3777 #define IEM_MC_AND_LOCAL_U64(a_u64Local, a_u64Mask) do { (a_u64Local) &= (a_u64Mask); } while (0) 3778 3779 #define IEM_MC_AND_ARG_U16(a_u16Arg, a_u16Mask) do { (a_u16Arg) &= (a_u16Mask); } while (0) 3780 #define IEM_MC_AND_ARG_U32(a_u32Arg, a_u32Mask) do { (a_u32Arg) &= (a_u32Mask); } while (0) 3781 #define IEM_MC_AND_ARG_U64(a_u64Arg, a_u64Mask) do { (a_u64Arg) &= (a_u64Mask); } while (0) 3782 3783 #define IEM_MC_SAR_LOCAL_S16(a_i16Local, a_cShift) do { (a_i16Local) >>= (a_cShift); } while (0) 3784 #define IEM_MC_SAR_LOCAL_S32(a_i32Local, a_cShift) do { (a_i32Local) >>= (a_cShift); } while (0) 3785 #define IEM_MC_SAR_LOCAL_S64(a_i64Local, a_cShift) do { (a_i64Local) >>= (a_cShift); } while (0) 3786 3787 #define IEM_MC_SHL_LOCAL_S16(a_i16Local, a_cShift) do { (a_i16Local) <<= (a_cShift); } while (0) 3788 #define IEM_MC_SHL_LOCAL_S32(a_i32Local, a_cShift) do { (a_i32Local) <<= (a_cShift); } while (0) 3789 #define IEM_MC_SHL_LOCAL_S64(a_i64Local, a_cShift) do { (a_i64Local) <<= (a_cShift); } while (0) 3717 3790 3718 3791 … … 4010 4083 #ifdef DEBUG 4011 4084 # define IEMOP_MNEMONIC(a_szMnemonic) \ 4012 Log2(("decode - %04x:%08RGv %s\n", pIemCpu->CTX_SUFF(pCtx)->cs, pIemCpu->CTX_SUFF(pCtx)->rip, a_szMnemonic)) 4085 Log2(("decode - %04x:%08RGv %s%s\n", pIemCpu->CTX_SUFF(pCtx)->cs, pIemCpu->CTX_SUFF(pCtx)->rip, \ 4086 pIemCpu->fPrefixes & IEM_OP_PRF_LOCK ? "lock " : "", a_szMnemonic)) 4013 4087 # define IEMOP_MNEMONIC2(a_szMnemonic, a_szOps) \ 4014 Log2(("decode - %04x:%08RGv %s %s\n", pIemCpu->CTX_SUFF(pCtx)->cs, pIemCpu->CTX_SUFF(pCtx)->rip, a_szMnemonic, a_szOps)) 4088 Log2(("decode - %04x:%08RGv %s%s %s\n", pIemCpu->CTX_SUFF(pCtx)->cs, pIemCpu->CTX_SUFF(pCtx)->rip, \ 4089 pIemCpu->fPrefixes & IEM_OP_PRF_LOCK ? "lock " : "", a_szMnemonic, a_szOps)) 4015 4090 #else 4016 4091 # define IEMOP_MNEMONIC(a_szMnemonic) do { } while (0) … … 4420 4495 pIemCpu->cIOReads = 0; 4421 4496 pIemCpu->cIOWrites = 0; 4422 pIemCpu->fMulDivHack = false; 4423 pIemCpu->fShiftOfHack= false; 4497 pIemCpu->fUndefinedEFlags = 0; 4424 4498 4425 4499 if (IEM_VERIFICATION_ENABLED(pIemCpu)) … … 4620 4694 pIemCpu->cIOWrites++; 4621 4695 return VINF_SUCCESS; 4696 } 4697 4698 4699 /** 4700 * Used to add extra details about a stub case. 4701 * @param pIemCpu The IEM per CPU state. 4702 */ 4703 static void iemVerifyAssertMsg2(PIEMCPU pIemCpu) 4704 { 4705 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx); 4706 PVM pVM = IEMCPU_TO_VM(pIemCpu); 4707 PVMCPU pVCpu = IEMCPU_TO_VMCPU(pIemCpu); 4708 char szRegs[4096]; 4709 DBGFR3RegPrintf(pVM, pVCpu->idCpu, &szRegs[0], sizeof(szRegs), 4710 "rax=%016VR{rax} rbx=%016VR{rbx} rcx=%016VR{rcx} rdx=%016VR{rdx}\n" 4711 "rsi=%016VR{rsi} rdi=%016VR{rdi} r8 =%016VR{r8} r9 =%016VR{r9}\n" 4712 "r10=%016VR{r10} r11=%016VR{r11} r12=%016VR{r12} r13=%016VR{r13}\n" 4713 "r14=%016VR{r14} r15=%016VR{r15} %VRF{rflags}\n" 4714 "rip=%016VR{rip} rsp=%016VR{rsp} rbp=%016VR{rbp}\n" 4715 "cs={%04VR{cs} base=%016VR{cs_base} limit=%08VR{cs_lim} flags=%04VR{cs_attr}} cr0=%016VR{cr0}\n" 4716 "ds={%04VR{ds} base=%016VR{ds_base} limit=%08VR{ds_lim} flags=%04VR{ds_attr}} cr2=%016VR{cr2}\n" 4717 "es={%04VR{es} base=%016VR{es_base} limit=%08VR{es_lim} flags=%04VR{es_attr}} cr3=%016VR{cr3}\n" 4718 "fs={%04VR{fs} base=%016VR{fs_base} limit=%08VR{fs_lim} flags=%04VR{fs_attr}} cr4=%016VR{cr4}\n" 4719 "gs={%04VR{gs} base=%016VR{gs_base} limit=%08VR{gs_lim} flags=%04VR{gs_attr}} cr8=%016VR{cr8}\n" 4720 "ss={%04VR{ss} base=%016VR{ss_base} limit=%08VR{ss_lim} flags=%04VR{ss_attr}}\n" 4721 "dr0=%016VR{dr0} dr1=%016VR{dr1} dr2=%016VR{dr2} dr3=%016VR{dr3}\n" 4722 "dr6=%016VR{dr6} dr7=%016VR{dr7}\n" 4723 "gdtr=%016VR{gdtr_base}:%04VR{gdtr_lim} idtr=%016VR{idtr_base}:%04VR{idtr_lim} rflags=%08VR{rflags}\n" 4724 "ldtr={%04VR{ldtr} base=%016VR{ldtr_base} limit=%08VR{ldtr_lim} flags=%08VR{ldtr_attr}}\n" 4725 "tr ={%04VR{tr} base=%016VR{tr_base} limit=%08VR{tr_lim} flags=%08VR{tr_attr}}\n" 4726 " sysenter={cs=%04VR{sysenter_cs} eip=%08VR{sysenter_eip} esp=%08VR{sysenter_esp}}\n" 4727 " efer=%016VR{efer}\n" 4728 " pat=%016VR{pat}\n" 4729 " sf_mask=%016VR{sf_mask}\n" 4730 "krnl_gs_base=%016VR{krnl_gs_base}\n" 4731 " lstar=%016VR{lstar}\n" 4732 " star=%016VR{star} cstar=%016VR{cstar}\n" 4733 "fcw=%04VR{fcw} fsw=%04VR{fsw} ftw=%04VR{ftw} mxcsr=%04VR{mxcsr} mxcsr_mask=%04VR{mxcsr_mask}\n" 4734 ); 4735 4736 char szInstr1[256]; 4737 DBGFR3DisasInstrEx(pVM, pVCpu->idCpu, pCtx->cs, pCtx->rip - pIemCpu->offOpcode, 4738 DBGF_DISAS_FLAGS_DEFAULT_MODE, 4739 szInstr1, sizeof(szInstr1), NULL); 4740 char szInstr2[256]; 4741 DBGFR3DisasInstrEx(pVM, pVCpu->idCpu, 0, 0, 4742 DBGF_DISAS_FLAGS_CURRENT_GUEST | DBGF_DISAS_FLAGS_DEFAULT_MODE, 4743 szInstr2, sizeof(szInstr2), NULL); 4744 4745 RTAssertMsg2Weak("%s%s\n%s\n", szRegs, szInstr1, szInstr2); 4622 4746 } 4623 4747 … … 4677 4801 iemVerifyAssertAddRecordDump(pEvtRec1); 4678 4802 iemVerifyAssertAddRecordDump(pEvtRec2); 4679 iem OpStubMsg2(pIemCpu);4803 iemVerifyAssertMsg2(pIemCpu); 4680 4804 RTAssertPanic(); 4681 4805 } … … 4694 4818 RTAssertMsg1(pszMsg, __LINE__, __FILE__, __PRETTY_FUNCTION__); 4695 4819 iemVerifyAssertAddRecordDump(pEvtRec); 4696 iem OpStubMsg2(pIemCpu);4820 iemVerifyAssertMsg2(pIemCpu); 4697 4821 RTAssertPanic(); 4698 4822 } … … 4731 4855 pEvtRec->u.RamWrite.cb, pEvtRec->u.RamWrite.ab); 4732 4856 iemVerifyAssertAddRecordDump(pEvtRec); 4733 iem OpStubMsg2(pIemCpu);4857 iemVerifyAssertMsg2(pIemCpu); 4734 4858 RTAssertPanic(); 4735 4859 } … … 4816 4940 RTAssertMsg2Weak(" the FPU state differs\n"); 4817 4941 cDiffs++; 4942 CHECK_FIELD(fpu.FCW); 4943 CHECK_FIELD(fpu.FSW); 4944 CHECK_FIELD(fpu.FTW); 4945 CHECK_FIELD(fpu.FOP); 4946 CHECK_FIELD(fpu.FPUIP); 4947 CHECK_FIELD(fpu.CS); 4948 CHECK_FIELD(fpu.Rsrvd1); 4949 CHECK_FIELD(fpu.FPUDP); 4950 CHECK_FIELD(fpu.DS); 4951 CHECK_FIELD(fpu.Rsrvd2); 4952 CHECK_FIELD(fpu.MXCSR); 4953 CHECK_FIELD(fpu.MXCSR_MASK); 4954 CHECK_FIELD(fpu.aRegs[0].au64[0]); CHECK_FIELD(fpu.aRegs[0].au64[1]); 4955 CHECK_FIELD(fpu.aRegs[1].au64[0]); CHECK_FIELD(fpu.aRegs[1].au64[1]); 4956 CHECK_FIELD(fpu.aRegs[2].au64[0]); CHECK_FIELD(fpu.aRegs[2].au64[1]); 4957 CHECK_FIELD(fpu.aRegs[3].au64[0]); CHECK_FIELD(fpu.aRegs[3].au64[1]); 4958 CHECK_FIELD(fpu.aRegs[4].au64[0]); CHECK_FIELD(fpu.aRegs[4].au64[1]); 4959 CHECK_FIELD(fpu.aRegs[5].au64[0]); CHECK_FIELD(fpu.aRegs[5].au64[1]); 4960 CHECK_FIELD(fpu.aRegs[6].au64[0]); CHECK_FIELD(fpu.aRegs[6].au64[1]); 4961 CHECK_FIELD(fpu.aRegs[7].au64[0]); CHECK_FIELD(fpu.aRegs[7].au64[1]); 4962 CHECK_FIELD(fpu.aXMM[ 0].au64[0]); CHECK_FIELD(fpu.aXMM[ 0].au64[1]); 4963 CHECK_FIELD(fpu.aXMM[ 1].au64[0]); CHECK_FIELD(fpu.aXMM[ 1].au64[1]); 4964 CHECK_FIELD(fpu.aXMM[ 2].au64[0]); CHECK_FIELD(fpu.aXMM[ 2].au64[1]); 4965 CHECK_FIELD(fpu.aXMM[ 3].au64[0]); CHECK_FIELD(fpu.aXMM[ 3].au64[1]); 4966 CHECK_FIELD(fpu.aXMM[ 4].au64[0]); CHECK_FIELD(fpu.aXMM[ 4].au64[1]); 4967 CHECK_FIELD(fpu.aXMM[ 5].au64[0]); CHECK_FIELD(fpu.aXMM[ 5].au64[1]); 4968 CHECK_FIELD(fpu.aXMM[ 6].au64[0]); CHECK_FIELD(fpu.aXMM[ 6].au64[1]); 4969 CHECK_FIELD(fpu.aXMM[ 7].au64[0]); CHECK_FIELD(fpu.aXMM[ 7].au64[1]); 4970 CHECK_FIELD(fpu.aXMM[ 8].au64[0]); CHECK_FIELD(fpu.aXMM[ 8].au64[1]); 4971 CHECK_FIELD(fpu.aXMM[ 9].au64[0]); CHECK_FIELD(fpu.aXMM[ 9].au64[1]); 4972 CHECK_FIELD(fpu.aXMM[10].au64[0]); CHECK_FIELD(fpu.aXMM[10].au64[1]); 4973 CHECK_FIELD(fpu.aXMM[11].au64[0]); CHECK_FIELD(fpu.aXMM[11].au64[1]); 4974 CHECK_FIELD(fpu.aXMM[12].au64[0]); CHECK_FIELD(fpu.aXMM[12].au64[1]); 4975 CHECK_FIELD(fpu.aXMM[13].au64[0]); CHECK_FIELD(fpu.aXMM[13].au64[1]); 4976 CHECK_FIELD(fpu.aXMM[14].au64[0]); CHECK_FIELD(fpu.aXMM[14].au64[1]); 4977 CHECK_FIELD(fpu.aXMM[15].au64[0]); CHECK_FIELD(fpu.aXMM[15].au64[1]); 4978 for (unsigned i = 0; i < RT_ELEMENTS(pOrgCtx->fpu.au32RsrvdRest); i++) 4979 CHECK_FIELD(fpu.au32RsrvdRest[i]); 4818 4980 } 4819 4981 else … … 4821 4983 } 4822 4984 CHECK_FIELD(rip); 4823 uint32_t fFlagsMask = UINT32_MAX; 4824 if (pIemCpu->fMulDivHack) 4825 fFlagsMask &= ~(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF); 4826 if (pIemCpu->fShiftOfHack) 4827 fFlagsMask &= ~(X86_EFL_OF | X86_EFL_AF); 4985 uint32_t fFlagsMask = UINT32_MAX & ~pIemCpu->fUndefinedEFlags; 4828 4986 if ((pOrgCtx->rflags.u & fFlagsMask) != (pDebugCtx->rflags.u & fFlagsMask)) 4829 4987 { … … 4909 5067 { 4910 5068 RTAssertMsg1(NULL, __LINE__, __FILE__, __FUNCTION__); 4911 iem OpStubMsg2(pIemCpu);5069 iemVerifyAssertMsg2(pIemCpu); 4912 5070 RTAssertPanic(); 4913 5071 } … … 4996 5154 } 4997 5155 pIemCpu->CTX_SUFF(pCtx) = pOrgCtx; 5156 5157 /* 5158 * HACK ALERT! You don't normally want to verify a whole boot sequence. 5159 */ 5160 if (pIemCpu->cInstructions == 1) 5161 RTLogFlags(NULL, "disabled"); 4998 5162 } 4999 5163 -
trunk/src/VBox/VMM/VMMAll/IEMAllAImpl.asm
r36851 r36857 204 204 mov T0_32, [%1] ; flags 205 205 and T0_32, ~(%2 | %3) ; clear the modified & undefined flags. 206 %ifndef IEM_VERIFICATION_MODE207 206 and T1_32, (%2 | %3) ; select the modified and undefined flags. 208 %else209 and T1_32, (%2) ; select the modified flags, leave the210 ; undefined cleared. This matches REM better.211 %endif212 207 or T0_32, T1_32 ; combine the flags. 213 208 mov [%1], T0_32 ; save the flags. … … 228 223 ; @param 1 The instruction mnemonic. 229 224 ; @param 2 Non-zero if there should be a locked version. 230 ; @param 3 If non-zero, load the affected flags prior to 231 ; execution (for dealing with undefined flags). 232 ; @param 4 The affected flags. 225 ; @param 3 The modified flags. 226 ; @param 4 The undefined flags. 233 227 ; 234 228 %macro IEMIMPL_BIN_OP 4 … … 328 322 IEMIMPL_BIN_OP sub, 1, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF), 0 329 323 IEMIMPL_BIN_OP sbb, 1, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF), 0 330 IEMIMPL_BIN_OP or, 1, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_PF | X86_EFL_CF), X86_EFL_AF 331 IEMIMPL_BIN_OP xor, 1, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_PF | X86_EFL_CF), X86_EFL_AF 332 IEMIMPL_BIN_OP and, 1, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_PF | X86_EFL_CF), X86_EFL_AF 324 IEMIMPL_BIN_OP or, 1, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_PF | X86_EFL_CF), X86_EFL_AF, 325 IEMIMPL_BIN_OP xor, 1, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_PF | X86_EFL_CF), X86_EFL_AF, 326 IEMIMPL_BIN_OP and, 1, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_PF | X86_EFL_CF), X86_EFL_AF, 333 327 IEMIMPL_BIN_OP cmp, 0, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF), 0 334 IEMIMPL_BIN_OP test, 0, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_PF | X86_EFL_CF), X86_EFL_AF 328 IEMIMPL_BIN_OP test, 0, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_PF | X86_EFL_CF), X86_EFL_AF, 329 330 331 ;; 332 ; Macro for implementing a bit operator. 333 ; 334 ; This will generate code for the 16, 32 and 64 bit accesses with locked 335 ; variants, except on 32-bit system where the 64-bit accesses requires hand 336 ; coding. 337 ; 338 ; All the functions takes a pointer to the destination memory operand in A0, 339 ; the source register operand in A1 and a pointer to eflags in A2. 340 ; 341 ; @param 1 The instruction mnemonic. 342 ; @param 2 Non-zero if there should be a locked version. 343 ; @param 3 The modified flags. 344 ; @param 4 The undefined flags. 345 ; 346 %macro IEMIMPL_BIT_OP 4 347 BEGINPROC iemAImpl_ %+ %1 %+ _u16 348 PROLOGUE_3_ARGS 349 IEM_MAYBE_LOAD_FLAGS A2, %3, %4 350 %1 word [A0], A1_16 351 IEM_SAVE_FLAGS A2, %3, %4 352 EPILOGUE_3_ARGS 353 ret 354 ENDPROC iemAImpl_ %+ %1 %+ _u16 355 356 BEGINPROC iemAImpl_ %+ %1 %+ _u32 357 PROLOGUE_3_ARGS 358 IEM_MAYBE_LOAD_FLAGS A2, %3, %4 359 %1 dword [A0], A1_32 360 IEM_SAVE_FLAGS A2, %3, %4 361 EPILOGUE_3_ARGS 362 ret 363 ENDPROC iemAImpl_ %+ %1 %+ _u32 364 365 %ifdef RT_ARCH_AMD64 366 BEGINPROC iemAImpl_ %+ %1 %+ _u64 367 PROLOGUE_3_ARGS 368 IEM_MAYBE_LOAD_FLAGS A2, %3, %4 369 %1 qword [A0], A1 370 IEM_SAVE_FLAGS A2, %3, %4 371 EPILOGUE_3_ARGS 372 ret 373 ENDPROC iemAImpl_ %+ %1 %+ _u64 374 %else ; stub it for now - later, replace with hand coded stuff. 375 BEGINPROC iemAImpl_ %+ %1 %+ _u64 376 int3 377 ret 378 ENDPROC iemAImpl_ %+ %1 %+ _u64 379 %endif ; !RT_ARCH_AMD64 380 381 %if %2 != 0 ; locked versions requested? 382 383 BEGINPROC iemAImpl_ %+ %1 %+ _u16_locked 384 PROLOGUE_3_ARGS 385 IEM_MAYBE_LOAD_FLAGS A2, %3, %4 386 lock %1 word [A0], A1_16 387 IEM_SAVE_FLAGS A2, %3, %4 388 EPILOGUE_3_ARGS 389 ret 390 ENDPROC iemAImpl_ %+ %1 %+ _u16_locked 391 392 BEGINPROC iemAImpl_ %+ %1 %+ _u32_locked 393 PROLOGUE_3_ARGS 394 IEM_MAYBE_LOAD_FLAGS A2, %3, %4 395 lock %1 dword [A0], A1_32 396 IEM_SAVE_FLAGS A2, %3, %4 397 EPILOGUE_3_ARGS 398 ret 399 ENDPROC iemAImpl_ %+ %1 %+ _u32_locked 400 401 %ifdef RT_ARCH_AMD64 402 BEGINPROC iemAImpl_ %+ %1 %+ _u64_locked 403 PROLOGUE_3_ARGS 404 IEM_MAYBE_LOAD_FLAGS A2, %3, %4 405 lock %1 qword [A0], A1 406 IEM_SAVE_FLAGS A2, %3, %4 407 EPILOGUE_3_ARGS 408 ret 409 ENDPROC iemAImpl_ %+ %1 %+ _u64_locked 410 %else ; stub it for now - later, replace with hand coded stuff. 411 BEGINPROC iemAImpl_ %+ %1 %+ _u64_locked 412 int3 413 ret 414 ENDPROC iemAImpl_ %+ %1 %+ _u64_locked 415 %endif ; !RT_ARCH_AMD64 416 %endif ; locked 417 %endmacro 418 IEMIMPL_BIT_OP bt, 0, (X86_EFL_CF), (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF) 419 IEMIMPL_BIT_OP btc, 1, (X86_EFL_CF), (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF) 420 IEMIMPL_BIT_OP bts, 1, (X86_EFL_CF), (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF) 421 IEMIMPL_BIT_OP btr, 1, (X86_EFL_CF), (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF) 422 423 ;; 424 ; Macro for implementing a bit search operator. 425 ; 426 ; This will generate code for the 16, 32 and 64 bit accesses, except on 32-bit 427 ; system where the 64-bit accesses requires hand coding. 428 ; 429 ; All the functions takes a pointer to the destination memory operand in A0, 430 ; the source register operand in A1 and a pointer to eflags in A2. 431 ; 432 ; @param 1 The instruction mnemonic. 433 ; @param 2 The modified flags. 434 ; @param 3 The undefined flags. 435 ; 436 %macro IEMIMPL_BIT_OP 3 437 BEGINPROC iemAImpl_ %+ %1 %+ _u16 438 PROLOGUE_3_ARGS 439 IEM_MAYBE_LOAD_FLAGS A2, %2, %3 440 %1 T0_16, A1_16 441 mov [A0], T0_16 442 IEM_SAVE_FLAGS A2, %2, %3 443 EPILOGUE_3_ARGS 444 ret 445 ENDPROC iemAImpl_ %+ %1 %+ _u16 446 447 BEGINPROC iemAImpl_ %+ %1 %+ _u32 448 PROLOGUE_3_ARGS 449 IEM_MAYBE_LOAD_FLAGS A2, %2, %3 450 %1 T0_32, A1_32 451 mov [A0], T0_32 452 IEM_SAVE_FLAGS A2, %2, %3 453 EPILOGUE_3_ARGS 454 ret 455 ENDPROC iemAImpl_ %+ %1 %+ _u32 456 457 %ifdef RT_ARCH_AMD64 458 BEGINPROC iemAImpl_ %+ %1 %+ _u64 459 PROLOGUE_3_ARGS 460 IEM_MAYBE_LOAD_FLAGS A2, %2, %3 461 %1 T0, A1 462 mov [A0], T0 463 IEM_SAVE_FLAGS A2, %2, %3 464 EPILOGUE_3_ARGS 465 ret 466 ENDPROC iemAImpl_ %+ %1 %+ _u64 467 %else ; stub it for now - later, replace with hand coded stuff. 468 BEGINPROC iemAImpl_ %+ %1 %+ _u64 469 int3 470 ret 471 ENDPROC iemAImpl_ %+ %1 %+ _u64 472 %endif ; !RT_ARCH_AMD64 473 %endmacro 474 IEMIMPL_BIT_OP bsf, (X86_EFL_ZF), (X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF) 475 IEMIMPL_BIT_OP bsr, (X86_EFL_ZF), (X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF) 335 476 336 477 … … 618 759 IEMIMPL_SHIFT_OP rcl, (X86_EFL_OF | X86_EFL_CF), 0 619 760 IEMIMPL_SHIFT_OP rcr, (X86_EFL_OF | X86_EFL_CF), 0 620 %ifndef IEM_VERIFICATION_MODE621 761 IEMIMPL_SHIFT_OP shl, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_PF | X86_EFL_CF), (X86_EFL_AF) 622 762 IEMIMPL_SHIFT_OP shr, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_PF | X86_EFL_CF), (X86_EFL_AF) 623 763 IEMIMPL_SHIFT_OP sar, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_PF | X86_EFL_CF), (X86_EFL_AF) 624 %else625 IEMIMPL_SHIFT_OP shl, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_PF | X86_EFL_CF | X86_EFL_AF), 0626 IEMIMPL_SHIFT_OP shr, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_PF | X86_EFL_CF | X86_EFL_AF), 0627 IEMIMPL_SHIFT_OP sar, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_PF | X86_EFL_CF | X86_EFL_AF), 0628 %endif629 764 630 765 -
trunk/src/VBox/VMM/VMMAll/IEMAllCImpl.cpp.h
r36849 r36857 1060 1060 1061 1061 /** 1062 * Implements leave. 1063 * 1064 * We're doing this in C because messing with the stack registers is annoying 1065 * since they depends on SS attributes. 1066 * 1067 * @param enmEffOpSize The effective operand size. 1068 */ 1069 IEM_CIMPL_DEF_1(iemCImpl_leave, IEMMODE, enmEffOpSize) 1070 { 1071 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx); 1072 1073 /* Calculate the intermediate RSP from RBP and the stack attributes. */ 1074 RTUINT64U NewRsp; 1075 if (pCtx->ssHid.Attr.n.u1Long) 1076 { 1077 /** @todo Check that LEAVE actually preserve the high EBP bits. */ 1078 NewRsp.u = pCtx->rsp; 1079 NewRsp.Words.w0 = pCtx->bp; 1080 } 1081 else if (pCtx->ssHid.Attr.n.u1DefBig) 1082 NewRsp.u = pCtx->ebp; 1083 else 1084 NewRsp.u = pCtx->rbp; 1085 1086 /* Pop RBP according to the operand size. */ 1087 VBOXSTRICTRC rcStrict; 1088 RTUINT64U NewRbp; 1089 switch (enmEffOpSize) 1090 { 1091 case IEMMODE_16BIT: 1092 NewRbp.u = pCtx->rbp; 1093 rcStrict = iemMemStackPopU16Ex(pIemCpu, &NewRbp.Words.w0, &NewRsp); 1094 break; 1095 case IEMMODE_32BIT: 1096 NewRbp.u = 0; 1097 rcStrict = iemMemStackPopU32Ex(pIemCpu, &NewRbp.DWords.dw0, &NewRsp); 1098 break; 1099 case IEMMODE_64BIT: 1100 rcStrict = iemMemStackPopU64Ex(pIemCpu, &NewRbp.u, &NewRsp); 1101 break; 1102 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 1103 } 1104 if (rcStrict != VINF_SUCCESS) 1105 return rcStrict; 1106 1107 1108 /* Commit it. */ 1109 pCtx->rbp = NewRbp.u; 1110 pCtx->rsp = NewRsp.u; 1111 iemRegAddToRip(pIemCpu, cbInstr); 1112 1113 return VINF_SUCCESS; 1114 } 1115 1116 1117 /** 1062 1118 * Implements int3 and int XX. 1063 1119 * -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h
r36851 r36857 2661 2661 2662 2662 2663 /** 2664 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and 2665 * iemOp_bts_Ev_Gv. 2666 */ 2667 FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl) 2668 { 2669 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm); 2670 IEMOP_HLP_NO_LOCK_PREFIX(); 2671 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); 2672 2673 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2674 { 2675 /* register destination. */ 2676 IEMOP_HLP_NO_LOCK_PREFIX(); 2677 switch (pIemCpu->enmEffOpSize) 2678 { 2679 case IEMMODE_16BIT: 2680 IEM_MC_BEGIN(3, 0); 2681 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 2682 IEM_MC_ARG(uint16_t, u16Src, 1); 2683 IEM_MC_ARG(uint32_t *, pEFlags, 2); 2684 2685 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 2686 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); 2687 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 2688 IEM_MC_REF_EFLAGS(pEFlags); 2689 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags); 2690 2691 IEM_MC_ADVANCE_RIP(); 2692 IEM_MC_END(); 2693 return VINF_SUCCESS; 2694 2695 case IEMMODE_32BIT: 2696 IEM_MC_BEGIN(3, 0); 2697 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 2698 IEM_MC_ARG(uint32_t, u32Src, 1); 2699 IEM_MC_ARG(uint32_t *, pEFlags, 2); 2700 2701 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 2702 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); 2703 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 2704 IEM_MC_REF_EFLAGS(pEFlags); 2705 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags); 2706 2707 IEM_MC_ADVANCE_RIP(); 2708 IEM_MC_END(); 2709 return VINF_SUCCESS; 2710 2711 case IEMMODE_64BIT: 2712 IEM_MC_BEGIN(3, 0); 2713 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 2714 IEM_MC_ARG(uint64_t, u64Src, 1); 2715 IEM_MC_ARG(uint32_t *, pEFlags, 2); 2716 2717 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 2718 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); 2719 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 2720 IEM_MC_REF_EFLAGS(pEFlags); 2721 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags); 2722 2723 IEM_MC_ADVANCE_RIP(); 2724 IEM_MC_END(); 2725 return VINF_SUCCESS; 2726 2727 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 2728 } 2729 } 2730 else 2731 { 2732 /* memory destination. */ 2733 2734 uint32_t fAccess; 2735 if (pImpl->pfnLockedU16) 2736 fAccess = IEM_ACCESS_DATA_RW; 2737 else /* BT */ 2738 { 2739 IEMOP_HLP_NO_LOCK_PREFIX(); 2740 fAccess = IEM_ACCESS_DATA_R; 2741 } 2742 2743 /** @todo test negative bit offsets! */ 2744 switch (pIemCpu->enmEffOpSize) 2745 { 2746 case IEMMODE_16BIT: 2747 IEM_MC_BEGIN(3, 2); 2748 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 2749 IEM_MC_ARG(uint16_t, u16Src, 1); 2750 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 2751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 2752 IEM_MC_LOCAL(int16_t, i16AddrAdj); 2753 2754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm); 2755 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 2756 IEM_MC_ASSIGN(i16AddrAdj, u16Src); 2757 IEM_MC_AND_ARG_U16(u16Src, 0x0f); 2758 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); 2759 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1); 2760 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); 2761 IEM_MC_FETCH_EFLAGS(EFlags); 2762 2763 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0); 2764 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)) 2765 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags); 2766 else 2767 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags); 2768 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); 2769 2770 IEM_MC_COMMIT_EFLAGS(EFlags); 2771 IEM_MC_ADVANCE_RIP(); 2772 IEM_MC_END(); 2773 return VINF_SUCCESS; 2774 2775 case IEMMODE_32BIT: 2776 IEM_MC_BEGIN(3, 2); 2777 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 2778 IEM_MC_ARG(uint32_t, u32Src, 1); 2779 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 2780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 2781 IEM_MC_LOCAL(int32_t, i32AddrAdj); 2782 2783 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm); 2784 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 2785 IEM_MC_ASSIGN(i32AddrAdj, u32Src); 2786 IEM_MC_AND_ARG_U32(u32Src, 0x1f); 2787 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); 2788 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); 2789 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); 2790 IEM_MC_FETCH_EFLAGS(EFlags); 2791 2792 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0); 2793 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)) 2794 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags); 2795 else 2796 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags); 2797 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); 2798 2799 IEM_MC_COMMIT_EFLAGS(EFlags); 2800 IEM_MC_ADVANCE_RIP(); 2801 IEM_MC_END(); 2802 return VINF_SUCCESS; 2803 2804 case IEMMODE_64BIT: 2805 IEM_MC_BEGIN(3, 2); 2806 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 2807 IEM_MC_ARG(uint64_t, u64Src, 1); 2808 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 2809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 2810 IEM_MC_LOCAL(int64_t, i64AddrAdj); 2811 2812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm); 2813 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 2814 IEM_MC_ASSIGN(i64AddrAdj, u64Src); 2815 IEM_MC_AND_ARG_U64(u64Src, 0x3f); 2816 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); 2817 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); 2818 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); 2819 IEM_MC_FETCH_EFLAGS(EFlags); 2820 2821 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0); 2822 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)) 2823 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags); 2824 else 2825 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags); 2826 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); 2827 2828 IEM_MC_COMMIT_EFLAGS(EFlags); 2829 IEM_MC_ADVANCE_RIP(); 2830 IEM_MC_END(); 2831 return VINF_SUCCESS; 2832 2833 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 2834 } 2835 } 2836 } 2837 2838 2663 2839 /** Opcode 0x0f 0xa3. */ 2664 FNIEMOP_STUB(iemOp_bt_Ev_Gv); 2840 FNIEMOP_DEF(iemOp_bt_Ev_Gv) 2841 { 2842 IEMOP_MNEMONIC("bt Gv,Mp"); 2843 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt); 2844 } 2665 2845 2666 2846 … … 2672 2852 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm); 2673 2853 IEMOP_HLP_NO_LOCK_PREFIX(); 2854 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); 2674 2855 2675 2856 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) … … 2816 2997 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm); 2817 2998 IEMOP_HLP_NO_LOCK_PREFIX(); 2999 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); 2818 3000 2819 3001 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) … … 2989 3171 /** Opcode 0x0f 0xaa. */ 2990 3172 FNIEMOP_STUB(iemOp_rsm); 3173 3174 2991 3175 /** Opcode 0x0f 0xab. */ 2992 FNIEMOP_STUB(iemOp_bts_Ev_Gv); 3176 FNIEMOP_DEF(iemOp_bts_Ev_Gv) 3177 { 3178 IEMOP_MNEMONIC("bts Gv,Mp"); 3179 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts); 3180 } 2993 3181 2994 3182 … … 3017 3205 { 3018 3206 IEMOP_MNEMONIC("imul Gv,Ev"); 3019 #ifdef IEM_VERIFICATION_MODE 3020 pIemCpu->fMulDivHack = true; 3021 #endif 3207 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); 3022 3208 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two); 3023 3209 } … … 3101 3287 3102 3288 /** Opcode 0x0f 0xb3. */ 3103 FNIEMOP_STUB(iemOp_btr_Ev_Gv); 3289 FNIEMOP_DEF(iemOp_btr_Ev_Gv) 3290 { 3291 IEMOP_MNEMONIC("btr Gv,Mp"); 3292 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr); 3293 } 3104 3294 3105 3295 … … 3282 3472 /** Opcode 0x0f 0xb9. */ 3283 3473 FNIEMOP_STUB(iemOp_Grp10); 3474 3475 3284 3476 /** Opcode 0x0f 0xba. */ 3285 FNIEMOP_STUB(iemOp_Grp11); 3477 FNIEMOP_DEF(iemOp_Grp8) 3478 { 3479 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm); 3480 PCIEMOPBINSIZES pImpl; 3481 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) 3482 { 3483 case 0: case 1: case 2: case 3: 3484 return IEMOP_RAISE_INVALID_OPCODE(); 3485 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break; 3486 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break; 3487 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break; 3488 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break; 3489 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 3490 } 3491 IEMOP_HLP_NO_LOCK_PREFIX(); 3492 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); 3493 3494 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3495 { 3496 /* register destination. */ 3497 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &u8Bit); 3498 IEMOP_HLP_NO_LOCK_PREFIX(); 3499 3500 switch (pIemCpu->enmEffOpSize) 3501 { 3502 case IEMMODE_16BIT: 3503 IEM_MC_BEGIN(3, 0); 3504 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 3505 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1); 3506 IEM_MC_ARG(uint32_t *, pEFlags, 2); 3507 3508 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 3509 IEM_MC_REF_EFLAGS(pEFlags); 3510 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags); 3511 3512 IEM_MC_ADVANCE_RIP(); 3513 IEM_MC_END(); 3514 return VINF_SUCCESS; 3515 3516 case IEMMODE_32BIT: 3517 IEM_MC_BEGIN(3, 0); 3518 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 3519 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1); 3520 IEM_MC_ARG(uint32_t *, pEFlags, 2); 3521 3522 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 3523 IEM_MC_REF_EFLAGS(pEFlags); 3524 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags); 3525 3526 IEM_MC_ADVANCE_RIP(); 3527 IEM_MC_END(); 3528 return VINF_SUCCESS; 3529 3530 case IEMMODE_64BIT: 3531 IEM_MC_BEGIN(3, 0); 3532 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 3533 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1); 3534 IEM_MC_ARG(uint32_t *, pEFlags, 2); 3535 3536 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 3537 IEM_MC_REF_EFLAGS(pEFlags); 3538 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags); 3539 3540 IEM_MC_ADVANCE_RIP(); 3541 IEM_MC_END(); 3542 return VINF_SUCCESS; 3543 3544 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 3545 } 3546 } 3547 else 3548 { 3549 /* memory destination. */ 3550 3551 uint32_t fAccess; 3552 if (pImpl->pfnLockedU16) 3553 fAccess = IEM_ACCESS_DATA_RW; 3554 else /* BT */ 3555 { 3556 IEMOP_HLP_NO_LOCK_PREFIX(); 3557 fAccess = IEM_ACCESS_DATA_R; 3558 } 3559 3560 /** @todo test negative bit offsets! */ 3561 switch (pIemCpu->enmEffOpSize) 3562 { 3563 case IEMMODE_16BIT: 3564 IEM_MC_BEGIN(3, 1); 3565 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 3566 IEM_MC_ARG(uint16_t, u16Src, 1); 3567 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 3568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 3569 3570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm); 3571 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &u8Bit); 3572 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f); 3573 IEM_MC_FETCH_EFLAGS(EFlags); 3574 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0); 3575 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)) 3576 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags); 3577 else 3578 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags); 3579 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); 3580 3581 IEM_MC_COMMIT_EFLAGS(EFlags); 3582 IEM_MC_ADVANCE_RIP(); 3583 IEM_MC_END(); 3584 return VINF_SUCCESS; 3585 3586 case IEMMODE_32BIT: 3587 IEM_MC_BEGIN(3, 1); 3588 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 3589 IEM_MC_ARG(uint32_t, u32Src, 1); 3590 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 3591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 3592 3593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm); 3594 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &u8Bit); 3595 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f); 3596 IEM_MC_FETCH_EFLAGS(EFlags); 3597 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0); 3598 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)) 3599 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags); 3600 else 3601 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags); 3602 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); 3603 3604 IEM_MC_COMMIT_EFLAGS(EFlags); 3605 IEM_MC_ADVANCE_RIP(); 3606 IEM_MC_END(); 3607 return VINF_SUCCESS; 3608 3609 case IEMMODE_64BIT: 3610 IEM_MC_BEGIN(3, 1); 3611 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 3612 IEM_MC_ARG(uint64_t, u64Src, 1); 3613 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 3614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 3615 3616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm); 3617 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &u8Bit); 3618 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f); 3619 IEM_MC_FETCH_EFLAGS(EFlags); 3620 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0); 3621 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)) 3622 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags); 3623 else 3624 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags); 3625 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); 3626 3627 IEM_MC_COMMIT_EFLAGS(EFlags); 3628 IEM_MC_ADVANCE_RIP(); 3629 IEM_MC_END(); 3630 return VINF_SUCCESS; 3631 3632 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 3633 } 3634 } 3635 3636 } 3637 3638 3286 3639 /** Opcode 0x0f 0xbb. */ 3287 FNIEMOP_STUB(iemOp_btc_Ev_Gv); 3640 FNIEMOP_DEF(iemOp_btc_Ev_Gv) 3641 { 3642 IEMOP_MNEMONIC("btc Gv,Mp"); 3643 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc); 3644 } 3645 3646 3288 3647 /** Opcode 0x0f 0xbc. */ 3289 FNIEMOP_STUB(iemOp_bsf_Gv_Ev); 3648 FNIEMOP_DEF(iemOp_bsf_Gv_Ev) 3649 { 3650 IEMOP_MNEMONIC("bsf Gv,Ev"); 3651 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF); 3652 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf); 3653 } 3654 3655 3290 3656 /** Opcode 0x0f 0xbd. */ 3291 FNIEMOP_STUB(iemOp_bsr_Gv_Ev); 3657 FNIEMOP_DEF(iemOp_bsr_Gv_Ev) 3658 { 3659 IEMOP_MNEMONIC("bsr Gv,Ev"); 3660 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF); 3661 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr); 3662 } 3663 3292 3664 3293 3665 /** Opcode 0x0f 0xbe. */ … … 3670 4042 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb, iemOp_cmpxchg_Ev_Gv, iemOp_lss_Gv_Mp, iemOp_btr_Ev_Gv, 3671 4043 /* 0xb4 */ iemOp_lfs_Gv_Mp, iemOp_lgs_Gv_Mp, iemOp_movzx_Gv_Eb, iemOp_movzx_Gv_Ew, 3672 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,iemOp_Grp10, iemOp_Grp 11,iemOp_btc_Ev_Gv,4044 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,iemOp_Grp10, iemOp_Grp8, iemOp_btc_Ev_Gv, 3673 4045 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_movsx_Gv_Eb, iemOp_movsx_Gv_Ew, 3674 4046 /* 0xc0 */ iemOp_xadd_Eb_Gb, … … 3810 4182 { 3811 4183 IEMOP_MNEMONIC("or Eb,Gb"); 4184 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 3812 4185 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or); 3813 4186 } … … 3818 4191 { 3819 4192 IEMOP_MNEMONIC("or Ev,Gv "); 4193 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 3820 4194 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or); 3821 4195 } … … 3826 4200 { 3827 4201 IEMOP_MNEMONIC("or Gb,Eb"); 4202 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 3828 4203 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or); 3829 4204 } … … 3834 4209 { 3835 4210 IEMOP_MNEMONIC("or Gv,Ev"); 4211 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 3836 4212 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or); 3837 4213 } … … 3842 4218 { 3843 4219 IEMOP_MNEMONIC("or al,Ib"); 4220 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 3844 4221 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or); 3845 4222 } … … 3850 4227 { 3851 4228 IEMOP_MNEMONIC("or rAX,Iz"); 4229 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 3852 4230 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or); 3853 4231 } … … 4005 4383 { 4006 4384 IEMOP_MNEMONIC("and Eb,Gb"); 4385 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 4007 4386 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and); 4008 4387 } … … 4013 4392 { 4014 4393 IEMOP_MNEMONIC("and Ev,Gv"); 4394 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 4015 4395 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and); 4016 4396 } … … 4021 4401 { 4022 4402 IEMOP_MNEMONIC("and Gb,Eb"); 4403 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 4023 4404 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and); 4024 4405 } … … 4029 4410 { 4030 4411 IEMOP_MNEMONIC("and Gv,Ev"); 4412 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 4031 4413 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and); 4032 4414 } … … 4037 4419 { 4038 4420 IEMOP_MNEMONIC("and al,Ib"); 4421 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 4039 4422 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and); 4040 4423 } … … 4045 4428 { 4046 4429 IEMOP_MNEMONIC("and rAX,Iz"); 4430 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 4047 4431 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and); 4048 4432 } … … 4131 4515 { 4132 4516 IEMOP_MNEMONIC("xor Eb,Gb"); 4517 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 4133 4518 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor); 4134 4519 } … … 4139 4524 { 4140 4525 IEMOP_MNEMONIC("xor Ev,Gv"); 4526 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 4141 4527 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor); 4142 4528 } … … 4147 4533 { 4148 4534 IEMOP_MNEMONIC("xor Gb,Eb"); 4535 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 4149 4536 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor); 4150 4537 } … … 4155 4542 { 4156 4543 IEMOP_MNEMONIC("xor Gv,Ev"); 4544 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 4157 4545 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor); 4158 4546 } … … 4163 4551 { 4164 4552 IEMOP_MNEMONIC("xor al,Ib"); 4553 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 4165 4554 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor); 4166 4555 } … … 4171 4560 { 4172 4561 IEMOP_MNEMONIC("xor rAX,Iz"); 4562 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 4173 4563 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor); 4174 4564 } … … 4992 5382 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */ 4993 5383 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm); 5384 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); 4994 5385 4995 5386 switch (pIemCpu->enmEffOpSize) … … 5168 5559 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &u8Imm); 5169 5560 IEMOP_HLP_NO_LOCK_PREFIX(); 5561 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); 5170 5562 5171 5563 switch (pIemCpu->enmEffOpSize) … … 6190 6582 IEMOP_MNEMONIC("test Eb,Gb"); 6191 6583 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */ 6584 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 6192 6585 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test); 6193 6586 } … … 6199 6592 IEMOP_MNEMONIC("test Ev,Gv"); 6200 6593 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */ 6594 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 6201 6595 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test); 6202 6596 } … … 7628 8022 { 7629 8023 IEMOP_MNEMONIC("test al,Ib"); 8024 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 7630 8025 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test); 7631 8026 } … … 7636 8031 { 7637 8032 IEMOP_MNEMONIC("test rAX,Iz"); 8033 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 7638 8034 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test); 7639 8035 } … … 8311 8707 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */ 8312 8708 } 8313 #ifdef IEM_VERIFICATION_MODE 8314 pIemCpu->fShiftOfHack = true; 8315 #endif 8709 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF); 8316 8710 8317 8711 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) … … 8373 8767 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */ 8374 8768 } 8375 #ifdef IEM_VERIFICATION_MODE 8376 pIemCpu->fShiftOfHack = true; 8377 #endif 8769 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF); 8378 8770 8379 8771 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) … … 8651 9043 /** Opcode 0xc8. */ 8652 9044 FNIEMOP_STUB(iemOp_enter_Iw_Ib); 9045 9046 8653 9047 /** Opcode 0xc9. */ 8654 FNIEMOP_STUB(iemOp_leave); 9048 FNIEMOP_DEF(iemOp_leave) 9049 { 9050 IEMOP_MNEMONIC("retn"); 9051 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); 9052 IEMOP_HLP_NO_LOCK_PREFIX(); 9053 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pIemCpu->enmEffOpSize); 9054 } 8655 9055 8656 9056 … … 8729 9129 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */ 8730 9130 } 8731 #ifdef IEM_VERIFICATION_MODE 8732 pIemCpu->fShiftOfHack = true; 8733 #endif 9131 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF); 8734 9132 8735 9133 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) … … 8789 9187 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */ 8790 9188 } 8791 #ifdef IEM_VERIFICATION_MODE 8792 pIemCpu->fShiftOfHack = true; 8793 #endif 9189 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF); 8794 9190 8795 9191 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) … … 8921 9317 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */ 8922 9318 } 8923 #ifdef IEM_VERIFICATION_MODE 8924 pIemCpu->fShiftOfHack = true; 8925 #endif 9319 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF); 8926 9320 8927 9321 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) … … 8982 9376 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */ 8983 9377 } 8984 #ifdef IEM_VERIFICATION_MODE 8985 pIemCpu->fShiftOfHack = true; 8986 #endif 9378 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF); 8987 9379 8988 9380 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) … … 9923 10315 { 9924 10316 IEMOP_MNEMONIC("test Eb,Ib"); 10317 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 9925 10318 9926 10319 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) … … 9972 10365 IEMOP_MNEMONIC("test Ev,Iv"); 9973 10366 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */ 10367 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 9974 10368 9975 10369 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) … … 10107 10501 { 10108 10502 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */ 10109 #ifdef IEM_VERIFICATION_MODE10110 pIemCpu->fMulDivHack = true;10111 #endif10112 10503 10113 10504 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) … … 10154 10545 { 10155 10546 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */ 10156 #ifdef IEM_VERIFICATION_MODE 10157 pIemCpu->fMulDivHack = true; 10158 #endif 10547 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); 10159 10548 10160 10549 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) … … 10350 10739 case 4: 10351 10740 IEMOP_MNEMONIC("mul Eb"); 10741 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); 10352 10742 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, &iemAImpl_mul_u8); 10353 10743 case 5: 10354 10744 IEMOP_MNEMONIC("imul Eb"); 10745 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); 10355 10746 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, &iemAImpl_imul_u8); 10356 10747 case 6: 10357 10748 IEMOP_MNEMONIC("div Eb"); 10749 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF); 10358 10750 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, &iemAImpl_div_u8); 10359 10751 case 7: 10360 10752 IEMOP_MNEMONIC("idiv Eb"); 10753 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF); 10361 10754 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, &iemAImpl_idiv_u8); 10362 10755 IEM_NOT_REACHED_DEFAULT_CASE_RET(); … … 10383 10776 case 4: 10384 10777 IEMOP_MNEMONIC("mul Ev"); 10778 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); 10385 10779 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul); 10386 10780 case 5: 10387 10781 IEMOP_MNEMONIC("imul Ev"); 10782 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); 10388 10783 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul); 10389 10784 case 6: 10390 10785 IEMOP_MNEMONIC("div Ev"); 10786 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF); 10391 10787 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div); 10392 10788 case 7: 10393 10789 IEMOP_MNEMONIC("idiv Ev"); 10790 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF); 10394 10791 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv); 10395 10792 IEM_NOT_REACHED_DEFAULT_CASE_RET(); -
trunk/src/VBox/VMM/VMMR0/HWACCMR0.cpp
r36256 r36857 1760 1760 , 1761 1761 pCtx->fpu.FCW, pCtx->fpu.FSW, pCtx->fpu.FTW, 1762 pCtx->fpu.FOP, pCtx->fpu.FPUIP, pCtx->fpu.CS, pCtx->fpu.Rs vrd1,1762 pCtx->fpu.FOP, pCtx->fpu.FPUIP, pCtx->fpu.CS, pCtx->fpu.Rsrvd1, 1763 1763 pCtx->fpu.FPUDP, pCtx->fpu.DS, pCtx->fpu.Rsrvd2, 1764 1764 pCtx->fpu.MXCSR, pCtx->fpu.MXCSR_MASK)); -
trunk/src/VBox/VMM/VMMR3/CPUM.cpp
r36660 r36857 2493 2493 pszPrefix, pCtx->fpu.FCW, pszPrefix, pCtx->fpu.FSW, pszPrefix, pCtx->fpu.FTW, pszPrefix, pCtx->fpu.FOP, 2494 2494 pszPrefix, pCtx->fpu.MXCSR, pszPrefix, pCtx->fpu.MXCSR_MASK, 2495 pszPrefix, pCtx->fpu.FPUIP, pszPrefix, pCtx->fpu.CS, pszPrefix, pCtx->fpu.Rs vrd1,2495 pszPrefix, pCtx->fpu.FPUIP, pszPrefix, pCtx->fpu.CS, pszPrefix, pCtx->fpu.Rsrvd1, 2496 2496 pszPrefix, pCtx->fpu.FPUDP, pszPrefix, pCtx->fpu.DS, pszPrefix, pCtx->fpu.Rsrvd2 2497 2497 ); -
trunk/src/VBox/VMM/include/IEMInternal.h
r36851 r36857 165 165 /** The Number of I/O port writes that has been performed. */ 166 166 uint32_t cIOWrites; 167 /** Hack for ignoring differences in undefined EFLAGS after MUL and DIV. */168 bool fMulDivHack;169 /** Hack for ignoring differences in OF after shifts greater than 1 bit.170 * At least two intel CPUs this code is running on will set it different171 * than what AMD and REM does. */172 bool fShiftOfHack;173 167 /** Set if no comparison to REM is currently performed. 174 168 * This is used to skip past really slow bits. */ 175 169 bool fNoRem; 176 bool afAlignment1[5]; 170 bool afAlignment1[3]; 171 /** Mask of undefined eflags. 172 * The verifier will any difference in these flags. */ 173 uint32_t fUndefinedEFlags; 177 174 /** The physical address corresponding to abOpcodes[0]. */ 178 175 RTGCPHYS GCPhysOpcodes; … … 353 350 #endif 354 351 352 /** 353 * Indicates to the verifier that the given flag set is undefined. 354 * 355 * Can be invoked again to add more flags. 356 * 357 * This is a NOOP if the verifier isn't compiled in. 358 */ 359 #ifdef IEM_VERIFICATION_MODE 360 # define IEMOP_VERIFICATION_UNDEFINED_EFLAGS(a_fEfl) do { pIemCpu->fUndefinedEFlags |= (a_fEfl); } while (0) 361 #else 362 # define IEMOP_VERIFICATION_UNDEFINED_EFLAGS(a_fEfl) do { } while (0) 363 #endif 364 355 365 356 366 /** @def IEM_DECL_IMPL_TYPE … … 456 466 FNIEMAIMPLBINU32 iemAImpl_test_u32; 457 467 FNIEMAIMPLBINU64 iemAImpl_test_u64; 468 /** @} */ 469 470 /** @name Bit operations operations (thrown in with the binary ops). 471 * @{ */ 472 FNIEMAIMPLBINU16 iemAImpl_bt_u16, iemAImpl_bt_u16_locked; 473 FNIEMAIMPLBINU32 iemAImpl_bt_u32, iemAImpl_bt_u32_locked; 474 FNIEMAIMPLBINU64 iemAImpl_bt_u64, iemAImpl_bt_u64_locked; 475 FNIEMAIMPLBINU16 iemAImpl_btc_u16, iemAImpl_btc_u16_locked; 476 FNIEMAIMPLBINU32 iemAImpl_btc_u32, iemAImpl_btc_u32_locked; 477 FNIEMAIMPLBINU64 iemAImpl_btc_u64, iemAImpl_btc_u64_locked; 478 FNIEMAIMPLBINU16 iemAImpl_btr_u16, iemAImpl_btr_u16_locked; 479 FNIEMAIMPLBINU32 iemAImpl_btr_u32, iemAImpl_btr_u32_locked; 480 FNIEMAIMPLBINU64 iemAImpl_btr_u64, iemAImpl_btr_u64_locked; 481 FNIEMAIMPLBINU16 iemAImpl_bts_u16, iemAImpl_bts_u16_locked; 482 FNIEMAIMPLBINU32 iemAImpl_bts_u32, iemAImpl_bts_u32_locked; 483 FNIEMAIMPLBINU64 iemAImpl_bts_u64, iemAImpl_bts_u64_locked; 458 484 /** @} */ 459 485 … … 482 508 /** @} */ 483 509 510 511 /** @name Bit search operations (thrown in with the binary ops). 512 * @{ */ 513 FNIEMAIMPLBINU16 iemAImpl_bsf_u16; 514 FNIEMAIMPLBINU32 iemAImpl_bsf_u32; 515 FNIEMAIMPLBINU64 iemAImpl_bsf_u64; 516 FNIEMAIMPLBINU16 iemAImpl_bsr_u16; 517 FNIEMAIMPLBINU32 iemAImpl_bsr_u32; 518 FNIEMAIMPLBINU64 iemAImpl_bsr_u64; 519 /** @} */ 520 484 521 /** @name Signed multiplication operations (thrown in with the binary ops). 485 522 * @{ */
Note:
See TracChangeset
for help on using the changeset viewer.