Changeset 98827 in vbox for trunk/src/VBox/VMM/VMMAll
- Timestamp:
- Mar 3, 2023 12:01:42 PM (2 years ago)
- svn:sync-xref-src-repo-rev:
- 156149
- Location:
- trunk/src/VBox/VMM/VMMAll
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/CPUMAllCpuId.cpp
r98703 r98827 1469 1469 pFeatures->fRtm = RT_BOOL(pSxfLeaf0->uEbx & X86_CPUID_STEXT_FEATURE_EBX_RTM); 1470 1470 pFeatures->fSha = RT_BOOL(pSxfLeaf0->uEbx & X86_CPUID_STEXT_FEATURE_EBX_SHA); 1471 pFeatures->fAdx = RT_BOOL(pSxfLeaf0->uEbx & X86_CPUID_STEXT_FEATURE_EBX_ADX); 1471 1472 1472 1473 pFeatures->fIbpb = RT_BOOL(pSxfLeaf0->uEdx & X86_CPUID_STEXT_FEATURE_EDX_IBRS_IBPB); -
trunk/src/VBox/VMM/VMMAll/IEMAllAImpl.asm
r98821 r98827 299 299 popf ; load the mixed flags. 300 300 ;%endif 301 %endmacro 302 303 ;; 304 ; Load the relevant flags from [%1]. 305 ; 306 ; @remarks Clobbers T0, stack. Changes EFLAGS. 307 ; @param A2 The register pointing to the flags. 308 ; @param 1 The parameter (A0..A3) pointing to the eflags. 309 ; @param 2 The set of flags to load. 310 ; @param 3 The set of undefined flags. 311 ; 312 %macro IEM_LOAD_FLAGS 3 313 pushf ; store current flags 314 mov T0_32, [%1] ; load the guest flags 315 and dword [xSP], ~(%2 | %3) ; mask out the modified and undefined flags 316 and T0_32, (%2 | %3) ; select the modified and undefined flags. 317 or [xSP], T0 ; merge guest flags with host flags. 318 popf ; load the mixed flags. 301 319 %endmacro 302 320 … … 6428 6446 EPILOGUE_3_ARGS 6429 6447 ENDPROC iemAImpl_sha256rnds2_u128 6448 6449 6450 ; 6451 ; 32-bit forms of ADCX and ADOX 6452 ; 6453 ; @param A0 Pointer to the destination operand (input/output). 6454 ; @param A1 Pointer to the EFLAGS value (input/output). 6455 ; @param A2 32-bit source operand 1 (input). 6456 ; 6457 %macro IEMIMPL_ADX_32 2 6458 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u32, 8 6459 PROLOGUE_4_ARGS 6460 6461 IEM_LOAD_FLAGS A1, %2, 0 6462 %1 A2_32, [A0] 6463 mov [A0], A2_32 6464 IEM_SAVE_FLAGS A1, %2, 0 6465 6466 EPILOGUE_4_ARGS 6467 ENDPROC iemAImpl_ %+ %1 %+ _u32 6468 %endmacro 6469 6470 ; 6471 ; 64-bit forms of ADCX and ADOX 6472 ; 6473 ; @param A0 Pointer to the destination operand (input/output). 6474 ; @param A1 Pointer to the EFLAGS value (input/output). 6475 ; @param A2 64-bit source operand 1 (input). 6476 ; 6477 %macro IEMIMPL_ADX_64 2 6478 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u64, 8 6479 PROLOGUE_4_ARGS 6480 6481 IEM_LOAD_FLAGS A1, %2, 0 6482 %1 A2, [A0] 6483 mov [A0], A2 6484 IEM_SAVE_FLAGS A1, %2, 0 6485 6486 EPILOGUE_4_ARGS 6487 ENDPROC iemAImpl_ %+ %1 %+ _u64 6488 %endmacro 6489 6490 IEMIMPL_ADX_32 adcx, X86_EFL_CF 6491 IEMIMPL_ADX_64 adcx, X86_EFL_CF 6492 6493 IEMIMPL_ADX_32 adox, X86_EFL_OF 6494 IEMIMPL_ADX_64 adox, X86_EFL_OF -
trunk/src/VBox/VMM/VMMAll/IEMAllAImplC.cpp
r98821 r98827 18005 18005 puDst->au32[0] = au32F[2]; 18006 18006 } 18007 18008 18009 /** 18010 * ADCX 18011 */ 18012 #define ADX_EMIT(a_Flag, a_Type, a_Max) \ 18013 do \ 18014 { \ 18015 bool f = RT_BOOL(*pfEFlags & (a_Flag)); \ 18016 a_Type uTmp = *puDst + uSrc; \ 18017 if (uTmp < uSrc) \ 18018 *pfEFlags |= (a_Flag); \ 18019 else \ 18020 *pfEFlags &= ~(a_Flag); \ 18021 if ( uTmp == a_Max \ 18022 && f) \ 18023 *pfEFlags |= (a_Flag); \ 18024 if (f) \ 18025 uTmp++; \ 18026 *puDst = uTmp; \ 18027 } \ 18028 while (0) 18029 18030 IEM_DECL_IMPL_DEF(void, iemAImpl_adcx_u32_fallback,(uint32_t *puDst, uint32_t *pfEFlags, uint32_t uSrc)) 18031 { 18032 ADX_EMIT(X86_EFL_CF, uint32_t, UINT32_MAX); 18033 } 18034 18035 IEM_DECL_IMPL_DEF(void, iemAImpl_adcx_u64_fallback,(uint64_t *puDst, uint32_t *pfEFlags, uint64_t uSrc)) 18036 { 18037 ADX_EMIT(X86_EFL_CF, uint64_t, UINT64_MAX); 18038 } 18039 18040 18041 /** 18042 * ADOX 18043 */ 18044 IEM_DECL_IMPL_DEF(void, iemAImpl_adox_u32_fallback,(uint32_t *puDst, uint32_t *pfEFlags, uint32_t uSrc)) 18045 { 18046 ADX_EMIT(X86_EFL_OF, uint32_t, UINT32_MAX); 18047 } 18048 18049 IEM_DECL_IMPL_DEF(void, iemAImpl_adox_u64_fallback,(uint64_t *puDst, uint32_t *pfEFlags, uint64_t uSrc)) 18050 { 18051 ADX_EMIT(X86_EFL_OF, uint64_t, UINT64_MAX); 18052 } -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsThree0f38.cpp.h
r98703 r98827 2010 2010 2011 2011 /* Opcode 0x0f 0x38 0xf6 - invalid. */ 2012 2013 #define ADX_EMIT(a_Variant) \ 2014 do \ 2015 { \ 2016 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAdx) \ 2017 return iemOp_InvalidNeedRM(pVCpu); \ 2018 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \ 2019 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \ 2020 { \ 2021 if (IEM_IS_MODRM_REG_MODE(bRm)) \ 2022 { \ 2023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 2024 IEM_MC_BEGIN(3, 0); \ 2025 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \ 2026 IEM_MC_ARG(uint32_t *, pEFlags, 1); \ 2027 IEM_MC_ARG(uint64_t, u64Src, 2); \ 2028 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 2029 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 2030 IEM_MC_REF_EFLAGS(pEFlags); \ 2031 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAdx, iemAImpl_## a_Variant ##_u64, iemAImpl_## a_Variant ##_u64_fallback), \ 2032 pu64Dst, pEFlags, u64Src); \ 2033 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 2034 IEM_MC_END(); \ 2035 } \ 2036 else \ 2037 { \ 2038 IEM_MC_BEGIN(3, 1); \ 2039 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \ 2040 IEM_MC_ARG(uint32_t *, pEFlags, 1); \ 2041 IEM_MC_ARG(uint64_t, u64Src, 2); \ 2042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \ 2043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \ 2044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 2045 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \ 2046 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 2047 IEM_MC_REF_EFLAGS(pEFlags); \ 2048 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAdx, iemAImpl_## a_Variant ##_u64, iemAImpl_## a_Variant ##_u64_fallback), \ 2049 pu64Dst, pEFlags, u64Src); \ 2050 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 2051 IEM_MC_END(); \ 2052 } \ 2053 } \ 2054 else \ 2055 { \ 2056 if (IEM_IS_MODRM_REG_MODE(bRm)) \ 2057 { \ 2058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 2059 IEM_MC_BEGIN(3, 0); \ 2060 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \ 2061 IEM_MC_ARG(uint32_t *, pEFlags, 1); \ 2062 IEM_MC_ARG(uint32_t, u32Src, 2); \ 2063 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 2064 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 2065 IEM_MC_REF_EFLAGS(pEFlags); \ 2066 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAdx, iemAImpl_## a_Variant ##_u32, iemAImpl_## a_Variant ##_u32_fallback), \ 2067 pu32Dst, pEFlags, u32Src); \ 2068 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 2069 IEM_MC_END(); \ 2070 } \ 2071 else \ 2072 { \ 2073 IEM_MC_BEGIN(3, 1); \ 2074 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \ 2075 IEM_MC_ARG(uint32_t *, pEFlags, 1); \ 2076 IEM_MC_ARG(uint32_t, u32Src, 2); \ 2077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \ 2078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \ 2079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 2080 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \ 2081 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 2082 IEM_MC_REF_EFLAGS(pEFlags); \ 2083 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAdx, iemAImpl_## a_Variant ##_u32, iemAImpl_## a_Variant ##_u32_fallback), \ 2084 pu32Dst, pEFlags, u32Src); \ 2085 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 2086 IEM_MC_END(); \ 2087 } \ 2088 } \ 2089 } while(0) 2090 2012 2091 /** Opcode 0x66 0x0f 0x38 0xf6. */ 2013 FNIEMOP_STUB(iemOp_adcx_Gy_Ey); 2092 FNIEMOP_DEF(iemOp_adcx_Gy_Ey) 2093 { 2094 IEMOP_MNEMONIC2(RM, ADCX, adcx, Gy, Ey, DISOPTYPE_HARMLESS, 0); 2095 ADX_EMIT(adcx); 2096 } 2097 2098 2014 2099 /** Opcode 0xf3 0x0f 0x38 0xf6. */ 2015 FNIEMOP_STUB(iemOp_adox_Gy_Ey); 2100 FNIEMOP_DEF(iemOp_adox_Gy_Ey) 2101 { 2102 IEMOP_MNEMONIC2(RM, ADOX, adox, Gy, Ey, DISOPTYPE_HARMLESS, 0); 2103 ADX_EMIT(adox); 2104 } 2105 2106 2016 2107 /* Opcode 0xf2 0x0f 0x38 0xf6 - invalid (vex only). */ 2017 2108
Note:
See TracChangeset
for help on using the changeset viewer.