Changeset 105309 in vbox for trunk/src/VBox/VMM/VMMAll
- Timestamp:
- Jul 12, 2024 3:12:43 PM (7 months ago)
- Location:
- trunk/src/VBox/VMM/VMMAll
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllAImpl.asm
r105307 r105309 6634 6634 ENDPROC iemAImpl_cvtsi2ss_r32_i32 6635 6635 6636 ;; 6637 ; cvtsi2ss instruction - 64-bit variant. 6636 6637 ;; 6638 ; vcvtsi2ss instruction - 32-bit variant. 6638 6639 ; 6639 6640 ; @return R0_32 The new MXCSR value of the guest. … … 6641 6642 ; @param A1 Pointer to the result operand (output). 6642 6643 ; @param A2 Pointer to the second operand (input). 6644 ; @param A3 Pointer to the third operand (input). 6645 ; 6646 BEGINPROC_FASTCALL iemAImpl_vcvtsi2ss_u128_i32, 16 6647 PROLOGUE_3_ARGS 6648 IEMIMPL_AVX_PROLOGUE 6649 SSE_AVX_LD_MXCSR A0_32 6650 6651 movdqu xmm0, [A2] 6652 vcvtsi2ss xmm0, xmm0, dword [A3] 6653 movdqu [A1], xmm0 6654 6655 SSE_AVX_ST_MXCSR R0_32, A0_32 6656 IEMIMPL_AVX_EPILOGUE 6657 EPILOGUE_3_ARGS 6658 ENDPROC iemAImpl_vcvtsi2ss_u128_i32 6659 6660 6661 ;; 6662 ; cvtsi2ss instruction - 64-bit variant. 6663 ; 6664 ; @return R0_32 The new MXCSR value of the guest. 6665 ; @param A0_32 The guest's MXCSR register value to use. 6666 ; @param A1 Pointer to the result operand (output). 6667 ; @param A2 Pointer to the second operand (input). 6643 6668 ; 6644 6669 BEGINPROC_FASTCALL iemAImpl_cvtsi2ss_r32_i64, 16 … … 6654 6679 EPILOGUE_3_ARGS 6655 6680 ENDPROC iemAImpl_cvtsi2ss_r32_i64 6681 6682 6683 ;; 6684 ; vcvtsi2ss instruction - 64-bit variant. 6685 ; 6686 ; @return R0_32 The new MXCSR value of the guest. 6687 ; @param A0_32 The guest's MXCSR register value to use. 6688 ; @param A1 Pointer to the result operand (output). 6689 ; @param A2 Pointer to the second operand (input). 6690 ; @param A3 Pointer to the third operand (input). 6691 ; 6692 BEGINPROC_FASTCALL iemAImpl_vcvtsi2ss_u128_i64, 16 6693 PROLOGUE_3_ARGS 6694 IEMIMPL_AVX_PROLOGUE 6695 SSE_AVX_LD_MXCSR A0_32 6696 6697 movdqu xmm0, [A2] 6698 vcvtsi2ss xmm0, xmm0, qword [A3] 6699 movdqu [A1], xmm0 6700 6701 SSE_AVX_ST_MXCSR R0_32, A0_32 6702 IEMIMPL_AVX_EPILOGUE 6703 EPILOGUE_3_ARGS 6704 ENDPROC iemAImpl_vcvtsi2ss_u128_i64 6656 6705 6657 6706 -
trunk/src/VBox/VMM/VMMAll/IEMAllAImplC.cpp
r105307 r105309 19113 19113 19114 19114 /** 19115 * CVTSI2SS19115 * [V]CVTSI2SS 19116 19116 */ 19117 19117 #ifdef IEM_WITHOUT_ASSEMBLY … … 19131 19131 } 19132 19132 #endif 19133 19134 19135 IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtsi2ss_u128_i32_fallback, (uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc1, const int32_t *pi32Src2)) 19136 { 19137 puDst->au32[1] = puSrc1->au32[1]; 19138 puDst->au64[1] = puSrc1->au64[1]; 19139 19140 softfloat_state_t SoftState = IEM_SOFTFLOAT_STATE_INITIALIZER_FROM_MXCSR(uMxCsrIn); 19141 float32_t r32Res = i32_to_f32(*pi32Src2, &SoftState); 19142 return iemSseSoftStateAndR32ToMxcsrAndIprtResult(&SoftState, r32Res, &puDst->ar32[0], uMxCsrIn); 19143 } 19144 19145 19146 IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtsi2ss_u128_i64_fallback, (uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc1, const int64_t *pi64Src2)) 19147 { 19148 puDst->au32[1] = puSrc1->au32[1]; 19149 puDst->au64[1] = puSrc1->au64[1]; 19150 19151 softfloat_state_t SoftState = IEM_SOFTFLOAT_STATE_INITIALIZER_FROM_MXCSR(uMxCsrIn); 19152 float32_t r32Res = i64_to_f32(*pi64Src2, &SoftState); 19153 return iemSseSoftStateAndR32ToMxcsrAndIprtResult(&SoftState, r32Res, &puDst->ar32[0], uMxCsrIn); 19154 } 19133 19155 19134 19156 -
trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap1.cpp.h
r105283 r105309 2381 2381 /** Opcode VEX.0F 0x2a - invalid */ 2382 2382 /** Opcode VEX.66.0F 0x2a - invalid */ 2383 2384 2383 2385 /** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */ 2384 FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); 2386 FNIEMOP_DEF(iemOp_vcvtsi2ss_Vss_Hss_Ey) 2387 { 2388 IEMOP_MNEMONIC3(VEX_RVM, VCVTSI2SS, vcvtsi2ss, Vps, Hps, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0); 2389 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT(); 2390 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2391 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 2392 { 2393 if (IEM_IS_MODRM_REG_MODE(bRm)) 2394 { 2395 /* XMM, greg64 */ 2396 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); 2397 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 2398 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2399 IEM_MC_PREPARE_AVX_USAGE(); 2400 2401 IEM_MC_LOCAL(X86XMMREG, uDst); 2402 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0); 2403 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1); 2404 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 2405 IEM_MC_ARG(const int64_t *, pi64Src2, 2); 2406 IEM_MC_REF_GREG_I64_CONST(pi64Src2, IEM_GET_MODRM_RM(pVCpu, bRm)); 2407 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i64, iemAImpl_vcvtsi2ss_u128_i64_fallback), 2408 puDst, puSrc1, pi64Src2); 2409 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); 2410 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); 2411 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); 2412 IEM_MC_ADVANCE_RIP_AND_FINISH(); 2413 IEM_MC_END(); 2414 } 2415 else 2416 { 2417 /* XMM, [mem64] */ 2418 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); 2419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2421 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 2422 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2423 IEM_MC_PREPARE_AVX_USAGE(); 2424 2425 IEM_MC_LOCAL(X86XMMREG, uDst); 2426 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0); 2427 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1); 2428 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 2429 IEM_MC_LOCAL(int64_t, i64Src2); 2430 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src2, i64Src2, 2); 2431 IEM_MC_FETCH_MEM_I64(i64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 2432 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i64, iemAImpl_vcvtsi2ss_u128_i64_fallback), 2433 puDst, puSrc1, pi64Src2); 2434 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); 2435 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); 2436 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); 2437 IEM_MC_ADVANCE_RIP_AND_FINISH(); 2438 IEM_MC_END(); 2439 } 2440 } 2441 else 2442 { 2443 if (IEM_IS_MODRM_REG_MODE(bRm)) 2444 { 2445 /* XMM, greg32 */ 2446 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 2447 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 2448 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2449 IEM_MC_PREPARE_AVX_USAGE(); 2450 2451 IEM_MC_LOCAL(X86XMMREG, uDst); 2452 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0); 2453 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1); 2454 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 2455 IEM_MC_ARG(const int32_t *, pi32Src2, 2); 2456 IEM_MC_REF_GREG_I32_CONST(pi32Src2, IEM_GET_MODRM_RM(pVCpu, bRm)); 2457 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i32, iemAImpl_vcvtsi2ss_u128_i32_fallback), 2458 puDst, puSrc1, pi32Src2); 2459 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); 2460 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); 2461 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); 2462 IEM_MC_ADVANCE_RIP_AND_FINISH(); 2463 IEM_MC_END(); 2464 } 2465 else 2466 { 2467 /* greg, [mem32] */ 2468 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 2469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2471 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 2472 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2473 IEM_MC_PREPARE_AVX_USAGE(); 2474 2475 IEM_MC_LOCAL(X86XMMREG, uDst); 2476 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0); 2477 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1); 2478 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 2479 IEM_MC_LOCAL(int32_t, i32Src2); 2480 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src2, i32Src2, 2); 2481 IEM_MC_FETCH_MEM_I32(i32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 2482 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i32, iemAImpl_vcvtsi2ss_u128_i32_fallback), 2483 puDst, puSrc1, pi32Src2); 2484 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); 2485 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); 2486 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); 2487 IEM_MC_ADVANCE_RIP_AND_FINISH(); 2488 IEM_MC_END(); 2489 } 2490 } 2491 } 2492 2493 2385 2494 /** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */ 2386 2495 FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey);
Note:
See TracChangeset
for help on using the changeset viewer.