Changeset 103807 in vbox for trunk/src/VBox/VMM/include/IEMN8veRecompiler.h
- Timestamp:
- Mar 12, 2024 7:43:31 PM (12 months ago)
- svn:sync-xref-src-repo-rev:
- 162176
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/include/IEMN8veRecompiler.h
r103804 r103807 796 796 /** @} */ 797 797 798 799 798 #ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 799 800 800 /** 801 801 * Guest registers that can be shadowed in host SIMD registers. … … 832 832 kIemNativeGstSimdRegLdStSz_End 833 833 } IEMNATIVEGSTSIMDREGLDSTSZ; 834 #endif 835 834 835 #endif /* IEMNATIVE_WITH_SIMD_REG_ALLOCATOR */ 836 836 837 837 /** … … 1395 1395 1396 1396 1397 #ifdef IEMNATIVE_WITH_TB_DEBUG_INFO 1398 DECL_HIDDEN_THROW(void) iemNativeDbgInfoAddNativeOffset(PIEMRECOMPILERSTATE pReNative, uint32_t off); 1399 DECL_HIDDEN_THROW(void) iemNativeDbgInfoAddGuestRegShadowing(PIEMRECOMPILERSTATE pReNative, IEMNATIVEGSTREG enmGstReg, 1400 uint8_t idxHstReg = UINT8_MAX, uint8_t idxHstRegPrev = UINT8_MAX); 1401 # ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 1402 DECL_HIDDEN_THROW(void) iemNativeDbgInfoAddGuestSimdRegShadowing(PIEMRECOMPILERSTATE pReNative, 1403 IEMNATIVEGSTSIMDREG enmGstSimdReg, 1404 uint8_t idxHstSimdReg = UINT8_MAX, 1405 uint8_t idxHstSimdRegPrev = UINT8_MAX); 1406 # endif 1407 DECL_HIDDEN_THROW(void) iemNativeDbgInfoAddDelayedPcUpdate(PIEMRECOMPILERSTATE pReNative, 1408 uint32_t offPc, uint32_t cInstrSkipped); 1409 #endif /* IEMNATIVE_WITH_TB_DEBUG_INFO */ 1410 1397 1411 DECL_HIDDEN_THROW(uint32_t) iemNativeLabelCreate(PIEMRECOMPILERSTATE pReNative, IEMNATIVELABELTYPE enmType, 1398 1412 uint32_t offWhere = UINT32_MAX, uint16_t uData = 0); … … 1416 1430 DECL_HIDDEN_THROW(uint32_t) iemNativeRegAllocArgs(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t cArgs); 1417 1431 DECL_HIDDEN_THROW(uint8_t) iemNativeRegAssignRc(PIEMRECOMPILERSTATE pReNative, uint8_t idxHstReg); 1432 DECL_HIDDEN_THROW(uint32_t) iemNativeRegMoveOrSpillStackVar(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxVar, 1433 uint32_t fForbiddenRegs = IEMNATIVE_CALL_VOLATILE_GREG_MASK); 1418 1434 DECLHIDDEN(void) iemNativeRegFree(PIEMRECOMPILERSTATE pReNative, uint8_t idxHstReg) RT_NOEXCEPT; 1419 1435 DECLHIDDEN(void) iemNativeRegFreeTmp(PIEMRECOMPILERSTATE pReNative, uint8_t idxHstReg) RT_NOEXCEPT; … … 1421 1437 DECLHIDDEN(void) iemNativeRegFreeVar(PIEMRECOMPILERSTATE pReNative, uint8_t idxHstReg, bool fFlushShadows) RT_NOEXCEPT; 1422 1438 DECLHIDDEN(void) iemNativeRegFreeAndFlushMask(PIEMRECOMPILERSTATE pReNative, uint32_t fHstRegMask) RT_NOEXCEPT; 1423 DECL_HIDDEN_THROW(uint32_t) iemNativeRegFlushPendingWrites(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint64_t fGstShwExept = 0, bool fFlushShadows = true);1424 1439 DECL_HIDDEN_THROW(uint32_t) iemNativeRegMoveAndFreeAndFlushAtCall(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t cArgs, 1425 1440 uint32_t fKeepVars = 0); … … 1428 1443 DECL_HIDDEN_THROW(uint32_t) iemNativeRegRestoreGuestShadowsInVolatileRegs(PIEMRECOMPILERSTATE pReNative, uint32_t off, 1429 1444 uint32_t fHstRegsActiveShadows); 1430 1445 #ifdef VBOX_STRICT 1446 DECLHIDDEN(void) iemNativeRegAssertSanity(PIEMRECOMPILERSTATE pReNative); 1447 #endif 1448 DECL_HIDDEN_THROW(uint32_t) iemNativeRegFlushPendingWritesSlow(PIEMRECOMPILERSTATE pReNative, uint32_t off, 1449 uint64_t fGstShwExcept, bool fFlushShadows); 1450 #ifdef IEMNATIVE_WITH_DELAYED_PC_UPDATING 1451 DECL_HIDDEN_THROW(uint32_t) iemNativeEmitPcWritebackSlow(PIEMRECOMPILERSTATE pReNative, uint32_t off); 1452 #endif 1453 1454 1455 #ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 1456 DECL_HIDDEN_THROW(uint8_t) iemNativeSimdRegAllocTmp(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, bool fPreferVolatile = true); 1457 DECL_HIDDEN_THROW(uint8_t) iemNativeSimdRegAllocTmpEx(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, uint32_t fRegMask, 1458 bool fPreferVolatile = true); 1459 DECL_HIDDEN_THROW(uint8_t) iemNativeSimdRegAllocTmpForGuestSimdReg(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, 1460 IEMNATIVEGSTSIMDREG enmGstSimdReg, 1461 IEMNATIVEGSTSIMDREGLDSTSZ enmLoadSz, 1462 IEMNATIVEGSTREGUSE enmIntendedUse = kIemNativeGstRegUse_ReadOnly, 1463 bool fNoVolatileRegs = false); 1464 DECLHIDDEN(void) iemNativeSimdRegFreeTmp(PIEMRECOMPILERSTATE pReNative, uint8_t idxHstSimdReg) RT_NOEXCEPT; 1465 DECLHIDDEN(void) iemNativeSimdRegFlushGuestShadows(PIEMRECOMPILERSTATE pReNative, uint64_t fGstSimdRegs) RT_NOEXCEPT; 1466 DECL_HIDDEN_THROW(uint32_t) iemNativeSimdRegFlushPendingWrite(PIEMRECOMPILERSTATE pReNative, uint32_t off, 1467 IEMNATIVEGSTSIMDREG enmGstSimdReg); 1468 DECL_HIDDEN_THROW(uint32_t) iemNativeEmitLoadSimdRegWithGstShadowSimdReg(PIEMRECOMPILERSTATE pReNative, uint32_t off, 1469 uint8_t idxHstSimdReg, IEMNATIVEGSTSIMDREG enmGstSimdReg, 1470 IEMNATIVEGSTSIMDREGLDSTSZ enmLoadSz); 1471 #endif 1472 1473 DECL_HIDDEN_THROW(uint8_t) iemNativeArgAlloc(PIEMRECOMPILERSTATE pReNative, uint8_t iArgNo, uint8_t cbType); 1474 DECL_HIDDEN_THROW(uint8_t) iemNativeArgAllocConst(PIEMRECOMPILERSTATE pReNative, uint8_t iArgNo, uint8_t cbType, uint64_t uValue); 1475 DECL_HIDDEN_THROW(uint8_t) iemNativeArgAllocLocalRef(PIEMRECOMPILERSTATE pReNative, uint8_t iArgNo, uint8_t idxOtherVar); 1476 DECL_HIDDEN_THROW(uint8_t) iemNativeVarAlloc(PIEMRECOMPILERSTATE pReNative, uint8_t cbType); 1477 DECL_HIDDEN_THROW(uint8_t) iemNativeVarAllocConst(PIEMRECOMPILERSTATE pReNative, uint8_t cbType, uint64_t uValue); 1478 DECL_HIDDEN_THROW(void) iemNativeVarSetKindToStack(PIEMRECOMPILERSTATE pReNative, uint8_t idxVar); 1479 DECL_HIDDEN_THROW(void) iemNativeVarSetKindToConst(PIEMRECOMPILERSTATE pReNative, uint8_t idxVar, uint64_t uValue); 1480 DECL_HIDDEN_THROW(void) iemNativeVarSetKindToGstRegRef(PIEMRECOMPILERSTATE pReNative, uint8_t idxVar, 1481 IEMNATIVEGSTREGREF enmRegClass, uint8_t idxReg); 1431 1482 DECL_HIDDEN_THROW(uint8_t) iemNativeVarGetStackSlot(PIEMRECOMPILERSTATE pReNative, uint8_t idxVar); 1432 1483 DECL_HIDDEN_THROW(uint8_t) iemNativeVarRegisterAcquire(PIEMRECOMPILERSTATE pReNative, uint8_t idxVar, uint32_t *poff, … … 1438 1489 DECL_HIDDEN_THROW(uint32_t) iemNativeVarRestoreVolatileRegsPostHlpCall(PIEMRECOMPILERSTATE pReNative, uint32_t off, 1439 1490 uint32_t fHstRegsNotToSave); 1491 DECLHIDDEN(void) iemNativeVarFreeOneWorker(PIEMRECOMPILERSTATE pReNative, uint8_t idxVar); 1492 DECLHIDDEN(void) iemNativeVarFreeAllSlow(PIEMRECOMPILERSTATE pReNative, uint32_t bmVars); 1440 1493 1441 1494 DECL_HIDDEN_THROW(uint32_t) iemNativeEmitLoadGprWithGstShadowReg(PIEMRECOMPILERSTATE pReNative, uint32_t off, 1442 1495 uint8_t idxHstReg, IEMNATIVEGSTREG enmGstReg); 1496 #ifdef VBOX_STRICT 1497 DECL_HIDDEN_THROW(uint32_t) iemNativeEmitTop32BitsClearCheck(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxReg); 1498 DECL_HIDDEN_THROW(uint32_t) iemNativeEmitGuestRegValueCheck(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxReg, 1499 IEMNATIVEGSTREG enmGstReg); 1500 # ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 1501 DECL_HIDDEN_THROW(uint32_t) iemNativeEmitGuestSimdRegValueCheck(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxSimdReg, 1502 IEMNATIVEGSTSIMDREG enmGstSimdReg, 1503 IEMNATIVEGSTSIMDREGLDSTSZ enmLoadSz); 1504 # endif 1505 DECL_HIDDEN_THROW(uint32_t) iemNativeEmitExecFlagsCheck(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t fExec); 1506 #endif 1443 1507 DECL_HIDDEN_THROW(uint32_t) iemNativeEmitCheckCallRetAndPassUp(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxInstr); 1508 DECL_HIDDEN_THROW(uint32_t) iemNativeEmitCallCommon(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t cArgs, uint8_t cHiddenArgs); 1444 1509 DECL_HIDDEN_THROW(uint32_t) iemNativeEmitCImplCall(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxInstr, 1445 1510 uint64_t fGstShwFlush, uintptr_t pfnCImpl, uint8_t cbInstr, uint8_t cAddParams, … … 1447 1512 DECL_HIDDEN_THROW(uint32_t) iemNativeEmitThreadedCall(PIEMRECOMPILERSTATE pReNative, uint32_t off, 1448 1513 PCIEMTHRDEDCALLENTRY pCallEntry); 1449 1450 #ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 1451 DECL_HIDDEN_THROW(uint8_t) iemNativeSimdRegAllocTmp(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, bool fPreferVolatile = true); 1452 DECL_HIDDEN_THROW(uint8_t) iemNativeSimdRegAllocTmpEx(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, uint32_t fRegMask, 1453 bool fPreferVolatile = true); 1454 DECL_HIDDEN_THROW(uint8_t) iemNativeSimdRegAllocTmpForGuestSimdReg(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTSIMDREG enmGstSimdReg, 1455 IEMNATIVEGSTSIMDREGLDSTSZ enmLoadSz, IEMNATIVEGSTREGUSE enmIntendedUse = kIemNativeGstRegUse_ReadOnly, 1456 bool fNoVolatileRegs = false); 1457 DECL_HIDDEN_THROW(uint32_t) iemNativeEmitLoadSimdRegWithGstShadowSimdReg(PIEMRECOMPILERSTATE pReNative, uint32_t off, 1458 uint8_t idxHstSimdReg, IEMNATIVEGSTSIMDREG enmGstSimdReg, 1459 IEMNATIVEGSTSIMDREGLDSTSZ enmLoadSz); 1460 #endif 1461 1462 extern DECL_HIDDEN_DATA(const char * const) g_apszIemNativeHstRegNames[]; 1514 DECL_HIDDEN_THROW(uint32_t) iemNativeEmitCheckGprCanonicalMaybeRaiseGp0(PIEMRECOMPILERSTATE pReNative, uint32_t off, 1515 uint8_t idxAddrReg, uint8_t idxInstr); 1516 DECL_HIDDEN_THROW(uint32_t) iemNativeEmitCheckGpr32AgainstCsSegLimitMaybeRaiseGp0(PIEMRECOMPILERSTATE pReNative, uint32_t off, 1517 uint8_t idxAddrReg, uint8_t idxInstr); 1518 1519 1520 IEM_DECL_NATIVE_HLP_PROTO(int, iemNativeHlpExecStatusCodeFiddling,(PVMCPUCC pVCpu, int rc, uint8_t idxInstr)); 1521 IEM_DECL_NATIVE_HLP_PROTO(int, iemNativeHlpExecRaiseGp0,(PVMCPUCC pVCpu)); 1522 IEM_DECL_NATIVE_HLP_PROTO(int, iemNativeHlpExecRaiseNm,(PVMCPUCC pVCpu)); 1523 IEM_DECL_NATIVE_HLP_PROTO(int, iemNativeHlpExecRaiseUd,(PVMCPUCC pVCpu)); 1524 IEM_DECL_NATIVE_HLP_PROTO(int, iemNativeHlpExecRaiseMf,(PVMCPUCC pVCpu)); 1525 IEM_DECL_NATIVE_HLP_PROTO(int, iemNativeHlpExecRaiseXf,(PVMCPUCC pVCpu)); 1526 IEM_DECL_NATIVE_HLP_PROTO(int, iemNativeHlpObsoleteTb,(PVMCPUCC pVCpu)); 1527 IEM_DECL_NATIVE_HLP_PROTO(int, iemNativeHlpNeedCsLimChecking,(PVMCPUCC pVCpu)); 1528 IEM_DECL_NATIVE_HLP_PROTO(int, iemNativeHlpCheckBranchMiss,(PVMCPUCC pVCpu)); 1529 1530 IEM_DECL_NATIVE_HLP_PROTO(uint64_t, iemNativeHlpMemFetchDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1531 IEM_DECL_NATIVE_HLP_PROTO(uint64_t, iemNativeHlpMemFetchDataU8_Sx_U16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1532 IEM_DECL_NATIVE_HLP_PROTO(uint64_t, iemNativeHlpMemFetchDataU8_Sx_U32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1533 IEM_DECL_NATIVE_HLP_PROTO(uint64_t, iemNativeHlpMemFetchDataU8_Sx_U64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1534 IEM_DECL_NATIVE_HLP_PROTO(uint64_t, iemNativeHlpMemFetchDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1535 IEM_DECL_NATIVE_HLP_PROTO(uint64_t, iemNativeHlpMemFetchDataU16_Sx_U32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1536 IEM_DECL_NATIVE_HLP_PROTO(uint64_t, iemNativeHlpMemFetchDataU16_Sx_U64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1537 IEM_DECL_NATIVE_HLP_PROTO(uint64_t, iemNativeHlpMemFetchDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1538 IEM_DECL_NATIVE_HLP_PROTO(uint64_t, iemNativeHlpMemFetchDataU32_Sx_U64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1539 IEM_DECL_NATIVE_HLP_PROTO(uint64_t, iemNativeHlpMemFetchDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1540 IEM_DECL_NATIVE_HLP_PROTO(void, iemNativeHlpMemStoreDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg, uint8_t u8Value)); 1541 IEM_DECL_NATIVE_HLP_PROTO(void, iemNativeHlpMemStoreDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg, uint16_t u16Value)); 1542 IEM_DECL_NATIVE_HLP_PROTO(void, iemNativeHlpMemStoreDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg, uint32_t u32Value)); 1543 IEM_DECL_NATIVE_HLP_PROTO(void, iemNativeHlpMemStoreDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg, uint64_t u64Value)); 1544 IEM_DECL_NATIVE_HLP_PROTO(void, iemNativeHlpStackStoreU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint16_t u16Value)); 1545 IEM_DECL_NATIVE_HLP_PROTO(void, iemNativeHlpStackStoreU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint32_t u32Value)); 1546 IEM_DECL_NATIVE_HLP_PROTO(void, iemNativeHlpStackStoreU32SReg,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint32_t u32Value)); 1547 IEM_DECL_NATIVE_HLP_PROTO(void, iemNativeHlpStackStoreU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint64_t u64Value)); 1548 IEM_DECL_NATIVE_HLP_PROTO(uint16_t, iemNativeHlpStackFetchU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)); 1549 IEM_DECL_NATIVE_HLP_PROTO(uint32_t, iemNativeHlpStackFetchU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)); 1550 IEM_DECL_NATIVE_HLP_PROTO(uint64_t, iemNativeHlpStackFetchU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)); 1551 1552 IEM_DECL_NATIVE_HLP_PROTO(uint64_t, iemNativeHlpMemFlatFetchDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)); 1553 IEM_DECL_NATIVE_HLP_PROTO(uint64_t, iemNativeHlpMemFlatFetchDataU8_Sx_U16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)); 1554 IEM_DECL_NATIVE_HLP_PROTO(uint64_t, iemNativeHlpMemFlatFetchDataU8_Sx_U32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)); 1555 IEM_DECL_NATIVE_HLP_PROTO(uint64_t, iemNativeHlpMemFlatFetchDataU8_Sx_U64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)); 1556 IEM_DECL_NATIVE_HLP_PROTO(uint64_t, iemNativeHlpMemFlatFetchDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)); 1557 IEM_DECL_NATIVE_HLP_PROTO(uint64_t, iemNativeHlpMemFlatFetchDataU16_Sx_U32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)); 1558 IEM_DECL_NATIVE_HLP_PROTO(uint64_t, iemNativeHlpMemFlatFetchDataU16_Sx_U64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)); 1559 IEM_DECL_NATIVE_HLP_PROTO(uint64_t, iemNativeHlpMemFlatFetchDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)); 1560 IEM_DECL_NATIVE_HLP_PROTO(uint64_t, iemNativeHlpMemFlatFetchDataU32_Sx_U64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)); 1561 IEM_DECL_NATIVE_HLP_PROTO(uint64_t, iemNativeHlpMemFlatFetchDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)); 1562 IEM_DECL_NATIVE_HLP_PROTO(void, iemNativeHlpMemFlatStoreDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t u8Value)); 1563 IEM_DECL_NATIVE_HLP_PROTO(void, iemNativeHlpMemFlatStoreDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint16_t u16Value)); 1564 IEM_DECL_NATIVE_HLP_PROTO(void, iemNativeHlpMemFlatStoreDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint32_t u32Value)); 1565 IEM_DECL_NATIVE_HLP_PROTO(void, iemNativeHlpMemFlatStoreDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint64_t u64Value)); 1566 IEM_DECL_NATIVE_HLP_PROTO(void, iemNativeHlpStackFlatStoreU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint16_t u16Value)); 1567 IEM_DECL_NATIVE_HLP_PROTO(void, iemNativeHlpStackFlatStoreU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint32_t u32Value)); 1568 IEM_DECL_NATIVE_HLP_PROTO(void, iemNativeHlpStackFlatStoreU32SReg,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint32_t u32Value)); 1569 IEM_DECL_NATIVE_HLP_PROTO(void, iemNativeHlpStackFlatStoreU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint64_t u64Value)); 1570 IEM_DECL_NATIVE_HLP_PROTO(uint16_t, iemNativeHlpStackFlatFetchU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)); 1571 IEM_DECL_NATIVE_HLP_PROTO(uint32_t, iemNativeHlpStackFlatFetchU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)); 1572 IEM_DECL_NATIVE_HLP_PROTO(uint64_t, iemNativeHlpStackFlatFetchU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)); 1573 1574 IEM_DECL_NATIVE_HLP_PROTO(uint8_t *, iemNativeHlpMemMapDataU8Atomic,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1575 IEM_DECL_NATIVE_HLP_PROTO(uint8_t *, iemNativeHlpMemMapDataU8Rw,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1576 IEM_DECL_NATIVE_HLP_PROTO(uint8_t *, iemNativeHlpMemMapDataU8Wo,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1577 IEM_DECL_NATIVE_HLP_PROTO(uint8_t const *, iemNativeHlpMemMapDataU8Ro,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1578 IEM_DECL_NATIVE_HLP_PROTO(uint16_t *, iemNativeHlpMemMapDataU16Atomic,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1579 IEM_DECL_NATIVE_HLP_PROTO(uint16_t *, iemNativeHlpMemMapDataU16Rw,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1580 IEM_DECL_NATIVE_HLP_PROTO(uint16_t *, iemNativeHlpMemMapDataU16Wo,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1581 IEM_DECL_NATIVE_HLP_PROTO(uint16_t const *, iemNativeHlpMemMapDataU16Ro,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1582 IEM_DECL_NATIVE_HLP_PROTO(uint32_t *, iemNativeHlpMemMapDataU32Atomic,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1583 IEM_DECL_NATIVE_HLP_PROTO(uint32_t *, iemNativeHlpMemMapDataU32Rw,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1584 IEM_DECL_NATIVE_HLP_PROTO(uint32_t *, iemNativeHlpMemMapDataU32Wo,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1585 IEM_DECL_NATIVE_HLP_PROTO(uint32_t const *, iemNativeHlpMemMapDataU32Ro,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1586 IEM_DECL_NATIVE_HLP_PROTO(uint64_t *, iemNativeHlpMemMapDataU64Atomic,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1587 IEM_DECL_NATIVE_HLP_PROTO(uint64_t *, iemNativeHlpMemMapDataU64Rw,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1588 IEM_DECL_NATIVE_HLP_PROTO(uint64_t *, iemNativeHlpMemMapDataU64Wo,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1589 IEM_DECL_NATIVE_HLP_PROTO(uint64_t const *, iemNativeHlpMemMapDataU64Ro,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1590 IEM_DECL_NATIVE_HLP_PROTO(RTFLOAT80U *, iemNativeHlpMemMapDataR80Wo,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1591 IEM_DECL_NATIVE_HLP_PROTO(RTPBCD80U *, iemNativeHlpMemMapDataD80Wo,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1592 IEM_DECL_NATIVE_HLP_PROTO(RTUINT128U *, iemNativeHlpMemMapDataU128Atomic,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1593 IEM_DECL_NATIVE_HLP_PROTO(RTUINT128U *, iemNativeHlpMemMapDataU128Rw,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1594 IEM_DECL_NATIVE_HLP_PROTO(RTUINT128U *, iemNativeHlpMemMapDataU128Wo,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1595 IEM_DECL_NATIVE_HLP_PROTO(RTUINT128U const *, iemNativeHlpMemMapDataU128Ro,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem, uint8_t iSegReg)); 1596 1597 IEM_DECL_NATIVE_HLP_PROTO(uint8_t *, iemNativeHlpMemFlatMapDataU8Atomic,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)); 1598 IEM_DECL_NATIVE_HLP_PROTO(uint8_t *, iemNativeHlpMemFlatMapDataU8Rw,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)); 1599 IEM_DECL_NATIVE_HLP_PROTO(uint8_t *, iemNativeHlpMemFlatMapDataU8Wo,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)); 1600 IEM_DECL_NATIVE_HLP_PROTO(uint8_t const *, iemNativeHlpMemFlatMapDataU8Ro,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)); 1601 IEM_DECL_NATIVE_HLP_PROTO(uint16_t *, iemNativeHlpMemFlatMapDataU16Atomic,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)); 1602 IEM_DECL_NATIVE_HLP_PROTO(uint16_t *, iemNativeHlpMemFlatMapDataU16Rw,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)); 1603 IEM_DECL_NATIVE_HLP_PROTO(uint16_t *, iemNativeHlpMemFlatMapDataU16Wo,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)); 1604 IEM_DECL_NATIVE_HLP_PROTO(uint16_t const *, iemNativeHlpMemFlatMapDataU16Ro,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)); 1605 IEM_DECL_NATIVE_HLP_PROTO(uint32_t *, iemNativeHlpMemFlatMapDataU32Atomic,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)); 1606 IEM_DECL_NATIVE_HLP_PROTO(uint32_t *, iemNativeHlpMemFlatMapDataU32Rw,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)); 1607 IEM_DECL_NATIVE_HLP_PROTO(uint32_t *, iemNativeHlpMemFlatMapDataU32Wo,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)); 1608 IEM_DECL_NATIVE_HLP_PROTO(uint32_t const *, iemNativeHlpMemFlatMapDataU32Ro,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)); 1609 IEM_DECL_NATIVE_HLP_PROTO(uint64_t *, iemNativeHlpMemFlatMapDataU64Atomic,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)); 1610 IEM_DECL_NATIVE_HLP_PROTO(uint64_t *, iemNativeHlpMemFlatMapDataU64Rw,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)); 1611 IEM_DECL_NATIVE_HLP_PROTO(uint64_t *, iemNativeHlpMemFlatMapDataU64Wo,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)); 1612 IEM_DECL_NATIVE_HLP_PROTO(uint64_t const *, iemNativeHlpMemFlatMapDataU64Ro,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)); 1613 IEM_DECL_NATIVE_HLP_PROTO(RTFLOAT80U *, iemNativeHlpMemFlatMapDataR80Wo,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)); 1614 IEM_DECL_NATIVE_HLP_PROTO(RTPBCD80U *, iemNativeHlpMemFlatMapDataD80Wo,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)); 1615 IEM_DECL_NATIVE_HLP_PROTO(RTUINT128U *, iemNativeHlpMemFlatMapDataU128Atomic,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)); 1616 IEM_DECL_NATIVE_HLP_PROTO(RTUINT128U *, iemNativeHlpMemFlatMapDataU128Rw,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)); 1617 IEM_DECL_NATIVE_HLP_PROTO(RTUINT128U *, iemNativeHlpMemFlatMapDataU128Wo,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)); 1618 IEM_DECL_NATIVE_HLP_PROTO(RTUINT128U const *, iemNativeHlpMemFlatMapDataU128Ro,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)); 1619 1620 IEM_DECL_NATIVE_HLP_PROTO(void, iemNativeHlpMemCommitAndUnmapAtomic,(PVMCPUCC pVCpu, uint8_t bUnmapInfo)); 1621 IEM_DECL_NATIVE_HLP_PROTO(void, iemNativeHlpMemCommitAndUnmapRw,(PVMCPUCC pVCpu, uint8_t bUnmapInfo)); 1622 IEM_DECL_NATIVE_HLP_PROTO(void, iemNativeHlpMemCommitAndUnmapWo,(PVMCPUCC pVCpu, uint8_t bUnmapInfo)); 1623 IEM_DECL_NATIVE_HLP_PROTO(void, iemNativeHlpMemCommitAndUnmapRo,(PVMCPUCC pVCpu, uint8_t bUnmapInfo)); 1624 1625 1626 /** 1627 * Info about shadowed guest register values. 1628 * @see IEMNATIVEGSTREG 1629 */ 1630 typedef struct IEMANTIVEGSTREGINFO 1631 { 1632 /** Offset in VMCPU. */ 1633 uint32_t off; 1634 /** The field size. */ 1635 uint8_t cb; 1636 /** Name (for logging). */ 1637 const char *pszName; 1638 } IEMANTIVEGSTREGINFO; 1639 extern DECL_HIDDEN_DATA(IEMANTIVEGSTREGINFO const) g_aGstShadowInfo[]; 1640 extern DECL_HIDDEN_DATA(const char * const) g_apszIemNativeHstRegNames[]; 1641 extern DECL_HIDDEN_DATA(int32_t const) g_aoffIemNativeCallStackArgBpDisp[]; 1642 extern DECL_HIDDEN_DATA(uint32_t const) g_afIemNativeCallRegs[]; 1643 extern DECL_HIDDEN_DATA(uint8_t const) g_aidxIemNativeCallRegs[]; 1644 1463 1645 1464 1646 … … 1579 1761 } 1580 1762 1763 1764 /** 1765 * Converts IEM_CIMPL_F_XXX flags into a guest register shadow copy flush mask. 1766 * 1767 * @returns The flush mask. 1768 * @param fCImpl The IEM_CIMPL_F_XXX flags. 1769 * @param fGstShwFlush The starting flush mask. 1770 */ 1771 DECL_FORCE_INLINE(uint64_t) iemNativeCImplFlagsToGuestShadowFlushMask(uint32_t fCImpl, uint64_t fGstShwFlush) 1772 { 1773 if (fCImpl & IEM_CIMPL_F_BRANCH_FAR) 1774 fGstShwFlush |= RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_CS) 1775 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_CS) 1776 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_CS); 1777 if (fCImpl & IEM_CIMPL_F_BRANCH_STACK_FAR) 1778 fGstShwFlush |= RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP) 1779 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_SS) 1780 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_SS) 1781 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_SS); 1782 else if (fCImpl & IEM_CIMPL_F_BRANCH_STACK) 1783 fGstShwFlush |= RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP); 1784 if (fCImpl & (IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_STATUS_FLAGS | IEM_CIMPL_F_INHIBIT_SHADOW)) 1785 fGstShwFlush |= RT_BIT_64(kIemNativeGstReg_EFlags); 1786 return fGstShwFlush; 1787 } 1788 1789 1790 /** Number of hidden arguments for CIMPL calls. 1791 * @note We're sufferning from the usual VBOXSTRICTRC fun on Windows. */ 1792 #if defined(VBOXSTRICTRC_STRICT_ENABLED) && defined(RT_OS_WINDOWS) && defined(RT_ARCH_AMD64) 1793 # define IEM_CIMPL_HIDDEN_ARGS 3 1794 #else 1795 # define IEM_CIMPL_HIDDEN_ARGS 2 1796 #endif 1797 1798 1799 #ifdef IEMNATIVE_WITH_LIVENESS_ANALYSIS 1800 1801 # ifndef IEMLIVENESS_EXTENDED_LAYOUT 1802 /** 1803 * Helper for iemNativeLivenessGetStateByGstReg. 1804 * 1805 * @returns IEMLIVENESS_STATE_XXX 1806 * @param fMergedStateExp2 This is the RT_BIT_32() of each sub-state 1807 * ORed together. 1808 */ 1809 DECL_FORCE_INLINE(uint32_t) 1810 iemNativeLivenessMergeExpandedEFlagsState(uint32_t fMergedStateExp2) 1811 { 1812 /* INPUT trumps anything else. */ 1813 if (fMergedStateExp2 & RT_BIT_32(IEMLIVENESS_STATE_INPUT)) 1814 return IEMLIVENESS_STATE_INPUT; 1815 1816 /* CLOBBERED trumps XCPT_OR_CALL and UNUSED. */ 1817 if (fMergedStateExp2 & RT_BIT_32(IEMLIVENESS_STATE_CLOBBERED)) 1818 { 1819 /* If not all sub-fields are clobbered they must be considered INPUT. */ 1820 if (fMergedStateExp2 & (RT_BIT_32(IEMLIVENESS_STATE_UNUSED) | RT_BIT_32(IEMLIVENESS_STATE_XCPT_OR_CALL))) 1821 return IEMLIVENESS_STATE_INPUT; 1822 return IEMLIVENESS_STATE_CLOBBERED; 1823 } 1824 1825 /* XCPT_OR_CALL trumps UNUSED. */ 1826 if (fMergedStateExp2 & RT_BIT_32(IEMLIVENESS_STATE_XCPT_OR_CALL)) 1827 return IEMLIVENESS_STATE_XCPT_OR_CALL; 1828 1829 return IEMLIVENESS_STATE_UNUSED; 1830 } 1831 # endif /* !IEMLIVENESS_EXTENDED_LAYOUT */ 1832 1833 1834 DECL_FORCE_INLINE(uint32_t) 1835 iemNativeLivenessGetStateByGstRegEx(PCIEMLIVENESSENTRY pLivenessEntry, unsigned enmGstRegEx) 1836 { 1837 # ifndef IEMLIVENESS_EXTENDED_LAYOUT 1838 return ((pLivenessEntry->Bit0.bm64 >> enmGstRegEx) & 1) 1839 | (((pLivenessEntry->Bit1.bm64 >> enmGstRegEx) << 1) & 2); 1840 # else 1841 return ( (pLivenessEntry->Bit0.bm64 >> enmGstRegEx) & 1) 1842 | (((pLivenessEntry->Bit1.bm64 >> enmGstRegEx) << 1) & 2) 1843 | (((pLivenessEntry->Bit2.bm64 >> enmGstRegEx) << 2) & 4) 1844 | (((pLivenessEntry->Bit3.bm64 >> enmGstRegEx) << 2) & 8); 1845 # endif 1846 } 1847 1848 1849 DECL_FORCE_INLINE(uint32_t) 1850 iemNativeLivenessGetStateByGstReg(PCIEMLIVENESSENTRY pLivenessEntry, IEMNATIVEGSTREG enmGstReg) 1851 { 1852 uint32_t uRet = iemNativeLivenessGetStateByGstRegEx(pLivenessEntry, (unsigned)enmGstReg); 1853 if (enmGstReg == kIemNativeGstReg_EFlags) 1854 { 1855 /* Merge the eflags states to one. */ 1856 # ifndef IEMLIVENESS_EXTENDED_LAYOUT 1857 uRet = RT_BIT_32(uRet); 1858 uRet |= RT_BIT_32(pLivenessEntry->Bit0.fEflCf | (pLivenessEntry->Bit1.fEflCf << 1)); 1859 uRet |= RT_BIT_32(pLivenessEntry->Bit0.fEflPf | (pLivenessEntry->Bit1.fEflPf << 1)); 1860 uRet |= RT_BIT_32(pLivenessEntry->Bit0.fEflAf | (pLivenessEntry->Bit1.fEflAf << 1)); 1861 uRet |= RT_BIT_32(pLivenessEntry->Bit0.fEflZf | (pLivenessEntry->Bit1.fEflZf << 1)); 1862 uRet |= RT_BIT_32(pLivenessEntry->Bit0.fEflSf | (pLivenessEntry->Bit1.fEflSf << 1)); 1863 uRet |= RT_BIT_32(pLivenessEntry->Bit0.fEflOf | (pLivenessEntry->Bit1.fEflOf << 1)); 1864 uRet = iemNativeLivenessMergeExpandedEFlagsState(uRet); 1865 # else 1866 AssertCompile(IEMLIVENESSBIT_IDX_EFL_OTHER == (unsigned)kIemNativeGstReg_EFlags); 1867 uRet |= iemNativeLivenessGetStateByGstRegEx(pLivenessEntry, IEMLIVENESSBIT_IDX_EFL_CF); 1868 uRet |= iemNativeLivenessGetStateByGstRegEx(pLivenessEntry, IEMLIVENESSBIT_IDX_EFL_PF); 1869 uRet |= iemNativeLivenessGetStateByGstRegEx(pLivenessEntry, IEMLIVENESSBIT_IDX_EFL_AF); 1870 uRet |= iemNativeLivenessGetStateByGstRegEx(pLivenessEntry, IEMLIVENESSBIT_IDX_EFL_ZF); 1871 uRet |= iemNativeLivenessGetStateByGstRegEx(pLivenessEntry, IEMLIVENESSBIT_IDX_EFL_SF); 1872 uRet |= iemNativeLivenessGetStateByGstRegEx(pLivenessEntry, IEMLIVENESSBIT_IDX_EFL_OF); 1873 # endif 1874 } 1875 return uRet; 1876 } 1877 1878 1879 # ifdef VBOX_STRICT 1880 /** For assertions only, user checks that idxCurCall isn't zerow. */ 1881 DECL_FORCE_INLINE(uint32_t) 1882 iemNativeLivenessGetPrevStateByGstReg(PIEMRECOMPILERSTATE pReNative, IEMNATIVEGSTREG enmGstReg) 1883 { 1884 return iemNativeLivenessGetStateByGstReg(&pReNative->paLivenessEntries[pReNative->idxCurCall - 1], enmGstReg); 1885 } 1886 # endif /* VBOX_STRICT */ 1887 1888 #endif /* IEMNATIVE_WITH_LIVENESS_ANALYSIS */ 1889 1890 1891 /** 1892 * Gets the number of hidden arguments for an expected IEM_MC_CALL statement. 1893 */ 1894 DECL_FORCE_INLINE(uint8_t) iemNativeArgGetHiddenArgCount(PIEMRECOMPILERSTATE pReNative) 1895 { 1896 if (pReNative->fCImpl & IEM_CIMPL_F_CALLS_CIMPL) 1897 return IEM_CIMPL_HIDDEN_ARGS; 1898 if (pReNative->fCImpl & IEM_CIMPL_F_CALLS_AIMPL_WITH_FXSTATE) 1899 return 1; 1900 return 0; 1901 } 1902 1903 1904 DECL_FORCE_INLINE(uint8_t) iemNativeRegMarkAllocated(PIEMRECOMPILERSTATE pReNative, unsigned idxReg, 1905 IEMNATIVEWHAT enmWhat, uint8_t idxVar = UINT8_MAX) RT_NOEXCEPT 1906 { 1907 pReNative->Core.bmHstRegs |= RT_BIT_32(idxReg); 1908 1909 pReNative->Core.aHstRegs[idxReg].enmWhat = enmWhat; 1910 pReNative->Core.aHstRegs[idxReg].fGstRegShadows = 0; 1911 pReNative->Core.aHstRegs[idxReg].idxVar = idxVar; 1912 return (uint8_t)idxReg; 1913 } 1914 1915 1916 1917 /********************************************************************************************************************************* 1918 * Register Allocator (GPR) * 1919 *********************************************************************************************************************************/ 1920 1921 /** 1922 * Marks host register @a idxHstReg as containing a shadow copy of guest 1923 * register @a enmGstReg. 1924 * 1925 * ASSUMES that caller has made sure @a enmGstReg is not associated with any 1926 * host register before calling. 1927 */ 1928 DECL_FORCE_INLINE(void) 1929 iemNativeRegMarkAsGstRegShadow(PIEMRECOMPILERSTATE pReNative, uint8_t idxHstReg, IEMNATIVEGSTREG enmGstReg, uint32_t off) 1930 { 1931 Assert(!(pReNative->Core.bmGstRegShadows & RT_BIT_64(enmGstReg))); 1932 Assert(!pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows); 1933 Assert((unsigned)enmGstReg < (unsigned)kIemNativeGstReg_End); 1934 1935 pReNative->Core.aidxGstRegShadows[enmGstReg] = idxHstReg; 1936 pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows = RT_BIT_64(enmGstReg); /** @todo why? not OR? */ 1937 pReNative->Core.bmGstRegShadows |= RT_BIT_64(enmGstReg); 1938 pReNative->Core.bmHstRegsWithGstShadow |= RT_BIT_32(idxHstReg); 1939 #ifdef IEMNATIVE_WITH_TB_DEBUG_INFO 1940 iemNativeDbgInfoAddNativeOffset(pReNative, off); 1941 iemNativeDbgInfoAddGuestRegShadowing(pReNative, enmGstReg, idxHstReg); 1942 #else 1943 RT_NOREF(off); 1944 #endif 1945 } 1946 1947 1948 /** 1949 * Clear any guest register shadow claims from @a idxHstReg. 1950 * 1951 * The register does not need to be shadowing any guest registers. 1952 */ 1953 DECL_FORCE_INLINE(void) 1954 iemNativeRegClearGstRegShadowing(PIEMRECOMPILERSTATE pReNative, uint8_t idxHstReg, uint32_t off) 1955 { 1956 Assert( (pReNative->Core.bmGstRegShadows & pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows) 1957 == pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows 1958 && pReNative->Core.bmGstRegShadows < RT_BIT_64(kIemNativeGstReg_End)); 1959 Assert( RT_BOOL(pReNative->Core.bmHstRegsWithGstShadow & RT_BIT_32(idxHstReg)) 1960 == RT_BOOL(pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows)); 1961 1962 #ifdef IEMNATIVE_WITH_TB_DEBUG_INFO 1963 uint64_t fGstRegs = pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows; 1964 if (fGstRegs) 1965 { 1966 Assert(fGstRegs < RT_BIT_64(kIemNativeGstReg_End)); 1967 iemNativeDbgInfoAddNativeOffset(pReNative, off); 1968 while (fGstRegs) 1969 { 1970 unsigned const iGstReg = ASMBitFirstSetU64(fGstRegs) - 1; 1971 fGstRegs &= ~RT_BIT_64(iGstReg); 1972 iemNativeDbgInfoAddGuestRegShadowing(pReNative, (IEMNATIVEGSTREG)iGstReg, UINT8_MAX, idxHstReg); 1973 } 1974 } 1975 #else 1976 RT_NOREF(off); 1977 #endif 1978 1979 pReNative->Core.bmHstRegsWithGstShadow &= ~RT_BIT_32(idxHstReg); 1980 pReNative->Core.bmGstRegShadows &= ~pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows; 1981 pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows = 0; 1982 } 1983 1984 1985 /** 1986 * Clear guest register shadow claim regarding @a enmGstReg from @a idxHstReg 1987 * and global overview flags. 1988 */ 1989 DECL_FORCE_INLINE(void) 1990 iemNativeRegClearGstRegShadowingOne(PIEMRECOMPILERSTATE pReNative, uint8_t idxHstReg, IEMNATIVEGSTREG enmGstReg, uint32_t off) 1991 { 1992 Assert(pReNative->Core.bmGstRegShadows < RT_BIT_64(kIemNativeGstReg_End)); 1993 Assert( (pReNative->Core.bmGstRegShadows & pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows) 1994 == pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows 1995 && pReNative->Core.bmGstRegShadows < RT_BIT_64(kIemNativeGstReg_End)); 1996 Assert(pReNative->Core.bmGstRegShadows & RT_BIT_64(enmGstReg)); 1997 Assert(pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows & RT_BIT_64(enmGstReg)); 1998 Assert(pReNative->Core.bmHstRegsWithGstShadow & RT_BIT_32(idxHstReg)); 1999 2000 #ifdef IEMNATIVE_WITH_TB_DEBUG_INFO 2001 iemNativeDbgInfoAddNativeOffset(pReNative, off); 2002 iemNativeDbgInfoAddGuestRegShadowing(pReNative, enmGstReg, UINT8_MAX, idxHstReg); 2003 #else 2004 RT_NOREF(off); 2005 #endif 2006 2007 uint64_t const fGstRegShadowsNew = pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows & ~RT_BIT_64(enmGstReg); 2008 pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows = fGstRegShadowsNew; 2009 if (!fGstRegShadowsNew) 2010 pReNative->Core.bmHstRegsWithGstShadow &= ~RT_BIT_32(idxHstReg); 2011 pReNative->Core.bmGstRegShadows &= ~RT_BIT_64(enmGstReg); 2012 } 2013 2014 2015 #if 0 /* unused */ 2016 /** 2017 * Clear any guest register shadow claim for @a enmGstReg. 2018 */ 2019 DECL_FORCE_INLINE(void) 2020 iemNativeRegClearGstRegShadowingByGstReg(PIEMRECOMPILERSTATE pReNative, IEMNATIVEGSTREG enmGstReg, uint32_t off) 2021 { 2022 Assert(pReNative->Core.bmGstRegShadows < RT_BIT_64(kIemNativeGstReg_End)); 2023 if (pReNative->Core.bmGstRegShadows & RT_BIT_64(enmGstReg)) 2024 { 2025 Assert(pReNative->Core.aidxGstRegShadows[enmGstReg] < RT_ELEMENTS(pReNative->Core.aHstRegs)); 2026 iemNativeRegClearGstRegShadowingOne(pReNative, pReNative->Core.aidxGstRegShadows[enmGstReg], enmGstReg, off); 2027 } 2028 } 2029 #endif 2030 2031 2032 /** 2033 * Clear any guest register shadow claim for @a enmGstReg and mark @a idxHstRegNew 2034 * as the new shadow of it. 2035 * 2036 * Unlike the other guest reg shadow helpers, this does the logging for you. 2037 * However, it is the liveness state is not asserted here, the caller must do 2038 * that. 2039 */ 2040 DECL_FORCE_INLINE(void) 2041 iemNativeRegClearAndMarkAsGstRegShadow(PIEMRECOMPILERSTATE pReNative, uint8_t idxHstRegNew, 2042 IEMNATIVEGSTREG enmGstReg, uint32_t off) 2043 { 2044 Assert(pReNative->Core.bmGstRegShadows < RT_BIT_64(kIemNativeGstReg_End)); 2045 if (pReNative->Core.bmGstRegShadows & RT_BIT_64(enmGstReg)) 2046 { 2047 uint8_t const idxHstRegOld = pReNative->Core.aidxGstRegShadows[enmGstReg]; 2048 Assert(idxHstRegOld < RT_ELEMENTS(pReNative->Core.aHstRegs)); 2049 if (idxHstRegOld == idxHstRegNew) 2050 return; 2051 Log12(("iemNativeRegClearAndMarkAsGstRegShadow: %s for guest %s (from %s)\n", g_apszIemNativeHstRegNames[idxHstRegNew], 2052 g_aGstShadowInfo[enmGstReg].pszName, g_apszIemNativeHstRegNames[idxHstRegOld])); 2053 iemNativeRegClearGstRegShadowingOne(pReNative, pReNative->Core.aidxGstRegShadows[enmGstReg], enmGstReg, off); 2054 } 2055 else 2056 Log12(("iemNativeRegClearAndMarkAsGstRegShadow: %s for guest %s\n", g_apszIemNativeHstRegNames[idxHstRegNew], 2057 g_aGstShadowInfo[enmGstReg].pszName)); 2058 iemNativeRegMarkAsGstRegShadow(pReNative, idxHstRegNew, enmGstReg, off); 2059 } 2060 2061 2062 /** 2063 * Transfers the guest register shadow claims of @a enmGstReg from @a idxRegFrom 2064 * to @a idxRegTo. 2065 */ 2066 DECL_FORCE_INLINE(void) 2067 iemNativeRegTransferGstRegShadowing(PIEMRECOMPILERSTATE pReNative, uint8_t idxRegFrom, uint8_t idxRegTo, 2068 IEMNATIVEGSTREG enmGstReg, uint32_t off) 2069 { 2070 Assert(pReNative->Core.aHstRegs[idxRegFrom].fGstRegShadows & RT_BIT_64(enmGstReg)); 2071 Assert(pReNative->Core.aidxGstRegShadows[enmGstReg] == idxRegFrom); 2072 Assert( (pReNative->Core.bmGstRegShadows & pReNative->Core.aHstRegs[idxRegFrom].fGstRegShadows) 2073 == pReNative->Core.aHstRegs[idxRegFrom].fGstRegShadows 2074 && pReNative->Core.bmGstRegShadows < RT_BIT_64(kIemNativeGstReg_End)); 2075 Assert( (pReNative->Core.bmGstRegShadows & pReNative->Core.aHstRegs[idxRegTo].fGstRegShadows) 2076 == pReNative->Core.aHstRegs[idxRegTo].fGstRegShadows); 2077 Assert( RT_BOOL(pReNative->Core.bmHstRegsWithGstShadow & RT_BIT_32(idxRegFrom)) 2078 == RT_BOOL(pReNative->Core.aHstRegs[idxRegFrom].fGstRegShadows)); 2079 2080 uint64_t const fGstRegShadowsFrom = pReNative->Core.aHstRegs[idxRegFrom].fGstRegShadows & ~RT_BIT_64(enmGstReg); 2081 pReNative->Core.aHstRegs[idxRegFrom].fGstRegShadows = fGstRegShadowsFrom; 2082 if (!fGstRegShadowsFrom) 2083 pReNative->Core.bmHstRegsWithGstShadow &= ~RT_BIT_32(idxRegFrom); 2084 pReNative->Core.bmHstRegsWithGstShadow |= RT_BIT_32(idxRegTo); 2085 pReNative->Core.aHstRegs[idxRegTo].fGstRegShadows |= RT_BIT_64(enmGstReg); 2086 pReNative->Core.aidxGstRegShadows[enmGstReg] = idxRegTo; 2087 #ifdef IEMNATIVE_WITH_TB_DEBUG_INFO 2088 iemNativeDbgInfoAddNativeOffset(pReNative, off); 2089 iemNativeDbgInfoAddGuestRegShadowing(pReNative, enmGstReg, idxRegTo, idxRegFrom); 2090 #else 2091 RT_NOREF(off); 2092 #endif 2093 } 2094 2095 2096 /** 2097 * Flushes any delayed guest register writes. 2098 * 2099 * This must be called prior to calling CImpl functions and any helpers that use 2100 * the guest state (like raising exceptions) and such. 2101 * 2102 * This optimization has not yet been implemented. The first target would be 2103 * RIP updates, since these are the most common ones. 2104 */ 2105 DECL_INLINE_THROW(uint32_t) 2106 iemNativeRegFlushPendingWrites(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint64_t fGstShwExcept = 0, bool fFlushShadows = true) 2107 { 2108 #ifdef IEMNATIVE_WITH_DELAYED_PC_UPDATING 2109 if (!(fGstShwExcept & kIemNativeGstReg_Pc)) 2110 return iemNativeRegFlushPendingWritesSlow(pReNative, off, fGstShwExcept, fFlushShadows); 2111 #else 2112 RT_NOREF(pReNative, fGstShwExcept); 2113 #endif 2114 2115 #ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 2116 /** @todo r=bird: There must be a quicker way to check if anything needs doing here! */ 2117 /** @todo This doesn't mix well with fGstShwExcept but we ignore this for now and just flush everything. */ 2118 return iemNativeRegFlushPendingWritesSlow(pReNative, off, fGstShwExcept, fFlushShadows); 2119 #else 2120 RT_NOREF(pReNative, fGstShwExcept, fFlushShadows); 2121 return off; 2122 #endif 2123 } 2124 2125 2126 2127 /********************************************************************************************************************************* 2128 * SIMD register allocator (largely code duplication of the GPR allocator for now but might diverge) * 2129 *********************************************************************************************************************************/ 2130 2131 #ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 2132 2133 DECL_FORCE_INLINE(uint8_t) 2134 iemNativeSimdRegMarkAllocated(PIEMRECOMPILERSTATE pReNative, uint8_t idxSimdReg, 2135 IEMNATIVEWHAT enmWhat, uint8_t idxVar = UINT8_MAX) RT_NOEXCEPT 2136 { 2137 pReNative->Core.bmHstSimdRegs |= RT_BIT_32(idxSimdReg); 2138 2139 pReNative->Core.aHstSimdRegs[idxSimdReg].enmWhat = enmWhat; 2140 pReNative->Core.aHstSimdRegs[idxSimdReg].fGstRegShadows = 0; 2141 RT_NOREF(idxVar); 2142 return idxSimdReg; 2143 } 2144 2145 2146 /** 2147 * Marks host SIMD register @a idxHstSimdReg as containing a shadow copy of guest 2148 * SIMD register @a enmGstSimdReg. 2149 * 2150 * ASSUMES that caller has made sure @a enmGstSimdReg is not associated with any 2151 * host register before calling. 2152 */ 2153 DECL_FORCE_INLINE(void) 2154 iemNativeSimdRegMarkAsGstSimdRegShadow(PIEMRECOMPILERSTATE pReNative, uint8_t idxHstSimdReg, 2155 IEMNATIVEGSTSIMDREG enmGstSimdReg, uint32_t off) 2156 { 2157 Assert(!(pReNative->Core.bmGstSimdRegShadows & RT_BIT_64(enmGstSimdReg))); 2158 Assert(!pReNative->Core.aHstSimdRegs[idxHstSimdReg].fGstRegShadows); 2159 Assert((unsigned)enmGstSimdReg < (unsigned)kIemNativeGstSimdReg_End); 2160 2161 pReNative->Core.aidxGstSimdRegShadows[enmGstSimdReg] = idxHstSimdReg; 2162 pReNative->Core.aHstSimdRegs[idxHstSimdReg].fGstRegShadows |= RT_BIT_64(enmGstSimdReg); 2163 pReNative->Core.bmGstSimdRegShadows |= RT_BIT_64(enmGstSimdReg); 2164 pReNative->Core.bmHstSimdRegsWithGstShadow |= RT_BIT_32(idxHstSimdReg); 2165 #ifdef IEMNATIVE_WITH_TB_DEBUG_INFO 2166 iemNativeDbgInfoAddNativeOffset(pReNative, off); 2167 iemNativeDbgInfoAddGuestSimdRegShadowing(pReNative, enmGstSimdReg, idxHstSimdReg); 2168 #else 2169 RT_NOREF(off); 2170 #endif 2171 } 2172 2173 2174 /** 2175 * Transfers the guest SIMD register shadow claims of @a enmGstSimdReg from @a idxSimdRegFrom 2176 * to @a idxSimdRegTo. 2177 */ 2178 DECL_FORCE_INLINE(void) 2179 iemNativeSimdRegTransferGstSimdRegShadowing(PIEMRECOMPILERSTATE pReNative, uint8_t idxSimdRegFrom, uint8_t idxSimdRegTo, 2180 IEMNATIVEGSTSIMDREG enmGstSimdReg, uint32_t off) 2181 { 2182 Assert(pReNative->Core.aHstSimdRegs[idxSimdRegFrom].fGstRegShadows & RT_BIT_64(enmGstSimdReg)); 2183 Assert(pReNative->Core.aidxGstSimdRegShadows[enmGstSimdReg] == idxSimdRegFrom); 2184 Assert( (pReNative->Core.bmGstSimdRegShadows & pReNative->Core.aHstSimdRegs[idxSimdRegFrom].fGstRegShadows) 2185 == pReNative->Core.aHstSimdRegs[idxSimdRegFrom].fGstRegShadows 2186 && pReNative->Core.bmGstSimdRegShadows < RT_BIT_64(kIemNativeGstReg_End)); 2187 Assert( (pReNative->Core.bmGstSimdRegShadows & pReNative->Core.aHstSimdRegs[idxSimdRegTo].fGstRegShadows) 2188 == pReNative->Core.aHstSimdRegs[idxSimdRegTo].fGstRegShadows); 2189 Assert( RT_BOOL(pReNative->Core.bmHstSimdRegsWithGstShadow & RT_BIT_32(idxSimdRegFrom)) 2190 == RT_BOOL(pReNative->Core.aHstSimdRegs[idxSimdRegFrom].fGstRegShadows)); 2191 Assert( pReNative->Core.aHstSimdRegs[idxSimdRegFrom].enmLoaded 2192 == pReNative->Core.aHstSimdRegs[idxSimdRegTo].enmLoaded); 2193 2194 uint64_t const fGstRegShadowsFrom = pReNative->Core.aHstSimdRegs[idxSimdRegFrom].fGstRegShadows & ~RT_BIT_64(enmGstSimdReg); 2195 pReNative->Core.aHstSimdRegs[idxSimdRegFrom].fGstRegShadows = fGstRegShadowsFrom; 2196 if (!fGstRegShadowsFrom) 2197 { 2198 pReNative->Core.bmHstSimdRegsWithGstShadow &= ~RT_BIT_32(idxSimdRegFrom); 2199 pReNative->Core.aHstSimdRegs[idxSimdRegFrom].enmLoaded = kIemNativeGstSimdRegLdStSz_Invalid; 2200 } 2201 pReNative->Core.bmHstSimdRegsWithGstShadow |= RT_BIT_32(idxSimdRegTo); 2202 pReNative->Core.aHstSimdRegs[idxSimdRegTo].fGstRegShadows |= RT_BIT_64(enmGstSimdReg); 2203 pReNative->Core.aidxGstSimdRegShadows[enmGstSimdReg] = idxSimdRegTo; 2204 #ifdef IEMNATIVE_WITH_TB_DEBUG_INFO 2205 iemNativeDbgInfoAddNativeOffset(pReNative, off); 2206 iemNativeDbgInfoAddGuestSimdRegShadowing(pReNative, enmGstSimdReg, idxSimdRegTo, idxSimdRegFrom); 2207 #else 2208 RT_NOREF(off); 2209 #endif 2210 } 2211 2212 2213 /** 2214 * Clear any guest register shadow claims from @a idxHstSimdReg. 2215 * 2216 * The register does not need to be shadowing any guest registers. 2217 */ 2218 DECL_FORCE_INLINE(void) 2219 iemNativeSimdRegClearGstSimdRegShadowing(PIEMRECOMPILERSTATE pReNative, uint8_t idxHstSimdReg, uint32_t off) 2220 { 2221 Assert( (pReNative->Core.bmGstSimdRegShadows & pReNative->Core.aHstSimdRegs[idxHstSimdReg].fGstRegShadows) 2222 == pReNative->Core.aHstSimdRegs[idxHstSimdReg].fGstRegShadows 2223 && pReNative->Core.bmGstSimdRegShadows < RT_BIT_64(kIemNativeGstSimdReg_End)); 2224 Assert( RT_BOOL(pReNative->Core.bmHstSimdRegsWithGstShadow & RT_BIT_32(idxHstSimdReg)) 2225 == RT_BOOL(pReNative->Core.aHstSimdRegs[idxHstSimdReg].fGstRegShadows)); 2226 Assert( !(pReNative->Core.aHstSimdRegs[idxHstSimdReg].fGstRegShadows & pReNative->Core.bmGstSimdRegShadowDirtyLo128) 2227 && !(pReNative->Core.aHstSimdRegs[idxHstSimdReg].fGstRegShadows & pReNative->Core.bmGstSimdRegShadowDirtyHi128)); 2228 2229 #ifdef IEMNATIVE_WITH_TB_DEBUG_INFO 2230 uint64_t fGstRegs = pReNative->Core.aHstSimdRegs[idxHstSimdReg].fGstRegShadows; 2231 if (fGstRegs) 2232 { 2233 Assert(fGstRegs < RT_BIT_64(kIemNativeGstSimdReg_End)); 2234 iemNativeDbgInfoAddNativeOffset(pReNative, off); 2235 while (fGstRegs) 2236 { 2237 unsigned const iGstReg = ASMBitFirstSetU64(fGstRegs) - 1; 2238 fGstRegs &= ~RT_BIT_64(iGstReg); 2239 iemNativeDbgInfoAddGuestSimdRegShadowing(pReNative, (IEMNATIVEGSTSIMDREG)iGstReg, UINT8_MAX, idxHstSimdReg); 2240 } 2241 } 2242 #else 2243 RT_NOREF(off); 2244 #endif 2245 2246 pReNative->Core.bmHstSimdRegsWithGstShadow &= ~RT_BIT_32(idxHstSimdReg); 2247 pReNative->Core.bmGstSimdRegShadows &= ~pReNative->Core.aHstSimdRegs[idxHstSimdReg].fGstRegShadows; 2248 pReNative->Core.aHstSimdRegs[idxHstSimdReg].fGstRegShadows = 0; 2249 pReNative->Core.aHstSimdRegs[idxHstSimdReg].enmLoaded = kIemNativeGstSimdRegLdStSz_Invalid; 2250 } 2251 2252 #endif /* IEMNATIVE_WITH_SIMD_REG_ALLOCATOR */ 2253 2254 2255 #ifdef IEMNATIVE_WITH_DELAYED_PC_UPDATING 2256 /** 2257 * Emits code to update the guest RIP value by adding the current offset since the start of the last RIP update. 2258 */ 2259 DECL_INLINE_THROW(uint32_t) iemNativeEmitPcWriteback(PIEMRECOMPILERSTATE pReNative, uint32_t off) 2260 { 2261 if (pReNative->Core.offPc) 2262 return iemNativeEmitPcWritebackSlow(pReNative, off); 2263 return off; 2264 } 2265 #endif /* IEMNATIVE_WITH_DELAYED_PC_UPDATING */ 2266 2267 1581 2268 /** @} */ 1582 2269
Note:
See TracChangeset
for help on using the changeset viewer.