Changeset 105732 in vbox for trunk/include
- Timestamp:
- Aug 19, 2024 5:01:37 PM (9 months ago)
- svn:sync-xref-src-repo-rev:
- 164437
- Location:
- trunk/include
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
TabularUnified trunk/include/VBox/vmm/cpum-armv8.h ¶
r101121 r105732 858 858 VMMDECL(bool) CPUMGetGuestIrqMasked(PVMCPUCC pVCpu); 859 859 VMMDECL(bool) CPUMGetGuestFiqMasked(PVMCPUCC pVCpu); 860 VMM_INT_DECL(uint8_t) CPUMGetGuestEL(PVMCPUCC pVCpu); 861 VMM_INT_DECL(bool) CPUMGetGuestMmuEnabled(PVMCPUCC pVCpu); 860 862 VMMDECL(VBOXSTRICTRC) CPUMQueryGuestSysReg(PVMCPUCC pVCpu, uint32_t idSysReg, uint64_t *puValue); 861 863 /** @} */ -
TabularUnified trunk/include/iprt/armv8.h ¶
r105685 r105732 2666 2666 2667 2667 2668 2669 /** @name SCTLR_EL1 - AArch64 System Control Register (EL1). 2670 * @{ */ 2671 /** Bit 0 - MMU enable for EL1 and EL0 stage 1 address translation. */ 2672 #define ARMV8_SCTLR_EL1_M RT_BIT_64(0) 2673 /** Bit 1 - Alignment check enable for EL1 and EL0. */ 2674 #define ARMV8_SCTLR_EL1_A RT_BIT_64(1) 2675 /** Bit 2 - Stage 1 cacheability control, for data accesses. */ 2676 #define ARMV8_SCTLR_EL1_C RT_BIT_64(2) 2677 /** Bit 3 - SP alignment check enable. */ 2678 #define ARMV8_SCTLR_EL1_SA RT_BIT_64(3) 2679 /** Bit 4 - SP alignment check enable for EL0. */ 2680 #define ARMV8_SCTLR_EL1_SA0 RT_BIT_64(4) 2681 /** Bit 5 - System instruction memory barrier enable from AArch32 EL0. */ 2682 #define ARMV8_SCTLR_EL1_CP15BEN RT_BIT_64(5) 2683 /** Bit 6 - Non-aligned access enable. */ 2684 #define ARMV8_SCTLR_EL1_nAA RT_BIT_64(6) 2685 /** Bit 7 - IT disable, disables some uses of IT instructions at EL0 using AArch32. */ 2686 #define ARMV8_SCTLR_EL1_ITD RT_BIT_64(7) 2687 /** Bit 8 - SETEND instruction disable, disables SETEND instructions at EL0 using AArch32. */ 2688 #define ARMV8_SCTLR_EL1_SED RT_BIT_64(8) 2689 /** Bit 9 - User Mask Access. Traps EL0 execution of MSR and MRS instructions that access the PSTATE.{D,A,I,F} masks to EL1. */ 2690 #define ARMV8_SCTLR_EL1_UMA RT_BIT_64(9) 2691 /** Bit 10 - Enable EL0 acccess to the CFP*, DVP* and CPP* instructions if FEAT_SPECRES is supported. */ 2692 #define ARMV8_SCTLR_EL1_EnRCTX RT_BIT_64(10) 2693 /** Bit 11 - Exception Exit is Context Synchronizing (FEAT_ExS required). */ 2694 #define ARMV8_SCTLR_EL1_EOS RT_BIT_64(11) 2695 /** Bit 12 - Stage 1 instruction access cacheability control, for access at EL0 and EL1. */ 2696 #define ARMV8_SCTLR_EL1_I RT_BIT_64(12) 2697 /** @todo Finish (lazy developer). */ 2698 /** @} */ 2699 2700 2701 /** @name SCTLR_EL2 - AArch64 System Control Register (EL2) - 32-bit. 2702 * @{ */ 2703 /** Bit 0 - MMU enable for EL2. */ 2704 #define ARMV8_SCTLR_EL2_M RT_BIT_64(0) 2705 /** Bit 1 - Alignment check enable. */ 2706 #define ARMV8_SCTLR_EL2_A RT_BIT_64(1) 2707 /** Bit 2 - Global enable for data and unified caches. */ 2708 #define ARMV8_SCTLR_EL2_C RT_BIT_64(2) 2709 /** Bit 3 - SP alignment check enable. */ 2710 #define ARMV8_SCTLR_EL2_SA RT_BIT_64(3) 2711 /* Bit 4 - 11 - Reserved. */ 2712 /** Bit 12 - Instruction cache enable. */ 2713 #define ARMV8_SCTLR_EL2_I RT_BIT_64(12) 2714 /* Bit 13 - 18 - Reserved. */ 2715 /** Bit 19 - Force treatment of all memory regions with write permissions as XN. */ 2716 #define ARMV8_SCTLR_EL2_WXN RT_BIT_64(19) 2717 /* Bit 20 - 24 - Reserved. */ 2718 /** Bit 25 - Exception endianess - set means big endian, clear little endian. */ 2719 #define ARMV8_SCTLR_EL2_EE RT_BIT_64(25) 2720 /* Bit 26 - 31 - Reserved. */ 2721 /** @} */ 2722 2723 2668 2724 #if (!defined(VBOX_FOR_DTRACE_LIB) && defined(__cplusplus) && !defined(ARMV8_WITHOUT_MK_INSTR)) || defined(DOXYGEN_RUNNING) 2669 2725 /** @defgroup grp_rt_armv8_mkinstr Instruction Encoding Helpers
Note:
See TracChangeset
for help on using the changeset viewer.