VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/CPUMAllMsrs.cpp@ 74157

Last change on this file since 74157 was 74147, checked in by vboxsync, 7 years ago

VMM/CPUM: Nested VMX: bugref:9180 Secondary processor-based VM-execution controls reporting fix.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 252.5 KB
Line 
1/* $Id: CPUMAllMsrs.cpp 74147 2018-09-07 18:43:08Z vboxsync $ */
2/** @file
3 * CPUM - CPU MSR Registers.
4 */
5
6/*
7 * Copyright (C) 2013-2017 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*********************************************************************************************************************************
20* Header Files *
21*********************************************************************************************************************************/
22#define LOG_GROUP LOG_GROUP_CPUM
23#include <VBox/vmm/cpum.h>
24#include <VBox/vmm/apic.h>
25#include <VBox/vmm/hm.h>
26#include <VBox/vmm/hm_vmx.h>
27#include <VBox/vmm/tm.h>
28#include <VBox/vmm/gim.h>
29#include "CPUMInternal.h"
30#include <VBox/vmm/vm.h>
31#include <VBox/err.h>
32
33
34/*********************************************************************************************************************************
35* Defined Constants And Macros *
36*********************************************************************************************************************************/
37/**
38 * Validates the CPUMMSRRANGE::offCpumCpu value and declares a local variable
39 * pointing to it.
40 *
41 * ASSUMES sizeof(a_Type) is a power of two and that the member is aligned
42 * correctly.
43 */
44#define CPUM_MSR_ASSERT_CPUMCPU_OFFSET_RETURN(a_pVCpu, a_pRange, a_Type, a_VarName) \
45 AssertMsgReturn( (a_pRange)->offCpumCpu >= 8 \
46 && (a_pRange)->offCpumCpu < sizeof(CPUMCPU) \
47 && !((a_pRange)->offCpumCpu & (RT_MIN(sizeof(a_Type), 8) - 1)) \
48 , ("offCpumCpu=%#x %s\n", (a_pRange)->offCpumCpu, (a_pRange)->szName), \
49 VERR_CPUM_MSR_BAD_CPUMCPU_OFFSET); \
50 a_Type *a_VarName = (a_Type *)((uintptr_t)&(a_pVCpu)->cpum.s + (a_pRange)->offCpumCpu)
51
52
53/*********************************************************************************************************************************
54* Structures and Typedefs *
55*********************************************************************************************************************************/
56
57/**
58 * Implements reading one or more MSRs.
59 *
60 * @returns VBox status code.
61 * @retval VINF_SUCCESS on success.
62 * @retval VINF_CPUM_R3_MSR_READ if the MSR read could not be serviced in the
63 * current context (raw-mode or ring-0).
64 * @retval VERR_CPUM_RAISE_GP_0 on failure (invalid MSR).
65 *
66 * @param pVCpu The cross context virtual CPU structure.
67 * @param idMsr The MSR we're reading.
68 * @param pRange The MSR range descriptor.
69 * @param puValue Where to return the value.
70 */
71typedef DECLCALLBACK(VBOXSTRICTRC) FNCPUMRDMSR(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue);
72/** Pointer to a RDMSR worker for a specific MSR or range of MSRs. */
73typedef FNCPUMRDMSR *PFNCPUMRDMSR;
74
75
76/**
77 * Implements writing one or more MSRs.
78 *
79 * @retval VINF_SUCCESS on success.
80 * @retval VINF_CPUM_R3_MSR_WRITE if the MSR write could not be serviced in the
81 * current context (raw-mode or ring-0).
82 * @retval VERR_CPUM_RAISE_GP_0 on failure.
83 *
84 * @param pVCpu The cross context virtual CPU structure.
85 * @param idMsr The MSR we're writing.
86 * @param pRange The MSR range descriptor.
87 * @param uValue The value to set, ignored bits masked.
88 * @param uRawValue The raw value with the ignored bits not masked.
89 */
90typedef DECLCALLBACK(VBOXSTRICTRC) FNCPUMWRMSR(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue);
91/** Pointer to a WRMSR worker for a specific MSR or range of MSRs. */
92typedef FNCPUMWRMSR *PFNCPUMWRMSR;
93
94
95
96/*
97 * Generic functions.
98 * Generic functions.
99 * Generic functions.
100 */
101
102
103/** @callback_method_impl{FNCPUMRDMSR} */
104static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_FixedValue(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
105{
106 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
107 *puValue = pRange->uValue;
108 return VINF_SUCCESS;
109}
110
111
112/** @callback_method_impl{FNCPUMWRMSR} */
113static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IgnoreWrite(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
114{
115 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
116 Log(("CPUM: Ignoring WRMSR %#x (%s), %#llx\n", idMsr, pRange->szName, uValue));
117 return VINF_SUCCESS;
118}
119
120
121/** @callback_method_impl{FNCPUMRDMSR} */
122static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_WriteOnly(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
123{
124 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(puValue);
125 return VERR_CPUM_RAISE_GP_0;
126}
127
128
129/** @callback_method_impl{FNCPUMWRMSR} */
130static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_ReadOnly(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
131{
132 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
133 Assert(pRange->fWrGpMask == UINT64_MAX);
134 return VERR_CPUM_RAISE_GP_0;
135}
136
137
138
139
140/*
141 * IA32
142 * IA32
143 * IA32
144 */
145
146/** @callback_method_impl{FNCPUMRDMSR} */
147static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32P5McAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
148{
149 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
150 *puValue = 0; /** @todo implement machine check injection. */
151 return VINF_SUCCESS;
152}
153
154
155/** @callback_method_impl{FNCPUMWRMSR} */
156static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32P5McAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
157{
158 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
159 /** @todo implement machine check injection. */
160 return VINF_SUCCESS;
161}
162
163
164/** @callback_method_impl{FNCPUMRDMSR} */
165static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32P5McType(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
166{
167 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
168 *puValue = 0; /** @todo implement machine check injection. */
169 return VINF_SUCCESS;
170}
171
172
173/** @callback_method_impl{FNCPUMWRMSR} */
174static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32P5McType(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
175{
176 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
177 /** @todo implement machine check injection. */
178 return VINF_SUCCESS;
179}
180
181
182/** @callback_method_impl{FNCPUMRDMSR} */
183static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32TimestampCounter(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
184{
185 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
186 *puValue = TMCpuTickGet(pVCpu);
187#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
188 *puValue = CPUMApplyNestedGuestTscOffset(pVCpu, *puValue);
189#endif
190 return VINF_SUCCESS;
191}
192
193
194/** @callback_method_impl{FNCPUMWRMSR} */
195static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32TimestampCounter(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
196{
197 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
198 TMCpuTickSet(pVCpu->CTX_SUFF(pVM), pVCpu, uValue);
199 return VINF_SUCCESS;
200}
201
202
203/** @callback_method_impl{FNCPUMRDMSR} */
204static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PlatformId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
205{
206 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
207 uint64_t uValue = pRange->uValue;
208 if (uValue & 0x1f00)
209 {
210 /* Max allowed bus ratio present. */
211 /** @todo Implement scaled BUS frequency. */
212 }
213
214 *puValue = uValue;
215 return VINF_SUCCESS;
216}
217
218
219/** @callback_method_impl{FNCPUMRDMSR} */
220static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ApicBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
221{
222 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
223 return APICGetBaseMsr(pVCpu, puValue);
224}
225
226
227/** @callback_method_impl{FNCPUMWRMSR} */
228static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ApicBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
229{
230 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
231 return APICSetBaseMsr(pVCpu, uValue);
232}
233
234
235/**
236 * Get fixed IA32_FEATURE_CONTROL value for NEM and cpumMsrRd_Ia32FeatureControl.
237 *
238 * @returns Fixed IA32_FEATURE_CONTROL value.
239 * @param pVCpu The cross context per CPU structure.
240 */
241VMM_INT_DECL(uint64_t) CPUMGetGuestIa32FeatureControl(PVMCPU pVCpu)
242{
243 /* Always report the MSR lock bit as set, in order to prevent guests from modifiying this MSR. */
244 uint64_t fFeatCtl = MSR_IA32_FEATURE_CONTROL_LOCK;
245
246 /* Report VMX features. */
247 if (pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fVmx)
248 fFeatCtl |= MSR_IA32_FEATURE_CONTROL_VMXON;
249
250 return fFeatCtl;
251}
252
253/** @callback_method_impl{FNCPUMRDMSR} */
254static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32FeatureControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
255{
256 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
257 *puValue = CPUMGetGuestIa32FeatureControl(pVCpu);
258 return VINF_SUCCESS;
259}
260
261
262/** @callback_method_impl{FNCPUMWRMSR} */
263static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32FeatureControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
264{
265 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
266 return VERR_CPUM_RAISE_GP_0;
267}
268
269
270/** @callback_method_impl{FNCPUMRDMSR} */
271static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32BiosSignId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
272{
273 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
274 /** @todo fake microcode update. */
275 *puValue = pRange->uValue;
276 return VINF_SUCCESS;
277}
278
279
280/** @callback_method_impl{FNCPUMWRMSR} */
281static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32BiosSignId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
282{
283 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
284 /* Normally, zero is written to Ia32BiosSignId before reading it in order
285 to select the signature instead of the BBL_CR_D3 behaviour. The GP mask
286 of the database entry should take care of most illegal writes for now, so
287 just ignore all writes atm. */
288 return VINF_SUCCESS;
289}
290
291
292/** @callback_method_impl{FNCPUMWRMSR} */
293static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32BiosUpdateTrigger(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
294{
295 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
296 /** @todo Fake bios update trigger better. The value is the address to an
297 * update package, I think. We should probably GP if it's invalid. */
298 return VINF_SUCCESS;
299}
300
301
302/**
303 * Get MSR_IA32_SMM_MONITOR_CTL value for IEM and cpumMsrRd_Ia32SmmMonitorCtl.
304 *
305 * @returns The MSR_IA32_SMM_MONITOR_CTL value.
306 * @param pVCpu The cross context per CPU structure.
307 */
308VMM_INT_DECL(uint64_t) CPUMGetGuestIa32SmmMonitorCtl(PVMCPU pVCpu)
309{
310 /* We do not support dual-monitor treatment for SMI and SMM. */
311 /** @todo SMM. */
312 RT_NOREF(pVCpu);
313 return 0;
314}
315
316
317/** @callback_method_impl{FNCPUMRDMSR} */
318static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SmmMonitorCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
319{
320 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
321 *puValue = CPUMGetGuestIa32SmmMonitorCtl(pVCpu);
322 return VINF_SUCCESS;
323}
324
325
326/** @callback_method_impl{FNCPUMWRMSR} */
327static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SmmMonitorCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
328{
329 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
330 /** @todo SMM. */
331 return VINF_SUCCESS;
332}
333
334
335/** @callback_method_impl{FNCPUMRDMSR} */
336static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PmcN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
337{
338 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
339 /** @todo check CPUID leaf 0ah. */
340 *puValue = 0;
341 return VINF_SUCCESS;
342}
343
344
345/** @callback_method_impl{FNCPUMWRMSR} */
346static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PmcN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
347{
348 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
349 /** @todo check CPUID leaf 0ah. */
350 return VINF_SUCCESS;
351}
352
353
354/** @callback_method_impl{FNCPUMRDMSR} */
355static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MonitorFilterLineSize(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
356{
357 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
358 /** @todo return 0x1000 if we try emulate mwait 100% correctly. */
359 *puValue = 0x40; /** @todo Change to CPU cache line size. */
360 return VINF_SUCCESS;
361}
362
363
364/** @callback_method_impl{FNCPUMWRMSR} */
365static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MonitorFilterLineSize(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
366{
367 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
368 /** @todo should remember writes, though it's supposedly something only a BIOS
369 * would write so, it's not extremely important. */
370 return VINF_SUCCESS;
371}
372
373/** @callback_method_impl{FNCPUMRDMSR} */
374static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MPerf(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
375{
376 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
377 /** @todo Read MPERF: Adjust against previously written MPERF value. Is TSC
378 * what we want? */
379 *puValue = TMCpuTickGet(pVCpu);
380#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
381 *puValue = CPUMApplyNestedGuestTscOffset(pVCpu, *puValue);
382#endif
383 return VINF_SUCCESS;
384}
385
386
387/** @callback_method_impl{FNCPUMWRMSR} */
388static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MPerf(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
389{
390 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
391 /** @todo Write MPERF: Calc adjustment. */
392 return VINF_SUCCESS;
393}
394
395
396/** @callback_method_impl{FNCPUMRDMSR} */
397static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32APerf(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
398{
399 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
400 /** @todo Read APERF: Adjust against previously written MPERF value. Is TSC
401 * what we want? */
402 *puValue = TMCpuTickGet(pVCpu);
403#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
404 *puValue = CPUMApplyNestedGuestTscOffset(pVCpu, *puValue);
405#endif
406 return VINF_SUCCESS;
407}
408
409
410/** @callback_method_impl{FNCPUMWRMSR} */
411static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32APerf(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
412{
413 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
414 /** @todo Write APERF: Calc adjustment. */
415 return VINF_SUCCESS;
416}
417
418
419/**
420 * Get fixed IA32_MTRR_CAP value for NEM and cpumMsrRd_Ia32MtrrCap.
421 *
422 * @returns Fixed IA32_MTRR_CAP value.
423 * @param pVCpu The cross context per CPU structure.
424 */
425VMM_INT_DECL(uint64_t) CPUMGetGuestIa32MtrrCap(PVMCPU pVCpu)
426{
427 RT_NOREF_PV(pVCpu);
428
429 /* This is currently a bit weird. :-) */
430 uint8_t const cVariableRangeRegs = 0;
431 bool const fSystemManagementRangeRegisters = false;
432 bool const fFixedRangeRegisters = false;
433 bool const fWriteCombiningType = false;
434 return cVariableRangeRegs
435 | (fFixedRangeRegisters ? RT_BIT_64(8) : 0)
436 | (fWriteCombiningType ? RT_BIT_64(10) : 0)
437 | (fSystemManagementRangeRegisters ? RT_BIT_64(11) : 0);
438}
439
440/** @callback_method_impl{FNCPUMRDMSR} */
441static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
442{
443 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
444 *puValue = CPUMGetGuestIa32MtrrCap(pVCpu);
445 return VINF_SUCCESS;
446}
447
448
449/** @callback_method_impl{FNCPUMRDMSR} */
450static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrPhysBaseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
451{
452 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
453 /** @todo Implement variable MTRR storage. */
454 Assert(pRange->uValue == (idMsr - 0x200) / 2);
455 *puValue = 0;
456 return VINF_SUCCESS;
457}
458
459
460/** @callback_method_impl{FNCPUMWRMSR} */
461static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrPhysBaseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
462{
463 /*
464 * Validate the value.
465 */
466 Assert(pRange->uValue == (idMsr - 0x200) / 2);
467 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(uRawValue); RT_NOREF_PV(pRange);
468
469 uint8_t uType = uValue & 0xff;
470 if ((uType >= 7) || (uType == 2) || (uType == 3))
471 {
472 Log(("CPUM: Invalid type set writing MTRR PhysBase MSR %#x: %#llx (%#llx)\n", idMsr, uValue, uType));
473 return VERR_CPUM_RAISE_GP_0;
474 }
475
476 uint64_t fInvPhysMask = ~(RT_BIT_64(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.cMaxPhysAddrWidth) - 1U);
477 if (fInvPhysMask & uValue)
478 {
479 Log(("CPUM: Invalid physical address bits set writing MTRR PhysBase MSR %#x: %#llx (%#llx)\n",
480 idMsr, uValue, uValue & fInvPhysMask));
481 return VERR_CPUM_RAISE_GP_0;
482 }
483
484 /*
485 * Store it.
486 */
487 /** @todo Implement variable MTRR storage. */
488 return VINF_SUCCESS;
489}
490
491
492/** @callback_method_impl{FNCPUMRDMSR} */
493static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrPhysMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
494{
495 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
496 /** @todo Implement variable MTRR storage. */
497 Assert(pRange->uValue == (idMsr - 0x200) / 2);
498 *puValue = 0;
499 return VINF_SUCCESS;
500}
501
502
503/** @callback_method_impl{FNCPUMWRMSR} */
504static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrPhysMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
505{
506 /*
507 * Validate the value.
508 */
509 Assert(pRange->uValue == (idMsr - 0x200) / 2);
510 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(uRawValue); RT_NOREF_PV(pRange);
511
512 uint64_t fInvPhysMask = ~(RT_BIT_64(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.cMaxPhysAddrWidth) - 1U);
513 if (fInvPhysMask & uValue)
514 {
515 Log(("CPUM: Invalid physical address bits set writing MTRR PhysMask MSR %#x: %#llx (%#llx)\n",
516 idMsr, uValue, uValue & fInvPhysMask));
517 return VERR_CPUM_RAISE_GP_0;
518 }
519
520 /*
521 * Store it.
522 */
523 /** @todo Implement variable MTRR storage. */
524 return VINF_SUCCESS;
525}
526
527
528/** @callback_method_impl{FNCPUMRDMSR} */
529static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrFixed(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
530{
531 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
532 CPUM_MSR_ASSERT_CPUMCPU_OFFSET_RETURN(pVCpu, pRange, uint64_t, puFixedMtrr);
533 *puValue = *puFixedMtrr;
534 return VINF_SUCCESS;
535}
536
537
538/** @callback_method_impl{FNCPUMWRMSR} */
539static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrFixed(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
540{
541 CPUM_MSR_ASSERT_CPUMCPU_OFFSET_RETURN(pVCpu, pRange, uint64_t, puFixedMtrr);
542 RT_NOREF_PV(idMsr); RT_NOREF_PV(uRawValue);
543
544 for (uint32_t cShift = 0; cShift < 63; cShift += 8)
545 {
546 uint8_t uType = (uint8_t)(uValue >> cShift);
547 if ((uType >= 7) || (uType == 2) || (uType == 3))
548 {
549 Log(("CPUM: Invalid MTRR type at %u:%u in fixed range (%#x/%s): %#llx (%#llx)\n",
550 cShift + 7, cShift, idMsr, pRange->szName, uValue, uType));
551 return VERR_CPUM_RAISE_GP_0;
552 }
553 }
554 *puFixedMtrr = uValue;
555 return VINF_SUCCESS;
556}
557
558
559/** @callback_method_impl{FNCPUMRDMSR} */
560static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrDefType(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
561{
562 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
563 *puValue = pVCpu->cpum.s.GuestMsrs.msr.MtrrDefType;
564 return VINF_SUCCESS;
565}
566
567
568/** @callback_method_impl{FNCPUMWRMSR} */
569static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrDefType(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
570{
571 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
572
573 uint8_t uType = uValue & 0xff;
574 if ((uType >= 7) || (uType == 2) || (uType == 3))
575 {
576 Log(("CPUM: Invalid MTRR default type value on %s: %#llx (%#llx)\n", pRange->szName, uValue, uType));
577 return VERR_CPUM_RAISE_GP_0;
578 }
579
580 pVCpu->cpum.s.GuestMsrs.msr.MtrrDefType = uValue;
581 return VINF_SUCCESS;
582}
583
584
585/** @callback_method_impl{FNCPUMRDMSR} */
586static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32Pat(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
587{
588 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
589 *puValue = pVCpu->cpum.s.Guest.msrPAT;
590 return VINF_SUCCESS;
591}
592
593
594/** @callback_method_impl{FNCPUMWRMSR} */
595static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32Pat(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
596{
597 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
598 if (CPUMIsPatMsrValid(uValue))
599 {
600 pVCpu->cpum.s.Guest.msrPAT = uValue;
601 return VINF_SUCCESS;
602 }
603 return VERR_CPUM_RAISE_GP_0;
604}
605
606
607/** @callback_method_impl{FNCPUMRDMSR} */
608static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SysEnterCs(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
609{
610 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
611 *puValue = pVCpu->cpum.s.Guest.SysEnter.cs;
612 return VINF_SUCCESS;
613}
614
615
616/** @callback_method_impl{FNCPUMWRMSR} */
617static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SysEnterCs(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
618{
619 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
620
621 /* Note! We used to mask this by 0xffff, but turns out real HW doesn't and
622 there are generally 32-bit working bits backing this register. */
623 pVCpu->cpum.s.Guest.SysEnter.cs = uValue;
624 return VINF_SUCCESS;
625}
626
627
628/** @callback_method_impl{FNCPUMRDMSR} */
629static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SysEnterEsp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
630{
631 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
632 *puValue = pVCpu->cpum.s.Guest.SysEnter.esp;
633 return VINF_SUCCESS;
634}
635
636
637/** @callback_method_impl{FNCPUMWRMSR} */
638static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SysEnterEsp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
639{
640 if (X86_IS_CANONICAL(uValue))
641 {
642 pVCpu->cpum.s.Guest.SysEnter.esp = uValue;
643 return VINF_SUCCESS;
644 }
645 Log(("CPUM: IA32_SYSENTER_ESP not canonical! %#llx\n", uValue));
646 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
647 return VERR_CPUM_RAISE_GP_0;
648}
649
650
651/** @callback_method_impl{FNCPUMRDMSR} */
652static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SysEnterEip(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
653{
654 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
655 *puValue = pVCpu->cpum.s.Guest.SysEnter.eip;
656 return VINF_SUCCESS;
657}
658
659
660/** @callback_method_impl{FNCPUMWRMSR} */
661static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SysEnterEip(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
662{
663 if (X86_IS_CANONICAL(uValue))
664 {
665 pVCpu->cpum.s.Guest.SysEnter.eip = uValue;
666 return VINF_SUCCESS;
667 }
668 LogRel(("CPUM: IA32_SYSENTER_EIP not canonical! %#llx\n", uValue));
669 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
670 return VERR_CPUM_RAISE_GP_0;
671}
672
673
674/** @callback_method_impl{FNCPUMRDMSR} */
675static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McgCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
676{
677#if 0 /** @todo implement machine checks. */
678 *puValue = pRange->uValue & (RT_BIT_64(8) | 0);
679#else
680 *puValue = 0;
681#endif
682 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
683 return VINF_SUCCESS;
684}
685
686
687/** @callback_method_impl{FNCPUMRDMSR} */
688static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McgStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
689{
690 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
691 /** @todo implement machine checks. */
692 *puValue = 0;
693 return VINF_SUCCESS;
694}
695
696
697/** @callback_method_impl{FNCPUMWRMSR} */
698static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McgStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
699{
700 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
701 /** @todo implement machine checks. */
702 return VINF_SUCCESS;
703}
704
705
706/** @callback_method_impl{FNCPUMRDMSR} */
707static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McgCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
708{
709 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
710 /** @todo implement machine checks. */
711 *puValue = 0;
712 return VINF_SUCCESS;
713}
714
715
716/** @callback_method_impl{FNCPUMWRMSR} */
717static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McgCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
718{
719 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
720 /** @todo implement machine checks. */
721 return VINF_SUCCESS;
722}
723
724
725/** @callback_method_impl{FNCPUMRDMSR} */
726static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32DebugCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
727{
728 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
729 /** @todo implement IA32_DEBUGCTL. */
730 *puValue = 0;
731 return VINF_SUCCESS;
732}
733
734
735/** @callback_method_impl{FNCPUMWRMSR} */
736static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32DebugCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
737{
738 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
739 /** @todo implement IA32_DEBUGCTL. */
740 return VINF_SUCCESS;
741}
742
743
744/** @callback_method_impl{FNCPUMRDMSR} */
745static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SmrrPhysBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
746{
747 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
748 /** @todo implement intel SMM. */
749 *puValue = 0;
750 return VINF_SUCCESS;
751}
752
753
754/** @callback_method_impl{FNCPUMWRMSR} */
755static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SmrrPhysBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
756{
757 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
758 /** @todo implement intel SMM. */
759 return VERR_CPUM_RAISE_GP_0;
760}
761
762
763/** @callback_method_impl{FNCPUMRDMSR} */
764static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SmrrPhysMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
765{
766 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
767 /** @todo implement intel SMM. */
768 *puValue = 0;
769 return VINF_SUCCESS;
770}
771
772
773/** @callback_method_impl{FNCPUMWRMSR} */
774static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SmrrPhysMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
775{
776 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
777 /** @todo implement intel SMM. */
778 return VERR_CPUM_RAISE_GP_0;
779}
780
781
782/** @callback_method_impl{FNCPUMRDMSR} */
783static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PlatformDcaCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
784{
785 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
786 /** @todo implement intel direct cache access (DCA)?? */
787 *puValue = 0;
788 return VINF_SUCCESS;
789}
790
791
792/** @callback_method_impl{FNCPUMWRMSR} */
793static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PlatformDcaCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
794{
795 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
796 /** @todo implement intel direct cache access (DCA)?? */
797 return VINF_SUCCESS;
798}
799
800
801/** @callback_method_impl{FNCPUMRDMSR} */
802static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32CpuDcaCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
803{
804 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
805 /** @todo implement intel direct cache access (DCA)?? */
806 *puValue = 0;
807 return VINF_SUCCESS;
808}
809
810
811/** @callback_method_impl{FNCPUMRDMSR} */
812static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32Dca0Cap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
813{
814 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
815 /** @todo implement intel direct cache access (DCA)?? */
816 *puValue = 0;
817 return VINF_SUCCESS;
818}
819
820
821/** @callback_method_impl{FNCPUMWRMSR} */
822static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32Dca0Cap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
823{
824 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
825 /** @todo implement intel direct cache access (DCA)?? */
826 return VINF_SUCCESS;
827}
828
829
830/** @callback_method_impl{FNCPUMRDMSR} */
831static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfEvtSelN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
832{
833 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
834 /** @todo implement IA32_PERFEVTSEL0+. */
835 *puValue = 0;
836 return VINF_SUCCESS;
837}
838
839
840/** @callback_method_impl{FNCPUMWRMSR} */
841static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfEvtSelN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
842{
843 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
844 /** @todo implement IA32_PERFEVTSEL0+. */
845 return VINF_SUCCESS;
846}
847
848
849/** @callback_method_impl{FNCPUMRDMSR} */
850static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
851{
852 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
853 uint64_t uValue = pRange->uValue;
854
855 /* Always provide the max bus ratio for now. XNU expects it. */
856 uValue &= ~((UINT64_C(0x1f) << 40) | RT_BIT_64(46));
857
858 PVM pVM = pVCpu->CTX_SUFF(pVM);
859 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
860 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
861 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
862 if (uTscRatio > 0x1f)
863 uTscRatio = 0x1f;
864 uValue |= (uint64_t)uTscRatio << 40;
865
866 *puValue = uValue;
867 return VINF_SUCCESS;
868}
869
870
871/** @callback_method_impl{FNCPUMWRMSR} */
872static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
873{
874 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
875 /* Pentium4 allows writing, but all bits are ignored. */
876 return VINF_SUCCESS;
877}
878
879
880/** @callback_method_impl{FNCPUMRDMSR} */
881static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
882{
883 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
884 /** @todo implement IA32_PERFCTL. */
885 *puValue = 0;
886 return VINF_SUCCESS;
887}
888
889
890/** @callback_method_impl{FNCPUMWRMSR} */
891static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
892{
893 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
894 /** @todo implement IA32_PERFCTL. */
895 return VINF_SUCCESS;
896}
897
898
899/** @callback_method_impl{FNCPUMRDMSR} */
900static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32FixedCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
901{
902 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
903 /** @todo implement IA32_FIXED_CTRn (fixed performance counters). */
904 *puValue = 0;
905 return VINF_SUCCESS;
906}
907
908
909/** @callback_method_impl{FNCPUMWRMSR} */
910static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32FixedCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
911{
912 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
913 /** @todo implement IA32_FIXED_CTRn (fixed performance counters). */
914 return VINF_SUCCESS;
915}
916
917
918/** @callback_method_impl{FNCPUMRDMSR} */
919static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfCapabilities(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
920{
921 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
922 /** @todo implement performance counters. */
923 *puValue = 0;
924 return VINF_SUCCESS;
925}
926
927
928/** @callback_method_impl{FNCPUMWRMSR} */
929static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfCapabilities(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
930{
931 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
932 /** @todo implement performance counters. */
933 return VINF_SUCCESS;
934}
935
936
937/** @callback_method_impl{FNCPUMRDMSR} */
938static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32FixedCtrCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
939{
940 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
941 /** @todo implement performance counters. */
942 *puValue = 0;
943 return VINF_SUCCESS;
944}
945
946
947/** @callback_method_impl{FNCPUMWRMSR} */
948static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32FixedCtrCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
949{
950 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
951 /** @todo implement performance counters. */
952 return VINF_SUCCESS;
953}
954
955
956/** @callback_method_impl{FNCPUMRDMSR} */
957static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfGlobalStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
958{
959 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
960 /** @todo implement performance counters. */
961 *puValue = 0;
962 return VINF_SUCCESS;
963}
964
965
966/** @callback_method_impl{FNCPUMWRMSR} */
967static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfGlobalStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
968{
969 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
970 /** @todo implement performance counters. */
971 return VINF_SUCCESS;
972}
973
974
975/** @callback_method_impl{FNCPUMRDMSR} */
976static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfGlobalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
977{
978 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
979 /** @todo implement performance counters. */
980 *puValue = 0;
981 return VINF_SUCCESS;
982}
983
984
985/** @callback_method_impl{FNCPUMWRMSR} */
986static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfGlobalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
987{
988 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
989 /** @todo implement performance counters. */
990 return VINF_SUCCESS;
991}
992
993
994/** @callback_method_impl{FNCPUMRDMSR} */
995static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfGlobalOvfCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
996{
997 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
998 /** @todo implement performance counters. */
999 *puValue = 0;
1000 return VINF_SUCCESS;
1001}
1002
1003
1004/** @callback_method_impl{FNCPUMWRMSR} */
1005static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfGlobalOvfCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1006{
1007 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1008 /** @todo implement performance counters. */
1009 return VINF_SUCCESS;
1010}
1011
1012
1013/** @callback_method_impl{FNCPUMRDMSR} */
1014static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PebsEnable(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1015{
1016 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1017 /** @todo implement performance counters. */
1018 *puValue = 0;
1019 return VINF_SUCCESS;
1020}
1021
1022
1023/** @callback_method_impl{FNCPUMWRMSR} */
1024static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PebsEnable(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1025{
1026 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1027 /** @todo implement performance counters. */
1028 return VINF_SUCCESS;
1029}
1030
1031
1032/** @callback_method_impl{FNCPUMRDMSR} */
1033static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ClockModulation(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1034{
1035 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1036 /** @todo implement IA32_CLOCK_MODULATION. */
1037 *puValue = 0;
1038 return VINF_SUCCESS;
1039}
1040
1041
1042/** @callback_method_impl{FNCPUMWRMSR} */
1043static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ClockModulation(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1044{
1045 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1046 /** @todo implement IA32_CLOCK_MODULATION. */
1047 return VINF_SUCCESS;
1048}
1049
1050
1051/** @callback_method_impl{FNCPUMRDMSR} */
1052static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ThermInterrupt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1053{
1054 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1055 /** @todo implement IA32_THERM_INTERRUPT. */
1056 *puValue = 0;
1057 return VINF_SUCCESS;
1058}
1059
1060
1061/** @callback_method_impl{FNCPUMWRMSR} */
1062static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ThermInterrupt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1063{
1064 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1065 /** @todo implement IA32_THERM_STATUS. */
1066 return VINF_SUCCESS;
1067}
1068
1069
1070/** @callback_method_impl{FNCPUMRDMSR} */
1071static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ThermStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1072{
1073 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1074 /** @todo implement IA32_THERM_STATUS. */
1075 *puValue = 0;
1076 return VINF_SUCCESS;
1077}
1078
1079
1080/** @callback_method_impl{FNCPUMWRMSR} */
1081static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ThermStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1082{
1083 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1084 /** @todo implement IA32_THERM_INTERRUPT. */
1085 return VINF_SUCCESS;
1086}
1087
1088
1089/** @callback_method_impl{FNCPUMRDMSR} */
1090static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32Therm2Ctl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1091{
1092 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1093 /** @todo implement IA32_THERM2_CTL. */
1094 *puValue = 0;
1095 return VINF_SUCCESS;
1096}
1097
1098
1099/** @callback_method_impl{FNCPUMWRMSR} */
1100static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32Therm2Ctl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1101{
1102 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1103 /** @todo implement IA32_THERM2_CTL. */
1104 return VINF_SUCCESS;
1105}
1106
1107
1108/** @callback_method_impl{FNCPUMRDMSR} */
1109static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MiscEnable(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1110{
1111 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1112 *puValue = pVCpu->cpum.s.GuestMsrs.msr.MiscEnable;
1113 return VINF_SUCCESS;
1114}
1115
1116
1117/** @callback_method_impl{FNCPUMWRMSR} */
1118static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MiscEnable(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1119{
1120 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1121#ifdef LOG_ENABLED
1122 uint64_t const uOld = pVCpu->cpum.s.GuestMsrs.msr.MiscEnable;
1123#endif
1124
1125 /* Unsupported bits are generally ignored and stripped by the MSR range
1126 entry that got us here. So, we just need to preserve fixed bits. */
1127 pVCpu->cpum.s.GuestMsrs.msr.MiscEnable = uValue
1128 | MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL
1129 | MSR_IA32_MISC_ENABLE_BTS_UNAVAIL;
1130
1131 Log(("CPUM: IA32_MISC_ENABLE; old=%#llx written=%#llx => %#llx\n",
1132 uOld, uValue, pVCpu->cpum.s.GuestMsrs.msr.MiscEnable));
1133
1134 /** @todo Wire IA32_MISC_ENABLE bit 22 to our NT 4 CPUID trick. */
1135 /** @todo Wire up MSR_IA32_MISC_ENABLE_XD_DISABLE. */
1136 return VINF_SUCCESS;
1137}
1138
1139
1140/** @callback_method_impl{FNCPUMRDMSR} */
1141static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McCtlStatusAddrMiscN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1142{
1143 RT_NOREF_PV(pVCpu); RT_NOREF_PV(pRange);
1144
1145 /** @todo Implement machine check exception injection. */
1146 switch (idMsr & 3)
1147 {
1148 case 0:
1149 case 1:
1150 *puValue = 0;
1151 break;
1152
1153 /* The ADDR and MISC registers aren't accessible since the
1154 corresponding STATUS bits are zero. */
1155 case 2:
1156 Log(("CPUM: Reading IA32_MCi_ADDR %#x -> #GP\n", idMsr));
1157 return VERR_CPUM_RAISE_GP_0;
1158 case 3:
1159 Log(("CPUM: Reading IA32_MCi_MISC %#x -> #GP\n", idMsr));
1160 return VERR_CPUM_RAISE_GP_0;
1161 }
1162 return VINF_SUCCESS;
1163}
1164
1165
1166/** @callback_method_impl{FNCPUMWRMSR} */
1167static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McCtlStatusAddrMiscN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1168{
1169 RT_NOREF_PV(pVCpu); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1170 switch (idMsr & 3)
1171 {
1172 case 0:
1173 /* Ignore writes to the CTL register. */
1174 break;
1175
1176 case 1:
1177 /* According to specs, the STATUS register can only be written to
1178 with the value 0. VBoxCpuReport thinks different for a
1179 Pentium M Dothan, but implementing according to specs now. */
1180 if (uValue != 0)
1181 {
1182 Log(("CPUM: Writing non-zero value (%#llx) to IA32_MCi_STATUS %#x -> #GP\n", uValue, idMsr));
1183 return VERR_CPUM_RAISE_GP_0;
1184 }
1185 break;
1186
1187 /* Specs states that ADDR and MISC can be cleared by writing zeros.
1188 Writing 1s will GP. Need to figure out how this relates to the
1189 ADDRV and MISCV status flags. If writing is independent of those
1190 bits, we need to know whether the CPU really implements them since
1191 that is exposed by writing 0 to them.
1192 Implementing the solution with the fewer GPs for now. */
1193 case 2:
1194 if (uValue != 0)
1195 {
1196 Log(("CPUM: Writing non-zero value (%#llx) to IA32_MCi_ADDR %#x -> #GP\n", uValue, idMsr));
1197 return VERR_CPUM_RAISE_GP_0;
1198 }
1199 break;
1200 case 3:
1201 if (uValue != 0)
1202 {
1203 Log(("CPUM: Writing non-zero value (%#llx) to IA32_MCi_MISC %#x -> #GP\n", uValue, idMsr));
1204 return VERR_CPUM_RAISE_GP_0;
1205 }
1206 break;
1207 }
1208 return VINF_SUCCESS;
1209}
1210
1211
1212/** @callback_method_impl{FNCPUMRDMSR} */
1213static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McNCtl2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1214{
1215 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1216 /** @todo Implement machine check exception injection. */
1217 *puValue = 0;
1218 return VINF_SUCCESS;
1219}
1220
1221
1222/** @callback_method_impl{FNCPUMWRMSR} */
1223static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McNCtl2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1224{
1225 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1226 /** @todo Implement machine check exception injection. */
1227 return VINF_SUCCESS;
1228}
1229
1230
1231/** @callback_method_impl{FNCPUMRDMSR} */
1232static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32DsArea(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1233{
1234 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1235 /** @todo implement IA32_DS_AREA. */
1236 *puValue = 0;
1237 return VINF_SUCCESS;
1238}
1239
1240
1241/** @callback_method_impl{FNCPUMWRMSR} */
1242static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32DsArea(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1243{
1244 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1245 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1246 return VINF_SUCCESS;
1247}
1248
1249
1250/** @callback_method_impl{FNCPUMRDMSR} */
1251static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32TscDeadline(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1252{
1253 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1254 /** @todo implement TSC deadline timer. */
1255 *puValue = 0;
1256 return VINF_SUCCESS;
1257}
1258
1259
1260/** @callback_method_impl{FNCPUMWRMSR} */
1261static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32TscDeadline(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1262{
1263 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1264 /** @todo implement TSC deadline timer. */
1265 return VINF_SUCCESS;
1266}
1267
1268
1269/** @callback_method_impl{FNCPUMRDMSR} */
1270static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32X2ApicN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1271{
1272 RT_NOREF_PV(pRange);
1273 return APICReadMsr(pVCpu, idMsr, puValue);
1274}
1275
1276
1277/** @callback_method_impl{FNCPUMWRMSR} */
1278static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32X2ApicN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1279{
1280 RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1281 return APICWriteMsr(pVCpu, idMsr, uValue);
1282}
1283
1284
1285/** @callback_method_impl{FNCPUMRDMSR} */
1286static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32DebugInterface(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1287{
1288 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1289 /** @todo IA32_DEBUG_INTERFACE (no docs) */
1290 *puValue = 0;
1291 return VINF_SUCCESS;
1292}
1293
1294
1295/** @callback_method_impl{FNCPUMWRMSR} */
1296static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32DebugInterface(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1297{
1298 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1299 /** @todo IA32_DEBUG_INTERFACE (no docs) */
1300 return VINF_SUCCESS;
1301}
1302
1303
1304/**
1305 * Gets IA32_VMX_BASIC for IEM and cpumMsrRd_Ia32VmxBasic.
1306 *
1307 * @returns IA32_VMX_BASIC value.
1308 * @param pVCpu The cross context per CPU structure.
1309 */
1310VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxBasic(PVMCPU pVCpu)
1311{
1312 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1313 uint64_t uVmxMsr;
1314 if (pGuestFeatures->fVmx)
1315 {
1316 uVmxMsr = RT_BF_MAKE(VMX_BF_BASIC_VMCS_ID, VMX_V_VMCS_REVISION_ID )
1317 | RT_BF_MAKE(VMX_BF_BASIC_VMCS_SIZE, VMX_V_VMCS_SIZE )
1318 | RT_BF_MAKE(VMX_BF_BASIC_PHYSADDR_WIDTH, !pGuestFeatures->fLongMode )
1319 | RT_BF_MAKE(VMX_BF_BASIC_DUAL_MON, 0 )
1320 | RT_BF_MAKE(VMX_BF_BASIC_VMCS_MEM_TYPE, VMX_BASIC_MEM_TYPE_WB )
1321 | RT_BF_MAKE(VMX_BF_BASIC_VMCS_INS_OUTS, pGuestFeatures->fVmxInsOutInfo)
1322 | RT_BF_MAKE(VMX_BF_BASIC_TRUE_CTLS, 0 );
1323 }
1324 else
1325 uVmxMsr = 0;
1326 return uVmxMsr;
1327}
1328
1329
1330/** @callback_method_impl{FNCPUMRDMSR} */
1331static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxBasic(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1332{
1333 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1334 *puValue = CPUMGetGuestIa32VmxBasic(pVCpu);
1335 return VINF_SUCCESS;
1336}
1337
1338
1339/**
1340 * Gets IA32_VMX_PINBASED_CTLS for IEM and cpumMsrRd_Ia32VmxPinbasedCtls.
1341 *
1342 * @returns IA32_VMX_PINBASED_CTLS value.
1343 * @param pVCpu The cross context per CPU structure.
1344 */
1345VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxPinbasedCtls(PVMCPU pVCpu)
1346{
1347 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1348 uint64_t uVmxMsr;
1349 if (pGuestFeatures->fVmx)
1350 {
1351 uint32_t const fFeatures = (pGuestFeatures->fVmxExtIntExit << VMX_BF_PIN_CTLS_EXT_INT_EXIT_SHIFT )
1352 | (pGuestFeatures->fVmxNmiExit << VMX_BF_PIN_CTLS_NMI_EXIT_SHIFT )
1353 | (pGuestFeatures->fVmxVirtNmi << VMX_BF_PIN_CTLS_VIRT_NMI_SHIFT )
1354 | (pGuestFeatures->fVmxPreemptTimer << VMX_BF_PIN_CTLS_PREEMPT_TIMER_SHIFT)
1355 | (pGuestFeatures->fVmxPostedInt << VMX_BF_PIN_CTLS_POSTED_INT_SHIFT );
1356 uint32_t const fVal = VMX_PIN_CTLS_DEFAULT1;
1357 uint32_t const fZap = fFeatures | VMX_PIN_CTLS_DEFAULT1;
1358 AssertMsg((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures));
1359 uVmxMsr = RT_MAKE_U64(fVal, fZap);
1360 }
1361 else
1362 uVmxMsr = 0;
1363 return uVmxMsr;
1364}
1365
1366
1367/** @callback_method_impl{FNCPUMRDMSR} */
1368static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxPinbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1369{
1370 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1371 *puValue = CPUMGetGuestIa32VmxPinbasedCtls(pVCpu);
1372 return VINF_SUCCESS;
1373}
1374
1375
1376/**
1377 * Gets IA32_VMX_PROCBASED_CTLS for IEM and cpumMsrRd_Ia32VmxProcbasedCtls.
1378 *
1379 * @returns IA32_VMX_PROCBASED_CTLS value.
1380 * @param pVCpu The cross context per CPU structure.
1381 */
1382VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxProcbasedCtls(PVMCPU pVCpu)
1383{
1384 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1385 uint64_t uVmxMsr;
1386 if (pGuestFeatures->fVmx)
1387 {
1388 uint32_t const fFeatures = (pGuestFeatures->fVmxIntWindowExit << VMX_BF_PROC_CTLS_INT_WINDOW_EXIT_SHIFT )
1389 | (pGuestFeatures->fVmxTscOffsetting << VMX_BF_PROC_CTLS_USE_TSC_OFFSETTING_SHIFT)
1390 | (pGuestFeatures->fVmxHltExit << VMX_BF_PROC_CTLS_HLT_EXIT_SHIFT )
1391 | (pGuestFeatures->fVmxInvlpgExit << VMX_BF_PROC_CTLS_INVLPG_EXIT_SHIFT )
1392 | (pGuestFeatures->fVmxMwaitExit << VMX_BF_PROC_CTLS_MWAIT_EXIT_SHIFT )
1393 | (pGuestFeatures->fVmxRdpmcExit << VMX_BF_PROC_CTLS_RDPMC_EXIT_SHIFT )
1394 | (pGuestFeatures->fVmxRdtscExit << VMX_BF_PROC_CTLS_RDTSC_EXIT_SHIFT )
1395 | (pGuestFeatures->fVmxCr3LoadExit << VMX_BF_PROC_CTLS_CR3_LOAD_EXIT_SHIFT )
1396 | (pGuestFeatures->fVmxCr3StoreExit << VMX_BF_PROC_CTLS_CR3_STORE_EXIT_SHIFT )
1397 | (pGuestFeatures->fVmxCr8LoadExit << VMX_BF_PROC_CTLS_CR8_LOAD_EXIT_SHIFT )
1398 | (pGuestFeatures->fVmxCr8StoreExit << VMX_BF_PROC_CTLS_CR8_STORE_EXIT_SHIFT )
1399 | (pGuestFeatures->fVmxUseTprShadow << VMX_BF_PROC_CTLS_USE_TPR_SHADOW_SHIFT )
1400 | (pGuestFeatures->fVmxNmiWindowExit << VMX_BF_PROC_CTLS_NMI_WINDOW_EXIT_SHIFT )
1401 | (pGuestFeatures->fVmxMovDRxExit << VMX_BF_PROC_CTLS_MOV_DR_EXIT_SHIFT )
1402 | (pGuestFeatures->fVmxUncondIoExit << VMX_BF_PROC_CTLS_UNCOND_IO_EXIT_SHIFT )
1403 | (pGuestFeatures->fVmxUseIoBitmaps << VMX_BF_PROC_CTLS_USE_IO_BITMAPS_SHIFT )
1404 | (pGuestFeatures->fVmxMonitorTrapFlag << VMX_BF_PROC_CTLS_MONITOR_TRAP_FLAG_SHIFT )
1405 | (pGuestFeatures->fVmxUseMsrBitmaps << VMX_BF_PROC_CTLS_USE_MSR_BITMAPS_SHIFT )
1406 | (pGuestFeatures->fVmxMonitorExit << VMX_BF_PROC_CTLS_MONITOR_EXIT_SHIFT )
1407 | (pGuestFeatures->fVmxPauseExit << VMX_BF_PROC_CTLS_PAUSE_EXIT_SHIFT )
1408 | (pGuestFeatures->fVmxSecondaryExecCtls << VMX_BF_PROC_CTLS_USE_SECONDARY_CTLS_SHIFT);
1409 uint32_t const fVal = VMX_PROC_CTLS_DEFAULT1;
1410 uint32_t const fZap = fFeatures | VMX_PROC_CTLS_DEFAULT1;
1411 AssertMsg((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures));
1412 uVmxMsr = RT_MAKE_U64(fVal, fZap);
1413 }
1414 else
1415 uVmxMsr = 0;
1416 return uVmxMsr;
1417}
1418
1419
1420/** @callback_method_impl{FNCPUMRDMSR} */
1421static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxProcbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1422{
1423 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1424 *puValue = CPUMGetGuestIa32VmxProcbasedCtls(pVCpu);
1425 return VINF_SUCCESS;
1426}
1427
1428
1429/**
1430 * Gets IA32_VMX_EXIT_CTLS for IEM and cpumMsrRd_Ia32VmxProcbasedCtls.
1431 *
1432 * @returns IA32_VMX_EXIT_CTLS value.
1433 * @param pVCpu The cross context per CPU structure.
1434 */
1435VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxExitCtls(PVMCPU pVCpu)
1436{
1437 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1438 uint64_t uVmxMsr;
1439 if (pGuestFeatures->fVmx)
1440 {
1441 uint32_t const fFeatures = (pGuestFeatures->fVmxExitSaveDebugCtls << VMX_BF_EXIT_CTLS_SAVE_DEBUG_SHIFT )
1442 | (pGuestFeatures->fVmxHostAddrSpaceSize << VMX_BF_EXIT_CTLS_HOST_ADDR_SPACE_SIZE_SHIFT)
1443 | (pGuestFeatures->fVmxExitAckExtInt << VMX_BF_EXIT_CTLS_ACK_EXT_INT_SHIFT )
1444 | (pGuestFeatures->fVmxExitSavePatMsr << VMX_BF_EXIT_CTLS_SAVE_PAT_MSR_SHIFT )
1445 | (pGuestFeatures->fVmxExitLoadPatMsr << VMX_BF_EXIT_CTLS_LOAD_PAT_MSR_SHIFT )
1446 | (pGuestFeatures->fVmxExitSaveEferMsr << VMX_BF_EXIT_CTLS_SAVE_EFER_MSR_SHIFT )
1447 | (pGuestFeatures->fVmxExitLoadEferMsr << VMX_BF_EXIT_CTLS_LOAD_EFER_MSR_SHIFT )
1448 | (pGuestFeatures->fVmxSavePreemptTimer << VMX_BF_EXIT_CTLS_SAVE_PREEMPT_TIMER_SHIFT );
1449 uint32_t const fVal = VMX_EXIT_CTLS_DEFAULT1;
1450 uint32_t const fZap = fFeatures | VMX_EXIT_CTLS_DEFAULT1;
1451 AssertMsg((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures));
1452 uVmxMsr = RT_MAKE_U64(fVal, fZap);
1453 }
1454 else
1455 uVmxMsr = 0;
1456 return uVmxMsr;
1457}
1458
1459
1460/** @callback_method_impl{FNCPUMRDMSR} */
1461static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxExitCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1462{
1463 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1464 *puValue = CPUMGetGuestIa32VmxExitCtls(pVCpu);
1465 return VINF_SUCCESS;
1466}
1467
1468
1469/**
1470 * Gets IA32_VMX_ENTRY_CTLS for IEM and cpumMsrRd_Ia32VmxEntryCtls.
1471 *
1472 * @returns IA32_VMX_ENTRY_CTLS value.
1473 * @param pVCpu The cross context per CPU structure.
1474 */
1475VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxEntryCtls(PVMCPU pVCpu)
1476{
1477 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1478 uint64_t uVmxMsr;
1479 if (pGuestFeatures->fVmx)
1480 {
1481 uint32_t const fFeatures = (pGuestFeatures->fVmxEntryLoadDebugCtls << VMX_BF_ENTRY_CTLS_LOAD_DEBUG_SHIFT )
1482 | (pGuestFeatures->fVmxIa32eModeGuest << VMX_BF_ENTRY_CTLS_IA32E_MODE_GUEST_SHIFT)
1483 | (pGuestFeatures->fVmxEntryLoadEferMsr << VMX_BF_ENTRY_CTLS_LOAD_EFER_MSR_SHIFT )
1484 | (pGuestFeatures->fVmxEntryLoadPatMsr << VMX_BF_ENTRY_CTLS_LOAD_PAT_MSR_SHIFT );
1485 uint32_t const fDefault1 = VMX_ENTRY_CTLS_DEFAULT1;
1486 uint32_t const fVal = fDefault1;
1487 uint32_t const fZap = fFeatures | fDefault1;
1488 AssertMsg((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures));
1489 uVmxMsr = RT_MAKE_U64(fVal, fZap);
1490 }
1491 else
1492 uVmxMsr = 0;
1493 return uVmxMsr;
1494}
1495
1496
1497/** @callback_method_impl{FNCPUMRDMSR} */
1498static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxEntryCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1499{
1500 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1501 *puValue = CPUMGetGuestIa32VmxEntryCtls(pVCpu);
1502 return VINF_SUCCESS;
1503}
1504
1505
1506/**
1507 * Gets IA32_VMX_MISC for IEM and cpumMsrRd_Ia32VmxMisc.
1508 *
1509 * @returns IA32_VMX_MISC MSR.
1510 * @param pVCpu The cross context per CPU structure.
1511 */
1512VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxMisc(PVMCPU pVCpu)
1513{
1514 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1515 uint64_t uVmxMsr;
1516 if (pGuestFeatures->fVmx)
1517 {
1518 uint64_t uHostMsr;
1519 int rc = HMVmxGetHostMsr(pVCpu->CTX_SUFF(pVM), MSR_IA32_VMX_MISC, &uHostMsr);
1520 AssertMsgRC(rc, ("HMVmxGetHostMsr failed. rc=%Rrc\n", rc)); RT_NOREF_PV(rc);
1521 uint8_t const cMaxMsrs = RT_MIN(RT_BF_GET(uHostMsr, VMX_BF_MISC_MAX_MSRS), VMX_V_MAX_MSRS);
1522 uVmxMsr = RT_BF_MAKE(VMX_BF_MISC_PREEMPT_TIMER_TSC, VMX_V_PREEMPT_TIMER_SHIFT )
1523 | RT_BF_MAKE(VMX_BF_MISC_EXIT_STORE_EFER_LMA, pGuestFeatures->fVmxExitStoreEferLma )
1524 | RT_BF_MAKE(VMX_BF_MISC_ACTIVITY_STATES, VMX_V_GUEST_ACTIVITY_STATE_MASK )
1525 | RT_BF_MAKE(VMX_BF_MISC_PT, 0 )
1526 | RT_BF_MAKE(VMX_BF_MISC_SMM_READ_SMBASE_MSR, 0 )
1527 | RT_BF_MAKE(VMX_BF_MISC_CR3_TARGET, VMX_V_CR3_TARGET_COUNT )
1528 | RT_BF_MAKE(VMX_BF_MISC_MAX_MSRS, cMaxMsrs )
1529 | RT_BF_MAKE(VMX_BF_MISC_VMXOFF_BLOCK_SMI, 0 )
1530 | RT_BF_MAKE(VMX_BF_MISC_VMWRITE_ALL, pGuestFeatures->fVmxVmwriteAll )
1531 | RT_BF_MAKE(VMX_BF_MISC_ENTRY_INJECT_SOFT_INT, pGuestFeatures->fVmxEntryInjectSoftInt)
1532 | RT_BF_MAKE(VMX_BF_MISC_MSEG_ID, VMX_V_MSEG_REV_ID );
1533 }
1534 else
1535 uVmxMsr = 0;
1536 return uVmxMsr;
1537}
1538
1539
1540/** @callback_method_impl{FNCPUMRDMSR} */
1541static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxMisc(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1542{
1543 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1544 *puValue = CPUMGetGuestIa32VmxMisc(pVCpu);
1545 return VINF_SUCCESS;
1546}
1547
1548
1549/**
1550 * Gets IA32_VMX_CR0_FIXED0 for IEM and cpumMsrRd_Ia32VmxMisc.
1551 *
1552 * @returns IA32_VMX_CR0_FIXED0 value.
1553 * @param pVCpu The cross context per CPU structure.
1554 */
1555VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr0Fixed0(PVMCPU pVCpu)
1556{
1557 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1558 uint64_t const uVmxMsr = pGuestFeatures->fVmx ? VMX_V_CR0_FIXED0 : 0;
1559 return uVmxMsr;
1560}
1561
1562
1563/** @callback_method_impl{FNCPUMRDMSR} */
1564static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr0Fixed0(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1565{
1566 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1567 *puValue = CPUMGetGuestIa32VmxCr0Fixed0(pVCpu);
1568 return VINF_SUCCESS;
1569}
1570
1571
1572/**
1573 * Gets IA32_VMX_CR0_FIXED1 for IEM and cpumMsrRd_Ia32VmxMisc.
1574 *
1575 * @returns IA32_VMX_CR0_FIXED1 MSR.
1576 * @param pVCpu The cross context per CPU structure.
1577 */
1578VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr0Fixed1(PVMCPU pVCpu)
1579{
1580 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1581 uint64_t uVmxMsr;
1582 if (pGuestFeatures->fVmx)
1583 {
1584 int rc = HMVmxGetHostMsr(pVCpu->CTX_SUFF(pVM), MSR_IA32_VMX_CR0_FIXED1, &uVmxMsr);
1585 AssertMsgRC(rc, ("HMVmxGetHostMsr failed. rc=%Rrc\n", rc)); RT_NOREF_PV(rc);
1586 uVmxMsr |= VMX_V_CR0_FIXED0; /* Make sure the CR0 MB1 bits are not clear. */
1587 }
1588 else
1589 uVmxMsr = 0;
1590 return uVmxMsr;
1591}
1592
1593
1594/** @callback_method_impl{FNCPUMRDMSR} */
1595static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr0Fixed1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1596{
1597 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1598 Assert(idMsr == MSR_IA32_VMX_CR0_FIXED1);
1599 *puValue = CPUMGetGuestIa32VmxCr0Fixed1(pVCpu);
1600 return VINF_SUCCESS;
1601}
1602
1603
1604/**
1605 * Gets IA32_VMX_CR4_FIXED0 for IEM and cpumMsrRd_Ia32VmxMisc.
1606 *
1607 * @returns IA32_VMX_CR4_FIXED0 value.
1608 * @param pVCpu The cross context per CPU structure.
1609 */
1610VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr4Fixed0(PVMCPU pVCpu)
1611{
1612 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1613 uint64_t const uVmxMsr = pGuestFeatures->fVmx ? VMX_V_CR4_FIXED0 : 0;
1614 return uVmxMsr;
1615}
1616
1617
1618/** @callback_method_impl{FNCPUMRDMSR} */
1619static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr4Fixed0(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1620{
1621 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1622 *puValue = CPUMGetGuestIa32VmxCr4Fixed0(pVCpu);
1623 return VINF_SUCCESS;
1624}
1625
1626
1627/**
1628 * Gets IA32_VMX_CR4_FIXED1 for IEM and cpumMsrRd_Ia32VmxMisc.
1629 *
1630 * @returns IA32_VMX_CR4_FIXED1 MSR.
1631 * @param pVCpu The cross context per CPU structure.
1632 */
1633VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr4Fixed1(PVMCPU pVCpu)
1634{
1635 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1636 uint64_t uVmxMsr;
1637 if (pGuestFeatures->fVmx)
1638 {
1639 int rc = HMVmxGetHostMsr(pVCpu->CTX_SUFF(pVM), MSR_IA32_VMX_CR4_FIXED1, &uVmxMsr);
1640 AssertMsgRC(rc, ("HMVmxGetHostMsr failed. rc=%Rrc\n", rc)); RT_NOREF_PV(rc);
1641 uVmxMsr |= VMX_V_CR4_FIXED0; /* Make sure the CR4 MB1 bits are not clear. */
1642 }
1643 else
1644 uVmxMsr = 0;
1645 return uVmxMsr;
1646}
1647
1648
1649/** @callback_method_impl{FNCPUMRDMSR} */
1650static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr4Fixed1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1651{
1652 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1653 Assert(idMsr == MSR_IA32_VMX_CR4_FIXED1);
1654 *puValue = CPUMGetGuestIa32VmxCr4Fixed1(pVCpu);
1655 return VINF_SUCCESS;
1656}
1657
1658
1659/**
1660 * Gets IA32_VMX_VMCS_ENUM for IEM and cpumMsrRd_Ia32VmxMisc.
1661 *
1662 * @returns IA32_VMX_VMCS_ENUM value.
1663 * @param pVCpu The cross context per CPU structure.
1664 */
1665VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxVmcsEnum(PVMCPU pVCpu)
1666{
1667 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1668 uint64_t uVmxMsr;
1669 if (pGuestFeatures->fVmx)
1670 uVmxMsr = VMX_V_VMCS_MAX_INDEX << VMX_BF_VMCS_ENUM_HIGHEST_IDX_SHIFT;
1671 else
1672 uVmxMsr = 0;
1673 return uVmxMsr;
1674}
1675
1676
1677/** @callback_method_impl{FNCPUMRDMSR} */
1678static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxVmcsEnum(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1679{
1680 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1681 *puValue = CPUMGetGuestIa32VmxVmcsEnum(pVCpu);
1682 return VINF_SUCCESS;
1683}
1684
1685
1686/**
1687 * Gets MSR_IA32_VMX_PROCBASED_CTLS2 for IEM and cpumMsrRd_Ia32VmxMisc.
1688 *
1689 * @returns MSR_IA32_VMX_PROCBASED_CTLS2 value.
1690 * @param pVCpu The cross context per CPU structure.
1691 */
1692VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxProcbasedCtls2(PVMCPU pVCpu)
1693{
1694 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1695 uint64_t uVmxMsr;
1696 if ( pGuestFeatures->fVmx
1697 && pGuestFeatures->fVmxSecondaryExecCtls)
1698 {
1699 uint32_t const fFeatures = (pGuestFeatures->fVmxVirtApicAccess << VMX_BF_PROC_CTLS2_VIRT_APIC_ACCESS_SHIFT )
1700 | (pGuestFeatures->fVmxEpt << VMX_BF_PROC_CTLS2_EPT_SHIFT )
1701 | (pGuestFeatures->fVmxDescTableExit << VMX_BF_PROC_CTLS2_DESC_TABLE_EXIT_SHIFT )
1702 | (pGuestFeatures->fVmxRdtscp << VMX_BF_PROC_CTLS2_RDTSCP_SHIFT )
1703 | (pGuestFeatures->fVmxVirtX2ApicMode << VMX_BF_PROC_CTLS2_VIRT_X2APIC_MODE_SHIFT )
1704 | (pGuestFeatures->fVmxVpid << VMX_BF_PROC_CTLS2_VPID_SHIFT )
1705 | (pGuestFeatures->fVmxWbinvdExit << VMX_BF_PROC_CTLS2_WBINVD_EXIT_SHIFT )
1706 | (pGuestFeatures->fVmxUnrestrictedGuest << VMX_BF_PROC_CTLS2_UNRESTRICTED_GUEST_SHIFT)
1707 | (pGuestFeatures->fVmxApicRegVirt << VMX_BF_PROC_CTLS2_APIC_REG_VIRT_SHIFT )
1708 | (pGuestFeatures->fVmxVirtIntDelivery << VMX_BF_PROC_CTLS2_VIRT_INT_DELIVERY_SHIFT )
1709 | (pGuestFeatures->fVmxPauseLoopExit << VMX_BF_PROC_CTLS2_PAUSE_LOOP_EXIT_SHIFT )
1710 | (pGuestFeatures->fVmxRdrandExit << VMX_BF_PROC_CTLS2_RDRAND_EXIT_SHIFT )
1711 | (pGuestFeatures->fVmxInvpcid << VMX_BF_PROC_CTLS2_INVPCID_SHIFT )
1712 | (pGuestFeatures->fVmxVmFunc << VMX_BF_PROC_CTLS2_VMFUNC_SHIFT )
1713 | (pGuestFeatures->fVmxVmcsShadowing << VMX_BF_PROC_CTLS2_VMCS_SHADOWING_SHIFT )
1714 | (pGuestFeatures->fVmxRdseedExit << VMX_BF_PROC_CTLS2_RDSEED_EXIT_SHIFT )
1715 | (pGuestFeatures->fVmxPml << VMX_BF_PROC_CTLS2_PML_SHIFT )
1716 | (pGuestFeatures->fVmxEptXcptVe << VMX_BF_PROC_CTLS2_EPT_VE_SHIFT )
1717 | (pGuestFeatures->fVmxXsavesXrstors << VMX_BF_PROC_CTLS2_XSAVES_XRSTORS_SHIFT )
1718 | (pGuestFeatures->fVmxUseTscScaling << VMX_BF_PROC_CTLS2_TSC_SCALING_SHIFT );
1719 uint32_t const fVal = 0;
1720 uint32_t const fZap = fFeatures;
1721 uVmxMsr = RT_MAKE_U64(fVal, fZap);
1722 }
1723 else
1724 uVmxMsr = 0;
1725 return uVmxMsr;
1726}
1727
1728
1729/** @callback_method_impl{FNCPUMRDMSR} */
1730static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxProcBasedCtls2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1731{
1732 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1733 *puValue = CPUMGetGuestIa32VmxProcbasedCtls2(pVCpu);
1734 return VINF_SUCCESS;
1735}
1736
1737
1738/** @callback_method_impl{FNCPUMRDMSR} */
1739static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxEptVpidCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1740{
1741 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1742 *puValue = 0;
1743 return VINF_SUCCESS;
1744}
1745
1746
1747/** @callback_method_impl{FNCPUMRDMSR} */
1748static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTruePinbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1749{
1750 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1751 *puValue = 0;
1752 return VINF_SUCCESS;
1753}
1754
1755
1756/** @callback_method_impl{FNCPUMRDMSR} */
1757static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTrueProcbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1758{
1759 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1760 *puValue = 0;
1761 return VINF_SUCCESS;
1762}
1763
1764
1765/** @callback_method_impl{FNCPUMRDMSR} */
1766static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTrueExitCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1767{
1768 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1769 *puValue = 0;
1770 return VINF_SUCCESS;
1771}
1772
1773
1774/** @callback_method_impl{FNCPUMRDMSR} */
1775static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTrueEntryCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1776{
1777 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1778 *puValue = 0;
1779 return VINF_SUCCESS;
1780}
1781
1782
1783/**
1784 * Gets IA32_VMX_VMFUNC for IEM and cpumMsrRd_Ia32VmxVmFunc.
1785 *
1786 * @returns IA32_VMX_VMFUNC value.
1787 * @param pVCpu The cross context per CPU structure.
1788 */
1789VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxVmFunc(PVMCPU pVCpu)
1790{
1791 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1792 uint64_t uVmxMsr;
1793 if ( pGuestFeatures->fVmx
1794 && pGuestFeatures->fVmxVmFunc)
1795 uVmxMsr = RT_BF_MAKE(VMX_BF_VMFUNC_EPTP_SWITCHING, 1);
1796 else
1797 uVmxMsr = 0;
1798 return uVmxMsr;
1799}
1800
1801
1802/** @callback_method_impl{FNCPUMRDMSR} */
1803static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxVmFunc(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1804{
1805 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1806 *puValue = CPUMGetGuestIa32VmxVmFunc(pVCpu);
1807 return VINF_SUCCESS;
1808}
1809
1810
1811/** @callback_method_impl{FNCPUMRDMSR} */
1812static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SpecCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1813{
1814 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1815 *puValue = pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl;
1816 return VINF_SUCCESS;
1817}
1818
1819
1820/** @callback_method_impl{FNCPUMWRMSR} */
1821static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SpecCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1822{
1823 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1824
1825 /* NB: The STIBP bit can be set even when IBRS is present, regardless of whether STIBP is actually implemented. */
1826 if (uValue & ~(MSR_IA32_SPEC_CTRL_F_IBRS | MSR_IA32_SPEC_CTRL_F_STIBP))
1827 {
1828 Log(("CPUM: Invalid IA32_SPEC_CTRL bits (trying to write %#llx)\n", uValue));
1829 return VERR_CPUM_RAISE_GP_0;
1830 }
1831
1832 pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl = uValue;
1833 return VINF_SUCCESS;
1834}
1835
1836
1837/** @callback_method_impl{FNCPUMWRMSR} */
1838static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PredCmd(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1839{
1840 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1841 return VINF_SUCCESS;
1842}
1843
1844
1845/** @callback_method_impl{FNCPUMRDMSR} */
1846static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ArchCapabilities(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1847{
1848 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1849 *puValue = pVCpu->cpum.s.GuestMsrs.msr.ArchCaps;
1850 return VINF_SUCCESS;
1851}
1852
1853
1854
1855
1856
1857
1858
1859
1860
1861
1862
1863
1864/*
1865 * AMD64
1866 * AMD64
1867 * AMD64
1868 */
1869
1870
1871/** @callback_method_impl{FNCPUMRDMSR} */
1872static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64Efer(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1873{
1874 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1875 *puValue = pVCpu->cpum.s.Guest.msrEFER;
1876 return VINF_SUCCESS;
1877}
1878
1879
1880/** @callback_method_impl{FNCPUMWRMSR} */
1881static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64Efer(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1882{
1883 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1884 uint64_t uValidatedEfer;
1885 uint64_t const uOldEfer = pVCpu->cpum.s.Guest.msrEFER;
1886 int rc = CPUMIsGuestEferMsrWriteValid(pVCpu->CTX_SUFF(pVM), pVCpu->cpum.s.Guest.cr0, uOldEfer, uValue, &uValidatedEfer);
1887 if (RT_FAILURE(rc))
1888 return VERR_CPUM_RAISE_GP_0;
1889
1890 CPUMSetGuestEferMsrNoChecks(pVCpu, uOldEfer, uValidatedEfer);
1891 return VINF_SUCCESS;
1892}
1893
1894
1895/** @callback_method_impl{FNCPUMRDMSR} */
1896static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64SyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1897{
1898 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1899 *puValue = pVCpu->cpum.s.Guest.msrSTAR;
1900 return VINF_SUCCESS;
1901}
1902
1903
1904/** @callback_method_impl{FNCPUMWRMSR} */
1905static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64SyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1906{
1907 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1908 pVCpu->cpum.s.Guest.msrSTAR = uValue;
1909 return VINF_SUCCESS;
1910}
1911
1912
1913/** @callback_method_impl{FNCPUMRDMSR} */
1914static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64LongSyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1915{
1916 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1917 *puValue = pVCpu->cpum.s.Guest.msrLSTAR;
1918 return VINF_SUCCESS;
1919}
1920
1921
1922/** @callback_method_impl{FNCPUMWRMSR} */
1923static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64LongSyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1924{
1925 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1926 if (!X86_IS_CANONICAL(uValue))
1927 {
1928 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
1929 return VERR_CPUM_RAISE_GP_0;
1930 }
1931 pVCpu->cpum.s.Guest.msrLSTAR = uValue;
1932 return VINF_SUCCESS;
1933}
1934
1935
1936/** @callback_method_impl{FNCPUMRDMSR} */
1937static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64CompSyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1938{
1939 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1940 *puValue = pVCpu->cpum.s.Guest.msrCSTAR;
1941 return VINF_SUCCESS;
1942}
1943
1944
1945/** @callback_method_impl{FNCPUMWRMSR} */
1946static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64CompSyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1947{
1948 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1949 if (!X86_IS_CANONICAL(uValue))
1950 {
1951 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
1952 return VERR_CPUM_RAISE_GP_0;
1953 }
1954 pVCpu->cpum.s.Guest.msrCSTAR = uValue;
1955 return VINF_SUCCESS;
1956}
1957
1958
1959/** @callback_method_impl{FNCPUMRDMSR} */
1960static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64SyscallFlagMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1961{
1962 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1963 *puValue = pVCpu->cpum.s.Guest.msrSFMASK;
1964 return VINF_SUCCESS;
1965}
1966
1967
1968/** @callback_method_impl{FNCPUMWRMSR} */
1969static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64SyscallFlagMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1970{
1971 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1972 pVCpu->cpum.s.Guest.msrSFMASK = uValue;
1973 return VINF_SUCCESS;
1974}
1975
1976
1977/** @callback_method_impl{FNCPUMRDMSR} */
1978static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64FsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1979{
1980 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1981 *puValue = pVCpu->cpum.s.Guest.fs.u64Base;
1982 return VINF_SUCCESS;
1983}
1984
1985
1986/** @callback_method_impl{FNCPUMWRMSR} */
1987static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64FsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1988{
1989 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1990 pVCpu->cpum.s.Guest.fs.u64Base = uValue;
1991 return VINF_SUCCESS;
1992}
1993
1994
1995/** @callback_method_impl{FNCPUMRDMSR} */
1996static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64GsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1997{
1998 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1999 *puValue = pVCpu->cpum.s.Guest.gs.u64Base;
2000 return VINF_SUCCESS;
2001}
2002
2003/** @callback_method_impl{FNCPUMWRMSR} */
2004static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64GsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2005{
2006 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2007 pVCpu->cpum.s.Guest.gs.u64Base = uValue;
2008 return VINF_SUCCESS;
2009}
2010
2011
2012
2013/** @callback_method_impl{FNCPUMRDMSR} */
2014static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64KernelGsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2015{
2016 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2017 *puValue = pVCpu->cpum.s.Guest.msrKERNELGSBASE;
2018 return VINF_SUCCESS;
2019}
2020
2021/** @callback_method_impl{FNCPUMWRMSR} */
2022static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64KernelGsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2023{
2024 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2025 pVCpu->cpum.s.Guest.msrKERNELGSBASE = uValue;
2026 return VINF_SUCCESS;
2027}
2028
2029
2030/** @callback_method_impl{FNCPUMRDMSR} */
2031static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64TscAux(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2032{
2033 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2034 *puValue = pVCpu->cpum.s.GuestMsrs.msr.TscAux;
2035 return VINF_SUCCESS;
2036}
2037
2038/** @callback_method_impl{FNCPUMWRMSR} */
2039static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64TscAux(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2040{
2041 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2042 pVCpu->cpum.s.GuestMsrs.msr.TscAux = uValue;
2043 return VINF_SUCCESS;
2044}
2045
2046
2047/*
2048 * Intel specific
2049 * Intel specific
2050 * Intel specific
2051 */
2052
2053/** @callback_method_impl{FNCPUMRDMSR} */
2054static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelEblCrPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2055{
2056 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2057 /** @todo recalc clock frequency ratio? */
2058 *puValue = pRange->uValue;
2059 return VINF_SUCCESS;
2060}
2061
2062
2063/** @callback_method_impl{FNCPUMWRMSR} */
2064static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelEblCrPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2065{
2066 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2067 /** @todo Write EBL_CR_POWERON: Remember written bits. */
2068 return VINF_SUCCESS;
2069}
2070
2071
2072/** @callback_method_impl{FNCPUMRDMSR} */
2073static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7CoreThreadCount(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2074{
2075 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2076
2077 /* Note! According to cpuid_set_info in XNU (10.7.0), Westmere CPU only
2078 have a 4-bit core count. */
2079 uint16_t cCores = pVCpu->CTX_SUFF(pVM)->cCpus;
2080 uint16_t cThreads = cCores; /** @todo hyper-threading. */
2081 *puValue = RT_MAKE_U32(cThreads, cCores);
2082 return VINF_SUCCESS;
2083}
2084
2085
2086/** @callback_method_impl{FNCPUMRDMSR} */
2087static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP4EbcHardPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2088{
2089 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2090 /** @todo P4 hard power on config */
2091 *puValue = pRange->uValue;
2092 return VINF_SUCCESS;
2093}
2094
2095
2096/** @callback_method_impl{FNCPUMWRMSR} */
2097static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP4EbcHardPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2098{
2099 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2100 /** @todo P4 hard power on config */
2101 return VINF_SUCCESS;
2102}
2103
2104
2105/** @callback_method_impl{FNCPUMRDMSR} */
2106static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP4EbcSoftPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2107{
2108 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2109 /** @todo P4 soft power on config */
2110 *puValue = pRange->uValue;
2111 return VINF_SUCCESS;
2112}
2113
2114
2115/** @callback_method_impl{FNCPUMWRMSR} */
2116static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP4EbcSoftPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2117{
2118 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2119 /** @todo P4 soft power on config */
2120 return VINF_SUCCESS;
2121}
2122
2123
2124/** @callback_method_impl{FNCPUMRDMSR} */
2125static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP4EbcFrequencyId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2126{
2127 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2128
2129 uint64_t uValue;
2130 PVM pVM = pVCpu->CTX_SUFF(pVM);
2131 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
2132 if (pVM->cpum.s.GuestFeatures.uModel >= 2)
2133 {
2134 if (uScalableBusHz <= CPUM_SBUSFREQ_100MHZ && pVM->cpum.s.GuestFeatures.uModel <= 2)
2135 {
2136 uScalableBusHz = CPUM_SBUSFREQ_100MHZ;
2137 uValue = 0;
2138 }
2139 else if (uScalableBusHz <= CPUM_SBUSFREQ_133MHZ)
2140 {
2141 uScalableBusHz = CPUM_SBUSFREQ_133MHZ;
2142 uValue = 1;
2143 }
2144 else if (uScalableBusHz <= CPUM_SBUSFREQ_167MHZ)
2145 {
2146 uScalableBusHz = CPUM_SBUSFREQ_167MHZ;
2147 uValue = 3;
2148 }
2149 else if (uScalableBusHz <= CPUM_SBUSFREQ_200MHZ)
2150 {
2151 uScalableBusHz = CPUM_SBUSFREQ_200MHZ;
2152 uValue = 2;
2153 }
2154 else if (uScalableBusHz <= CPUM_SBUSFREQ_267MHZ && pVM->cpum.s.GuestFeatures.uModel > 2)
2155 {
2156 uScalableBusHz = CPUM_SBUSFREQ_267MHZ;
2157 uValue = 0;
2158 }
2159 else
2160 {
2161 uScalableBusHz = CPUM_SBUSFREQ_333MHZ;
2162 uValue = 6;
2163 }
2164 uValue <<= 16;
2165
2166 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
2167 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
2168 uValue |= (uint32_t)uTscRatio << 24;
2169
2170 uValue |= pRange->uValue & ~UINT64_C(0xff0f0000);
2171 }
2172 else
2173 {
2174 /* Probably more stuff here, but intel doesn't want to tell us. */
2175 uValue = pRange->uValue;
2176 uValue &= ~(RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23)); /* 100 MHz is only documented value */
2177 }
2178
2179 *puValue = uValue;
2180 return VINF_SUCCESS;
2181}
2182
2183
2184/** @callback_method_impl{FNCPUMWRMSR} */
2185static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP4EbcFrequencyId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2186{
2187 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2188 /** @todo P4 bus frequency config */
2189 return VINF_SUCCESS;
2190}
2191
2192
2193/** @callback_method_impl{FNCPUMRDMSR} */
2194static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP6FsbFrequency(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2195{
2196 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2197
2198 /* Convert the scalable bus frequency to the encoding in the intel manual (for core+). */
2199 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVCpu->CTX_SUFF(pVM));
2200 if (uScalableBusHz <= CPUM_SBUSFREQ_100MHZ)
2201 *puValue = 5;
2202 else if (uScalableBusHz <= CPUM_SBUSFREQ_133MHZ)
2203 *puValue = 1;
2204 else if (uScalableBusHz <= CPUM_SBUSFREQ_167MHZ)
2205 *puValue = 3;
2206 else if (uScalableBusHz <= CPUM_SBUSFREQ_200MHZ)
2207 *puValue = 2;
2208 else if (uScalableBusHz <= CPUM_SBUSFREQ_267MHZ)
2209 *puValue = 0;
2210 else if (uScalableBusHz <= CPUM_SBUSFREQ_333MHZ)
2211 *puValue = 4;
2212 else /*if (uScalableBusHz <= CPUM_SBUSFREQ_400MHZ)*/
2213 *puValue = 6;
2214
2215 *puValue |= pRange->uValue & ~UINT64_C(0x7);
2216
2217 return VINF_SUCCESS;
2218}
2219
2220
2221/** @callback_method_impl{FNCPUMRDMSR} */
2222static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelPlatformInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2223{
2224 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2225
2226 /* Just indicate a fixed TSC, no turbo boost, no programmable anything. */
2227 PVM pVM = pVCpu->CTX_SUFF(pVM);
2228 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
2229 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
2230 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
2231 uint64_t uValue = ((uint32_t)uTscRatio << 8) /* TSC invariant frequency. */
2232 | ((uint64_t)uTscRatio << 40); /* The max turbo frequency. */
2233
2234 /* Ivy bridge has a minimum operating ratio as well. */
2235 if (true) /** @todo detect sandy bridge. */
2236 uValue |= (uint64_t)uTscRatio << 48;
2237
2238 *puValue = uValue;
2239 return VINF_SUCCESS;
2240}
2241
2242
2243/** @callback_method_impl{FNCPUMRDMSR} */
2244static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelFlexRatio(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2245{
2246 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2247
2248 uint64_t uValue = pRange->uValue & ~UINT64_C(0x1ff00);
2249
2250 PVM pVM = pVCpu->CTX_SUFF(pVM);
2251 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
2252 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
2253 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
2254 uValue |= (uint32_t)uTscRatio << 8;
2255
2256 *puValue = uValue;
2257 return VINF_SUCCESS;
2258}
2259
2260
2261/** @callback_method_impl{FNCPUMWRMSR} */
2262static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelFlexRatio(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2263{
2264 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2265 /** @todo implement writing MSR_FLEX_RATIO. */
2266 return VINF_SUCCESS;
2267}
2268
2269
2270/** @callback_method_impl{FNCPUMRDMSR} */
2271static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelPkgCStConfigControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2272{
2273 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2274 *puValue = pVCpu->cpum.s.GuestMsrs.msr.PkgCStateCfgCtrl;
2275 return VINF_SUCCESS;
2276}
2277
2278
2279/** @callback_method_impl{FNCPUMWRMSR} */
2280static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelPkgCStConfigControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2281{
2282 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2283
2284 if (pVCpu->cpum.s.GuestMsrs.msr.PkgCStateCfgCtrl & RT_BIT_64(15))
2285 {
2286 Log(("CPUM: WRMSR %#x (%s), %#llx: Write protected -> #GP\n", idMsr, pRange->szName, uValue));
2287 return VERR_CPUM_RAISE_GP_0;
2288 }
2289#if 0 /** @todo check what real (old) hardware does. */
2290 if ((uValue & 7) >= 5)
2291 {
2292 Log(("CPUM: WRMSR %#x (%s), %#llx: Invalid limit (%d) -> #GP\n", idMsr, pRange->szName, uValue, (uint32_t)(uValue & 7)));
2293 return VERR_CPUM_RAISE_GP_0;
2294 }
2295#endif
2296 pVCpu->cpum.s.GuestMsrs.msr.PkgCStateCfgCtrl = uValue;
2297 return VINF_SUCCESS;
2298}
2299
2300
2301/** @callback_method_impl{FNCPUMRDMSR} */
2302static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelPmgIoCaptureBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2303{
2304 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2305 /** @todo implement I/O mwait wakeup. */
2306 *puValue = 0;
2307 return VINF_SUCCESS;
2308}
2309
2310
2311/** @callback_method_impl{FNCPUMWRMSR} */
2312static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelPmgIoCaptureBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2313{
2314 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2315 /** @todo implement I/O mwait wakeup. */
2316 return VINF_SUCCESS;
2317}
2318
2319
2320/** @callback_method_impl{FNCPUMRDMSR} */
2321static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchFromToN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2322{
2323 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2324 /** @todo implement last branch records. */
2325 *puValue = 0;
2326 return VINF_SUCCESS;
2327}
2328
2329
2330/** @callback_method_impl{FNCPUMWRMSR} */
2331static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchFromToN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2332{
2333 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2334 /** @todo implement last branch records. */
2335 return VINF_SUCCESS;
2336}
2337
2338
2339/** @callback_method_impl{FNCPUMRDMSR} */
2340static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchFromN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2341{
2342 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2343 /** @todo implement last branch records. */
2344 *puValue = 0;
2345 return VINF_SUCCESS;
2346}
2347
2348
2349/** @callback_method_impl{FNCPUMWRMSR} */
2350static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchFromN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2351{
2352 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2353 /** @todo implement last branch records. */
2354 /** @todo Probing indicates that bit 63 is settable on SandyBridge, at least
2355 * if the rest of the bits are zero. Automatic sign extending?
2356 * Investigate! */
2357 if (!X86_IS_CANONICAL(uValue))
2358 {
2359 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
2360 return VERR_CPUM_RAISE_GP_0;
2361 }
2362 return VINF_SUCCESS;
2363}
2364
2365
2366/** @callback_method_impl{FNCPUMRDMSR} */
2367static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchToN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2368{
2369 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2370 /** @todo implement last branch records. */
2371 *puValue = 0;
2372 return VINF_SUCCESS;
2373}
2374
2375
2376/** @callback_method_impl{FNCPUMWRMSR} */
2377static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchToN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2378{
2379 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2380 /** @todo implement last branch records. */
2381 /** @todo Probing indicates that bit 63 is settable on SandyBridge, at least
2382 * if the rest of the bits are zero. Automatic sign extending?
2383 * Investigate! */
2384 if (!X86_IS_CANONICAL(uValue))
2385 {
2386 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
2387 return VERR_CPUM_RAISE_GP_0;
2388 }
2389 return VINF_SUCCESS;
2390}
2391
2392
2393/** @callback_method_impl{FNCPUMRDMSR} */
2394static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchTos(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2395{
2396 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2397 /** @todo implement last branch records. */
2398 *puValue = 0;
2399 return VINF_SUCCESS;
2400}
2401
2402
2403/** @callback_method_impl{FNCPUMWRMSR} */
2404static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchTos(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2405{
2406 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2407 /** @todo implement last branch records. */
2408 return VINF_SUCCESS;
2409}
2410
2411
2412/** @callback_method_impl{FNCPUMRDMSR} */
2413static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelBblCrCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2414{
2415 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2416 *puValue = pRange->uValue;
2417 return VINF_SUCCESS;
2418}
2419
2420
2421/** @callback_method_impl{FNCPUMWRMSR} */
2422static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelBblCrCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2423{
2424 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2425 return VINF_SUCCESS;
2426}
2427
2428
2429/** @callback_method_impl{FNCPUMRDMSR} */
2430static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelBblCrCtl3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2431{
2432 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2433 *puValue = pRange->uValue;
2434 return VINF_SUCCESS;
2435}
2436
2437
2438/** @callback_method_impl{FNCPUMWRMSR} */
2439static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelBblCrCtl3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2440{
2441 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2442 return VINF_SUCCESS;
2443}
2444
2445
2446/** @callback_method_impl{FNCPUMRDMSR} */
2447static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7TemperatureTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2448{
2449 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2450 *puValue = pRange->uValue;
2451 return VINF_SUCCESS;
2452}
2453
2454
2455/** @callback_method_impl{FNCPUMWRMSR} */
2456static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7TemperatureTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2457{
2458 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2459 return VINF_SUCCESS;
2460}
2461
2462
2463/** @callback_method_impl{FNCPUMRDMSR} */
2464static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7MsrOffCoreResponseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2465{
2466 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2467 /** @todo machine check. */
2468 *puValue = pRange->uValue;
2469 return VINF_SUCCESS;
2470}
2471
2472
2473/** @callback_method_impl{FNCPUMWRMSR} */
2474static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7MsrOffCoreResponseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2475{
2476 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2477 /** @todo machine check. */
2478 return VINF_SUCCESS;
2479}
2480
2481
2482/** @callback_method_impl{FNCPUMRDMSR} */
2483static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7MiscPwrMgmt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2484{
2485 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2486 *puValue = 0;
2487 return VINF_SUCCESS;
2488}
2489
2490
2491/** @callback_method_impl{FNCPUMWRMSR} */
2492static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7MiscPwrMgmt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2493{
2494 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2495 return VINF_SUCCESS;
2496}
2497
2498
2499/** @callback_method_impl{FNCPUMRDMSR} */
2500static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP6CrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2501{
2502 RT_NOREF_PV(idMsr);
2503 int rc = CPUMGetGuestCRx(pVCpu, pRange->uValue, puValue);
2504 AssertRC(rc);
2505 return VINF_SUCCESS;
2506}
2507
2508
2509/** @callback_method_impl{FNCPUMWRMSR} */
2510static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP6CrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2511{
2512 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2513 /* This CRx interface differs from the MOV CRx, GReg interface in that
2514 #GP(0) isn't raised if unsupported bits are written to. Instead they
2515 are simply ignored and masked off. (Pentium M Dothan) */
2516 /** @todo Implement MSR_P6_CRx writing. Too much effort for very little, if
2517 * any, gain. */
2518 return VINF_SUCCESS;
2519}
2520
2521
2522/** @callback_method_impl{FNCPUMRDMSR} */
2523static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCpuId1FeatureMaskEcdx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2524{
2525 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2526 /** @todo implement CPUID masking. */
2527 *puValue = UINT64_MAX;
2528 return VINF_SUCCESS;
2529}
2530
2531
2532/** @callback_method_impl{FNCPUMWRMSR} */
2533static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCpuId1FeatureMaskEcdx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2534{
2535 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2536 /** @todo implement CPUID masking. */
2537 return VINF_SUCCESS;
2538}
2539
2540
2541/** @callback_method_impl{FNCPUMRDMSR} */
2542static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCpuId1FeatureMaskEax(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2543{
2544 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2545 /** @todo implement CPUID masking. */
2546 *puValue = 0;
2547 return VINF_SUCCESS;
2548}
2549
2550
2551/** @callback_method_impl{FNCPUMWRMSR} */
2552static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCpuId1FeatureMaskEax(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2553{
2554 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2555 /** @todo implement CPUID masking. */
2556 return VINF_SUCCESS;
2557}
2558
2559
2560
2561/** @callback_method_impl{FNCPUMRDMSR} */
2562static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCpuId80000001FeatureMaskEcdx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2563{
2564 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2565 /** @todo implement CPUID masking. */
2566 *puValue = UINT64_MAX;
2567 return VINF_SUCCESS;
2568}
2569
2570
2571/** @callback_method_impl{FNCPUMWRMSR} */
2572static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCpuId80000001FeatureMaskEcdx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2573{
2574 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2575 /** @todo implement CPUID masking. */
2576 return VINF_SUCCESS;
2577}
2578
2579
2580
2581/** @callback_method_impl{FNCPUMRDMSR} */
2582static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyAesNiCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2583{
2584 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2585 /** @todo implement AES-NI. */
2586 *puValue = 3; /* Bit 0 is lock bit, bit 1 disables AES-NI. That's what they say. */
2587 return VINF_SUCCESS;
2588}
2589
2590
2591/** @callback_method_impl{FNCPUMWRMSR} */
2592static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyAesNiCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2593{
2594 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2595 /** @todo implement AES-NI. */
2596 return VERR_CPUM_RAISE_GP_0;
2597}
2598
2599
2600/** @callback_method_impl{FNCPUMRDMSR} */
2601static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7TurboRatioLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2602{
2603 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2604 /** @todo implement intel C states. */
2605 *puValue = pRange->uValue;
2606 return VINF_SUCCESS;
2607}
2608
2609
2610/** @callback_method_impl{FNCPUMWRMSR} */
2611static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7TurboRatioLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2612{
2613 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2614 /** @todo implement intel C states. */
2615 return VINF_SUCCESS;
2616}
2617
2618
2619/** @callback_method_impl{FNCPUMRDMSR} */
2620static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7LbrSelect(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2621{
2622 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2623 /** @todo implement last-branch-records. */
2624 *puValue = 0;
2625 return VINF_SUCCESS;
2626}
2627
2628
2629/** @callback_method_impl{FNCPUMWRMSR} */
2630static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7LbrSelect(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2631{
2632 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2633 /** @todo implement last-branch-records. */
2634 return VINF_SUCCESS;
2635}
2636
2637
2638/** @callback_method_impl{FNCPUMRDMSR} */
2639static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyErrorControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2640{
2641 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2642 /** @todo implement memory error injection (MSR_ERROR_CONTROL). */
2643 *puValue = 0;
2644 return VINF_SUCCESS;
2645}
2646
2647
2648/** @callback_method_impl{FNCPUMWRMSR} */
2649static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyErrorControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2650{
2651 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2652 /** @todo implement memory error injection (MSR_ERROR_CONTROL). */
2653 return VINF_SUCCESS;
2654}
2655
2656
2657/** @callback_method_impl{FNCPUMRDMSR} */
2658static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7VirtualLegacyWireCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2659{
2660 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2661 /** @todo implement memory VLW? */
2662 *puValue = pRange->uValue;
2663 /* Note: A20M is known to be bit 1 as this was disclosed in spec update
2664 AAJ49/AAK51/????, which documents the inversion of this bit. The
2665 Sandy bridge CPU here has value 0x74, so it probably doesn't have a BIOS
2666 that correct things. Some guesses at the other bits:
2667 bit 2 = INTR
2668 bit 4 = SMI
2669 bit 5 = INIT
2670 bit 6 = NMI */
2671 return VINF_SUCCESS;
2672}
2673
2674
2675/** @callback_method_impl{FNCPUMRDMSR} */
2676static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7PowerCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2677{
2678 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2679 /** @todo intel power management */
2680 *puValue = 0;
2681 return VINF_SUCCESS;
2682}
2683
2684
2685/** @callback_method_impl{FNCPUMWRMSR} */
2686static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7PowerCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2687{
2688 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2689 /** @todo intel power management */
2690 return VINF_SUCCESS;
2691}
2692
2693
2694/** @callback_method_impl{FNCPUMRDMSR} */
2695static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyPebsNumAlt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2696{
2697 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2698 /** @todo intel performance counters. */
2699 *puValue = 0;
2700 return VINF_SUCCESS;
2701}
2702
2703
2704/** @callback_method_impl{FNCPUMWRMSR} */
2705static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyPebsNumAlt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2706{
2707 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2708 /** @todo intel performance counters. */
2709 return VINF_SUCCESS;
2710}
2711
2712
2713/** @callback_method_impl{FNCPUMRDMSR} */
2714static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7PebsLdLat(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2715{
2716 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2717 /** @todo intel performance counters. */
2718 *puValue = 0;
2719 return VINF_SUCCESS;
2720}
2721
2722
2723/** @callback_method_impl{FNCPUMWRMSR} */
2724static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7PebsLdLat(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2725{
2726 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2727 /** @todo intel performance counters. */
2728 return VINF_SUCCESS;
2729}
2730
2731
2732/** @callback_method_impl{FNCPUMRDMSR} */
2733static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7PkgCnResidencyN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2734{
2735 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2736 /** @todo intel power management. */
2737 *puValue = 0;
2738 return VINF_SUCCESS;
2739}
2740
2741
2742/** @callback_method_impl{FNCPUMRDMSR} */
2743static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7CoreCnResidencyN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2744{
2745 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2746 /** @todo intel power management. */
2747 *puValue = 0;
2748 return VINF_SUCCESS;
2749}
2750
2751
2752/** @callback_method_impl{FNCPUMRDMSR} */
2753static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyVrCurrentConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2754{
2755 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2756 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2757 *puValue = 0;
2758 return VINF_SUCCESS;
2759}
2760
2761
2762/** @callback_method_impl{FNCPUMWRMSR} */
2763static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyVrCurrentConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2764{
2765 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2766 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2767 return VINF_SUCCESS;
2768}
2769
2770
2771/** @callback_method_impl{FNCPUMRDMSR} */
2772static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyVrMiscConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2773{
2774 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2775 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2776 *puValue = 0;
2777 return VINF_SUCCESS;
2778}
2779
2780
2781/** @callback_method_impl{FNCPUMWRMSR} */
2782static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyVrMiscConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2783{
2784 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2785 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2786 return VINF_SUCCESS;
2787}
2788
2789
2790/** @callback_method_impl{FNCPUMRDMSR} */
2791static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyRaplPowerUnit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2792{
2793 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2794 /** @todo intel RAPL. */
2795 *puValue = pRange->uValue;
2796 return VINF_SUCCESS;
2797}
2798
2799
2800/** @callback_method_impl{FNCPUMWRMSR} */
2801static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyRaplPowerUnit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2802{
2803 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2804 /* Note! This is documented as read only and except for a Silvermont sample has
2805 always been classified as read only. This is just here to make it compile. */
2806 return VINF_SUCCESS;
2807}
2808
2809
2810/** @callback_method_impl{FNCPUMRDMSR} */
2811static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyPkgCnIrtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2812{
2813 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2814 /** @todo intel power management. */
2815 *puValue = 0;
2816 return VINF_SUCCESS;
2817}
2818
2819
2820/** @callback_method_impl{FNCPUMWRMSR} */
2821static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyPkgCnIrtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2822{
2823 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2824 /** @todo intel power management. */
2825 return VINF_SUCCESS;
2826}
2827
2828
2829/** @callback_method_impl{FNCPUMRDMSR} */
2830static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyPkgC2Residency(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2831{
2832 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2833 /** @todo intel power management. */
2834 *puValue = 0;
2835 return VINF_SUCCESS;
2836}
2837
2838
2839/** @callback_method_impl{FNCPUMWRMSR} */
2840static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyPkgC2Residency(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2841{
2842 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2843 /* Note! This is documented as read only and except for a Silvermont sample has
2844 always been classified as read only. This is just here to make it compile. */
2845 return VINF_SUCCESS;
2846}
2847
2848
2849/** @callback_method_impl{FNCPUMRDMSR} */
2850static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgPowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2851{
2852 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2853 /** @todo intel RAPL. */
2854 *puValue = 0;
2855 return VINF_SUCCESS;
2856}
2857
2858
2859/** @callback_method_impl{FNCPUMWRMSR} */
2860static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPkgPowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2861{
2862 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2863 /** @todo intel RAPL. */
2864 return VINF_SUCCESS;
2865}
2866
2867
2868/** @callback_method_impl{FNCPUMRDMSR} */
2869static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgEnergyStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2870{
2871 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2872 /** @todo intel power management. */
2873 *puValue = 0;
2874 return VINF_SUCCESS;
2875}
2876
2877
2878/** @callback_method_impl{FNCPUMRDMSR} */
2879static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgPerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2880{
2881 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2882 /** @todo intel power management. */
2883 *puValue = 0;
2884 return VINF_SUCCESS;
2885}
2886
2887
2888/** @callback_method_impl{FNCPUMRDMSR} */
2889static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgPowerInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2890{
2891 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2892 /** @todo intel power management. */
2893 *puValue = 0;
2894 return VINF_SUCCESS;
2895}
2896
2897
2898/** @callback_method_impl{FNCPUMRDMSR} */
2899static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramPowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2900{
2901 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2902 /** @todo intel RAPL. */
2903 *puValue = 0;
2904 return VINF_SUCCESS;
2905}
2906
2907
2908/** @callback_method_impl{FNCPUMWRMSR} */
2909static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplDramPowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2910{
2911 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2912 /** @todo intel RAPL. */
2913 return VINF_SUCCESS;
2914}
2915
2916
2917/** @callback_method_impl{FNCPUMRDMSR} */
2918static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramEnergyStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2919{
2920 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2921 /** @todo intel power management. */
2922 *puValue = 0;
2923 return VINF_SUCCESS;
2924}
2925
2926
2927/** @callback_method_impl{FNCPUMRDMSR} */
2928static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramPerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2929{
2930 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2931 /** @todo intel power management. */
2932 *puValue = 0;
2933 return VINF_SUCCESS;
2934}
2935
2936
2937/** @callback_method_impl{FNCPUMRDMSR} */
2938static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramPowerInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2939{
2940 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2941 /** @todo intel power management. */
2942 *puValue = 0;
2943 return VINF_SUCCESS;
2944}
2945
2946
2947/** @callback_method_impl{FNCPUMRDMSR} */
2948static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0PowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2949{
2950 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2951 /** @todo intel RAPL. */
2952 *puValue = 0;
2953 return VINF_SUCCESS;
2954}
2955
2956
2957/** @callback_method_impl{FNCPUMWRMSR} */
2958static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp0PowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2959{
2960 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2961 /** @todo intel RAPL. */
2962 return VINF_SUCCESS;
2963}
2964
2965
2966/** @callback_method_impl{FNCPUMRDMSR} */
2967static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0EnergyStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2968{
2969 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2970 /** @todo intel power management. */
2971 *puValue = 0;
2972 return VINF_SUCCESS;
2973}
2974
2975
2976/** @callback_method_impl{FNCPUMRDMSR} */
2977static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0Policy(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2978{
2979 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2980 /** @todo intel RAPL. */
2981 *puValue = 0;
2982 return VINF_SUCCESS;
2983}
2984
2985
2986/** @callback_method_impl{FNCPUMWRMSR} */
2987static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp0Policy(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2988{
2989 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2990 /** @todo intel RAPL. */
2991 return VINF_SUCCESS;
2992}
2993
2994
2995/** @callback_method_impl{FNCPUMRDMSR} */
2996static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0PerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2997{
2998 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2999 /** @todo intel power management. */
3000 *puValue = 0;
3001 return VINF_SUCCESS;
3002}
3003
3004
3005/** @callback_method_impl{FNCPUMRDMSR} */
3006static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp1PowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3007{
3008 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3009 /** @todo intel RAPL. */
3010 *puValue = 0;
3011 return VINF_SUCCESS;
3012}
3013
3014
3015/** @callback_method_impl{FNCPUMWRMSR} */
3016static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp1PowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3017{
3018 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3019 /** @todo intel RAPL. */
3020 return VINF_SUCCESS;
3021}
3022
3023
3024/** @callback_method_impl{FNCPUMRDMSR} */
3025static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp1EnergyStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3026{
3027 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3028 /** @todo intel power management. */
3029 *puValue = 0;
3030 return VINF_SUCCESS;
3031}
3032
3033
3034/** @callback_method_impl{FNCPUMRDMSR} */
3035static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp1Policy(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3036{
3037 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3038 /** @todo intel RAPL. */
3039 *puValue = 0;
3040 return VINF_SUCCESS;
3041}
3042
3043
3044/** @callback_method_impl{FNCPUMWRMSR} */
3045static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp1Policy(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3046{
3047 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3048 /** @todo intel RAPL. */
3049 return VINF_SUCCESS;
3050}
3051
3052
3053/** @callback_method_impl{FNCPUMRDMSR} */
3054static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpNominal(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3055{
3056 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3057 /** @todo intel power management. */
3058 *puValue = pRange->uValue;
3059 return VINF_SUCCESS;
3060}
3061
3062
3063/** @callback_method_impl{FNCPUMRDMSR} */
3064static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpLevel1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3065{
3066 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3067 /** @todo intel power management. */
3068 *puValue = pRange->uValue;
3069 return VINF_SUCCESS;
3070}
3071
3072
3073/** @callback_method_impl{FNCPUMRDMSR} */
3074static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpLevel2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3075{
3076 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3077 /** @todo intel power management. */
3078 *puValue = pRange->uValue;
3079 return VINF_SUCCESS;
3080}
3081
3082
3083/** @callback_method_impl{FNCPUMRDMSR} */
3084static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3085{
3086 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3087 /** @todo intel power management. */
3088 *puValue = 0;
3089 return VINF_SUCCESS;
3090}
3091
3092
3093/** @callback_method_impl{FNCPUMWRMSR} */
3094static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7IvyConfigTdpControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3095{
3096 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3097 /** @todo intel power management. */
3098 return VINF_SUCCESS;
3099}
3100
3101
3102/** @callback_method_impl{FNCPUMRDMSR} */
3103static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyTurboActivationRatio(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3104{
3105 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3106 /** @todo intel power management. */
3107 *puValue = 0;
3108 return VINF_SUCCESS;
3109}
3110
3111
3112/** @callback_method_impl{FNCPUMWRMSR} */
3113static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7IvyTurboActivationRatio(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3114{
3115 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3116 /** @todo intel power management. */
3117 return VINF_SUCCESS;
3118}
3119
3120
3121/** @callback_method_impl{FNCPUMRDMSR} */
3122static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfGlobalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3123{
3124 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3125 /** @todo uncore msrs. */
3126 *puValue = 0;
3127 return VINF_SUCCESS;
3128}
3129
3130
3131/** @callback_method_impl{FNCPUMWRMSR} */
3132static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfGlobalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3133{
3134 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3135 /** @todo uncore msrs. */
3136 return VINF_SUCCESS;
3137}
3138
3139
3140/** @callback_method_impl{FNCPUMRDMSR} */
3141static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfGlobalStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3142{
3143 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3144 /** @todo uncore msrs. */
3145 *puValue = 0;
3146 return VINF_SUCCESS;
3147}
3148
3149
3150/** @callback_method_impl{FNCPUMWRMSR} */
3151static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfGlobalStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3152{
3153 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3154 /** @todo uncore msrs. */
3155 return VINF_SUCCESS;
3156}
3157
3158
3159/** @callback_method_impl{FNCPUMRDMSR} */
3160static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfGlobalOvfCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3161{
3162 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3163 /** @todo uncore msrs. */
3164 *puValue = 0;
3165 return VINF_SUCCESS;
3166}
3167
3168
3169/** @callback_method_impl{FNCPUMWRMSR} */
3170static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfGlobalOvfCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3171{
3172 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3173 /** @todo uncore msrs. */
3174 return VINF_SUCCESS;
3175}
3176
3177
3178/** @callback_method_impl{FNCPUMRDMSR} */
3179static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfFixedCtrCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3180{
3181 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3182 /** @todo uncore msrs. */
3183 *puValue = 0;
3184 return VINF_SUCCESS;
3185}
3186
3187
3188/** @callback_method_impl{FNCPUMWRMSR} */
3189static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfFixedCtrCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3190{
3191 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3192 /** @todo uncore msrs. */
3193 return VINF_SUCCESS;
3194}
3195
3196
3197/** @callback_method_impl{FNCPUMRDMSR} */
3198static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfFixedCtr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3199{
3200 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3201 /** @todo uncore msrs. */
3202 *puValue = 0;
3203 return VINF_SUCCESS;
3204}
3205
3206
3207/** @callback_method_impl{FNCPUMWRMSR} */
3208static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfFixedCtr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3209{
3210 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3211 /** @todo uncore msrs. */
3212 return VINF_SUCCESS;
3213}
3214
3215
3216/** @callback_method_impl{FNCPUMRDMSR} */
3217static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncCBoxConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3218{
3219 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3220 /** @todo uncore msrs. */
3221 *puValue = 0;
3222 return VINF_SUCCESS;
3223}
3224
3225
3226/** @callback_method_impl{FNCPUMRDMSR} */
3227static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncArbPerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3228{
3229 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3230 /** @todo uncore msrs. */
3231 *puValue = 0;
3232 return VINF_SUCCESS;
3233}
3234
3235
3236/** @callback_method_impl{FNCPUMWRMSR} */
3237static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncArbPerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3238{
3239 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3240 /** @todo uncore msrs. */
3241 return VINF_SUCCESS;
3242}
3243
3244
3245/** @callback_method_impl{FNCPUMRDMSR} */
3246static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncArbPerfEvtSelN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3247{
3248 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3249 /** @todo uncore msrs. */
3250 *puValue = 0;
3251 return VINF_SUCCESS;
3252}
3253
3254
3255/** @callback_method_impl{FNCPUMWRMSR} */
3256static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncArbPerfEvtSelN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3257{
3258 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3259 /** @todo uncore msrs. */
3260 return VINF_SUCCESS;
3261}
3262
3263
3264/** @callback_method_impl{FNCPUMRDMSR} */
3265static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SmiCount(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3266{
3267 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3268
3269 /*
3270 * 31:0 is SMI count (read only), 63:32 reserved.
3271 * Since we don't do SMI, the count is always zero.
3272 */
3273 *puValue = 0;
3274 return VINF_SUCCESS;
3275}
3276
3277
3278/** @callback_method_impl{FNCPUMRDMSR} */
3279static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore2EmttmCrTablesN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3280{
3281 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3282 /** @todo implement enhanced multi thread termal monitoring? */
3283 *puValue = pRange->uValue;
3284 return VINF_SUCCESS;
3285}
3286
3287
3288/** @callback_method_impl{FNCPUMWRMSR} */
3289static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore2EmttmCrTablesN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3290{
3291 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3292 /** @todo implement enhanced multi thread termal monitoring? */
3293 return VINF_SUCCESS;
3294}
3295
3296
3297/** @callback_method_impl{FNCPUMRDMSR} */
3298static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore2SmmCStMiscInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3299{
3300 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3301 /** @todo SMM & C-states? */
3302 *puValue = 0;
3303 return VINF_SUCCESS;
3304}
3305
3306
3307/** @callback_method_impl{FNCPUMWRMSR} */
3308static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore2SmmCStMiscInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3309{
3310 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3311 /** @todo SMM & C-states? */
3312 return VINF_SUCCESS;
3313}
3314
3315
3316/** @callback_method_impl{FNCPUMRDMSR} */
3317static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore1ExtConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3318{
3319 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3320 /** @todo Core1&2 EXT_CONFIG (whatever that is)? */
3321 *puValue = 0;
3322 return VINF_SUCCESS;
3323}
3324
3325
3326/** @callback_method_impl{FNCPUMWRMSR} */
3327static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore1ExtConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3328{
3329 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3330 /** @todo Core1&2 EXT_CONFIG (whatever that is)? */
3331 return VINF_SUCCESS;
3332}
3333
3334
3335/** @callback_method_impl{FNCPUMRDMSR} */
3336static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore1DtsCalControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3337{
3338 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3339 /** @todo Core1&2(?) DTS_CAL_CTRL (whatever that is)? */
3340 *puValue = 0;
3341 return VINF_SUCCESS;
3342}
3343
3344
3345/** @callback_method_impl{FNCPUMWRMSR} */
3346static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore1DtsCalControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3347{
3348 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3349 /** @todo Core1&2(?) DTS_CAL_CTRL (whatever that is)? */
3350 return VINF_SUCCESS;
3351}
3352
3353
3354/** @callback_method_impl{FNCPUMRDMSR} */
3355static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore2PeciControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3356{
3357 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3358 /** @todo Core2+ platform environment control interface control register? */
3359 *puValue = 0;
3360 return VINF_SUCCESS;
3361}
3362
3363
3364/** @callback_method_impl{FNCPUMWRMSR} */
3365static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore2PeciControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3366{
3367 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3368 /** @todo Core2+ platform environment control interface control register? */
3369 return VINF_SUCCESS;
3370}
3371
3372
3373/** @callback_method_impl{FNCPUMRDMSR} */
3374static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelAtSilvCoreC1Recidency(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3375{
3376 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3377 *puValue = 0;
3378 return VINF_SUCCESS;
3379}
3380
3381
3382/*
3383 * Multiple vendor P6 MSRs.
3384 * Multiple vendor P6 MSRs.
3385 * Multiple vendor P6 MSRs.
3386 *
3387 * These MSRs were introduced with the P6 but not elevated to architectural
3388 * MSRs, despite other vendors implementing them.
3389 */
3390
3391
3392/** @callback_method_impl{FNCPUMRDMSR} */
3393static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastBranchFromIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3394{
3395 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3396 /* AMD seems to just record RIP, while intel claims to record RIP+CS.BASE
3397 if I read the docs correctly, thus the need for separate functions. */
3398 /** @todo implement last branch records. */
3399 *puValue = 0;
3400 return VINF_SUCCESS;
3401}
3402
3403
3404/** @callback_method_impl{FNCPUMRDMSR} */
3405static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastBranchToIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3406{
3407 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3408 /** @todo implement last branch records. */
3409 *puValue = 0;
3410 return VINF_SUCCESS;
3411}
3412
3413
3414/** @callback_method_impl{FNCPUMRDMSR} */
3415static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastIntFromIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3416{
3417 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3418 /** @todo implement last exception records. */
3419 *puValue = 0;
3420 return VINF_SUCCESS;
3421}
3422
3423
3424/** @callback_method_impl{FNCPUMWRMSR} */
3425static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_P6LastIntFromIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3426{
3427 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3428 /** @todo implement last exception records. */
3429 /* Note! On many CPUs, the high bit of the 0x000001dd register is always writable, even when the result is
3430 a non-cannonical address. */
3431 return VINF_SUCCESS;
3432}
3433
3434
3435/** @callback_method_impl{FNCPUMRDMSR} */
3436static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastIntToIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3437{
3438 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3439 /** @todo implement last exception records. */
3440 *puValue = 0;
3441 return VINF_SUCCESS;
3442}
3443
3444
3445/** @callback_method_impl{FNCPUMWRMSR} */
3446static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_P6LastIntToIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3447{
3448 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3449 /** @todo implement last exception records. */
3450 return VINF_SUCCESS;
3451}
3452
3453
3454
3455/*
3456 * AMD specific
3457 * AMD specific
3458 * AMD specific
3459 */
3460
3461
3462/** @callback_method_impl{FNCPUMRDMSR} */
3463static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hTscRate(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3464{
3465 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3466 /** @todo Implement TscRateMsr */
3467 *puValue = RT_MAKE_U64(0, 1); /* 1.0 = reset value. */
3468 return VINF_SUCCESS;
3469}
3470
3471
3472/** @callback_method_impl{FNCPUMWRMSR} */
3473static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hTscRate(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3474{
3475 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3476 /** @todo Implement TscRateMsr */
3477 return VINF_SUCCESS;
3478}
3479
3480
3481/** @callback_method_impl{FNCPUMRDMSR} */
3482static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hLwpCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3483{
3484 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3485 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3486 /* Note: Only listes in BKDG for Family 15H. */
3487 *puValue = 0;
3488 return VINF_SUCCESS;
3489}
3490
3491
3492/** @callback_method_impl{FNCPUMWRMSR} */
3493static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hLwpCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3494{
3495 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3496 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3497 return VINF_SUCCESS;
3498}
3499
3500
3501/** @callback_method_impl{FNCPUMRDMSR} */
3502static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hLwpCbAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3503{
3504 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3505 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3506 /* Note: Only listes in BKDG for Family 15H. */
3507 *puValue = 0;
3508 return VINF_SUCCESS;
3509}
3510
3511
3512/** @callback_method_impl{FNCPUMWRMSR} */
3513static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hLwpCbAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3514{
3515 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3516 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3517 return VINF_SUCCESS;
3518}
3519
3520
3521/** @callback_method_impl{FNCPUMRDMSR} */
3522static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hMc4MiscN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3523{
3524 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3525 /** @todo machine check. */
3526 *puValue = 0;
3527 return VINF_SUCCESS;
3528}
3529
3530
3531/** @callback_method_impl{FNCPUMWRMSR} */
3532static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hMc4MiscN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3533{
3534 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3535 /** @todo machine check. */
3536 return VINF_SUCCESS;
3537}
3538
3539
3540/** @callback_method_impl{FNCPUMRDMSR} */
3541static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8PerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3542{
3543 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3544 /** @todo AMD performance events. */
3545 *puValue = 0;
3546 return VINF_SUCCESS;
3547}
3548
3549
3550/** @callback_method_impl{FNCPUMWRMSR} */
3551static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8PerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3552{
3553 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3554 /** @todo AMD performance events. */
3555 return VINF_SUCCESS;
3556}
3557
3558
3559/** @callback_method_impl{FNCPUMRDMSR} */
3560static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8PerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3561{
3562 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3563 /** @todo AMD performance events. */
3564 *puValue = 0;
3565 return VINF_SUCCESS;
3566}
3567
3568
3569/** @callback_method_impl{FNCPUMWRMSR} */
3570static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8PerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3571{
3572 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3573 /** @todo AMD performance events. */
3574 return VINF_SUCCESS;
3575}
3576
3577
3578/** @callback_method_impl{FNCPUMRDMSR} */
3579static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SysCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3580{
3581 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3582 /** @todo AMD SYS_CFG */
3583 *puValue = pRange->uValue;
3584 return VINF_SUCCESS;
3585}
3586
3587
3588/** @callback_method_impl{FNCPUMWRMSR} */
3589static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SysCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3590{
3591 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3592 /** @todo AMD SYS_CFG */
3593 return VINF_SUCCESS;
3594}
3595
3596
3597/** @callback_method_impl{FNCPUMRDMSR} */
3598static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8HwCr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3599{
3600 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3601 /** @todo AMD HW_CFG */
3602 *puValue = 0;
3603 return VINF_SUCCESS;
3604}
3605
3606
3607/** @callback_method_impl{FNCPUMWRMSR} */
3608static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8HwCr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3609{
3610 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3611 /** @todo AMD HW_CFG */
3612 return VINF_SUCCESS;
3613}
3614
3615
3616/** @callback_method_impl{FNCPUMRDMSR} */
3617static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IorrBaseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3618{
3619 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3620 /** @todo AMD IorrMask/IorrBase */
3621 *puValue = 0;
3622 return VINF_SUCCESS;
3623}
3624
3625
3626/** @callback_method_impl{FNCPUMWRMSR} */
3627static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IorrBaseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3628{
3629 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3630 /** @todo AMD IorrMask/IorrBase */
3631 return VINF_SUCCESS;
3632}
3633
3634
3635/** @callback_method_impl{FNCPUMRDMSR} */
3636static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IorrMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3637{
3638 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3639 /** @todo AMD IorrMask/IorrBase */
3640 *puValue = 0;
3641 return VINF_SUCCESS;
3642}
3643
3644
3645/** @callback_method_impl{FNCPUMWRMSR} */
3646static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IorrMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3647{
3648 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3649 /** @todo AMD IorrMask/IorrBase */
3650 return VINF_SUCCESS;
3651}
3652
3653
3654/** @callback_method_impl{FNCPUMRDMSR} */
3655static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8TopOfMemN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3656{
3657 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3658 *puValue = 0;
3659 /** @todo return 4GB - RamHoleSize here for TOPMEM. Figure out what to return
3660 * for TOPMEM2. */
3661 //if (pRange->uValue == 0)
3662 // *puValue = _4G - RamHoleSize;
3663 return VINF_SUCCESS;
3664}
3665
3666
3667/** @callback_method_impl{FNCPUMWRMSR} */
3668static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8TopOfMemN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3669{
3670 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3671 /** @todo AMD TOPMEM and TOPMEM2/TOM2. */
3672 return VINF_SUCCESS;
3673}
3674
3675
3676/** @callback_method_impl{FNCPUMRDMSR} */
3677static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8NbCfg1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3678{
3679 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3680 /** @todo AMD NB_CFG1 */
3681 *puValue = 0;
3682 return VINF_SUCCESS;
3683}
3684
3685
3686/** @callback_method_impl{FNCPUMWRMSR} */
3687static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8NbCfg1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3688{
3689 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3690 /** @todo AMD NB_CFG1 */
3691 return VINF_SUCCESS;
3692}
3693
3694
3695/** @callback_method_impl{FNCPUMRDMSR} */
3696static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8McXcptRedir(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3697{
3698 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3699 /** @todo machine check. */
3700 *puValue = 0;
3701 return VINF_SUCCESS;
3702}
3703
3704
3705/** @callback_method_impl{FNCPUMWRMSR} */
3706static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8McXcptRedir(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3707{
3708 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3709 /** @todo machine check. */
3710 return VINF_SUCCESS;
3711}
3712
3713
3714/** @callback_method_impl{FNCPUMRDMSR} */
3715static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuNameN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3716{
3717 RT_NOREF_PV(idMsr);
3718 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), pRange->uValue / 2 + 0x80000001);
3719 if (pLeaf)
3720 {
3721 if (!(pRange->uValue & 1))
3722 *puValue = RT_MAKE_U64(pLeaf->uEax, pLeaf->uEbx);
3723 else
3724 *puValue = RT_MAKE_U64(pLeaf->uEcx, pLeaf->uEdx);
3725 }
3726 else
3727 *puValue = 0;
3728 return VINF_SUCCESS;
3729}
3730
3731
3732/** @callback_method_impl{FNCPUMWRMSR} */
3733static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuNameN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3734{
3735 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3736 /** @todo Remember guest programmed CPU name. */
3737 return VINF_SUCCESS;
3738}
3739
3740
3741/** @callback_method_impl{FNCPUMRDMSR} */
3742static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8HwThermalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3743{
3744 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3745 /** @todo AMD HTC. */
3746 *puValue = pRange->uValue;
3747 return VINF_SUCCESS;
3748}
3749
3750
3751/** @callback_method_impl{FNCPUMWRMSR} */
3752static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8HwThermalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3753{
3754 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3755 /** @todo AMD HTC. */
3756 return VINF_SUCCESS;
3757}
3758
3759
3760/** @callback_method_impl{FNCPUMRDMSR} */
3761static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SwThermalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3762{
3763 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3764 /** @todo AMD STC. */
3765 *puValue = 0;
3766 return VINF_SUCCESS;
3767}
3768
3769
3770/** @callback_method_impl{FNCPUMWRMSR} */
3771static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SwThermalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3772{
3773 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3774 /** @todo AMD STC. */
3775 return VINF_SUCCESS;
3776}
3777
3778
3779/** @callback_method_impl{FNCPUMRDMSR} */
3780static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8FidVidControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3781{
3782 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3783 /** @todo AMD FIDVID_CTL. */
3784 *puValue = pRange->uValue;
3785 return VINF_SUCCESS;
3786}
3787
3788
3789/** @callback_method_impl{FNCPUMWRMSR} */
3790static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8FidVidControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3791{
3792 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3793 /** @todo AMD FIDVID_CTL. */
3794 return VINF_SUCCESS;
3795}
3796
3797
3798/** @callback_method_impl{FNCPUMRDMSR} */
3799static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8FidVidStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3800{
3801 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3802 /** @todo AMD FIDVID_STATUS. */
3803 *puValue = pRange->uValue;
3804 return VINF_SUCCESS;
3805}
3806
3807
3808/** @callback_method_impl{FNCPUMRDMSR} */
3809static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8McCtlMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3810{
3811 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3812 /** @todo AMD MC. */
3813 *puValue = 0;
3814 return VINF_SUCCESS;
3815}
3816
3817
3818/** @callback_method_impl{FNCPUMWRMSR} */
3819static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8McCtlMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3820{
3821 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3822 /** @todo AMD MC. */
3823 return VINF_SUCCESS;
3824}
3825
3826
3827/** @callback_method_impl{FNCPUMRDMSR} */
3828static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmiOnIoTrapN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3829{
3830 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3831 /** @todo AMD SMM/SMI and I/O trap. */
3832 *puValue = 0;
3833 return VINF_SUCCESS;
3834}
3835
3836
3837/** @callback_method_impl{FNCPUMWRMSR} */
3838static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmiOnIoTrapN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3839{
3840 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3841 /** @todo AMD SMM/SMI and I/O trap. */
3842 return VINF_SUCCESS;
3843}
3844
3845
3846/** @callback_method_impl{FNCPUMRDMSR} */
3847static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmiOnIoTrapCtlSts(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3848{
3849 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3850 /** @todo AMD SMM/SMI and I/O trap. */
3851 *puValue = 0;
3852 return VINF_SUCCESS;
3853}
3854
3855
3856/** @callback_method_impl{FNCPUMWRMSR} */
3857static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmiOnIoTrapCtlSts(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3858{
3859 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3860 /** @todo AMD SMM/SMI and I/O trap. */
3861 return VINF_SUCCESS;
3862}
3863
3864
3865/** @callback_method_impl{FNCPUMRDMSR} */
3866static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IntPendingMessage(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3867{
3868 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3869 /** @todo Interrupt pending message. */
3870 *puValue = 0;
3871 return VINF_SUCCESS;
3872}
3873
3874
3875/** @callback_method_impl{FNCPUMWRMSR} */
3876static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IntPendingMessage(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3877{
3878 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3879 /** @todo Interrupt pending message. */
3880 return VINF_SUCCESS;
3881}
3882
3883
3884/** @callback_method_impl{FNCPUMRDMSR} */
3885static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmiTriggerIoCycle(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3886{
3887 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3888 /** @todo AMD SMM/SMI and trigger I/O cycle. */
3889 *puValue = 0;
3890 return VINF_SUCCESS;
3891}
3892
3893
3894/** @callback_method_impl{FNCPUMWRMSR} */
3895static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmiTriggerIoCycle(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3896{
3897 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3898 /** @todo AMD SMM/SMI and trigger I/O cycle. */
3899 return VINF_SUCCESS;
3900}
3901
3902
3903/** @callback_method_impl{FNCPUMRDMSR} */
3904static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hMmioCfgBaseAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3905{
3906 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3907 /** @todo AMD MMIO Configuration base address. */
3908 *puValue = 0;
3909 return VINF_SUCCESS;
3910}
3911
3912
3913/** @callback_method_impl{FNCPUMWRMSR} */
3914static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hMmioCfgBaseAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3915{
3916 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3917 /** @todo AMD MMIO Configuration base address. */
3918 return VINF_SUCCESS;
3919}
3920
3921
3922/** @callback_method_impl{FNCPUMRDMSR} */
3923static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hTrapCtlMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3924{
3925 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3926 /** @todo AMD 0xc0010059. */
3927 *puValue = 0;
3928 return VINF_SUCCESS;
3929}
3930
3931
3932/** @callback_method_impl{FNCPUMWRMSR} */
3933static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hTrapCtlMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3934{
3935 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3936 /** @todo AMD 0xc0010059. */
3937 return VINF_SUCCESS;
3938}
3939
3940
3941/** @callback_method_impl{FNCPUMRDMSR} */
3942static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateCurLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3943{
3944 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3945 /** @todo AMD P-states. */
3946 *puValue = pRange->uValue;
3947 return VINF_SUCCESS;
3948}
3949
3950
3951/** @callback_method_impl{FNCPUMRDMSR} */
3952static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3953{
3954 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3955 /** @todo AMD P-states. */
3956 *puValue = pRange->uValue;
3957 return VINF_SUCCESS;
3958}
3959
3960
3961/** @callback_method_impl{FNCPUMWRMSR} */
3962static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hPStateControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3963{
3964 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3965 /** @todo AMD P-states. */
3966 return VINF_SUCCESS;
3967}
3968
3969
3970/** @callback_method_impl{FNCPUMRDMSR} */
3971static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3972{
3973 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3974 /** @todo AMD P-states. */
3975 *puValue = pRange->uValue;
3976 return VINF_SUCCESS;
3977}
3978
3979
3980/** @callback_method_impl{FNCPUMWRMSR} */
3981static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hPStateStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3982{
3983 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3984 /** @todo AMD P-states. */
3985 return VINF_SUCCESS;
3986}
3987
3988
3989/** @callback_method_impl{FNCPUMRDMSR} */
3990static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3991{
3992 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3993 /** @todo AMD P-states. */
3994 *puValue = pRange->uValue;
3995 return VINF_SUCCESS;
3996}
3997
3998
3999/** @callback_method_impl{FNCPUMWRMSR} */
4000static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hPStateN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4001{
4002 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4003 /** @todo AMD P-states. */
4004 return VINF_SUCCESS;
4005}
4006
4007
4008/** @callback_method_impl{FNCPUMRDMSR} */
4009static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCofVidControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4010{
4011 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4012 /** @todo AMD P-states. */
4013 *puValue = pRange->uValue;
4014 return VINF_SUCCESS;
4015}
4016
4017
4018/** @callback_method_impl{FNCPUMWRMSR} */
4019static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCofVidControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4020{
4021 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4022 /** @todo AMD P-states. */
4023 return VINF_SUCCESS;
4024}
4025
4026
4027/** @callback_method_impl{FNCPUMRDMSR} */
4028static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCofVidStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4029{
4030 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4031 /** @todo AMD P-states. */
4032 *puValue = pRange->uValue;
4033 return VINF_SUCCESS;
4034}
4035
4036
4037/** @callback_method_impl{FNCPUMWRMSR} */
4038static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCofVidStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4039{
4040 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4041 /* Note! Writing 0 seems to not GP, not sure if it does anything to the value... */
4042 /** @todo AMD P-states. */
4043 return VINF_SUCCESS;
4044}
4045
4046
4047/** @callback_method_impl{FNCPUMRDMSR} */
4048static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCStateIoBaseAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4049{
4050 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4051 /** @todo AMD C-states. */
4052 *puValue = 0;
4053 return VINF_SUCCESS;
4054}
4055
4056
4057/** @callback_method_impl{FNCPUMWRMSR} */
4058static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCStateIoBaseAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4059{
4060 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4061 /** @todo AMD C-states. */
4062 return VINF_SUCCESS;
4063}
4064
4065
4066/** @callback_method_impl{FNCPUMRDMSR} */
4067static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCpuWatchdogTimer(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4068{
4069 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4070 /** @todo AMD machine checks. */
4071 *puValue = 0;
4072 return VINF_SUCCESS;
4073}
4074
4075
4076/** @callback_method_impl{FNCPUMWRMSR} */
4077static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCpuWatchdogTimer(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4078{
4079 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4080 /** @todo AMD machine checks. */
4081 return VINF_SUCCESS;
4082}
4083
4084
4085/** @callback_method_impl{FNCPUMRDMSR} */
4086static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4087{
4088 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4089 /** @todo AMD SMM. */
4090 *puValue = 0;
4091 return VINF_SUCCESS;
4092}
4093
4094
4095/** @callback_method_impl{FNCPUMWRMSR} */
4096static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4097{
4098 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4099 /** @todo AMD SMM. */
4100 return VINF_SUCCESS;
4101}
4102
4103
4104/** @callback_method_impl{FNCPUMRDMSR} */
4105static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4106{
4107 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4108 /** @todo AMD SMM. */
4109 *puValue = 0;
4110 return VINF_SUCCESS;
4111}
4112
4113
4114/** @callback_method_impl{FNCPUMWRMSR} */
4115static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4116{
4117 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4118 /** @todo AMD SMM. */
4119 return VINF_SUCCESS;
4120}
4121
4122
4123
4124/** @callback_method_impl{FNCPUMRDMSR} */
4125static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4126{
4127 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4128 /** @todo AMD SMM. */
4129 *puValue = 0;
4130 return VINF_SUCCESS;
4131}
4132
4133
4134/** @callback_method_impl{FNCPUMWRMSR} */
4135static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4136{
4137 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4138 /** @todo AMD SMM. */
4139 return VINF_SUCCESS;
4140}
4141
4142
4143/** @callback_method_impl{FNCPUMRDMSR} */
4144static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8VmCr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4145{
4146 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4147 PVM pVM = pVCpu->CTX_SUFF(pVM);
4148 if (pVM->cpum.s.GuestFeatures.fSvm)
4149 *puValue = MSR_K8_VM_CR_LOCK;
4150 else
4151 *puValue = 0;
4152 return VINF_SUCCESS;
4153}
4154
4155
4156/** @callback_method_impl{FNCPUMWRMSR} */
4157static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8VmCr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4158{
4159 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
4160 PVM pVM = pVCpu->CTX_SUFF(pVM);
4161 if (pVM->cpum.s.GuestFeatures.fSvm)
4162 {
4163 /* Silently ignore writes to LOCK and SVM_DISABLE bit when the LOCK bit is set (see cpumMsrRd_AmdK8VmCr). */
4164 if (uValue & (MSR_K8_VM_CR_DPD | MSR_K8_VM_CR_R_INIT | MSR_K8_VM_CR_DIS_A20M))
4165 return VERR_CPUM_RAISE_GP_0;
4166 return VINF_SUCCESS;
4167 }
4168 return VERR_CPUM_RAISE_GP_0;
4169}
4170
4171
4172/** @callback_method_impl{FNCPUMRDMSR} */
4173static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IgnNe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4174{
4175 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4176 /** @todo AMD IGNNE\# control. */
4177 *puValue = 0;
4178 return VINF_SUCCESS;
4179}
4180
4181
4182/** @callback_method_impl{FNCPUMWRMSR} */
4183static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IgnNe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4184{
4185 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4186 /** @todo AMD IGNNE\# control. */
4187 return VINF_SUCCESS;
4188}
4189
4190
4191/** @callback_method_impl{FNCPUMRDMSR} */
4192static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4193{
4194 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4195 /** @todo AMD SMM. */
4196 *puValue = 0;
4197 return VINF_SUCCESS;
4198}
4199
4200
4201/** @callback_method_impl{FNCPUMWRMSR} */
4202static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4203{
4204 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4205 /** @todo AMD SMM. */
4206 return VINF_SUCCESS;
4207}
4208
4209
4210/** @callback_method_impl{FNCPUMRDMSR} */
4211static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8VmHSavePa(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4212{
4213 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4214 *puValue = pVCpu->cpum.s.Guest.hwvirt.svm.uMsrHSavePa;
4215 return VINF_SUCCESS;
4216}
4217
4218
4219/** @callback_method_impl{FNCPUMWRMSR} */
4220static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8VmHSavePa(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4221{
4222 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
4223 if (uValue & UINT64_C(0xfff))
4224 {
4225 Log(("CPUM: Invalid setting of low 12 bits set writing host-state save area MSR %#x: %#llx\n", idMsr, uValue));
4226 return VERR_CPUM_RAISE_GP_0;
4227 }
4228
4229 uint64_t fInvPhysMask = ~(RT_BIT_64(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.cMaxPhysAddrWidth) - 1U);
4230 if (fInvPhysMask & uValue)
4231 {
4232 Log(("CPUM: Invalid physical address bits set writing host-state save area MSR %#x: %#llx (%#llx)\n",
4233 idMsr, uValue, uValue & fInvPhysMask));
4234 return VERR_CPUM_RAISE_GP_0;
4235 }
4236
4237 pVCpu->cpum.s.Guest.hwvirt.svm.uMsrHSavePa = uValue;
4238 return VINF_SUCCESS;
4239}
4240
4241
4242/** @callback_method_impl{FNCPUMRDMSR} */
4243static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hVmLockKey(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4244{
4245 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4246 /** @todo AMD SVM. */
4247 *puValue = 0; /* RAZ */
4248 return VINF_SUCCESS;
4249}
4250
4251
4252/** @callback_method_impl{FNCPUMWRMSR} */
4253static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hVmLockKey(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4254{
4255 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4256 /** @todo AMD SVM. */
4257 return VINF_SUCCESS;
4258}
4259
4260
4261/** @callback_method_impl{FNCPUMRDMSR} */
4262static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hSmmLockKey(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4263{
4264 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4265 /** @todo AMD SMM. */
4266 *puValue = 0; /* RAZ */
4267 return VINF_SUCCESS;
4268}
4269
4270
4271/** @callback_method_impl{FNCPUMWRMSR} */
4272static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hSmmLockKey(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4273{
4274 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4275 /** @todo AMD SMM. */
4276 return VINF_SUCCESS;
4277}
4278
4279
4280/** @callback_method_impl{FNCPUMRDMSR} */
4281static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hLocalSmiStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4282{
4283 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4284 /** @todo AMD SMM/SMI. */
4285 *puValue = 0;
4286 return VINF_SUCCESS;
4287}
4288
4289
4290/** @callback_method_impl{FNCPUMWRMSR} */
4291static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hLocalSmiStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4292{
4293 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4294 /** @todo AMD SMM/SMI. */
4295 return VINF_SUCCESS;
4296}
4297
4298
4299/** @callback_method_impl{FNCPUMRDMSR} */
4300static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hOsVisWrkIdLength(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4301{
4302 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
4303 /** @todo AMD OS visible workaround. */
4304 *puValue = pRange->uValue;
4305 return VINF_SUCCESS;
4306}
4307
4308
4309/** @callback_method_impl{FNCPUMWRMSR} */
4310static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hOsVisWrkIdLength(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4311{
4312 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4313 /** @todo AMD OS visible workaround. */
4314 return VINF_SUCCESS;
4315}
4316
4317
4318/** @callback_method_impl{FNCPUMRDMSR} */
4319static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hOsVisWrkStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4320{
4321 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4322 /** @todo AMD OS visible workaround. */
4323 *puValue = 0;
4324 return VINF_SUCCESS;
4325}
4326
4327
4328/** @callback_method_impl{FNCPUMWRMSR} */
4329static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hOsVisWrkStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4330{
4331 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4332 /** @todo AMD OS visible workaround. */
4333 return VINF_SUCCESS;
4334}
4335
4336
4337/** @callback_method_impl{FNCPUMRDMSR} */
4338static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam16hL2IPerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4339{
4340 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4341 /** @todo AMD L2I performance counters. */
4342 *puValue = 0;
4343 return VINF_SUCCESS;
4344}
4345
4346
4347/** @callback_method_impl{FNCPUMWRMSR} */
4348static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam16hL2IPerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4349{
4350 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4351 /** @todo AMD L2I performance counters. */
4352 return VINF_SUCCESS;
4353}
4354
4355
4356/** @callback_method_impl{FNCPUMRDMSR} */
4357static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam16hL2IPerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4358{
4359 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4360 /** @todo AMD L2I performance counters. */
4361 *puValue = 0;
4362 return VINF_SUCCESS;
4363}
4364
4365
4366/** @callback_method_impl{FNCPUMWRMSR} */
4367static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam16hL2IPerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4368{
4369 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4370 /** @todo AMD L2I performance counters. */
4371 return VINF_SUCCESS;
4372}
4373
4374
4375/** @callback_method_impl{FNCPUMRDMSR} */
4376static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hNorthbridgePerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4377{
4378 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4379 /** @todo AMD Northbridge performance counters. */
4380 *puValue = 0;
4381 return VINF_SUCCESS;
4382}
4383
4384
4385/** @callback_method_impl{FNCPUMWRMSR} */
4386static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hNorthbridgePerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4387{
4388 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4389 /** @todo AMD Northbridge performance counters. */
4390 return VINF_SUCCESS;
4391}
4392
4393
4394/** @callback_method_impl{FNCPUMRDMSR} */
4395static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hNorthbridgePerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4396{
4397 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4398 /** @todo AMD Northbridge performance counters. */
4399 *puValue = 0;
4400 return VINF_SUCCESS;
4401}
4402
4403
4404/** @callback_method_impl{FNCPUMWRMSR} */
4405static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hNorthbridgePerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4406{
4407 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4408 /** @todo AMD Northbridge performance counters. */
4409 return VINF_SUCCESS;
4410}
4411
4412
4413/** @callback_method_impl{FNCPUMRDMSR} */
4414static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7MicrocodeCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4415{
4416 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4417 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4418 * cpus. Need to be explored and verify K7 presence. */
4419 /** @todo Undocumented register only seen mentioned in fam15h erratum \#608. */
4420 *puValue = pRange->uValue;
4421 return VINF_SUCCESS;
4422}
4423
4424
4425/** @callback_method_impl{FNCPUMWRMSR} */
4426static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7MicrocodeCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4427{
4428 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4429 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4430 * cpus. Need to be explored and verify K7 presence. */
4431 /** @todo Undocumented register only seen mentioned in fam15h erratum \#608. */
4432 return VINF_SUCCESS;
4433}
4434
4435
4436/** @callback_method_impl{FNCPUMRDMSR} */
4437static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7ClusterIdMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4438{
4439 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4440 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4441 * cpus. Need to be explored and verify K7 presence. */
4442 /** @todo Undocumented register only seen mentioned in fam16h BKDG r3.00 when
4443 * describing EBL_CR_POWERON. */
4444 *puValue = pRange->uValue;
4445 return VINF_SUCCESS;
4446}
4447
4448
4449/** @callback_method_impl{FNCPUMWRMSR} */
4450static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7ClusterIdMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4451{
4452 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4453 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4454 * cpus. Need to be explored and verify K7 presence. */
4455 /** @todo Undocumented register only seen mentioned in fam16h BKDG r3.00 when
4456 * describing EBL_CR_POWERON. */
4457 return VINF_SUCCESS;
4458}
4459
4460
4461/** @callback_method_impl{FNCPUMRDMSR} */
4462static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlStd07hEbax(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4463{
4464 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4465 bool fIgnored;
4466 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeafEx(pVCpu->CTX_SUFF(pVM), 0x00000007, 0, &fIgnored);
4467 if (pLeaf)
4468 *puValue = RT_MAKE_U64(pLeaf->uEbx, pLeaf->uEax);
4469 else
4470 *puValue = 0;
4471 return VINF_SUCCESS;
4472}
4473
4474
4475/** @callback_method_impl{FNCPUMWRMSR} */
4476static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlStd07hEbax(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4477{
4478 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4479 /** @todo Changing CPUID leaf 7/0. */
4480 return VINF_SUCCESS;
4481}
4482
4483
4484/** @callback_method_impl{FNCPUMRDMSR} */
4485static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlStd06hEcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4486{
4487 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4488 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), 0x00000006);
4489 if (pLeaf)
4490 *puValue = pLeaf->uEcx;
4491 else
4492 *puValue = 0;
4493 return VINF_SUCCESS;
4494}
4495
4496
4497/** @callback_method_impl{FNCPUMWRMSR} */
4498static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlStd06hEcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4499{
4500 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4501 /** @todo Changing CPUID leaf 6. */
4502 return VINF_SUCCESS;
4503}
4504
4505
4506/** @callback_method_impl{FNCPUMRDMSR} */
4507static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlStd01hEdcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4508{
4509 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4510 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), 0x00000001);
4511 if (pLeaf)
4512 *puValue = RT_MAKE_U64(pLeaf->uEdx, pLeaf->uEcx);
4513 else
4514 *puValue = 0;
4515 return VINF_SUCCESS;
4516}
4517
4518
4519/** @callback_method_impl{FNCPUMWRMSR} */
4520static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlStd01hEdcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4521{
4522 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4523 /** @todo Changing CPUID leaf 0x80000001. */
4524 return VINF_SUCCESS;
4525}
4526
4527
4528/** @callback_method_impl{FNCPUMRDMSR} */
4529static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlExt01hEdcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4530{
4531 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4532 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), 0x80000001);
4533 if (pLeaf)
4534 *puValue = RT_MAKE_U64(pLeaf->uEdx, pLeaf->uEcx);
4535 else
4536 *puValue = 0;
4537 return VINF_SUCCESS;
4538}
4539
4540
4541/** @callback_method_impl{FNCPUMWRMSR} */
4542static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlExt01hEdcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4543{
4544 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4545 /** @todo Changing CPUID leaf 0x80000001. */
4546 return VINF_SUCCESS;
4547}
4548
4549
4550/** @callback_method_impl{FNCPUMRDMSR} */
4551static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8PatchLevel(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4552{
4553 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4554 /** @todo Fake AMD microcode patching. */
4555 *puValue = pRange->uValue;
4556 return VINF_SUCCESS;
4557}
4558
4559
4560/** @callback_method_impl{FNCPUMWRMSR} */
4561static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8PatchLoader(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4562{
4563 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4564 /** @todo Fake AMD microcode patching. */
4565 return VINF_SUCCESS;
4566}
4567
4568
4569/** @callback_method_impl{FNCPUMRDMSR} */
4570static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DebugStatusMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4571{
4572 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4573 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4574 * cpus. Need to be explored and verify K7 presence. */
4575 /** @todo undocumented */
4576 *puValue = 0;
4577 return VINF_SUCCESS;
4578}
4579
4580
4581/** @callback_method_impl{FNCPUMWRMSR} */
4582static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DebugStatusMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4583{
4584 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4585 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4586 * cpus. Need to be explored and verify K7 presence. */
4587 /** @todo undocumented */
4588 return VINF_SUCCESS;
4589}
4590
4591
4592/** @callback_method_impl{FNCPUMRDMSR} */
4593static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BHTraceBaseMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4594{
4595 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4596 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4597 * cpus. Need to be explored and verify K7 presence. */
4598 /** @todo undocumented */
4599 *puValue = 0;
4600 return VINF_SUCCESS;
4601}
4602
4603
4604/** @callback_method_impl{FNCPUMWRMSR} */
4605static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BHTraceBaseMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4606{
4607 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4608 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4609 * cpus. Need to be explored and verify K7 presence. */
4610 /** @todo undocumented */
4611 return VINF_SUCCESS;
4612}
4613
4614
4615/** @callback_method_impl{FNCPUMRDMSR} */
4616static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BHTracePtrMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4617{
4618 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4619 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4620 * cpus. Need to be explored and verify K7 presence. */
4621 /** @todo undocumented */
4622 *puValue = 0;
4623 return VINF_SUCCESS;
4624}
4625
4626
4627/** @callback_method_impl{FNCPUMWRMSR} */
4628static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BHTracePtrMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4629{
4630 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4631 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4632 * cpus. Need to be explored and verify K7 presence. */
4633 /** @todo undocumented */
4634 return VINF_SUCCESS;
4635}
4636
4637
4638/** @callback_method_impl{FNCPUMRDMSR} */
4639static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BHTraceLimitMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4640{
4641 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4642 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4643 * cpus. Need to be explored and verify K7 presence. */
4644 /** @todo undocumented */
4645 *puValue = 0;
4646 return VINF_SUCCESS;
4647}
4648
4649
4650/** @callback_method_impl{FNCPUMWRMSR} */
4651static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BHTraceLimitMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4652{
4653 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4654 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4655 * cpus. Need to be explored and verify K7 presence. */
4656 /** @todo undocumented */
4657 return VINF_SUCCESS;
4658}
4659
4660
4661/** @callback_method_impl{FNCPUMRDMSR} */
4662static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7HardwareDebugToolCfgMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4663{
4664 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4665 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4666 * cpus. Need to be explored and verify K7 presence. */
4667 /** @todo undocumented */
4668 *puValue = 0;
4669 return VINF_SUCCESS;
4670}
4671
4672
4673/** @callback_method_impl{FNCPUMWRMSR} */
4674static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7HardwareDebugToolCfgMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4675{
4676 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4677 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4678 * cpus. Need to be explored and verify K7 presence. */
4679 /** @todo undocumented */
4680 return VINF_SUCCESS;
4681}
4682
4683
4684/** @callback_method_impl{FNCPUMRDMSR} */
4685static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7FastFlushCountMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4686{
4687 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4688 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4689 * cpus. Need to be explored and verify K7 presence. */
4690 /** @todo undocumented */
4691 *puValue = 0;
4692 return VINF_SUCCESS;
4693}
4694
4695
4696/** @callback_method_impl{FNCPUMWRMSR} */
4697static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7FastFlushCountMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4698{
4699 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4700 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4701 * cpus. Need to be explored and verify K7 presence. */
4702 /** @todo undocumented */
4703 return VINF_SUCCESS;
4704}
4705
4706
4707/** @callback_method_impl{FNCPUMRDMSR} */
4708static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7NodeId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4709{
4710 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4711 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4712 * cpus. Need to be explored and verify K7 presence. */
4713 /** @todo AMD node ID and bios scratch. */
4714 *puValue = 0; /* nodeid = 0; nodes-per-cpu = 1 */
4715 return VINF_SUCCESS;
4716}
4717
4718
4719/** @callback_method_impl{FNCPUMWRMSR} */
4720static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7NodeId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4721{
4722 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4723 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4724 * cpus. Need to be explored and verify K7 presence. */
4725 /** @todo AMD node ID and bios scratch. */
4726 return VINF_SUCCESS;
4727}
4728
4729
4730/** @callback_method_impl{FNCPUMRDMSR} */
4731static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DrXAddrMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4732{
4733 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4734 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4735 * cpus. Need to be explored and verify K7 presence. */
4736 /** @todo AMD DRx address masking (range breakpoints). */
4737 *puValue = 0;
4738 return VINF_SUCCESS;
4739}
4740
4741
4742/** @callback_method_impl{FNCPUMWRMSR} */
4743static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DrXAddrMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4744{
4745 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4746 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4747 * cpus. Need to be explored and verify K7 presence. */
4748 /** @todo AMD DRx address masking (range breakpoints). */
4749 return VINF_SUCCESS;
4750}
4751
4752
4753/** @callback_method_impl{FNCPUMRDMSR} */
4754static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7Dr0DataMatchMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4755{
4756 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4757 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4758 * cpus. Need to be explored and verify K7 presence. */
4759 /** @todo AMD undocument debugging features. */
4760 *puValue = 0;
4761 return VINF_SUCCESS;
4762}
4763
4764
4765/** @callback_method_impl{FNCPUMWRMSR} */
4766static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7Dr0DataMatchMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4767{
4768 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4769 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4770 * cpus. Need to be explored and verify K7 presence. */
4771 /** @todo AMD undocument debugging features. */
4772 return VINF_SUCCESS;
4773}
4774
4775
4776/** @callback_method_impl{FNCPUMRDMSR} */
4777static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7Dr0DataMaskMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4778{
4779 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4780 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4781 * cpus. Need to be explored and verify K7 presence. */
4782 /** @todo AMD undocument debugging features. */
4783 *puValue = 0;
4784 return VINF_SUCCESS;
4785}
4786
4787
4788/** @callback_method_impl{FNCPUMWRMSR} */
4789static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7Dr0DataMaskMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4790{
4791 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4792 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4793 * cpus. Need to be explored and verify K7 presence. */
4794 /** @todo AMD undocument debugging features. */
4795 return VINF_SUCCESS;
4796}
4797
4798
4799/** @callback_method_impl{FNCPUMRDMSR} */
4800static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7LoadStoreCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4801{
4802 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4803 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4804 * cpus. Need to be explored and verify K7 presence. */
4805 /** @todo AMD load-store config. */
4806 *puValue = 0;
4807 return VINF_SUCCESS;
4808}
4809
4810
4811/** @callback_method_impl{FNCPUMWRMSR} */
4812static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7LoadStoreCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4813{
4814 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4815 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4816 * cpus. Need to be explored and verify K7 presence. */
4817 /** @todo AMD load-store config. */
4818 return VINF_SUCCESS;
4819}
4820
4821
4822/** @callback_method_impl{FNCPUMRDMSR} */
4823static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7InstrCacheCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4824{
4825 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4826 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4827 * cpus. Need to be explored and verify K7 presence. */
4828 /** @todo AMD instruction cache config. */
4829 *puValue = 0;
4830 return VINF_SUCCESS;
4831}
4832
4833
4834/** @callback_method_impl{FNCPUMWRMSR} */
4835static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7InstrCacheCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4836{
4837 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4838 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4839 * cpus. Need to be explored and verify K7 presence. */
4840 /** @todo AMD instruction cache config. */
4841 return VINF_SUCCESS;
4842}
4843
4844
4845/** @callback_method_impl{FNCPUMRDMSR} */
4846static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DataCacheCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4847{
4848 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4849 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4850 * cpus. Need to be explored and verify K7 presence. */
4851 /** @todo AMD data cache config. */
4852 *puValue = 0;
4853 return VINF_SUCCESS;
4854}
4855
4856
4857/** @callback_method_impl{FNCPUMWRMSR} */
4858static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DataCacheCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4859{
4860 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4861 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4862 * cpus. Need to be explored and verify K7 presence. */
4863 /** @todo AMD data cache config. */
4864 return VINF_SUCCESS;
4865}
4866
4867
4868/** @callback_method_impl{FNCPUMRDMSR} */
4869static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BusUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4870{
4871 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4872 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4873 * cpus. Need to be explored and verify K7 presence. */
4874 /** @todo AMD bus unit config. */
4875 *puValue = 0;
4876 return VINF_SUCCESS;
4877}
4878
4879
4880/** @callback_method_impl{FNCPUMWRMSR} */
4881static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BusUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4882{
4883 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4884 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4885 * cpus. Need to be explored and verify K7 presence. */
4886 /** @todo AMD bus unit config. */
4887 return VINF_SUCCESS;
4888}
4889
4890
4891/** @callback_method_impl{FNCPUMRDMSR} */
4892static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DebugCtl2Maybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4893{
4894 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4895 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4896 * cpus. Need to be explored and verify K7 presence. */
4897 /** @todo Undocument AMD debug control register \#2. */
4898 *puValue = 0;
4899 return VINF_SUCCESS;
4900}
4901
4902
4903/** @callback_method_impl{FNCPUMWRMSR} */
4904static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DebugCtl2Maybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4905{
4906 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4907 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4908 * cpus. Need to be explored and verify K7 presence. */
4909 /** @todo Undocument AMD debug control register \#2. */
4910 return VINF_SUCCESS;
4911}
4912
4913
4914/** @callback_method_impl{FNCPUMRDMSR} */
4915static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hFpuCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4916{
4917 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4918 /** @todo AMD FPU config. */
4919 *puValue = 0;
4920 return VINF_SUCCESS;
4921}
4922
4923
4924/** @callback_method_impl{FNCPUMWRMSR} */
4925static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hFpuCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4926{
4927 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4928 /** @todo AMD FPU config. */
4929 return VINF_SUCCESS;
4930}
4931
4932
4933/** @callback_method_impl{FNCPUMRDMSR} */
4934static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hDecoderCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4935{
4936 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4937 /** @todo AMD decoder config. */
4938 *puValue = 0;
4939 return VINF_SUCCESS;
4940}
4941
4942
4943/** @callback_method_impl{FNCPUMWRMSR} */
4944static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hDecoderCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4945{
4946 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4947 /** @todo AMD decoder config. */
4948 return VINF_SUCCESS;
4949}
4950
4951
4952/** @callback_method_impl{FNCPUMRDMSR} */
4953static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hBusUnitCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4954{
4955 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4956 /* Note! 10h and 16h */
4957 /** @todo AMD bus unit config. */
4958 *puValue = 0;
4959 return VINF_SUCCESS;
4960}
4961
4962
4963/** @callback_method_impl{FNCPUMWRMSR} */
4964static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hBusUnitCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4965{
4966 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4967 /* Note! 10h and 16h */
4968 /** @todo AMD bus unit config. */
4969 return VINF_SUCCESS;
4970}
4971
4972
4973/** @callback_method_impl{FNCPUMRDMSR} */
4974static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hCombUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4975{
4976 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4977 /** @todo AMD unit config. */
4978 *puValue = 0;
4979 return VINF_SUCCESS;
4980}
4981
4982
4983/** @callback_method_impl{FNCPUMWRMSR} */
4984static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hCombUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4985{
4986 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4987 /** @todo AMD unit config. */
4988 return VINF_SUCCESS;
4989}
4990
4991
4992/** @callback_method_impl{FNCPUMRDMSR} */
4993static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hCombUnitCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4994{
4995 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4996 /** @todo AMD unit config 2. */
4997 *puValue = 0;
4998 return VINF_SUCCESS;
4999}
5000
5001
5002/** @callback_method_impl{FNCPUMWRMSR} */
5003static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hCombUnitCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5004{
5005 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5006 /** @todo AMD unit config 2. */
5007 return VINF_SUCCESS;
5008}
5009
5010
5011/** @callback_method_impl{FNCPUMRDMSR} */
5012static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hCombUnitCfg3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5013{
5014 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5015 /** @todo AMD combined unit config 3. */
5016 *puValue = 0;
5017 return VINF_SUCCESS;
5018}
5019
5020
5021/** @callback_method_impl{FNCPUMWRMSR} */
5022static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hCombUnitCfg3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5023{
5024 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5025 /** @todo AMD combined unit config 3. */
5026 return VINF_SUCCESS;
5027}
5028
5029
5030/** @callback_method_impl{FNCPUMRDMSR} */
5031static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hExecUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5032{
5033 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5034 /** @todo AMD execution unit config. */
5035 *puValue = 0;
5036 return VINF_SUCCESS;
5037}
5038
5039
5040/** @callback_method_impl{FNCPUMWRMSR} */
5041static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hExecUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5042{
5043 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5044 /** @todo AMD execution unit config. */
5045 return VINF_SUCCESS;
5046}
5047
5048
5049/** @callback_method_impl{FNCPUMRDMSR} */
5050static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hLoadStoreCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5051{
5052 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5053 /** @todo AMD load-store config 2. */
5054 *puValue = 0;
5055 return VINF_SUCCESS;
5056}
5057
5058
5059/** @callback_method_impl{FNCPUMWRMSR} */
5060static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hLoadStoreCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5061{
5062 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5063 /** @todo AMD load-store config 2. */
5064 return VINF_SUCCESS;
5065}
5066
5067
5068/** @callback_method_impl{FNCPUMRDMSR} */
5069static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsFetchCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5070{
5071 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5072 /** @todo AMD IBS. */
5073 *puValue = 0;
5074 return VINF_SUCCESS;
5075}
5076
5077
5078/** @callback_method_impl{FNCPUMWRMSR} */
5079static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsFetchCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5080{
5081 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5082 /** @todo AMD IBS. */
5083 return VINF_SUCCESS;
5084}
5085
5086
5087/** @callback_method_impl{FNCPUMRDMSR} */
5088static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsFetchLinAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5089{
5090 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5091 /** @todo AMD IBS. */
5092 *puValue = 0;
5093 return VINF_SUCCESS;
5094}
5095
5096
5097/** @callback_method_impl{FNCPUMWRMSR} */
5098static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsFetchLinAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5099{
5100 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5101 /** @todo AMD IBS. */
5102 return VINF_SUCCESS;
5103}
5104
5105
5106/** @callback_method_impl{FNCPUMRDMSR} */
5107static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsFetchPhysAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5108{
5109 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5110 /** @todo AMD IBS. */
5111 *puValue = 0;
5112 return VINF_SUCCESS;
5113}
5114
5115
5116/** @callback_method_impl{FNCPUMWRMSR} */
5117static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsFetchPhysAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5118{
5119 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5120 /** @todo AMD IBS. */
5121 return VINF_SUCCESS;
5122}
5123
5124
5125/** @callback_method_impl{FNCPUMRDMSR} */
5126static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpExecCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5127{
5128 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5129 /** @todo AMD IBS. */
5130 *puValue = 0;
5131 return VINF_SUCCESS;
5132}
5133
5134
5135/** @callback_method_impl{FNCPUMWRMSR} */
5136static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpExecCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5137{
5138 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5139 /** @todo AMD IBS. */
5140 return VINF_SUCCESS;
5141}
5142
5143
5144/** @callback_method_impl{FNCPUMRDMSR} */
5145static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpRip(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5146{
5147 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5148 /** @todo AMD IBS. */
5149 *puValue = 0;
5150 return VINF_SUCCESS;
5151}
5152
5153
5154/** @callback_method_impl{FNCPUMWRMSR} */
5155static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpRip(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5156{
5157 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5158 /** @todo AMD IBS. */
5159 if (!X86_IS_CANONICAL(uValue))
5160 {
5161 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
5162 return VERR_CPUM_RAISE_GP_0;
5163 }
5164 return VINF_SUCCESS;
5165}
5166
5167
5168/** @callback_method_impl{FNCPUMRDMSR} */
5169static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpData(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5170{
5171 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5172 /** @todo AMD IBS. */
5173 *puValue = 0;
5174 return VINF_SUCCESS;
5175}
5176
5177
5178/** @callback_method_impl{FNCPUMWRMSR} */
5179static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpData(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5180{
5181 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5182 /** @todo AMD IBS. */
5183 return VINF_SUCCESS;
5184}
5185
5186
5187/** @callback_method_impl{FNCPUMRDMSR} */
5188static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpData2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5189{
5190 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5191 /** @todo AMD IBS. */
5192 *puValue = 0;
5193 return VINF_SUCCESS;
5194}
5195
5196
5197/** @callback_method_impl{FNCPUMWRMSR} */
5198static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpData2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5199{
5200 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5201 /** @todo AMD IBS. */
5202 return VINF_SUCCESS;
5203}
5204
5205
5206/** @callback_method_impl{FNCPUMRDMSR} */
5207static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpData3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5208{
5209 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5210 /** @todo AMD IBS. */
5211 *puValue = 0;
5212 return VINF_SUCCESS;
5213}
5214
5215
5216/** @callback_method_impl{FNCPUMWRMSR} */
5217static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpData3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5218{
5219 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5220 /** @todo AMD IBS. */
5221 return VINF_SUCCESS;
5222}
5223
5224
5225/** @callback_method_impl{FNCPUMRDMSR} */
5226static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsDcLinAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5227{
5228 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5229 /** @todo AMD IBS. */
5230 *puValue = 0;
5231 return VINF_SUCCESS;
5232}
5233
5234
5235/** @callback_method_impl{FNCPUMWRMSR} */
5236static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsDcLinAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5237{
5238 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5239 /** @todo AMD IBS. */
5240 if (!X86_IS_CANONICAL(uValue))
5241 {
5242 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
5243 return VERR_CPUM_RAISE_GP_0;
5244 }
5245 return VINF_SUCCESS;
5246}
5247
5248
5249/** @callback_method_impl{FNCPUMRDMSR} */
5250static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsDcPhysAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5251{
5252 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5253 /** @todo AMD IBS. */
5254 *puValue = 0;
5255 return VINF_SUCCESS;
5256}
5257
5258
5259/** @callback_method_impl{FNCPUMWRMSR} */
5260static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsDcPhysAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5261{
5262 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5263 /** @todo AMD IBS. */
5264 return VINF_SUCCESS;
5265}
5266
5267
5268/** @callback_method_impl{FNCPUMRDMSR} */
5269static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5270{
5271 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5272 /** @todo AMD IBS. */
5273 *puValue = 0;
5274 return VINF_SUCCESS;
5275}
5276
5277
5278/** @callback_method_impl{FNCPUMWRMSR} */
5279static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5280{
5281 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5282 /** @todo AMD IBS. */
5283 return VINF_SUCCESS;
5284}
5285
5286
5287/** @callback_method_impl{FNCPUMRDMSR} */
5288static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam14hIbsBrTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5289{
5290 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5291 /** @todo AMD IBS. */
5292 *puValue = 0;
5293 return VINF_SUCCESS;
5294}
5295
5296
5297/** @callback_method_impl{FNCPUMWRMSR} */
5298static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam14hIbsBrTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5299{
5300 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5301 /** @todo AMD IBS. */
5302 if (!X86_IS_CANONICAL(uValue))
5303 {
5304 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
5305 return VERR_CPUM_RAISE_GP_0;
5306 }
5307 return VINF_SUCCESS;
5308}
5309
5310
5311
5312/*
5313 * GIM MSRs.
5314 * GIM MSRs.
5315 * GIM MSRs.
5316 */
5317
5318
5319/** @callback_method_impl{FNCPUMRDMSR} */
5320static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Gim(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5321{
5322#if defined(VBOX_WITH_NESTED_HWVIRT_SVM) || defined(VBOX_WITH_NESTED_HWVIRT_VMX)
5323 /* Raise #GP(0) like a physical CPU would since the nested-hypervisor hasn't intercept these MSRs. */
5324 if ( CPUMIsGuestInSvmNestedHwVirtMode(&pVCpu->cpum.s.Guest)
5325 || CPUMIsGuestInVmxNonRootMode(&pVCpu->cpum.s.Guest))
5326 return VERR_CPUM_RAISE_GP_0;
5327#endif
5328 return GIMReadMsr(pVCpu, idMsr, pRange, puValue);
5329}
5330
5331
5332/** @callback_method_impl{FNCPUMWRMSR} */
5333static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Gim(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5334{
5335#if defined(VBOX_WITH_NESTED_HWVIRT_SVM) || defined(VBOX_WITH_NESTED_HWVIRT_VMX)
5336 /* Raise #GP(0) like a physical CPU would since the nested-hypervisor hasn't intercept these MSRs. */
5337 if ( CPUMIsGuestInSvmNestedHwVirtMode(&pVCpu->cpum.s.Guest)
5338 || CPUMIsGuestInVmxNonRootMode(&pVCpu->cpum.s.Guest))
5339 return VERR_CPUM_RAISE_GP_0;
5340#endif
5341 return GIMWriteMsr(pVCpu, idMsr, pRange, uValue, uRawValue);
5342}
5343
5344
5345/**
5346 * MSR read function table.
5347 */
5348static const PFNCPUMRDMSR g_aCpumRdMsrFns[kCpumMsrRdFn_End] =
5349{
5350 NULL, /* Invalid */
5351 cpumMsrRd_FixedValue,
5352 NULL, /* Alias */
5353 cpumMsrRd_WriteOnly,
5354 cpumMsrRd_Ia32P5McAddr,
5355 cpumMsrRd_Ia32P5McType,
5356 cpumMsrRd_Ia32TimestampCounter,
5357 cpumMsrRd_Ia32PlatformId,
5358 cpumMsrRd_Ia32ApicBase,
5359 cpumMsrRd_Ia32FeatureControl,
5360 cpumMsrRd_Ia32BiosSignId,
5361 cpumMsrRd_Ia32SmmMonitorCtl,
5362 cpumMsrRd_Ia32PmcN,
5363 cpumMsrRd_Ia32MonitorFilterLineSize,
5364 cpumMsrRd_Ia32MPerf,
5365 cpumMsrRd_Ia32APerf,
5366 cpumMsrRd_Ia32MtrrCap,
5367 cpumMsrRd_Ia32MtrrPhysBaseN,
5368 cpumMsrRd_Ia32MtrrPhysMaskN,
5369 cpumMsrRd_Ia32MtrrFixed,
5370 cpumMsrRd_Ia32MtrrDefType,
5371 cpumMsrRd_Ia32Pat,
5372 cpumMsrRd_Ia32SysEnterCs,
5373 cpumMsrRd_Ia32SysEnterEsp,
5374 cpumMsrRd_Ia32SysEnterEip,
5375 cpumMsrRd_Ia32McgCap,
5376 cpumMsrRd_Ia32McgStatus,
5377 cpumMsrRd_Ia32McgCtl,
5378 cpumMsrRd_Ia32DebugCtl,
5379 cpumMsrRd_Ia32SmrrPhysBase,
5380 cpumMsrRd_Ia32SmrrPhysMask,
5381 cpumMsrRd_Ia32PlatformDcaCap,
5382 cpumMsrRd_Ia32CpuDcaCap,
5383 cpumMsrRd_Ia32Dca0Cap,
5384 cpumMsrRd_Ia32PerfEvtSelN,
5385 cpumMsrRd_Ia32PerfStatus,
5386 cpumMsrRd_Ia32PerfCtl,
5387 cpumMsrRd_Ia32FixedCtrN,
5388 cpumMsrRd_Ia32PerfCapabilities,
5389 cpumMsrRd_Ia32FixedCtrCtrl,
5390 cpumMsrRd_Ia32PerfGlobalStatus,
5391 cpumMsrRd_Ia32PerfGlobalCtrl,
5392 cpumMsrRd_Ia32PerfGlobalOvfCtrl,
5393 cpumMsrRd_Ia32PebsEnable,
5394 cpumMsrRd_Ia32ClockModulation,
5395 cpumMsrRd_Ia32ThermInterrupt,
5396 cpumMsrRd_Ia32ThermStatus,
5397 cpumMsrRd_Ia32Therm2Ctl,
5398 cpumMsrRd_Ia32MiscEnable,
5399 cpumMsrRd_Ia32McCtlStatusAddrMiscN,
5400 cpumMsrRd_Ia32McNCtl2,
5401 cpumMsrRd_Ia32DsArea,
5402 cpumMsrRd_Ia32TscDeadline,
5403 cpumMsrRd_Ia32X2ApicN,
5404 cpumMsrRd_Ia32DebugInterface,
5405 cpumMsrRd_Ia32VmxBasic,
5406 cpumMsrRd_Ia32VmxPinbasedCtls,
5407 cpumMsrRd_Ia32VmxProcbasedCtls,
5408 cpumMsrRd_Ia32VmxExitCtls,
5409 cpumMsrRd_Ia32VmxEntryCtls,
5410 cpumMsrRd_Ia32VmxMisc,
5411 cpumMsrRd_Ia32VmxCr0Fixed0,
5412 cpumMsrRd_Ia32VmxCr0Fixed1,
5413 cpumMsrRd_Ia32VmxCr4Fixed0,
5414 cpumMsrRd_Ia32VmxCr4Fixed1,
5415 cpumMsrRd_Ia32VmxVmcsEnum,
5416 cpumMsrRd_Ia32VmxProcBasedCtls2,
5417 cpumMsrRd_Ia32VmxEptVpidCap,
5418 cpumMsrRd_Ia32VmxTruePinbasedCtls,
5419 cpumMsrRd_Ia32VmxTrueProcbasedCtls,
5420 cpumMsrRd_Ia32VmxTrueExitCtls,
5421 cpumMsrRd_Ia32VmxTrueEntryCtls,
5422 cpumMsrRd_Ia32VmxVmFunc,
5423 cpumMsrRd_Ia32SpecCtrl,
5424 cpumMsrRd_Ia32ArchCapabilities,
5425
5426 cpumMsrRd_Amd64Efer,
5427 cpumMsrRd_Amd64SyscallTarget,
5428 cpumMsrRd_Amd64LongSyscallTarget,
5429 cpumMsrRd_Amd64CompSyscallTarget,
5430 cpumMsrRd_Amd64SyscallFlagMask,
5431 cpumMsrRd_Amd64FsBase,
5432 cpumMsrRd_Amd64GsBase,
5433 cpumMsrRd_Amd64KernelGsBase,
5434 cpumMsrRd_Amd64TscAux,
5435
5436 cpumMsrRd_IntelEblCrPowerOn,
5437 cpumMsrRd_IntelI7CoreThreadCount,
5438 cpumMsrRd_IntelP4EbcHardPowerOn,
5439 cpumMsrRd_IntelP4EbcSoftPowerOn,
5440 cpumMsrRd_IntelP4EbcFrequencyId,
5441 cpumMsrRd_IntelP6FsbFrequency,
5442 cpumMsrRd_IntelPlatformInfo,
5443 cpumMsrRd_IntelFlexRatio,
5444 cpumMsrRd_IntelPkgCStConfigControl,
5445 cpumMsrRd_IntelPmgIoCaptureBase,
5446 cpumMsrRd_IntelLastBranchFromToN,
5447 cpumMsrRd_IntelLastBranchFromN,
5448 cpumMsrRd_IntelLastBranchToN,
5449 cpumMsrRd_IntelLastBranchTos,
5450 cpumMsrRd_IntelBblCrCtl,
5451 cpumMsrRd_IntelBblCrCtl3,
5452 cpumMsrRd_IntelI7TemperatureTarget,
5453 cpumMsrRd_IntelI7MsrOffCoreResponseN,
5454 cpumMsrRd_IntelI7MiscPwrMgmt,
5455 cpumMsrRd_IntelP6CrN,
5456 cpumMsrRd_IntelCpuId1FeatureMaskEcdx,
5457 cpumMsrRd_IntelCpuId1FeatureMaskEax,
5458 cpumMsrRd_IntelCpuId80000001FeatureMaskEcdx,
5459 cpumMsrRd_IntelI7SandyAesNiCtl,
5460 cpumMsrRd_IntelI7TurboRatioLimit,
5461 cpumMsrRd_IntelI7LbrSelect,
5462 cpumMsrRd_IntelI7SandyErrorControl,
5463 cpumMsrRd_IntelI7VirtualLegacyWireCap,
5464 cpumMsrRd_IntelI7PowerCtl,
5465 cpumMsrRd_IntelI7SandyPebsNumAlt,
5466 cpumMsrRd_IntelI7PebsLdLat,
5467 cpumMsrRd_IntelI7PkgCnResidencyN,
5468 cpumMsrRd_IntelI7CoreCnResidencyN,
5469 cpumMsrRd_IntelI7SandyVrCurrentConfig,
5470 cpumMsrRd_IntelI7SandyVrMiscConfig,
5471 cpumMsrRd_IntelI7SandyRaplPowerUnit,
5472 cpumMsrRd_IntelI7SandyPkgCnIrtlN,
5473 cpumMsrRd_IntelI7SandyPkgC2Residency,
5474 cpumMsrRd_IntelI7RaplPkgPowerLimit,
5475 cpumMsrRd_IntelI7RaplPkgEnergyStatus,
5476 cpumMsrRd_IntelI7RaplPkgPerfStatus,
5477 cpumMsrRd_IntelI7RaplPkgPowerInfo,
5478 cpumMsrRd_IntelI7RaplDramPowerLimit,
5479 cpumMsrRd_IntelI7RaplDramEnergyStatus,
5480 cpumMsrRd_IntelI7RaplDramPerfStatus,
5481 cpumMsrRd_IntelI7RaplDramPowerInfo,
5482 cpumMsrRd_IntelI7RaplPp0PowerLimit,
5483 cpumMsrRd_IntelI7RaplPp0EnergyStatus,
5484 cpumMsrRd_IntelI7RaplPp0Policy,
5485 cpumMsrRd_IntelI7RaplPp0PerfStatus,
5486 cpumMsrRd_IntelI7RaplPp1PowerLimit,
5487 cpumMsrRd_IntelI7RaplPp1EnergyStatus,
5488 cpumMsrRd_IntelI7RaplPp1Policy,
5489 cpumMsrRd_IntelI7IvyConfigTdpNominal,
5490 cpumMsrRd_IntelI7IvyConfigTdpLevel1,
5491 cpumMsrRd_IntelI7IvyConfigTdpLevel2,
5492 cpumMsrRd_IntelI7IvyConfigTdpControl,
5493 cpumMsrRd_IntelI7IvyTurboActivationRatio,
5494 cpumMsrRd_IntelI7UncPerfGlobalCtrl,
5495 cpumMsrRd_IntelI7UncPerfGlobalStatus,
5496 cpumMsrRd_IntelI7UncPerfGlobalOvfCtrl,
5497 cpumMsrRd_IntelI7UncPerfFixedCtrCtrl,
5498 cpumMsrRd_IntelI7UncPerfFixedCtr,
5499 cpumMsrRd_IntelI7UncCBoxConfig,
5500 cpumMsrRd_IntelI7UncArbPerfCtrN,
5501 cpumMsrRd_IntelI7UncArbPerfEvtSelN,
5502 cpumMsrRd_IntelI7SmiCount,
5503 cpumMsrRd_IntelCore2EmttmCrTablesN,
5504 cpumMsrRd_IntelCore2SmmCStMiscInfo,
5505 cpumMsrRd_IntelCore1ExtConfig,
5506 cpumMsrRd_IntelCore1DtsCalControl,
5507 cpumMsrRd_IntelCore2PeciControl,
5508 cpumMsrRd_IntelAtSilvCoreC1Recidency,
5509
5510 cpumMsrRd_P6LastBranchFromIp,
5511 cpumMsrRd_P6LastBranchToIp,
5512 cpumMsrRd_P6LastIntFromIp,
5513 cpumMsrRd_P6LastIntToIp,
5514
5515 cpumMsrRd_AmdFam15hTscRate,
5516 cpumMsrRd_AmdFam15hLwpCfg,
5517 cpumMsrRd_AmdFam15hLwpCbAddr,
5518 cpumMsrRd_AmdFam10hMc4MiscN,
5519 cpumMsrRd_AmdK8PerfCtlN,
5520 cpumMsrRd_AmdK8PerfCtrN,
5521 cpumMsrRd_AmdK8SysCfg,
5522 cpumMsrRd_AmdK8HwCr,
5523 cpumMsrRd_AmdK8IorrBaseN,
5524 cpumMsrRd_AmdK8IorrMaskN,
5525 cpumMsrRd_AmdK8TopOfMemN,
5526 cpumMsrRd_AmdK8NbCfg1,
5527 cpumMsrRd_AmdK8McXcptRedir,
5528 cpumMsrRd_AmdK8CpuNameN,
5529 cpumMsrRd_AmdK8HwThermalCtrl,
5530 cpumMsrRd_AmdK8SwThermalCtrl,
5531 cpumMsrRd_AmdK8FidVidControl,
5532 cpumMsrRd_AmdK8FidVidStatus,
5533 cpumMsrRd_AmdK8McCtlMaskN,
5534 cpumMsrRd_AmdK8SmiOnIoTrapN,
5535 cpumMsrRd_AmdK8SmiOnIoTrapCtlSts,
5536 cpumMsrRd_AmdK8IntPendingMessage,
5537 cpumMsrRd_AmdK8SmiTriggerIoCycle,
5538 cpumMsrRd_AmdFam10hMmioCfgBaseAddr,
5539 cpumMsrRd_AmdFam10hTrapCtlMaybe,
5540 cpumMsrRd_AmdFam10hPStateCurLimit,
5541 cpumMsrRd_AmdFam10hPStateControl,
5542 cpumMsrRd_AmdFam10hPStateStatus,
5543 cpumMsrRd_AmdFam10hPStateN,
5544 cpumMsrRd_AmdFam10hCofVidControl,
5545 cpumMsrRd_AmdFam10hCofVidStatus,
5546 cpumMsrRd_AmdFam10hCStateIoBaseAddr,
5547 cpumMsrRd_AmdFam10hCpuWatchdogTimer,
5548 cpumMsrRd_AmdK8SmmBase,
5549 cpumMsrRd_AmdK8SmmAddr,
5550 cpumMsrRd_AmdK8SmmMask,
5551 cpumMsrRd_AmdK8VmCr,
5552 cpumMsrRd_AmdK8IgnNe,
5553 cpumMsrRd_AmdK8SmmCtl,
5554 cpumMsrRd_AmdK8VmHSavePa,
5555 cpumMsrRd_AmdFam10hVmLockKey,
5556 cpumMsrRd_AmdFam10hSmmLockKey,
5557 cpumMsrRd_AmdFam10hLocalSmiStatus,
5558 cpumMsrRd_AmdFam10hOsVisWrkIdLength,
5559 cpumMsrRd_AmdFam10hOsVisWrkStatus,
5560 cpumMsrRd_AmdFam16hL2IPerfCtlN,
5561 cpumMsrRd_AmdFam16hL2IPerfCtrN,
5562 cpumMsrRd_AmdFam15hNorthbridgePerfCtlN,
5563 cpumMsrRd_AmdFam15hNorthbridgePerfCtrN,
5564 cpumMsrRd_AmdK7MicrocodeCtl,
5565 cpumMsrRd_AmdK7ClusterIdMaybe,
5566 cpumMsrRd_AmdK8CpuIdCtlStd07hEbax,
5567 cpumMsrRd_AmdK8CpuIdCtlStd06hEcx,
5568 cpumMsrRd_AmdK8CpuIdCtlStd01hEdcx,
5569 cpumMsrRd_AmdK8CpuIdCtlExt01hEdcx,
5570 cpumMsrRd_AmdK8PatchLevel,
5571 cpumMsrRd_AmdK7DebugStatusMaybe,
5572 cpumMsrRd_AmdK7BHTraceBaseMaybe,
5573 cpumMsrRd_AmdK7BHTracePtrMaybe,
5574 cpumMsrRd_AmdK7BHTraceLimitMaybe,
5575 cpumMsrRd_AmdK7HardwareDebugToolCfgMaybe,
5576 cpumMsrRd_AmdK7FastFlushCountMaybe,
5577 cpumMsrRd_AmdK7NodeId,
5578 cpumMsrRd_AmdK7DrXAddrMaskN,
5579 cpumMsrRd_AmdK7Dr0DataMatchMaybe,
5580 cpumMsrRd_AmdK7Dr0DataMaskMaybe,
5581 cpumMsrRd_AmdK7LoadStoreCfg,
5582 cpumMsrRd_AmdK7InstrCacheCfg,
5583 cpumMsrRd_AmdK7DataCacheCfg,
5584 cpumMsrRd_AmdK7BusUnitCfg,
5585 cpumMsrRd_AmdK7DebugCtl2Maybe,
5586 cpumMsrRd_AmdFam15hFpuCfg,
5587 cpumMsrRd_AmdFam15hDecoderCfg,
5588 cpumMsrRd_AmdFam10hBusUnitCfg2,
5589 cpumMsrRd_AmdFam15hCombUnitCfg,
5590 cpumMsrRd_AmdFam15hCombUnitCfg2,
5591 cpumMsrRd_AmdFam15hCombUnitCfg3,
5592 cpumMsrRd_AmdFam15hExecUnitCfg,
5593 cpumMsrRd_AmdFam15hLoadStoreCfg2,
5594 cpumMsrRd_AmdFam10hIbsFetchCtl,
5595 cpumMsrRd_AmdFam10hIbsFetchLinAddr,
5596 cpumMsrRd_AmdFam10hIbsFetchPhysAddr,
5597 cpumMsrRd_AmdFam10hIbsOpExecCtl,
5598 cpumMsrRd_AmdFam10hIbsOpRip,
5599 cpumMsrRd_AmdFam10hIbsOpData,
5600 cpumMsrRd_AmdFam10hIbsOpData2,
5601 cpumMsrRd_AmdFam10hIbsOpData3,
5602 cpumMsrRd_AmdFam10hIbsDcLinAddr,
5603 cpumMsrRd_AmdFam10hIbsDcPhysAddr,
5604 cpumMsrRd_AmdFam10hIbsCtl,
5605 cpumMsrRd_AmdFam14hIbsBrTarget,
5606
5607 cpumMsrRd_Gim
5608};
5609
5610
5611/**
5612 * MSR write function table.
5613 */
5614static const PFNCPUMWRMSR g_aCpumWrMsrFns[kCpumMsrWrFn_End] =
5615{
5616 NULL, /* Invalid */
5617 cpumMsrWr_IgnoreWrite,
5618 cpumMsrWr_ReadOnly,
5619 NULL, /* Alias */
5620 cpumMsrWr_Ia32P5McAddr,
5621 cpumMsrWr_Ia32P5McType,
5622 cpumMsrWr_Ia32TimestampCounter,
5623 cpumMsrWr_Ia32ApicBase,
5624 cpumMsrWr_Ia32FeatureControl,
5625 cpumMsrWr_Ia32BiosSignId,
5626 cpumMsrWr_Ia32BiosUpdateTrigger,
5627 cpumMsrWr_Ia32SmmMonitorCtl,
5628 cpumMsrWr_Ia32PmcN,
5629 cpumMsrWr_Ia32MonitorFilterLineSize,
5630 cpumMsrWr_Ia32MPerf,
5631 cpumMsrWr_Ia32APerf,
5632 cpumMsrWr_Ia32MtrrPhysBaseN,
5633 cpumMsrWr_Ia32MtrrPhysMaskN,
5634 cpumMsrWr_Ia32MtrrFixed,
5635 cpumMsrWr_Ia32MtrrDefType,
5636 cpumMsrWr_Ia32Pat,
5637 cpumMsrWr_Ia32SysEnterCs,
5638 cpumMsrWr_Ia32SysEnterEsp,
5639 cpumMsrWr_Ia32SysEnterEip,
5640 cpumMsrWr_Ia32McgStatus,
5641 cpumMsrWr_Ia32McgCtl,
5642 cpumMsrWr_Ia32DebugCtl,
5643 cpumMsrWr_Ia32SmrrPhysBase,
5644 cpumMsrWr_Ia32SmrrPhysMask,
5645 cpumMsrWr_Ia32PlatformDcaCap,
5646 cpumMsrWr_Ia32Dca0Cap,
5647 cpumMsrWr_Ia32PerfEvtSelN,
5648 cpumMsrWr_Ia32PerfStatus,
5649 cpumMsrWr_Ia32PerfCtl,
5650 cpumMsrWr_Ia32FixedCtrN,
5651 cpumMsrWr_Ia32PerfCapabilities,
5652 cpumMsrWr_Ia32FixedCtrCtrl,
5653 cpumMsrWr_Ia32PerfGlobalStatus,
5654 cpumMsrWr_Ia32PerfGlobalCtrl,
5655 cpumMsrWr_Ia32PerfGlobalOvfCtrl,
5656 cpumMsrWr_Ia32PebsEnable,
5657 cpumMsrWr_Ia32ClockModulation,
5658 cpumMsrWr_Ia32ThermInterrupt,
5659 cpumMsrWr_Ia32ThermStatus,
5660 cpumMsrWr_Ia32Therm2Ctl,
5661 cpumMsrWr_Ia32MiscEnable,
5662 cpumMsrWr_Ia32McCtlStatusAddrMiscN,
5663 cpumMsrWr_Ia32McNCtl2,
5664 cpumMsrWr_Ia32DsArea,
5665 cpumMsrWr_Ia32TscDeadline,
5666 cpumMsrWr_Ia32X2ApicN,
5667 cpumMsrWr_Ia32DebugInterface,
5668 cpumMsrWr_Ia32SpecCtrl,
5669 cpumMsrWr_Ia32PredCmd,
5670
5671 cpumMsrWr_Amd64Efer,
5672 cpumMsrWr_Amd64SyscallTarget,
5673 cpumMsrWr_Amd64LongSyscallTarget,
5674 cpumMsrWr_Amd64CompSyscallTarget,
5675 cpumMsrWr_Amd64SyscallFlagMask,
5676 cpumMsrWr_Amd64FsBase,
5677 cpumMsrWr_Amd64GsBase,
5678 cpumMsrWr_Amd64KernelGsBase,
5679 cpumMsrWr_Amd64TscAux,
5680
5681 cpumMsrWr_IntelEblCrPowerOn,
5682 cpumMsrWr_IntelP4EbcHardPowerOn,
5683 cpumMsrWr_IntelP4EbcSoftPowerOn,
5684 cpumMsrWr_IntelP4EbcFrequencyId,
5685 cpumMsrWr_IntelFlexRatio,
5686 cpumMsrWr_IntelPkgCStConfigControl,
5687 cpumMsrWr_IntelPmgIoCaptureBase,
5688 cpumMsrWr_IntelLastBranchFromToN,
5689 cpumMsrWr_IntelLastBranchFromN,
5690 cpumMsrWr_IntelLastBranchToN,
5691 cpumMsrWr_IntelLastBranchTos,
5692 cpumMsrWr_IntelBblCrCtl,
5693 cpumMsrWr_IntelBblCrCtl3,
5694 cpumMsrWr_IntelI7TemperatureTarget,
5695 cpumMsrWr_IntelI7MsrOffCoreResponseN,
5696 cpumMsrWr_IntelI7MiscPwrMgmt,
5697 cpumMsrWr_IntelP6CrN,
5698 cpumMsrWr_IntelCpuId1FeatureMaskEcdx,
5699 cpumMsrWr_IntelCpuId1FeatureMaskEax,
5700 cpumMsrWr_IntelCpuId80000001FeatureMaskEcdx,
5701 cpumMsrWr_IntelI7SandyAesNiCtl,
5702 cpumMsrWr_IntelI7TurboRatioLimit,
5703 cpumMsrWr_IntelI7LbrSelect,
5704 cpumMsrWr_IntelI7SandyErrorControl,
5705 cpumMsrWr_IntelI7PowerCtl,
5706 cpumMsrWr_IntelI7SandyPebsNumAlt,
5707 cpumMsrWr_IntelI7PebsLdLat,
5708 cpumMsrWr_IntelI7SandyVrCurrentConfig,
5709 cpumMsrWr_IntelI7SandyVrMiscConfig,
5710 cpumMsrWr_IntelI7SandyRaplPowerUnit,
5711 cpumMsrWr_IntelI7SandyPkgCnIrtlN,
5712 cpumMsrWr_IntelI7SandyPkgC2Residency,
5713 cpumMsrWr_IntelI7RaplPkgPowerLimit,
5714 cpumMsrWr_IntelI7RaplDramPowerLimit,
5715 cpumMsrWr_IntelI7RaplPp0PowerLimit,
5716 cpumMsrWr_IntelI7RaplPp0Policy,
5717 cpumMsrWr_IntelI7RaplPp1PowerLimit,
5718 cpumMsrWr_IntelI7RaplPp1Policy,
5719 cpumMsrWr_IntelI7IvyConfigTdpControl,
5720 cpumMsrWr_IntelI7IvyTurboActivationRatio,
5721 cpumMsrWr_IntelI7UncPerfGlobalCtrl,
5722 cpumMsrWr_IntelI7UncPerfGlobalStatus,
5723 cpumMsrWr_IntelI7UncPerfGlobalOvfCtrl,
5724 cpumMsrWr_IntelI7UncPerfFixedCtrCtrl,
5725 cpumMsrWr_IntelI7UncPerfFixedCtr,
5726 cpumMsrWr_IntelI7UncArbPerfCtrN,
5727 cpumMsrWr_IntelI7UncArbPerfEvtSelN,
5728 cpumMsrWr_IntelCore2EmttmCrTablesN,
5729 cpumMsrWr_IntelCore2SmmCStMiscInfo,
5730 cpumMsrWr_IntelCore1ExtConfig,
5731 cpumMsrWr_IntelCore1DtsCalControl,
5732 cpumMsrWr_IntelCore2PeciControl,
5733
5734 cpumMsrWr_P6LastIntFromIp,
5735 cpumMsrWr_P6LastIntToIp,
5736
5737 cpumMsrWr_AmdFam15hTscRate,
5738 cpumMsrWr_AmdFam15hLwpCfg,
5739 cpumMsrWr_AmdFam15hLwpCbAddr,
5740 cpumMsrWr_AmdFam10hMc4MiscN,
5741 cpumMsrWr_AmdK8PerfCtlN,
5742 cpumMsrWr_AmdK8PerfCtrN,
5743 cpumMsrWr_AmdK8SysCfg,
5744 cpumMsrWr_AmdK8HwCr,
5745 cpumMsrWr_AmdK8IorrBaseN,
5746 cpumMsrWr_AmdK8IorrMaskN,
5747 cpumMsrWr_AmdK8TopOfMemN,
5748 cpumMsrWr_AmdK8NbCfg1,
5749 cpumMsrWr_AmdK8McXcptRedir,
5750 cpumMsrWr_AmdK8CpuNameN,
5751 cpumMsrWr_AmdK8HwThermalCtrl,
5752 cpumMsrWr_AmdK8SwThermalCtrl,
5753 cpumMsrWr_AmdK8FidVidControl,
5754 cpumMsrWr_AmdK8McCtlMaskN,
5755 cpumMsrWr_AmdK8SmiOnIoTrapN,
5756 cpumMsrWr_AmdK8SmiOnIoTrapCtlSts,
5757 cpumMsrWr_AmdK8IntPendingMessage,
5758 cpumMsrWr_AmdK8SmiTriggerIoCycle,
5759 cpumMsrWr_AmdFam10hMmioCfgBaseAddr,
5760 cpumMsrWr_AmdFam10hTrapCtlMaybe,
5761 cpumMsrWr_AmdFam10hPStateControl,
5762 cpumMsrWr_AmdFam10hPStateStatus,
5763 cpumMsrWr_AmdFam10hPStateN,
5764 cpumMsrWr_AmdFam10hCofVidControl,
5765 cpumMsrWr_AmdFam10hCofVidStatus,
5766 cpumMsrWr_AmdFam10hCStateIoBaseAddr,
5767 cpumMsrWr_AmdFam10hCpuWatchdogTimer,
5768 cpumMsrWr_AmdK8SmmBase,
5769 cpumMsrWr_AmdK8SmmAddr,
5770 cpumMsrWr_AmdK8SmmMask,
5771 cpumMsrWr_AmdK8VmCr,
5772 cpumMsrWr_AmdK8IgnNe,
5773 cpumMsrWr_AmdK8SmmCtl,
5774 cpumMsrWr_AmdK8VmHSavePa,
5775 cpumMsrWr_AmdFam10hVmLockKey,
5776 cpumMsrWr_AmdFam10hSmmLockKey,
5777 cpumMsrWr_AmdFam10hLocalSmiStatus,
5778 cpumMsrWr_AmdFam10hOsVisWrkIdLength,
5779 cpumMsrWr_AmdFam10hOsVisWrkStatus,
5780 cpumMsrWr_AmdFam16hL2IPerfCtlN,
5781 cpumMsrWr_AmdFam16hL2IPerfCtrN,
5782 cpumMsrWr_AmdFam15hNorthbridgePerfCtlN,
5783 cpumMsrWr_AmdFam15hNorthbridgePerfCtrN,
5784 cpumMsrWr_AmdK7MicrocodeCtl,
5785 cpumMsrWr_AmdK7ClusterIdMaybe,
5786 cpumMsrWr_AmdK8CpuIdCtlStd07hEbax,
5787 cpumMsrWr_AmdK8CpuIdCtlStd06hEcx,
5788 cpumMsrWr_AmdK8CpuIdCtlStd01hEdcx,
5789 cpumMsrWr_AmdK8CpuIdCtlExt01hEdcx,
5790 cpumMsrWr_AmdK8PatchLoader,
5791 cpumMsrWr_AmdK7DebugStatusMaybe,
5792 cpumMsrWr_AmdK7BHTraceBaseMaybe,
5793 cpumMsrWr_AmdK7BHTracePtrMaybe,
5794 cpumMsrWr_AmdK7BHTraceLimitMaybe,
5795 cpumMsrWr_AmdK7HardwareDebugToolCfgMaybe,
5796 cpumMsrWr_AmdK7FastFlushCountMaybe,
5797 cpumMsrWr_AmdK7NodeId,
5798 cpumMsrWr_AmdK7DrXAddrMaskN,
5799 cpumMsrWr_AmdK7Dr0DataMatchMaybe,
5800 cpumMsrWr_AmdK7Dr0DataMaskMaybe,
5801 cpumMsrWr_AmdK7LoadStoreCfg,
5802 cpumMsrWr_AmdK7InstrCacheCfg,
5803 cpumMsrWr_AmdK7DataCacheCfg,
5804 cpumMsrWr_AmdK7BusUnitCfg,
5805 cpumMsrWr_AmdK7DebugCtl2Maybe,
5806 cpumMsrWr_AmdFam15hFpuCfg,
5807 cpumMsrWr_AmdFam15hDecoderCfg,
5808 cpumMsrWr_AmdFam10hBusUnitCfg2,
5809 cpumMsrWr_AmdFam15hCombUnitCfg,
5810 cpumMsrWr_AmdFam15hCombUnitCfg2,
5811 cpumMsrWr_AmdFam15hCombUnitCfg3,
5812 cpumMsrWr_AmdFam15hExecUnitCfg,
5813 cpumMsrWr_AmdFam15hLoadStoreCfg2,
5814 cpumMsrWr_AmdFam10hIbsFetchCtl,
5815 cpumMsrWr_AmdFam10hIbsFetchLinAddr,
5816 cpumMsrWr_AmdFam10hIbsFetchPhysAddr,
5817 cpumMsrWr_AmdFam10hIbsOpExecCtl,
5818 cpumMsrWr_AmdFam10hIbsOpRip,
5819 cpumMsrWr_AmdFam10hIbsOpData,
5820 cpumMsrWr_AmdFam10hIbsOpData2,
5821 cpumMsrWr_AmdFam10hIbsOpData3,
5822 cpumMsrWr_AmdFam10hIbsDcLinAddr,
5823 cpumMsrWr_AmdFam10hIbsDcPhysAddr,
5824 cpumMsrWr_AmdFam10hIbsCtl,
5825 cpumMsrWr_AmdFam14hIbsBrTarget,
5826
5827 cpumMsrWr_Gim
5828};
5829
5830
5831/**
5832 * Looks up the range for the given MSR.
5833 *
5834 * @returns Pointer to the range if found, NULL if not.
5835 * @param pVM The cross context VM structure.
5836 * @param idMsr The MSR to look up.
5837 */
5838# ifndef IN_RING3
5839static
5840# endif
5841PCPUMMSRRANGE cpumLookupMsrRange(PVM pVM, uint32_t idMsr)
5842{
5843 /*
5844 * Binary lookup.
5845 */
5846 uint32_t cRanges = pVM->cpum.s.GuestInfo.cMsrRanges;
5847 if (!cRanges)
5848 return NULL;
5849 PCPUMMSRRANGE paRanges = pVM->cpum.s.GuestInfo.CTX_SUFF(paMsrRanges);
5850 for (;;)
5851 {
5852 uint32_t i = cRanges / 2;
5853 if (idMsr < paRanges[i].uFirst)
5854 {
5855 if (i == 0)
5856 break;
5857 cRanges = i;
5858 }
5859 else if (idMsr > paRanges[i].uLast)
5860 {
5861 i++;
5862 if (i >= cRanges)
5863 break;
5864 cRanges -= i;
5865 paRanges = &paRanges[i];
5866 }
5867 else
5868 {
5869 if (paRanges[i].enmRdFn == kCpumMsrRdFn_MsrAlias)
5870 return cpumLookupMsrRange(pVM, paRanges[i].uValue);
5871 return &paRanges[i];
5872 }
5873 }
5874
5875# ifdef VBOX_STRICT
5876 /*
5877 * Linear lookup to verify the above binary search.
5878 */
5879 uint32_t cLeft = pVM->cpum.s.GuestInfo.cMsrRanges;
5880 PCPUMMSRRANGE pCur = pVM->cpum.s.GuestInfo.CTX_SUFF(paMsrRanges);
5881 while (cLeft-- > 0)
5882 {
5883 if (idMsr >= pCur->uFirst && idMsr <= pCur->uLast)
5884 {
5885 AssertFailed();
5886 if (pCur->enmRdFn == kCpumMsrRdFn_MsrAlias)
5887 return cpumLookupMsrRange(pVM, pCur->uValue);
5888 return pCur;
5889 }
5890 pCur++;
5891 }
5892# endif
5893 return NULL;
5894}
5895
5896
5897/**
5898 * Query a guest MSR.
5899 *
5900 * The caller is responsible for checking privilege if the call is the result of
5901 * a RDMSR instruction. We'll do the rest.
5902 *
5903 * @retval VINF_SUCCESS on success.
5904 * @retval VINF_CPUM_R3_MSR_READ if the MSR read could not be serviced in the
5905 * current context (raw-mode or ring-0).
5906 * @retval VERR_CPUM_RAISE_GP_0 on failure (invalid MSR), the caller is
5907 * expected to take the appropriate actions. @a *puValue is set to 0.
5908 * @param pVCpu The cross context virtual CPU structure.
5909 * @param idMsr The MSR.
5910 * @param puValue Where to return the value.
5911 *
5912 * @remarks This will always return the right values, even when we're in the
5913 * recompiler.
5914 */
5915VMMDECL(VBOXSTRICTRC) CPUMQueryGuestMsr(PVMCPU pVCpu, uint32_t idMsr, uint64_t *puValue)
5916{
5917 *puValue = 0;
5918
5919 VBOXSTRICTRC rcStrict;
5920 PVM pVM = pVCpu->CTX_SUFF(pVM);
5921 PCPUMMSRRANGE pRange = cpumLookupMsrRange(pVM, idMsr);
5922 if (pRange)
5923 {
5924 CPUMMSRRDFN enmRdFn = (CPUMMSRRDFN)pRange->enmRdFn;
5925 AssertReturn(enmRdFn > kCpumMsrRdFn_Invalid && enmRdFn < kCpumMsrRdFn_End, VERR_CPUM_IPE_1);
5926
5927 PFNCPUMRDMSR pfnRdMsr = g_aCpumRdMsrFns[enmRdFn];
5928 AssertReturn(pfnRdMsr, VERR_CPUM_IPE_2);
5929
5930 STAM_COUNTER_INC(&pRange->cReads);
5931 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReads);
5932
5933 rcStrict = pfnRdMsr(pVCpu, idMsr, pRange, puValue);
5934 if (rcStrict == VINF_SUCCESS)
5935 Log2(("CPUM: RDMSR %#x (%s) -> %#llx\n", idMsr, pRange->szName, *puValue));
5936 else if (rcStrict == VERR_CPUM_RAISE_GP_0)
5937 {
5938 Log(("CPUM: RDMSR %#x (%s) -> #GP(0)\n", idMsr, pRange->szName));
5939 STAM_COUNTER_INC(&pRange->cGps);
5940 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReadsRaiseGp);
5941 }
5942#ifndef IN_RING3
5943 else if (rcStrict == VINF_CPUM_R3_MSR_READ)
5944 Log(("CPUM: RDMSR %#x (%s) -> ring-3\n", idMsr, pRange->szName));
5945#endif
5946 else
5947 {
5948 Log(("CPUM: RDMSR %#x (%s) -> rcStrict=%Rrc\n", idMsr, pRange->szName, VBOXSTRICTRC_VAL(rcStrict)));
5949 AssertMsgStmt(RT_FAILURE_NP(rcStrict), ("%Rrc idMsr=%#x\n", VBOXSTRICTRC_VAL(rcStrict), idMsr),
5950 rcStrict = VERR_IPE_UNEXPECTED_INFO_STATUS);
5951 Assert(rcStrict != VERR_EM_INTERPRETER);
5952 }
5953 }
5954 else
5955 {
5956 Log(("CPUM: Unknown RDMSR %#x -> #GP(0)\n", idMsr));
5957 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReads);
5958 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReadsUnknown);
5959 rcStrict = VERR_CPUM_RAISE_GP_0;
5960 }
5961 return rcStrict;
5962}
5963
5964
5965/**
5966 * Writes to a guest MSR.
5967 *
5968 * The caller is responsible for checking privilege if the call is the result of
5969 * a WRMSR instruction. We'll do the rest.
5970 *
5971 * @retval VINF_SUCCESS on success.
5972 * @retval VINF_CPUM_R3_MSR_WRITE if the MSR write could not be serviced in the
5973 * current context (raw-mode or ring-0).
5974 * @retval VERR_CPUM_RAISE_GP_0 on failure, the caller is expected to take the
5975 * appropriate actions.
5976 *
5977 * @param pVCpu The cross context virtual CPU structure.
5978 * @param idMsr The MSR id.
5979 * @param uValue The value to set.
5980 *
5981 * @remarks Everyone changing MSR values, including the recompiler, shall do it
5982 * by calling this method. This makes sure we have current values and
5983 * that we trigger all the right actions when something changes.
5984 *
5985 * For performance reasons, this actually isn't entirely true for some
5986 * MSRs when in HM mode. The code here and in HM must be aware of
5987 * this.
5988 */
5989VMMDECL(VBOXSTRICTRC) CPUMSetGuestMsr(PVMCPU pVCpu, uint32_t idMsr, uint64_t uValue)
5990{
5991 VBOXSTRICTRC rcStrict;
5992 PVM pVM = pVCpu->CTX_SUFF(pVM);
5993 PCPUMMSRRANGE pRange = cpumLookupMsrRange(pVM, idMsr);
5994 if (pRange)
5995 {
5996 STAM_COUNTER_INC(&pRange->cWrites);
5997 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWrites);
5998
5999 if (!(uValue & pRange->fWrGpMask))
6000 {
6001 CPUMMSRWRFN enmWrFn = (CPUMMSRWRFN)pRange->enmWrFn;
6002 AssertReturn(enmWrFn > kCpumMsrWrFn_Invalid && enmWrFn < kCpumMsrWrFn_End, VERR_CPUM_IPE_1);
6003
6004 PFNCPUMWRMSR pfnWrMsr = g_aCpumWrMsrFns[enmWrFn];
6005 AssertReturn(pfnWrMsr, VERR_CPUM_IPE_2);
6006
6007 uint64_t uValueAdjusted = uValue & ~pRange->fWrIgnMask;
6008 if (uValueAdjusted != uValue)
6009 {
6010 STAM_COUNTER_INC(&pRange->cIgnoredBits);
6011 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesToIgnoredBits);
6012 }
6013
6014 rcStrict = pfnWrMsr(pVCpu, idMsr, pRange, uValueAdjusted, uValue);
6015 if (rcStrict == VINF_SUCCESS)
6016 Log2(("CPUM: WRMSR %#x (%s), %#llx [%#llx]\n", idMsr, pRange->szName, uValueAdjusted, uValue));
6017 else if (rcStrict == VERR_CPUM_RAISE_GP_0)
6018 {
6019 Log(("CPUM: WRMSR %#x (%s), %#llx [%#llx] -> #GP(0)\n", idMsr, pRange->szName, uValueAdjusted, uValue));
6020 STAM_COUNTER_INC(&pRange->cGps);
6021 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesRaiseGp);
6022 }
6023#ifndef IN_RING3
6024 else if (rcStrict == VINF_CPUM_R3_MSR_WRITE)
6025 Log(("CPUM: WRMSR %#x (%s), %#llx [%#llx] -> ring-3\n", idMsr, pRange->szName, uValueAdjusted, uValue));
6026#endif
6027 else
6028 {
6029 Log(("CPUM: WRMSR %#x (%s), %#llx [%#llx] -> rcStrict=%Rrc\n",
6030 idMsr, pRange->szName, uValueAdjusted, uValue, VBOXSTRICTRC_VAL(rcStrict)));
6031 AssertMsgStmt(RT_FAILURE_NP(rcStrict), ("%Rrc idMsr=%#x\n", VBOXSTRICTRC_VAL(rcStrict), idMsr),
6032 rcStrict = VERR_IPE_UNEXPECTED_INFO_STATUS);
6033 Assert(rcStrict != VERR_EM_INTERPRETER);
6034 }
6035 }
6036 else
6037 {
6038 Log(("CPUM: WRMSR %#x (%s), %#llx -> #GP(0) - invalid bits %#llx\n",
6039 idMsr, pRange->szName, uValue, uValue & pRange->fWrGpMask));
6040 STAM_COUNTER_INC(&pRange->cGps);
6041 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesRaiseGp);
6042 rcStrict = VERR_CPUM_RAISE_GP_0;
6043 }
6044 }
6045 else
6046 {
6047 Log(("CPUM: Unknown WRMSR %#x, %#llx -> #GP(0)\n", idMsr, uValue));
6048 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWrites);
6049 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesUnknown);
6050 rcStrict = VERR_CPUM_RAISE_GP_0;
6051 }
6052 return rcStrict;
6053}
6054
6055
6056#if defined(VBOX_STRICT) && defined(IN_RING3)
6057/**
6058 * Performs some checks on the static data related to MSRs.
6059 *
6060 * @returns VINF_SUCCESS on success, error on failure.
6061 */
6062int cpumR3MsrStrictInitChecks(void)
6063{
6064#define CPUM_ASSERT_RD_MSR_FN(a_Register) \
6065 AssertReturn(g_aCpumRdMsrFns[kCpumMsrRdFn_##a_Register] == cpumMsrRd_##a_Register, VERR_CPUM_IPE_2);
6066#define CPUM_ASSERT_WR_MSR_FN(a_Register) \
6067 AssertReturn(g_aCpumWrMsrFns[kCpumMsrWrFn_##a_Register] == cpumMsrWr_##a_Register, VERR_CPUM_IPE_2);
6068
6069 AssertReturn(g_aCpumRdMsrFns[kCpumMsrRdFn_Invalid] == NULL, VERR_CPUM_IPE_2);
6070 CPUM_ASSERT_RD_MSR_FN(FixedValue);
6071 CPUM_ASSERT_RD_MSR_FN(WriteOnly);
6072 CPUM_ASSERT_RD_MSR_FN(Ia32P5McAddr);
6073 CPUM_ASSERT_RD_MSR_FN(Ia32P5McType);
6074 CPUM_ASSERT_RD_MSR_FN(Ia32TimestampCounter);
6075 CPUM_ASSERT_RD_MSR_FN(Ia32PlatformId);
6076 CPUM_ASSERT_RD_MSR_FN(Ia32ApicBase);
6077 CPUM_ASSERT_RD_MSR_FN(Ia32FeatureControl);
6078 CPUM_ASSERT_RD_MSR_FN(Ia32BiosSignId);
6079 CPUM_ASSERT_RD_MSR_FN(Ia32SmmMonitorCtl);
6080 CPUM_ASSERT_RD_MSR_FN(Ia32PmcN);
6081 CPUM_ASSERT_RD_MSR_FN(Ia32MonitorFilterLineSize);
6082 CPUM_ASSERT_RD_MSR_FN(Ia32MPerf);
6083 CPUM_ASSERT_RD_MSR_FN(Ia32APerf);
6084 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrCap);
6085 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrPhysBaseN);
6086 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrPhysMaskN);
6087 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrFixed);
6088 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrDefType);
6089 CPUM_ASSERT_RD_MSR_FN(Ia32Pat);
6090 CPUM_ASSERT_RD_MSR_FN(Ia32SysEnterCs);
6091 CPUM_ASSERT_RD_MSR_FN(Ia32SysEnterEsp);
6092 CPUM_ASSERT_RD_MSR_FN(Ia32SysEnterEip);
6093 CPUM_ASSERT_RD_MSR_FN(Ia32McgCap);
6094 CPUM_ASSERT_RD_MSR_FN(Ia32McgStatus);
6095 CPUM_ASSERT_RD_MSR_FN(Ia32McgCtl);
6096 CPUM_ASSERT_RD_MSR_FN(Ia32DebugCtl);
6097 CPUM_ASSERT_RD_MSR_FN(Ia32SmrrPhysBase);
6098 CPUM_ASSERT_RD_MSR_FN(Ia32SmrrPhysMask);
6099 CPUM_ASSERT_RD_MSR_FN(Ia32PlatformDcaCap);
6100 CPUM_ASSERT_RD_MSR_FN(Ia32CpuDcaCap);
6101 CPUM_ASSERT_RD_MSR_FN(Ia32Dca0Cap);
6102 CPUM_ASSERT_RD_MSR_FN(Ia32PerfEvtSelN);
6103 CPUM_ASSERT_RD_MSR_FN(Ia32PerfStatus);
6104 CPUM_ASSERT_RD_MSR_FN(Ia32PerfCtl);
6105 CPUM_ASSERT_RD_MSR_FN(Ia32FixedCtrN);
6106 CPUM_ASSERT_RD_MSR_FN(Ia32PerfCapabilities);
6107 CPUM_ASSERT_RD_MSR_FN(Ia32FixedCtrCtrl);
6108 CPUM_ASSERT_RD_MSR_FN(Ia32PerfGlobalStatus);
6109 CPUM_ASSERT_RD_MSR_FN(Ia32PerfGlobalCtrl);
6110 CPUM_ASSERT_RD_MSR_FN(Ia32PerfGlobalOvfCtrl);
6111 CPUM_ASSERT_RD_MSR_FN(Ia32PebsEnable);
6112 CPUM_ASSERT_RD_MSR_FN(Ia32ClockModulation);
6113 CPUM_ASSERT_RD_MSR_FN(Ia32ThermInterrupt);
6114 CPUM_ASSERT_RD_MSR_FN(Ia32ThermStatus);
6115 CPUM_ASSERT_RD_MSR_FN(Ia32MiscEnable);
6116 CPUM_ASSERT_RD_MSR_FN(Ia32McCtlStatusAddrMiscN);
6117 CPUM_ASSERT_RD_MSR_FN(Ia32McNCtl2);
6118 CPUM_ASSERT_RD_MSR_FN(Ia32DsArea);
6119 CPUM_ASSERT_RD_MSR_FN(Ia32TscDeadline);
6120 CPUM_ASSERT_RD_MSR_FN(Ia32X2ApicN);
6121 CPUM_ASSERT_RD_MSR_FN(Ia32DebugInterface);
6122 CPUM_ASSERT_RD_MSR_FN(Ia32VmxBasic);
6123 CPUM_ASSERT_RD_MSR_FN(Ia32VmxPinbasedCtls);
6124 CPUM_ASSERT_RD_MSR_FN(Ia32VmxProcbasedCtls);
6125 CPUM_ASSERT_RD_MSR_FN(Ia32VmxExitCtls);
6126 CPUM_ASSERT_RD_MSR_FN(Ia32VmxEntryCtls);
6127 CPUM_ASSERT_RD_MSR_FN(Ia32VmxMisc);
6128 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr0Fixed0);
6129 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr0Fixed1);
6130 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr4Fixed0);
6131 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr4Fixed1);
6132 CPUM_ASSERT_RD_MSR_FN(Ia32VmxVmcsEnum);
6133 CPUM_ASSERT_RD_MSR_FN(Ia32VmxProcBasedCtls2);
6134 CPUM_ASSERT_RD_MSR_FN(Ia32VmxEptVpidCap);
6135 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTruePinbasedCtls);
6136 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTrueProcbasedCtls);
6137 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTrueExitCtls);
6138 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTrueEntryCtls);
6139 CPUM_ASSERT_RD_MSR_FN(Ia32VmxVmFunc);
6140 CPUM_ASSERT_RD_MSR_FN(Ia32SpecCtrl);
6141 CPUM_ASSERT_RD_MSR_FN(Ia32ArchCapabilities);
6142
6143 CPUM_ASSERT_RD_MSR_FN(Amd64Efer);
6144 CPUM_ASSERT_RD_MSR_FN(Amd64SyscallTarget);
6145 CPUM_ASSERT_RD_MSR_FN(Amd64LongSyscallTarget);
6146 CPUM_ASSERT_RD_MSR_FN(Amd64CompSyscallTarget);
6147 CPUM_ASSERT_RD_MSR_FN(Amd64SyscallFlagMask);
6148 CPUM_ASSERT_RD_MSR_FN(Amd64FsBase);
6149 CPUM_ASSERT_RD_MSR_FN(Amd64GsBase);
6150 CPUM_ASSERT_RD_MSR_FN(Amd64KernelGsBase);
6151 CPUM_ASSERT_RD_MSR_FN(Amd64TscAux);
6152
6153 CPUM_ASSERT_RD_MSR_FN(IntelEblCrPowerOn);
6154 CPUM_ASSERT_RD_MSR_FN(IntelI7CoreThreadCount);
6155 CPUM_ASSERT_RD_MSR_FN(IntelP4EbcHardPowerOn);
6156 CPUM_ASSERT_RD_MSR_FN(IntelP4EbcSoftPowerOn);
6157 CPUM_ASSERT_RD_MSR_FN(IntelP4EbcFrequencyId);
6158 CPUM_ASSERT_RD_MSR_FN(IntelP6FsbFrequency);
6159 CPUM_ASSERT_RD_MSR_FN(IntelPlatformInfo);
6160 CPUM_ASSERT_RD_MSR_FN(IntelFlexRatio);
6161 CPUM_ASSERT_RD_MSR_FN(IntelPkgCStConfigControl);
6162 CPUM_ASSERT_RD_MSR_FN(IntelPmgIoCaptureBase);
6163 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchFromToN);
6164 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchFromN);
6165 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchToN);
6166 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchTos);
6167 CPUM_ASSERT_RD_MSR_FN(IntelBblCrCtl);
6168 CPUM_ASSERT_RD_MSR_FN(IntelBblCrCtl3);
6169 CPUM_ASSERT_RD_MSR_FN(IntelI7TemperatureTarget);
6170 CPUM_ASSERT_RD_MSR_FN(IntelI7MsrOffCoreResponseN);
6171 CPUM_ASSERT_RD_MSR_FN(IntelI7MiscPwrMgmt);
6172 CPUM_ASSERT_RD_MSR_FN(IntelP6CrN);
6173 CPUM_ASSERT_RD_MSR_FN(IntelCpuId1FeatureMaskEcdx);
6174 CPUM_ASSERT_RD_MSR_FN(IntelCpuId1FeatureMaskEax);
6175 CPUM_ASSERT_RD_MSR_FN(IntelCpuId80000001FeatureMaskEcdx);
6176 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyAesNiCtl);
6177 CPUM_ASSERT_RD_MSR_FN(IntelI7TurboRatioLimit);
6178 CPUM_ASSERT_RD_MSR_FN(IntelI7LbrSelect);
6179 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyErrorControl);
6180 CPUM_ASSERT_RD_MSR_FN(IntelI7VirtualLegacyWireCap);
6181 CPUM_ASSERT_RD_MSR_FN(IntelI7PowerCtl);
6182 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyPebsNumAlt);
6183 CPUM_ASSERT_RD_MSR_FN(IntelI7PebsLdLat);
6184 CPUM_ASSERT_RD_MSR_FN(IntelI7PkgCnResidencyN);
6185 CPUM_ASSERT_RD_MSR_FN(IntelI7CoreCnResidencyN);
6186 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyVrCurrentConfig);
6187 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyVrMiscConfig);
6188 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyRaplPowerUnit);
6189 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyPkgCnIrtlN);
6190 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyPkgC2Residency);
6191 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgPowerLimit);
6192 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgEnergyStatus);
6193 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgPerfStatus);
6194 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgPowerInfo);
6195 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramPowerLimit);
6196 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramEnergyStatus);
6197 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramPerfStatus);
6198 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramPowerInfo);
6199 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0PowerLimit);
6200 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0EnergyStatus);
6201 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0Policy);
6202 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0PerfStatus);
6203 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp1PowerLimit);
6204 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp1EnergyStatus);
6205 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp1Policy);
6206 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpNominal);
6207 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpLevel1);
6208 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpLevel2);
6209 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpControl);
6210 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyTurboActivationRatio);
6211 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfGlobalCtrl);
6212 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfGlobalStatus);
6213 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfGlobalOvfCtrl);
6214 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfFixedCtrCtrl);
6215 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfFixedCtr);
6216 CPUM_ASSERT_RD_MSR_FN(IntelI7UncCBoxConfig);
6217 CPUM_ASSERT_RD_MSR_FN(IntelI7UncArbPerfCtrN);
6218 CPUM_ASSERT_RD_MSR_FN(IntelI7UncArbPerfEvtSelN);
6219 CPUM_ASSERT_RD_MSR_FN(IntelI7SmiCount);
6220 CPUM_ASSERT_RD_MSR_FN(IntelCore2EmttmCrTablesN);
6221 CPUM_ASSERT_RD_MSR_FN(IntelCore2SmmCStMiscInfo);
6222 CPUM_ASSERT_RD_MSR_FN(IntelCore1ExtConfig);
6223 CPUM_ASSERT_RD_MSR_FN(IntelCore1DtsCalControl);
6224 CPUM_ASSERT_RD_MSR_FN(IntelCore2PeciControl);
6225 CPUM_ASSERT_RD_MSR_FN(IntelAtSilvCoreC1Recidency);
6226
6227 CPUM_ASSERT_RD_MSR_FN(P6LastBranchFromIp);
6228 CPUM_ASSERT_RD_MSR_FN(P6LastBranchToIp);
6229 CPUM_ASSERT_RD_MSR_FN(P6LastIntFromIp);
6230 CPUM_ASSERT_RD_MSR_FN(P6LastIntToIp);
6231
6232 CPUM_ASSERT_RD_MSR_FN(AmdFam15hTscRate);
6233 CPUM_ASSERT_RD_MSR_FN(AmdFam15hLwpCfg);
6234 CPUM_ASSERT_RD_MSR_FN(AmdFam15hLwpCbAddr);
6235 CPUM_ASSERT_RD_MSR_FN(AmdFam10hMc4MiscN);
6236 CPUM_ASSERT_RD_MSR_FN(AmdK8PerfCtlN);
6237 CPUM_ASSERT_RD_MSR_FN(AmdK8PerfCtrN);
6238 CPUM_ASSERT_RD_MSR_FN(AmdK8SysCfg);
6239 CPUM_ASSERT_RD_MSR_FN(AmdK8HwCr);
6240 CPUM_ASSERT_RD_MSR_FN(AmdK8IorrBaseN);
6241 CPUM_ASSERT_RD_MSR_FN(AmdK8IorrMaskN);
6242 CPUM_ASSERT_RD_MSR_FN(AmdK8TopOfMemN);
6243 CPUM_ASSERT_RD_MSR_FN(AmdK8NbCfg1);
6244 CPUM_ASSERT_RD_MSR_FN(AmdK8McXcptRedir);
6245 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuNameN);
6246 CPUM_ASSERT_RD_MSR_FN(AmdK8HwThermalCtrl);
6247 CPUM_ASSERT_RD_MSR_FN(AmdK8SwThermalCtrl);
6248 CPUM_ASSERT_RD_MSR_FN(AmdK8FidVidControl);
6249 CPUM_ASSERT_RD_MSR_FN(AmdK8FidVidStatus);
6250 CPUM_ASSERT_RD_MSR_FN(AmdK8McCtlMaskN);
6251 CPUM_ASSERT_RD_MSR_FN(AmdK8SmiOnIoTrapN);
6252 CPUM_ASSERT_RD_MSR_FN(AmdK8SmiOnIoTrapCtlSts);
6253 CPUM_ASSERT_RD_MSR_FN(AmdK8IntPendingMessage);
6254 CPUM_ASSERT_RD_MSR_FN(AmdK8SmiTriggerIoCycle);
6255 CPUM_ASSERT_RD_MSR_FN(AmdFam10hMmioCfgBaseAddr);
6256 CPUM_ASSERT_RD_MSR_FN(AmdFam10hTrapCtlMaybe);
6257 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateCurLimit);
6258 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateControl);
6259 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateStatus);
6260 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateN);
6261 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCofVidControl);
6262 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCofVidStatus);
6263 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCStateIoBaseAddr);
6264 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCpuWatchdogTimer);
6265 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmBase);
6266 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmAddr);
6267 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmMask);
6268 CPUM_ASSERT_RD_MSR_FN(AmdK8VmCr);
6269 CPUM_ASSERT_RD_MSR_FN(AmdK8IgnNe);
6270 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmCtl);
6271 CPUM_ASSERT_RD_MSR_FN(AmdK8VmHSavePa);
6272 CPUM_ASSERT_RD_MSR_FN(AmdFam10hVmLockKey);
6273 CPUM_ASSERT_RD_MSR_FN(AmdFam10hSmmLockKey);
6274 CPUM_ASSERT_RD_MSR_FN(AmdFam10hLocalSmiStatus);
6275 CPUM_ASSERT_RD_MSR_FN(AmdFam10hOsVisWrkIdLength);
6276 CPUM_ASSERT_RD_MSR_FN(AmdFam10hOsVisWrkStatus);
6277 CPUM_ASSERT_RD_MSR_FN(AmdFam16hL2IPerfCtlN);
6278 CPUM_ASSERT_RD_MSR_FN(AmdFam16hL2IPerfCtrN);
6279 CPUM_ASSERT_RD_MSR_FN(AmdFam15hNorthbridgePerfCtlN);
6280 CPUM_ASSERT_RD_MSR_FN(AmdFam15hNorthbridgePerfCtrN);
6281 CPUM_ASSERT_RD_MSR_FN(AmdK7MicrocodeCtl);
6282 CPUM_ASSERT_RD_MSR_FN(AmdK7ClusterIdMaybe);
6283 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlStd07hEbax);
6284 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlStd06hEcx);
6285 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlStd01hEdcx);
6286 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlExt01hEdcx);
6287 CPUM_ASSERT_RD_MSR_FN(AmdK8PatchLevel);
6288 CPUM_ASSERT_RD_MSR_FN(AmdK7DebugStatusMaybe);
6289 CPUM_ASSERT_RD_MSR_FN(AmdK7BHTraceBaseMaybe);
6290 CPUM_ASSERT_RD_MSR_FN(AmdK7BHTracePtrMaybe);
6291 CPUM_ASSERT_RD_MSR_FN(AmdK7BHTraceLimitMaybe);
6292 CPUM_ASSERT_RD_MSR_FN(AmdK7HardwareDebugToolCfgMaybe);
6293 CPUM_ASSERT_RD_MSR_FN(AmdK7FastFlushCountMaybe);
6294 CPUM_ASSERT_RD_MSR_FN(AmdK7NodeId);
6295 CPUM_ASSERT_RD_MSR_FN(AmdK7DrXAddrMaskN);
6296 CPUM_ASSERT_RD_MSR_FN(AmdK7Dr0DataMatchMaybe);
6297 CPUM_ASSERT_RD_MSR_FN(AmdK7Dr0DataMaskMaybe);
6298 CPUM_ASSERT_RD_MSR_FN(AmdK7LoadStoreCfg);
6299 CPUM_ASSERT_RD_MSR_FN(AmdK7InstrCacheCfg);
6300 CPUM_ASSERT_RD_MSR_FN(AmdK7DataCacheCfg);
6301 CPUM_ASSERT_RD_MSR_FN(AmdK7BusUnitCfg);
6302 CPUM_ASSERT_RD_MSR_FN(AmdK7DebugCtl2Maybe);
6303 CPUM_ASSERT_RD_MSR_FN(AmdFam15hFpuCfg);
6304 CPUM_ASSERT_RD_MSR_FN(AmdFam15hDecoderCfg);
6305 CPUM_ASSERT_RD_MSR_FN(AmdFam10hBusUnitCfg2);
6306 CPUM_ASSERT_RD_MSR_FN(AmdFam15hCombUnitCfg);
6307 CPUM_ASSERT_RD_MSR_FN(AmdFam15hCombUnitCfg2);
6308 CPUM_ASSERT_RD_MSR_FN(AmdFam15hCombUnitCfg3);
6309 CPUM_ASSERT_RD_MSR_FN(AmdFam15hExecUnitCfg);
6310 CPUM_ASSERT_RD_MSR_FN(AmdFam15hLoadStoreCfg2);
6311 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsFetchCtl);
6312 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsFetchLinAddr);
6313 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsFetchPhysAddr);
6314 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpExecCtl);
6315 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpRip);
6316 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpData);
6317 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpData2);
6318 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpData3);
6319 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsDcLinAddr);
6320 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsDcPhysAddr);
6321 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsCtl);
6322 CPUM_ASSERT_RD_MSR_FN(AmdFam14hIbsBrTarget);
6323
6324 CPUM_ASSERT_RD_MSR_FN(Gim)
6325
6326 AssertReturn(g_aCpumWrMsrFns[kCpumMsrWrFn_Invalid] == NULL, VERR_CPUM_IPE_2);
6327 CPUM_ASSERT_WR_MSR_FN(Ia32P5McAddr);
6328 CPUM_ASSERT_WR_MSR_FN(Ia32P5McType);
6329 CPUM_ASSERT_WR_MSR_FN(Ia32TimestampCounter);
6330 CPUM_ASSERT_WR_MSR_FN(Ia32ApicBase);
6331 CPUM_ASSERT_WR_MSR_FN(Ia32FeatureControl);
6332 CPUM_ASSERT_WR_MSR_FN(Ia32BiosSignId);
6333 CPUM_ASSERT_WR_MSR_FN(Ia32BiosUpdateTrigger);
6334 CPUM_ASSERT_WR_MSR_FN(Ia32SmmMonitorCtl);
6335 CPUM_ASSERT_WR_MSR_FN(Ia32PmcN);
6336 CPUM_ASSERT_WR_MSR_FN(Ia32MonitorFilterLineSize);
6337 CPUM_ASSERT_WR_MSR_FN(Ia32MPerf);
6338 CPUM_ASSERT_WR_MSR_FN(Ia32APerf);
6339 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrPhysBaseN);
6340 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrPhysMaskN);
6341 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrFixed);
6342 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrDefType);
6343 CPUM_ASSERT_WR_MSR_FN(Ia32Pat);
6344 CPUM_ASSERT_WR_MSR_FN(Ia32SysEnterCs);
6345 CPUM_ASSERT_WR_MSR_FN(Ia32SysEnterEsp);
6346 CPUM_ASSERT_WR_MSR_FN(Ia32SysEnterEip);
6347 CPUM_ASSERT_WR_MSR_FN(Ia32McgStatus);
6348 CPUM_ASSERT_WR_MSR_FN(Ia32McgCtl);
6349 CPUM_ASSERT_WR_MSR_FN(Ia32DebugCtl);
6350 CPUM_ASSERT_WR_MSR_FN(Ia32SmrrPhysBase);
6351 CPUM_ASSERT_WR_MSR_FN(Ia32SmrrPhysMask);
6352 CPUM_ASSERT_WR_MSR_FN(Ia32PlatformDcaCap);
6353 CPUM_ASSERT_WR_MSR_FN(Ia32Dca0Cap);
6354 CPUM_ASSERT_WR_MSR_FN(Ia32PerfEvtSelN);
6355 CPUM_ASSERT_WR_MSR_FN(Ia32PerfStatus);
6356 CPUM_ASSERT_WR_MSR_FN(Ia32PerfCtl);
6357 CPUM_ASSERT_WR_MSR_FN(Ia32FixedCtrN);
6358 CPUM_ASSERT_WR_MSR_FN(Ia32PerfCapabilities);
6359 CPUM_ASSERT_WR_MSR_FN(Ia32FixedCtrCtrl);
6360 CPUM_ASSERT_WR_MSR_FN(Ia32PerfGlobalStatus);
6361 CPUM_ASSERT_WR_MSR_FN(Ia32PerfGlobalCtrl);
6362 CPUM_ASSERT_WR_MSR_FN(Ia32PerfGlobalOvfCtrl);
6363 CPUM_ASSERT_WR_MSR_FN(Ia32PebsEnable);
6364 CPUM_ASSERT_WR_MSR_FN(Ia32ClockModulation);
6365 CPUM_ASSERT_WR_MSR_FN(Ia32ThermInterrupt);
6366 CPUM_ASSERT_WR_MSR_FN(Ia32ThermStatus);
6367 CPUM_ASSERT_WR_MSR_FN(Ia32MiscEnable);
6368 CPUM_ASSERT_WR_MSR_FN(Ia32McCtlStatusAddrMiscN);
6369 CPUM_ASSERT_WR_MSR_FN(Ia32McNCtl2);
6370 CPUM_ASSERT_WR_MSR_FN(Ia32DsArea);
6371 CPUM_ASSERT_WR_MSR_FN(Ia32TscDeadline);
6372 CPUM_ASSERT_WR_MSR_FN(Ia32X2ApicN);
6373 CPUM_ASSERT_WR_MSR_FN(Ia32DebugInterface);
6374 CPUM_ASSERT_WR_MSR_FN(Ia32SpecCtrl);
6375 CPUM_ASSERT_WR_MSR_FN(Ia32PredCmd);
6376
6377 CPUM_ASSERT_WR_MSR_FN(Amd64Efer);
6378 CPUM_ASSERT_WR_MSR_FN(Amd64SyscallTarget);
6379 CPUM_ASSERT_WR_MSR_FN(Amd64LongSyscallTarget);
6380 CPUM_ASSERT_WR_MSR_FN(Amd64CompSyscallTarget);
6381 CPUM_ASSERT_WR_MSR_FN(Amd64SyscallFlagMask);
6382 CPUM_ASSERT_WR_MSR_FN(Amd64FsBase);
6383 CPUM_ASSERT_WR_MSR_FN(Amd64GsBase);
6384 CPUM_ASSERT_WR_MSR_FN(Amd64KernelGsBase);
6385 CPUM_ASSERT_WR_MSR_FN(Amd64TscAux);
6386
6387 CPUM_ASSERT_WR_MSR_FN(IntelEblCrPowerOn);
6388 CPUM_ASSERT_WR_MSR_FN(IntelP4EbcHardPowerOn);
6389 CPUM_ASSERT_WR_MSR_FN(IntelP4EbcSoftPowerOn);
6390 CPUM_ASSERT_WR_MSR_FN(IntelP4EbcFrequencyId);
6391 CPUM_ASSERT_WR_MSR_FN(IntelFlexRatio);
6392 CPUM_ASSERT_WR_MSR_FN(IntelPkgCStConfigControl);
6393 CPUM_ASSERT_WR_MSR_FN(IntelPmgIoCaptureBase);
6394 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchFromToN);
6395 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchFromN);
6396 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchToN);
6397 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchTos);
6398 CPUM_ASSERT_WR_MSR_FN(IntelBblCrCtl);
6399 CPUM_ASSERT_WR_MSR_FN(IntelBblCrCtl3);
6400 CPUM_ASSERT_WR_MSR_FN(IntelI7TemperatureTarget);
6401 CPUM_ASSERT_WR_MSR_FN(IntelI7MsrOffCoreResponseN);
6402 CPUM_ASSERT_WR_MSR_FN(IntelI7MiscPwrMgmt);
6403 CPUM_ASSERT_WR_MSR_FN(IntelP6CrN);
6404 CPUM_ASSERT_WR_MSR_FN(IntelCpuId1FeatureMaskEcdx);
6405 CPUM_ASSERT_WR_MSR_FN(IntelCpuId1FeatureMaskEax);
6406 CPUM_ASSERT_WR_MSR_FN(IntelCpuId80000001FeatureMaskEcdx);
6407 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyAesNiCtl);
6408 CPUM_ASSERT_WR_MSR_FN(IntelI7TurboRatioLimit);
6409 CPUM_ASSERT_WR_MSR_FN(IntelI7LbrSelect);
6410 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyErrorControl);
6411 CPUM_ASSERT_WR_MSR_FN(IntelI7PowerCtl);
6412 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyPebsNumAlt);
6413 CPUM_ASSERT_WR_MSR_FN(IntelI7PebsLdLat);
6414 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyVrCurrentConfig);
6415 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyVrMiscConfig);
6416 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyPkgCnIrtlN);
6417 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyPkgC2Residency);
6418 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPkgPowerLimit);
6419 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplDramPowerLimit);
6420 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp0PowerLimit);
6421 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp0Policy);
6422 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp1PowerLimit);
6423 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp1Policy);
6424 CPUM_ASSERT_WR_MSR_FN(IntelI7IvyConfigTdpControl);
6425 CPUM_ASSERT_WR_MSR_FN(IntelI7IvyTurboActivationRatio);
6426 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfGlobalCtrl);
6427 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfGlobalStatus);
6428 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfGlobalOvfCtrl);
6429 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfFixedCtrCtrl);
6430 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfFixedCtr);
6431 CPUM_ASSERT_WR_MSR_FN(IntelI7UncArbPerfCtrN);
6432 CPUM_ASSERT_WR_MSR_FN(IntelI7UncArbPerfEvtSelN);
6433 CPUM_ASSERT_WR_MSR_FN(IntelCore2EmttmCrTablesN);
6434 CPUM_ASSERT_WR_MSR_FN(IntelCore2SmmCStMiscInfo);
6435 CPUM_ASSERT_WR_MSR_FN(IntelCore1ExtConfig);
6436 CPUM_ASSERT_WR_MSR_FN(IntelCore1DtsCalControl);
6437 CPUM_ASSERT_WR_MSR_FN(IntelCore2PeciControl);
6438
6439 CPUM_ASSERT_WR_MSR_FN(P6LastIntFromIp);
6440 CPUM_ASSERT_WR_MSR_FN(P6LastIntToIp);
6441
6442 CPUM_ASSERT_WR_MSR_FN(AmdFam15hTscRate);
6443 CPUM_ASSERT_WR_MSR_FN(AmdFam15hLwpCfg);
6444 CPUM_ASSERT_WR_MSR_FN(AmdFam15hLwpCbAddr);
6445 CPUM_ASSERT_WR_MSR_FN(AmdFam10hMc4MiscN);
6446 CPUM_ASSERT_WR_MSR_FN(AmdK8PerfCtlN);
6447 CPUM_ASSERT_WR_MSR_FN(AmdK8PerfCtrN);
6448 CPUM_ASSERT_WR_MSR_FN(AmdK8SysCfg);
6449 CPUM_ASSERT_WR_MSR_FN(AmdK8HwCr);
6450 CPUM_ASSERT_WR_MSR_FN(AmdK8IorrBaseN);
6451 CPUM_ASSERT_WR_MSR_FN(AmdK8IorrMaskN);
6452 CPUM_ASSERT_WR_MSR_FN(AmdK8TopOfMemN);
6453 CPUM_ASSERT_WR_MSR_FN(AmdK8NbCfg1);
6454 CPUM_ASSERT_WR_MSR_FN(AmdK8McXcptRedir);
6455 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuNameN);
6456 CPUM_ASSERT_WR_MSR_FN(AmdK8HwThermalCtrl);
6457 CPUM_ASSERT_WR_MSR_FN(AmdK8SwThermalCtrl);
6458 CPUM_ASSERT_WR_MSR_FN(AmdK8FidVidControl);
6459 CPUM_ASSERT_WR_MSR_FN(AmdK8McCtlMaskN);
6460 CPUM_ASSERT_WR_MSR_FN(AmdK8SmiOnIoTrapN);
6461 CPUM_ASSERT_WR_MSR_FN(AmdK8SmiOnIoTrapCtlSts);
6462 CPUM_ASSERT_WR_MSR_FN(AmdK8IntPendingMessage);
6463 CPUM_ASSERT_WR_MSR_FN(AmdK8SmiTriggerIoCycle);
6464 CPUM_ASSERT_WR_MSR_FN(AmdFam10hMmioCfgBaseAddr);
6465 CPUM_ASSERT_WR_MSR_FN(AmdFam10hTrapCtlMaybe);
6466 CPUM_ASSERT_WR_MSR_FN(AmdFam10hPStateControl);
6467 CPUM_ASSERT_WR_MSR_FN(AmdFam10hPStateStatus);
6468 CPUM_ASSERT_WR_MSR_FN(AmdFam10hPStateN);
6469 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCofVidControl);
6470 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCofVidStatus);
6471 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCStateIoBaseAddr);
6472 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCpuWatchdogTimer);
6473 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmBase);
6474 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmAddr);
6475 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmMask);
6476 CPUM_ASSERT_WR_MSR_FN(AmdK8VmCr);
6477 CPUM_ASSERT_WR_MSR_FN(AmdK8IgnNe);
6478 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmCtl);
6479 CPUM_ASSERT_WR_MSR_FN(AmdK8VmHSavePa);
6480 CPUM_ASSERT_WR_MSR_FN(AmdFam10hVmLockKey);
6481 CPUM_ASSERT_WR_MSR_FN(AmdFam10hSmmLockKey);
6482 CPUM_ASSERT_WR_MSR_FN(AmdFam10hLocalSmiStatus);
6483 CPUM_ASSERT_WR_MSR_FN(AmdFam10hOsVisWrkIdLength);
6484 CPUM_ASSERT_WR_MSR_FN(AmdFam10hOsVisWrkStatus);
6485 CPUM_ASSERT_WR_MSR_FN(AmdFam16hL2IPerfCtlN);
6486 CPUM_ASSERT_WR_MSR_FN(AmdFam16hL2IPerfCtrN);
6487 CPUM_ASSERT_WR_MSR_FN(AmdFam15hNorthbridgePerfCtlN);
6488 CPUM_ASSERT_WR_MSR_FN(AmdFam15hNorthbridgePerfCtrN);
6489 CPUM_ASSERT_WR_MSR_FN(AmdK7MicrocodeCtl);
6490 CPUM_ASSERT_WR_MSR_FN(AmdK7ClusterIdMaybe);
6491 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlStd07hEbax);
6492 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlStd06hEcx);
6493 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlStd01hEdcx);
6494 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlExt01hEdcx);
6495 CPUM_ASSERT_WR_MSR_FN(AmdK8PatchLoader);
6496 CPUM_ASSERT_WR_MSR_FN(AmdK7DebugStatusMaybe);
6497 CPUM_ASSERT_WR_MSR_FN(AmdK7BHTraceBaseMaybe);
6498 CPUM_ASSERT_WR_MSR_FN(AmdK7BHTracePtrMaybe);
6499 CPUM_ASSERT_WR_MSR_FN(AmdK7BHTraceLimitMaybe);
6500 CPUM_ASSERT_WR_MSR_FN(AmdK7HardwareDebugToolCfgMaybe);
6501 CPUM_ASSERT_WR_MSR_FN(AmdK7FastFlushCountMaybe);
6502 CPUM_ASSERT_WR_MSR_FN(AmdK7NodeId);
6503 CPUM_ASSERT_WR_MSR_FN(AmdK7DrXAddrMaskN);
6504 CPUM_ASSERT_WR_MSR_FN(AmdK7Dr0DataMatchMaybe);
6505 CPUM_ASSERT_WR_MSR_FN(AmdK7Dr0DataMaskMaybe);
6506 CPUM_ASSERT_WR_MSR_FN(AmdK7LoadStoreCfg);
6507 CPUM_ASSERT_WR_MSR_FN(AmdK7InstrCacheCfg);
6508 CPUM_ASSERT_WR_MSR_FN(AmdK7DataCacheCfg);
6509 CPUM_ASSERT_WR_MSR_FN(AmdK7BusUnitCfg);
6510 CPUM_ASSERT_WR_MSR_FN(AmdK7DebugCtl2Maybe);
6511 CPUM_ASSERT_WR_MSR_FN(AmdFam15hFpuCfg);
6512 CPUM_ASSERT_WR_MSR_FN(AmdFam15hDecoderCfg);
6513 CPUM_ASSERT_WR_MSR_FN(AmdFam10hBusUnitCfg2);
6514 CPUM_ASSERT_WR_MSR_FN(AmdFam15hCombUnitCfg);
6515 CPUM_ASSERT_WR_MSR_FN(AmdFam15hCombUnitCfg2);
6516 CPUM_ASSERT_WR_MSR_FN(AmdFam15hCombUnitCfg3);
6517 CPUM_ASSERT_WR_MSR_FN(AmdFam15hExecUnitCfg);
6518 CPUM_ASSERT_WR_MSR_FN(AmdFam15hLoadStoreCfg2);
6519 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsFetchCtl);
6520 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsFetchLinAddr);
6521 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsFetchPhysAddr);
6522 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpExecCtl);
6523 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpRip);
6524 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpData);
6525 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpData2);
6526 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpData3);
6527 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsDcLinAddr);
6528 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsDcPhysAddr);
6529 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsCtl);
6530 CPUM_ASSERT_WR_MSR_FN(AmdFam14hIbsBrTarget);
6531
6532 CPUM_ASSERT_WR_MSR_FN(Gim);
6533
6534 return VINF_SUCCESS;
6535}
6536#endif /* VBOX_STRICT && IN_RING3 */
6537
6538
6539/**
6540 * Gets the scalable bus frequency.
6541 *
6542 * The bus frequency is used as a base in several MSRs that gives the CPU and
6543 * other frequency ratios.
6544 *
6545 * @returns Scalable bus frequency in Hz. Will not return CPUM_SBUSFREQ_UNKNOWN.
6546 * @param pVM The cross context VM structure.
6547 */
6548VMMDECL(uint64_t) CPUMGetGuestScalableBusFrequency(PVM pVM)
6549{
6550 uint64_t uFreq = pVM->cpum.s.GuestInfo.uScalableBusFreq;
6551 if (uFreq == CPUM_SBUSFREQ_UNKNOWN)
6552 uFreq = CPUM_SBUSFREQ_100MHZ;
6553 return uFreq;
6554}
6555
6556
6557/**
6558 * Sets the guest EFER MSR without performing any additional checks.
6559 *
6560 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6561 * @param uOldEfer The previous EFER MSR value.
6562 * @param uValidEfer The new, validated EFER MSR value.
6563 *
6564 * @remarks One would normally call CPUMIsGuestEferMsrWriteValid() before calling
6565 * this function to change the EFER in order to perform an EFER transition.
6566 */
6567VMMDECL(void) CPUMSetGuestEferMsrNoChecks(PVMCPU pVCpu, uint64_t uOldEfer, uint64_t uValidEfer)
6568{
6569 pVCpu->cpum.s.Guest.msrEFER = uValidEfer;
6570
6571 /* AMD64 Architecture Programmer's Manual: 15.15 TLB Control; flush the TLB
6572 if MSR_K6_EFER_NXE, MSR_K6_EFER_LME or MSR_K6_EFER_LMA are changed. */
6573 if ( (uOldEfer & (MSR_K6_EFER_NXE | MSR_K6_EFER_LME | MSR_K6_EFER_LMA))
6574 != (pVCpu->cpum.s.Guest.msrEFER & (MSR_K6_EFER_NXE | MSR_K6_EFER_LME | MSR_K6_EFER_LMA)))
6575 {
6576 /// @todo PGMFlushTLB(pVCpu, cr3, true /*fGlobal*/);
6577 HMFlushTLB(pVCpu);
6578
6579 /* Notify PGM about NXE changes. */
6580 if ( (uOldEfer & MSR_K6_EFER_NXE)
6581 != (pVCpu->cpum.s.Guest.msrEFER & MSR_K6_EFER_NXE))
6582 PGMNotifyNxeChanged(pVCpu, !(uOldEfer & MSR_K6_EFER_NXE));
6583 }
6584}
6585
6586
6587/**
6588 * Checks if a guest PAT MSR write is valid.
6589 *
6590 * @returns @c true if the PAT bit combination is valid, @c false otherwise.
6591 * @param uValue The PAT MSR value.
6592 */
6593VMMDECL(bool) CPUMIsPatMsrValid(uint64_t uValue)
6594{
6595 for (uint32_t cShift = 0; cShift < 63; cShift += 8)
6596 {
6597 /* Check all eight bits because the top 5 bits of each byte are reserved. */
6598 uint8_t uType = (uint8_t)(uValue >> cShift);
6599 if ((uType >= 8) || (uType == 2) || (uType == 3))
6600 {
6601 Log(("CPUM: Invalid PAT type at %u:%u in IA32_PAT: %#llx (%#llx)\n", cShift + 7, cShift, uValue, uType));
6602 return false;
6603 }
6604 }
6605 return true;
6606}
6607
6608
6609/**
6610 * Validates an EFER MSR write and provides the new, validated EFER MSR.
6611 *
6612 * @returns VBox status code.
6613 * @param pVM The cross context VM structure.
6614 * @param uCr0 The CR0 of the CPU corresponding to the EFER MSR.
6615 * @param uOldEfer Value of the previous EFER MSR on the CPU if any.
6616 * @param uNewEfer The new EFER MSR value being written.
6617 * @param puValidEfer Where to store the validated EFER (only updated if
6618 * this function returns VINF_SUCCESS).
6619 */
6620VMMDECL(int) CPUMIsGuestEferMsrWriteValid(PVM pVM, uint64_t uCr0, uint64_t uOldEfer, uint64_t uNewEfer, uint64_t *puValidEfer)
6621{
6622 /* #GP(0) If anything outside the allowed bits is set. */
6623 uint64_t fMask = CPUMGetGuestEferMsrValidMask(pVM);
6624 if (uNewEfer & ~fMask)
6625 {
6626 Log(("CPUM: Settings disallowed EFER bit. uNewEfer=%#RX64 fAllowed=%#RX64 -> #GP(0)\n", uNewEfer, fMask));
6627 return VERR_CPUM_RAISE_GP_0;
6628 }
6629
6630 /* Check for illegal MSR_K6_EFER_LME transitions: not allowed to change LME if
6631 paging is enabled. (AMD Arch. Programmer's Manual Volume 2: Table 14-5) */
6632 if ( (uOldEfer & MSR_K6_EFER_LME) != (uNewEfer & MSR_K6_EFER_LME)
6633 && (uCr0 & X86_CR0_PG))
6634 {
6635 Log(("CPUM: Illegal MSR_K6_EFER_LME change: paging is enabled!!\n"));
6636 return VERR_CPUM_RAISE_GP_0;
6637 }
6638
6639 /* There are a few more: e.g. MSR_K6_EFER_LMSLE. */
6640 AssertMsg(!(uNewEfer & ~( MSR_K6_EFER_NXE
6641 | MSR_K6_EFER_LME
6642 | MSR_K6_EFER_LMA /* ignored anyway */
6643 | MSR_K6_EFER_SCE
6644 | MSR_K6_EFER_FFXSR
6645 | MSR_K6_EFER_SVME)),
6646 ("Unexpected value %#RX64\n", uNewEfer));
6647
6648 /* Ignore EFER.LMA, it's updated when setting CR0. */
6649 fMask &= ~MSR_K6_EFER_LMA;
6650
6651 *puValidEfer = (uOldEfer & ~fMask) | (uNewEfer & fMask);
6652 return VINF_SUCCESS;
6653}
6654
6655
6656/**
6657 * Gets the mask of valid EFER bits depending on supported guest-CPU features.
6658 *
6659 * @returns Mask of valid EFER bits.
6660 * @param pVM The cross context VM structure.
6661 *
6662 * @remarks EFER.LMA is included as part of the valid mask. It's not invalid but
6663 * rather a read-only bit.
6664 */
6665VMMDECL(uint64_t) CPUMGetGuestEferMsrValidMask(PVM pVM)
6666{
6667 uint32_t const fExtFeatures = pVM->cpum.s.aGuestCpuIdPatmExt[0].uEax >= 0x80000001
6668 ? pVM->cpum.s.aGuestCpuIdPatmExt[1].uEdx
6669 : 0;
6670 uint64_t fMask = 0;
6671 uint64_t const fIgnoreMask = MSR_K6_EFER_LMA;
6672
6673 /* Filter out those bits the guest is allowed to change. (e.g. LMA is read-only) */
6674 if (fExtFeatures & X86_CPUID_EXT_FEATURE_EDX_NX)
6675 fMask |= MSR_K6_EFER_NXE;
6676 if (fExtFeatures & X86_CPUID_EXT_FEATURE_EDX_LONG_MODE)
6677 fMask |= MSR_K6_EFER_LME;
6678 if (fExtFeatures & X86_CPUID_EXT_FEATURE_EDX_SYSCALL)
6679 fMask |= MSR_K6_EFER_SCE;
6680 if (fExtFeatures & X86_CPUID_AMD_FEATURE_EDX_FFXSR)
6681 fMask |= MSR_K6_EFER_FFXSR;
6682 if (pVM->cpum.s.GuestFeatures.fSvm)
6683 fMask |= MSR_K6_EFER_SVME;
6684
6685 return (fIgnoreMask | fMask);
6686}
6687
6688
6689/**
6690 * Fast way for HM to access the MSR_K8_TSC_AUX register.
6691 *
6692 * @returns The register value.
6693 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6694 * @thread EMT(pVCpu)
6695 */
6696VMM_INT_DECL(uint64_t) CPUMGetGuestTscAux(PVMCPU pVCpu)
6697{
6698 Assert(!(pVCpu->cpum.s.Guest.fExtrn & CPUMCTX_EXTRN_TSC_AUX));
6699 return pVCpu->cpum.s.GuestMsrs.msr.TscAux;
6700}
6701
6702
6703/**
6704 * Fast way for HM to access the MSR_K8_TSC_AUX register.
6705 *
6706 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6707 * @param uValue The new value.
6708 * @thread EMT(pVCpu)
6709 */
6710VMM_INT_DECL(void) CPUMSetGuestTscAux(PVMCPU pVCpu, uint64_t uValue)
6711{
6712 pVCpu->cpum.s.Guest.fExtrn &= ~CPUMCTX_EXTRN_TSC_AUX;
6713 pVCpu->cpum.s.GuestMsrs.msr.TscAux = uValue;
6714}
6715
6716
6717/**
6718 * Fast way for HM to access the IA32_SPEC_CTRL register.
6719 *
6720 * @returns The register value.
6721 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6722 * @thread EMT(pVCpu)
6723 */
6724VMM_INT_DECL(uint64_t) CPUMGetGuestSpecCtrl(PVMCPU pVCpu)
6725{
6726 return pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl;
6727}
6728
6729
6730/**
6731 * Fast way for HM to access the IA32_SPEC_CTRL register.
6732 *
6733 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6734 * @param uValue The new value.
6735 * @thread EMT(pVCpu)
6736 */
6737VMM_INT_DECL(void) CPUMSetGuestSpecCtrl(PVMCPU pVCpu, uint64_t uValue)
6738{
6739 pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl = uValue;
6740}
6741
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette