VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/CPUMAllMsrs.cpp@ 74251

Last change on this file since 74251 was 74171, checked in by vboxsync, 6 years ago

VMM/CPUM, IEM: Nested VMX: bugref:9180 vmlaunch/vmresume bits.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 252.7 KB
Line 
1/* $Id: CPUMAllMsrs.cpp 74171 2018-09-10 07:48:40Z vboxsync $ */
2/** @file
3 * CPUM - CPU MSR Registers.
4 */
5
6/*
7 * Copyright (C) 2013-2017 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*********************************************************************************************************************************
20* Header Files *
21*********************************************************************************************************************************/
22#define LOG_GROUP LOG_GROUP_CPUM
23#include <VBox/vmm/cpum.h>
24#include <VBox/vmm/apic.h>
25#include <VBox/vmm/hm.h>
26#include <VBox/vmm/hm_vmx.h>
27#include <VBox/vmm/tm.h>
28#include <VBox/vmm/gim.h>
29#include "CPUMInternal.h"
30#include <VBox/vmm/vm.h>
31#include <VBox/err.h>
32
33
34/*********************************************************************************************************************************
35* Defined Constants And Macros *
36*********************************************************************************************************************************/
37/**
38 * Validates the CPUMMSRRANGE::offCpumCpu value and declares a local variable
39 * pointing to it.
40 *
41 * ASSUMES sizeof(a_Type) is a power of two and that the member is aligned
42 * correctly.
43 */
44#define CPUM_MSR_ASSERT_CPUMCPU_OFFSET_RETURN(a_pVCpu, a_pRange, a_Type, a_VarName) \
45 AssertMsgReturn( (a_pRange)->offCpumCpu >= 8 \
46 && (a_pRange)->offCpumCpu < sizeof(CPUMCPU) \
47 && !((a_pRange)->offCpumCpu & (RT_MIN(sizeof(a_Type), 8) - 1)) \
48 , ("offCpumCpu=%#x %s\n", (a_pRange)->offCpumCpu, (a_pRange)->szName), \
49 VERR_CPUM_MSR_BAD_CPUMCPU_OFFSET); \
50 a_Type *a_VarName = (a_Type *)((uintptr_t)&(a_pVCpu)->cpum.s + (a_pRange)->offCpumCpu)
51
52
53/*********************************************************************************************************************************
54* Structures and Typedefs *
55*********************************************************************************************************************************/
56
57/**
58 * Implements reading one or more MSRs.
59 *
60 * @returns VBox status code.
61 * @retval VINF_SUCCESS on success.
62 * @retval VINF_CPUM_R3_MSR_READ if the MSR read could not be serviced in the
63 * current context (raw-mode or ring-0).
64 * @retval VERR_CPUM_RAISE_GP_0 on failure (invalid MSR).
65 *
66 * @param pVCpu The cross context virtual CPU structure.
67 * @param idMsr The MSR we're reading.
68 * @param pRange The MSR range descriptor.
69 * @param puValue Where to return the value.
70 */
71typedef DECLCALLBACK(VBOXSTRICTRC) FNCPUMRDMSR(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue);
72/** Pointer to a RDMSR worker for a specific MSR or range of MSRs. */
73typedef FNCPUMRDMSR *PFNCPUMRDMSR;
74
75
76/**
77 * Implements writing one or more MSRs.
78 *
79 * @retval VINF_SUCCESS on success.
80 * @retval VINF_CPUM_R3_MSR_WRITE if the MSR write could not be serviced in the
81 * current context (raw-mode or ring-0).
82 * @retval VERR_CPUM_RAISE_GP_0 on failure.
83 *
84 * @param pVCpu The cross context virtual CPU structure.
85 * @param idMsr The MSR we're writing.
86 * @param pRange The MSR range descriptor.
87 * @param uValue The value to set, ignored bits masked.
88 * @param uRawValue The raw value with the ignored bits not masked.
89 */
90typedef DECLCALLBACK(VBOXSTRICTRC) FNCPUMWRMSR(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue);
91/** Pointer to a WRMSR worker for a specific MSR or range of MSRs. */
92typedef FNCPUMWRMSR *PFNCPUMWRMSR;
93
94
95
96/*
97 * Generic functions.
98 * Generic functions.
99 * Generic functions.
100 */
101
102
103/** @callback_method_impl{FNCPUMRDMSR} */
104static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_FixedValue(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
105{
106 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
107 *puValue = pRange->uValue;
108 return VINF_SUCCESS;
109}
110
111
112/** @callback_method_impl{FNCPUMWRMSR} */
113static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IgnoreWrite(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
114{
115 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
116 Log(("CPUM: Ignoring WRMSR %#x (%s), %#llx\n", idMsr, pRange->szName, uValue));
117 return VINF_SUCCESS;
118}
119
120
121/** @callback_method_impl{FNCPUMRDMSR} */
122static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_WriteOnly(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
123{
124 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(puValue);
125 return VERR_CPUM_RAISE_GP_0;
126}
127
128
129/** @callback_method_impl{FNCPUMWRMSR} */
130static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_ReadOnly(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
131{
132 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
133 Assert(pRange->fWrGpMask == UINT64_MAX);
134 return VERR_CPUM_RAISE_GP_0;
135}
136
137
138
139
140/*
141 * IA32
142 * IA32
143 * IA32
144 */
145
146/** @callback_method_impl{FNCPUMRDMSR} */
147static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32P5McAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
148{
149 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
150 *puValue = 0; /** @todo implement machine check injection. */
151 return VINF_SUCCESS;
152}
153
154
155/** @callback_method_impl{FNCPUMWRMSR} */
156static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32P5McAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
157{
158 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
159 /** @todo implement machine check injection. */
160 return VINF_SUCCESS;
161}
162
163
164/** @callback_method_impl{FNCPUMRDMSR} */
165static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32P5McType(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
166{
167 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
168 *puValue = 0; /** @todo implement machine check injection. */
169 return VINF_SUCCESS;
170}
171
172
173/** @callback_method_impl{FNCPUMWRMSR} */
174static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32P5McType(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
175{
176 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
177 /** @todo implement machine check injection. */
178 return VINF_SUCCESS;
179}
180
181
182/** @callback_method_impl{FNCPUMRDMSR} */
183static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32TimestampCounter(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
184{
185 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
186 *puValue = TMCpuTickGet(pVCpu);
187#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
188 *puValue = CPUMApplyNestedGuestTscOffset(pVCpu, *puValue);
189#endif
190 return VINF_SUCCESS;
191}
192
193
194/** @callback_method_impl{FNCPUMWRMSR} */
195static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32TimestampCounter(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
196{
197 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
198 TMCpuTickSet(pVCpu->CTX_SUFF(pVM), pVCpu, uValue);
199 return VINF_SUCCESS;
200}
201
202
203/** @callback_method_impl{FNCPUMRDMSR} */
204static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PlatformId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
205{
206 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
207 uint64_t uValue = pRange->uValue;
208 if (uValue & 0x1f00)
209 {
210 /* Max allowed bus ratio present. */
211 /** @todo Implement scaled BUS frequency. */
212 }
213
214 *puValue = uValue;
215 return VINF_SUCCESS;
216}
217
218
219/** @callback_method_impl{FNCPUMRDMSR} */
220static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ApicBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
221{
222 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
223 return APICGetBaseMsr(pVCpu, puValue);
224}
225
226
227/** @callback_method_impl{FNCPUMWRMSR} */
228static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ApicBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
229{
230 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
231 return APICSetBaseMsr(pVCpu, uValue);
232}
233
234
235/**
236 * Get fixed IA32_FEATURE_CONTROL value for NEM and cpumMsrRd_Ia32FeatureControl.
237 *
238 * @returns Fixed IA32_FEATURE_CONTROL value.
239 * @param pVCpu The cross context per CPU structure.
240 */
241VMM_INT_DECL(uint64_t) CPUMGetGuestIa32FeatureControl(PVMCPU pVCpu)
242{
243 /* Always report the MSR lock bit as set, in order to prevent guests from modifiying this MSR. */
244 uint64_t fFeatCtl = MSR_IA32_FEATURE_CONTROL_LOCK;
245
246 /* Report VMX features. */
247 if (pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fVmx)
248 fFeatCtl |= MSR_IA32_FEATURE_CONTROL_VMXON;
249
250 return fFeatCtl;
251}
252
253/** @callback_method_impl{FNCPUMRDMSR} */
254static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32FeatureControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
255{
256 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
257 *puValue = CPUMGetGuestIa32FeatureControl(pVCpu);
258 return VINF_SUCCESS;
259}
260
261
262/** @callback_method_impl{FNCPUMWRMSR} */
263static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32FeatureControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
264{
265 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
266 return VERR_CPUM_RAISE_GP_0;
267}
268
269
270/** @callback_method_impl{FNCPUMRDMSR} */
271static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32BiosSignId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
272{
273 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
274 /** @todo fake microcode update. */
275 *puValue = pRange->uValue;
276 return VINF_SUCCESS;
277}
278
279
280/** @callback_method_impl{FNCPUMWRMSR} */
281static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32BiosSignId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
282{
283 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
284 /* Normally, zero is written to Ia32BiosSignId before reading it in order
285 to select the signature instead of the BBL_CR_D3 behaviour. The GP mask
286 of the database entry should take care of most illegal writes for now, so
287 just ignore all writes atm. */
288 return VINF_SUCCESS;
289}
290
291
292/** @callback_method_impl{FNCPUMWRMSR} */
293static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32BiosUpdateTrigger(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
294{
295 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
296 /** @todo Fake bios update trigger better. The value is the address to an
297 * update package, I think. We should probably GP if it's invalid. */
298 return VINF_SUCCESS;
299}
300
301
302/**
303 * Get MSR_IA32_SMM_MONITOR_CTL value for IEM and cpumMsrRd_Ia32SmmMonitorCtl.
304 *
305 * @returns The MSR_IA32_SMM_MONITOR_CTL value.
306 * @param pVCpu The cross context per CPU structure.
307 */
308VMM_INT_DECL(uint64_t) CPUMGetGuestIa32SmmMonitorCtl(PVMCPU pVCpu)
309{
310 /* We do not support dual-monitor treatment for SMI and SMM. */
311 /** @todo SMM. */
312 RT_NOREF(pVCpu);
313 return 0;
314}
315
316
317/** @callback_method_impl{FNCPUMRDMSR} */
318static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SmmMonitorCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
319{
320 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
321 *puValue = CPUMGetGuestIa32SmmMonitorCtl(pVCpu);
322 return VINF_SUCCESS;
323}
324
325
326/** @callback_method_impl{FNCPUMWRMSR} */
327static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SmmMonitorCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
328{
329 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
330 /** @todo SMM. */
331 return VINF_SUCCESS;
332}
333
334
335/** @callback_method_impl{FNCPUMRDMSR} */
336static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PmcN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
337{
338 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
339 /** @todo check CPUID leaf 0ah. */
340 *puValue = 0;
341 return VINF_SUCCESS;
342}
343
344
345/** @callback_method_impl{FNCPUMWRMSR} */
346static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PmcN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
347{
348 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
349 /** @todo check CPUID leaf 0ah. */
350 return VINF_SUCCESS;
351}
352
353
354/** @callback_method_impl{FNCPUMRDMSR} */
355static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MonitorFilterLineSize(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
356{
357 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
358 /** @todo return 0x1000 if we try emulate mwait 100% correctly. */
359 *puValue = 0x40; /** @todo Change to CPU cache line size. */
360 return VINF_SUCCESS;
361}
362
363
364/** @callback_method_impl{FNCPUMWRMSR} */
365static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MonitorFilterLineSize(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
366{
367 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
368 /** @todo should remember writes, though it's supposedly something only a BIOS
369 * would write so, it's not extremely important. */
370 return VINF_SUCCESS;
371}
372
373/** @callback_method_impl{FNCPUMRDMSR} */
374static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MPerf(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
375{
376 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
377 /** @todo Read MPERF: Adjust against previously written MPERF value. Is TSC
378 * what we want? */
379 *puValue = TMCpuTickGet(pVCpu);
380#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
381 *puValue = CPUMApplyNestedGuestTscOffset(pVCpu, *puValue);
382#endif
383 return VINF_SUCCESS;
384}
385
386
387/** @callback_method_impl{FNCPUMWRMSR} */
388static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MPerf(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
389{
390 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
391 /** @todo Write MPERF: Calc adjustment. */
392 return VINF_SUCCESS;
393}
394
395
396/** @callback_method_impl{FNCPUMRDMSR} */
397static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32APerf(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
398{
399 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
400 /** @todo Read APERF: Adjust against previously written MPERF value. Is TSC
401 * what we want? */
402 *puValue = TMCpuTickGet(pVCpu);
403#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
404 *puValue = CPUMApplyNestedGuestTscOffset(pVCpu, *puValue);
405#endif
406 return VINF_SUCCESS;
407}
408
409
410/** @callback_method_impl{FNCPUMWRMSR} */
411static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32APerf(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
412{
413 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
414 /** @todo Write APERF: Calc adjustment. */
415 return VINF_SUCCESS;
416}
417
418
419/**
420 * Get fixed IA32_MTRR_CAP value for NEM and cpumMsrRd_Ia32MtrrCap.
421 *
422 * @returns Fixed IA32_MTRR_CAP value.
423 * @param pVCpu The cross context per CPU structure.
424 */
425VMM_INT_DECL(uint64_t) CPUMGetGuestIa32MtrrCap(PVMCPU pVCpu)
426{
427 RT_NOREF_PV(pVCpu);
428
429 /* This is currently a bit weird. :-) */
430 uint8_t const cVariableRangeRegs = 0;
431 bool const fSystemManagementRangeRegisters = false;
432 bool const fFixedRangeRegisters = false;
433 bool const fWriteCombiningType = false;
434 return cVariableRangeRegs
435 | (fFixedRangeRegisters ? RT_BIT_64(8) : 0)
436 | (fWriteCombiningType ? RT_BIT_64(10) : 0)
437 | (fSystemManagementRangeRegisters ? RT_BIT_64(11) : 0);
438}
439
440/** @callback_method_impl{FNCPUMRDMSR} */
441static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
442{
443 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
444 *puValue = CPUMGetGuestIa32MtrrCap(pVCpu);
445 return VINF_SUCCESS;
446}
447
448
449/** @callback_method_impl{FNCPUMRDMSR} */
450static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrPhysBaseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
451{
452 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
453 /** @todo Implement variable MTRR storage. */
454 Assert(pRange->uValue == (idMsr - 0x200) / 2);
455 *puValue = 0;
456 return VINF_SUCCESS;
457}
458
459
460/** @callback_method_impl{FNCPUMWRMSR} */
461static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrPhysBaseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
462{
463 /*
464 * Validate the value.
465 */
466 Assert(pRange->uValue == (idMsr - 0x200) / 2);
467 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(uRawValue); RT_NOREF_PV(pRange);
468
469 uint8_t uType = uValue & 0xff;
470 if ((uType >= 7) || (uType == 2) || (uType == 3))
471 {
472 Log(("CPUM: Invalid type set writing MTRR PhysBase MSR %#x: %#llx (%#llx)\n", idMsr, uValue, uType));
473 return VERR_CPUM_RAISE_GP_0;
474 }
475
476 uint64_t fInvPhysMask = ~(RT_BIT_64(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.cMaxPhysAddrWidth) - 1U);
477 if (fInvPhysMask & uValue)
478 {
479 Log(("CPUM: Invalid physical address bits set writing MTRR PhysBase MSR %#x: %#llx (%#llx)\n",
480 idMsr, uValue, uValue & fInvPhysMask));
481 return VERR_CPUM_RAISE_GP_0;
482 }
483
484 /*
485 * Store it.
486 */
487 /** @todo Implement variable MTRR storage. */
488 return VINF_SUCCESS;
489}
490
491
492/** @callback_method_impl{FNCPUMRDMSR} */
493static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrPhysMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
494{
495 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
496 /** @todo Implement variable MTRR storage. */
497 Assert(pRange->uValue == (idMsr - 0x200) / 2);
498 *puValue = 0;
499 return VINF_SUCCESS;
500}
501
502
503/** @callback_method_impl{FNCPUMWRMSR} */
504static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrPhysMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
505{
506 /*
507 * Validate the value.
508 */
509 Assert(pRange->uValue == (idMsr - 0x200) / 2);
510 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(uRawValue); RT_NOREF_PV(pRange);
511
512 uint64_t fInvPhysMask = ~(RT_BIT_64(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.cMaxPhysAddrWidth) - 1U);
513 if (fInvPhysMask & uValue)
514 {
515 Log(("CPUM: Invalid physical address bits set writing MTRR PhysMask MSR %#x: %#llx (%#llx)\n",
516 idMsr, uValue, uValue & fInvPhysMask));
517 return VERR_CPUM_RAISE_GP_0;
518 }
519
520 /*
521 * Store it.
522 */
523 /** @todo Implement variable MTRR storage. */
524 return VINF_SUCCESS;
525}
526
527
528/** @callback_method_impl{FNCPUMRDMSR} */
529static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrFixed(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
530{
531 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
532 CPUM_MSR_ASSERT_CPUMCPU_OFFSET_RETURN(pVCpu, pRange, uint64_t, puFixedMtrr);
533 *puValue = *puFixedMtrr;
534 return VINF_SUCCESS;
535}
536
537
538/** @callback_method_impl{FNCPUMWRMSR} */
539static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrFixed(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
540{
541 CPUM_MSR_ASSERT_CPUMCPU_OFFSET_RETURN(pVCpu, pRange, uint64_t, puFixedMtrr);
542 RT_NOREF_PV(idMsr); RT_NOREF_PV(uRawValue);
543
544 for (uint32_t cShift = 0; cShift < 63; cShift += 8)
545 {
546 uint8_t uType = (uint8_t)(uValue >> cShift);
547 if ((uType >= 7) || (uType == 2) || (uType == 3))
548 {
549 Log(("CPUM: Invalid MTRR type at %u:%u in fixed range (%#x/%s): %#llx (%#llx)\n",
550 cShift + 7, cShift, idMsr, pRange->szName, uValue, uType));
551 return VERR_CPUM_RAISE_GP_0;
552 }
553 }
554 *puFixedMtrr = uValue;
555 return VINF_SUCCESS;
556}
557
558
559/** @callback_method_impl{FNCPUMRDMSR} */
560static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrDefType(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
561{
562 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
563 *puValue = pVCpu->cpum.s.GuestMsrs.msr.MtrrDefType;
564 return VINF_SUCCESS;
565}
566
567
568/** @callback_method_impl{FNCPUMWRMSR} */
569static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrDefType(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
570{
571 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
572
573 uint8_t uType = uValue & 0xff;
574 if ((uType >= 7) || (uType == 2) || (uType == 3))
575 {
576 Log(("CPUM: Invalid MTRR default type value on %s: %#llx (%#llx)\n", pRange->szName, uValue, uType));
577 return VERR_CPUM_RAISE_GP_0;
578 }
579
580 pVCpu->cpum.s.GuestMsrs.msr.MtrrDefType = uValue;
581 return VINF_SUCCESS;
582}
583
584
585/** @callback_method_impl{FNCPUMRDMSR} */
586static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32Pat(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
587{
588 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
589 *puValue = pVCpu->cpum.s.Guest.msrPAT;
590 return VINF_SUCCESS;
591}
592
593
594/** @callback_method_impl{FNCPUMWRMSR} */
595static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32Pat(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
596{
597 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
598 if (CPUMIsPatMsrValid(uValue))
599 {
600 pVCpu->cpum.s.Guest.msrPAT = uValue;
601 return VINF_SUCCESS;
602 }
603 return VERR_CPUM_RAISE_GP_0;
604}
605
606
607/** @callback_method_impl{FNCPUMRDMSR} */
608static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SysEnterCs(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
609{
610 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
611 *puValue = pVCpu->cpum.s.Guest.SysEnter.cs;
612 return VINF_SUCCESS;
613}
614
615
616/** @callback_method_impl{FNCPUMWRMSR} */
617static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SysEnterCs(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
618{
619 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
620
621 /* Note! We used to mask this by 0xffff, but turns out real HW doesn't and
622 there are generally 32-bit working bits backing this register. */
623 pVCpu->cpum.s.Guest.SysEnter.cs = uValue;
624 return VINF_SUCCESS;
625}
626
627
628/** @callback_method_impl{FNCPUMRDMSR} */
629static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SysEnterEsp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
630{
631 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
632 *puValue = pVCpu->cpum.s.Guest.SysEnter.esp;
633 return VINF_SUCCESS;
634}
635
636
637/** @callback_method_impl{FNCPUMWRMSR} */
638static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SysEnterEsp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
639{
640 if (X86_IS_CANONICAL(uValue))
641 {
642 pVCpu->cpum.s.Guest.SysEnter.esp = uValue;
643 return VINF_SUCCESS;
644 }
645 Log(("CPUM: IA32_SYSENTER_ESP not canonical! %#llx\n", uValue));
646 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
647 return VERR_CPUM_RAISE_GP_0;
648}
649
650
651/** @callback_method_impl{FNCPUMRDMSR} */
652static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SysEnterEip(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
653{
654 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
655 *puValue = pVCpu->cpum.s.Guest.SysEnter.eip;
656 return VINF_SUCCESS;
657}
658
659
660/** @callback_method_impl{FNCPUMWRMSR} */
661static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SysEnterEip(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
662{
663 if (X86_IS_CANONICAL(uValue))
664 {
665 pVCpu->cpum.s.Guest.SysEnter.eip = uValue;
666 return VINF_SUCCESS;
667 }
668 LogRel(("CPUM: IA32_SYSENTER_EIP not canonical! %#llx\n", uValue));
669 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
670 return VERR_CPUM_RAISE_GP_0;
671}
672
673
674/** @callback_method_impl{FNCPUMRDMSR} */
675static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McgCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
676{
677#if 0 /** @todo implement machine checks. */
678 *puValue = pRange->uValue & (RT_BIT_64(8) | 0);
679#else
680 *puValue = 0;
681#endif
682 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
683 return VINF_SUCCESS;
684}
685
686
687/** @callback_method_impl{FNCPUMRDMSR} */
688static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McgStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
689{
690 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
691 /** @todo implement machine checks. */
692 *puValue = 0;
693 return VINF_SUCCESS;
694}
695
696
697/** @callback_method_impl{FNCPUMWRMSR} */
698static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McgStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
699{
700 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
701 /** @todo implement machine checks. */
702 return VINF_SUCCESS;
703}
704
705
706/** @callback_method_impl{FNCPUMRDMSR} */
707static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McgCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
708{
709 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
710 /** @todo implement machine checks. */
711 *puValue = 0;
712 return VINF_SUCCESS;
713}
714
715
716/** @callback_method_impl{FNCPUMWRMSR} */
717static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McgCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
718{
719 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
720 /** @todo implement machine checks. */
721 return VINF_SUCCESS;
722}
723
724
725/** @callback_method_impl{FNCPUMRDMSR} */
726static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32DebugCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
727{
728 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
729 /** @todo implement IA32_DEBUGCTL. */
730 *puValue = 0;
731 return VINF_SUCCESS;
732}
733
734
735/** @callback_method_impl{FNCPUMWRMSR} */
736static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32DebugCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
737{
738 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
739 /** @todo implement IA32_DEBUGCTL. */
740 return VINF_SUCCESS;
741}
742
743
744/** @callback_method_impl{FNCPUMRDMSR} */
745static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SmrrPhysBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
746{
747 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
748 /** @todo implement intel SMM. */
749 *puValue = 0;
750 return VINF_SUCCESS;
751}
752
753
754/** @callback_method_impl{FNCPUMWRMSR} */
755static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SmrrPhysBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
756{
757 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
758 /** @todo implement intel SMM. */
759 return VERR_CPUM_RAISE_GP_0;
760}
761
762
763/** @callback_method_impl{FNCPUMRDMSR} */
764static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SmrrPhysMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
765{
766 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
767 /** @todo implement intel SMM. */
768 *puValue = 0;
769 return VINF_SUCCESS;
770}
771
772
773/** @callback_method_impl{FNCPUMWRMSR} */
774static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SmrrPhysMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
775{
776 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
777 /** @todo implement intel SMM. */
778 return VERR_CPUM_RAISE_GP_0;
779}
780
781
782/** @callback_method_impl{FNCPUMRDMSR} */
783static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PlatformDcaCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
784{
785 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
786 /** @todo implement intel direct cache access (DCA)?? */
787 *puValue = 0;
788 return VINF_SUCCESS;
789}
790
791
792/** @callback_method_impl{FNCPUMWRMSR} */
793static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PlatformDcaCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
794{
795 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
796 /** @todo implement intel direct cache access (DCA)?? */
797 return VINF_SUCCESS;
798}
799
800
801/** @callback_method_impl{FNCPUMRDMSR} */
802static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32CpuDcaCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
803{
804 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
805 /** @todo implement intel direct cache access (DCA)?? */
806 *puValue = 0;
807 return VINF_SUCCESS;
808}
809
810
811/** @callback_method_impl{FNCPUMRDMSR} */
812static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32Dca0Cap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
813{
814 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
815 /** @todo implement intel direct cache access (DCA)?? */
816 *puValue = 0;
817 return VINF_SUCCESS;
818}
819
820
821/** @callback_method_impl{FNCPUMWRMSR} */
822static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32Dca0Cap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
823{
824 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
825 /** @todo implement intel direct cache access (DCA)?? */
826 return VINF_SUCCESS;
827}
828
829
830/** @callback_method_impl{FNCPUMRDMSR} */
831static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfEvtSelN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
832{
833 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
834 /** @todo implement IA32_PERFEVTSEL0+. */
835 *puValue = 0;
836 return VINF_SUCCESS;
837}
838
839
840/** @callback_method_impl{FNCPUMWRMSR} */
841static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfEvtSelN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
842{
843 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
844 /** @todo implement IA32_PERFEVTSEL0+. */
845 return VINF_SUCCESS;
846}
847
848
849/** @callback_method_impl{FNCPUMRDMSR} */
850static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
851{
852 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
853 uint64_t uValue = pRange->uValue;
854
855 /* Always provide the max bus ratio for now. XNU expects it. */
856 uValue &= ~((UINT64_C(0x1f) << 40) | RT_BIT_64(46));
857
858 PVM pVM = pVCpu->CTX_SUFF(pVM);
859 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
860 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
861 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
862 if (uTscRatio > 0x1f)
863 uTscRatio = 0x1f;
864 uValue |= (uint64_t)uTscRatio << 40;
865
866 *puValue = uValue;
867 return VINF_SUCCESS;
868}
869
870
871/** @callback_method_impl{FNCPUMWRMSR} */
872static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
873{
874 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
875 /* Pentium4 allows writing, but all bits are ignored. */
876 return VINF_SUCCESS;
877}
878
879
880/** @callback_method_impl{FNCPUMRDMSR} */
881static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
882{
883 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
884 /** @todo implement IA32_PERFCTL. */
885 *puValue = 0;
886 return VINF_SUCCESS;
887}
888
889
890/** @callback_method_impl{FNCPUMWRMSR} */
891static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
892{
893 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
894 /** @todo implement IA32_PERFCTL. */
895 return VINF_SUCCESS;
896}
897
898
899/** @callback_method_impl{FNCPUMRDMSR} */
900static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32FixedCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
901{
902 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
903 /** @todo implement IA32_FIXED_CTRn (fixed performance counters). */
904 *puValue = 0;
905 return VINF_SUCCESS;
906}
907
908
909/** @callback_method_impl{FNCPUMWRMSR} */
910static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32FixedCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
911{
912 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
913 /** @todo implement IA32_FIXED_CTRn (fixed performance counters). */
914 return VINF_SUCCESS;
915}
916
917
918/** @callback_method_impl{FNCPUMRDMSR} */
919static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfCapabilities(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
920{
921 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
922 /** @todo implement performance counters. */
923 *puValue = 0;
924 return VINF_SUCCESS;
925}
926
927
928/** @callback_method_impl{FNCPUMWRMSR} */
929static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfCapabilities(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
930{
931 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
932 /** @todo implement performance counters. */
933 return VINF_SUCCESS;
934}
935
936
937/** @callback_method_impl{FNCPUMRDMSR} */
938static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32FixedCtrCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
939{
940 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
941 /** @todo implement performance counters. */
942 *puValue = 0;
943 return VINF_SUCCESS;
944}
945
946
947/** @callback_method_impl{FNCPUMWRMSR} */
948static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32FixedCtrCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
949{
950 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
951 /** @todo implement performance counters. */
952 return VINF_SUCCESS;
953}
954
955
956/** @callback_method_impl{FNCPUMRDMSR} */
957static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfGlobalStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
958{
959 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
960 /** @todo implement performance counters. */
961 *puValue = 0;
962 return VINF_SUCCESS;
963}
964
965
966/** @callback_method_impl{FNCPUMWRMSR} */
967static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfGlobalStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
968{
969 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
970 /** @todo implement performance counters. */
971 return VINF_SUCCESS;
972}
973
974
975/** @callback_method_impl{FNCPUMRDMSR} */
976static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfGlobalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
977{
978 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
979 /** @todo implement performance counters. */
980 *puValue = 0;
981 return VINF_SUCCESS;
982}
983
984
985/** @callback_method_impl{FNCPUMWRMSR} */
986static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfGlobalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
987{
988 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
989 /** @todo implement performance counters. */
990 return VINF_SUCCESS;
991}
992
993
994/** @callback_method_impl{FNCPUMRDMSR} */
995static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfGlobalOvfCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
996{
997 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
998 /** @todo implement performance counters. */
999 *puValue = 0;
1000 return VINF_SUCCESS;
1001}
1002
1003
1004/** @callback_method_impl{FNCPUMWRMSR} */
1005static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfGlobalOvfCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1006{
1007 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1008 /** @todo implement performance counters. */
1009 return VINF_SUCCESS;
1010}
1011
1012
1013/** @callback_method_impl{FNCPUMRDMSR} */
1014static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PebsEnable(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1015{
1016 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1017 /** @todo implement performance counters. */
1018 *puValue = 0;
1019 return VINF_SUCCESS;
1020}
1021
1022
1023/** @callback_method_impl{FNCPUMWRMSR} */
1024static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PebsEnable(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1025{
1026 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1027 /** @todo implement performance counters. */
1028 return VINF_SUCCESS;
1029}
1030
1031
1032/** @callback_method_impl{FNCPUMRDMSR} */
1033static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ClockModulation(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1034{
1035 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1036 /** @todo implement IA32_CLOCK_MODULATION. */
1037 *puValue = 0;
1038 return VINF_SUCCESS;
1039}
1040
1041
1042/** @callback_method_impl{FNCPUMWRMSR} */
1043static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ClockModulation(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1044{
1045 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1046 /** @todo implement IA32_CLOCK_MODULATION. */
1047 return VINF_SUCCESS;
1048}
1049
1050
1051/** @callback_method_impl{FNCPUMRDMSR} */
1052static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ThermInterrupt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1053{
1054 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1055 /** @todo implement IA32_THERM_INTERRUPT. */
1056 *puValue = 0;
1057 return VINF_SUCCESS;
1058}
1059
1060
1061/** @callback_method_impl{FNCPUMWRMSR} */
1062static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ThermInterrupt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1063{
1064 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1065 /** @todo implement IA32_THERM_STATUS. */
1066 return VINF_SUCCESS;
1067}
1068
1069
1070/** @callback_method_impl{FNCPUMRDMSR} */
1071static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ThermStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1072{
1073 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1074 /** @todo implement IA32_THERM_STATUS. */
1075 *puValue = 0;
1076 return VINF_SUCCESS;
1077}
1078
1079
1080/** @callback_method_impl{FNCPUMWRMSR} */
1081static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ThermStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1082{
1083 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1084 /** @todo implement IA32_THERM_INTERRUPT. */
1085 return VINF_SUCCESS;
1086}
1087
1088
1089/** @callback_method_impl{FNCPUMRDMSR} */
1090static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32Therm2Ctl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1091{
1092 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1093 /** @todo implement IA32_THERM2_CTL. */
1094 *puValue = 0;
1095 return VINF_SUCCESS;
1096}
1097
1098
1099/** @callback_method_impl{FNCPUMWRMSR} */
1100static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32Therm2Ctl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1101{
1102 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1103 /** @todo implement IA32_THERM2_CTL. */
1104 return VINF_SUCCESS;
1105}
1106
1107
1108/** @callback_method_impl{FNCPUMRDMSR} */
1109static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MiscEnable(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1110{
1111 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1112 *puValue = pVCpu->cpum.s.GuestMsrs.msr.MiscEnable;
1113 return VINF_SUCCESS;
1114}
1115
1116
1117/** @callback_method_impl{FNCPUMWRMSR} */
1118static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MiscEnable(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1119{
1120 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1121#ifdef LOG_ENABLED
1122 uint64_t const uOld = pVCpu->cpum.s.GuestMsrs.msr.MiscEnable;
1123#endif
1124
1125 /* Unsupported bits are generally ignored and stripped by the MSR range
1126 entry that got us here. So, we just need to preserve fixed bits. */
1127 pVCpu->cpum.s.GuestMsrs.msr.MiscEnable = uValue
1128 | MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL
1129 | MSR_IA32_MISC_ENABLE_BTS_UNAVAIL;
1130
1131 Log(("CPUM: IA32_MISC_ENABLE; old=%#llx written=%#llx => %#llx\n",
1132 uOld, uValue, pVCpu->cpum.s.GuestMsrs.msr.MiscEnable));
1133
1134 /** @todo Wire IA32_MISC_ENABLE bit 22 to our NT 4 CPUID trick. */
1135 /** @todo Wire up MSR_IA32_MISC_ENABLE_XD_DISABLE. */
1136 return VINF_SUCCESS;
1137}
1138
1139
1140/** @callback_method_impl{FNCPUMRDMSR} */
1141static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McCtlStatusAddrMiscN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1142{
1143 RT_NOREF_PV(pVCpu); RT_NOREF_PV(pRange);
1144
1145 /** @todo Implement machine check exception injection. */
1146 switch (idMsr & 3)
1147 {
1148 case 0:
1149 case 1:
1150 *puValue = 0;
1151 break;
1152
1153 /* The ADDR and MISC registers aren't accessible since the
1154 corresponding STATUS bits are zero. */
1155 case 2:
1156 Log(("CPUM: Reading IA32_MCi_ADDR %#x -> #GP\n", idMsr));
1157 return VERR_CPUM_RAISE_GP_0;
1158 case 3:
1159 Log(("CPUM: Reading IA32_MCi_MISC %#x -> #GP\n", idMsr));
1160 return VERR_CPUM_RAISE_GP_0;
1161 }
1162 return VINF_SUCCESS;
1163}
1164
1165
1166/** @callback_method_impl{FNCPUMWRMSR} */
1167static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McCtlStatusAddrMiscN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1168{
1169 RT_NOREF_PV(pVCpu); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1170 switch (idMsr & 3)
1171 {
1172 case 0:
1173 /* Ignore writes to the CTL register. */
1174 break;
1175
1176 case 1:
1177 /* According to specs, the STATUS register can only be written to
1178 with the value 0. VBoxCpuReport thinks different for a
1179 Pentium M Dothan, but implementing according to specs now. */
1180 if (uValue != 0)
1181 {
1182 Log(("CPUM: Writing non-zero value (%#llx) to IA32_MCi_STATUS %#x -> #GP\n", uValue, idMsr));
1183 return VERR_CPUM_RAISE_GP_0;
1184 }
1185 break;
1186
1187 /* Specs states that ADDR and MISC can be cleared by writing zeros.
1188 Writing 1s will GP. Need to figure out how this relates to the
1189 ADDRV and MISCV status flags. If writing is independent of those
1190 bits, we need to know whether the CPU really implements them since
1191 that is exposed by writing 0 to them.
1192 Implementing the solution with the fewer GPs for now. */
1193 case 2:
1194 if (uValue != 0)
1195 {
1196 Log(("CPUM: Writing non-zero value (%#llx) to IA32_MCi_ADDR %#x -> #GP\n", uValue, idMsr));
1197 return VERR_CPUM_RAISE_GP_0;
1198 }
1199 break;
1200 case 3:
1201 if (uValue != 0)
1202 {
1203 Log(("CPUM: Writing non-zero value (%#llx) to IA32_MCi_MISC %#x -> #GP\n", uValue, idMsr));
1204 return VERR_CPUM_RAISE_GP_0;
1205 }
1206 break;
1207 }
1208 return VINF_SUCCESS;
1209}
1210
1211
1212/** @callback_method_impl{FNCPUMRDMSR} */
1213static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McNCtl2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1214{
1215 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1216 /** @todo Implement machine check exception injection. */
1217 *puValue = 0;
1218 return VINF_SUCCESS;
1219}
1220
1221
1222/** @callback_method_impl{FNCPUMWRMSR} */
1223static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McNCtl2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1224{
1225 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1226 /** @todo Implement machine check exception injection. */
1227 return VINF_SUCCESS;
1228}
1229
1230
1231/** @callback_method_impl{FNCPUMRDMSR} */
1232static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32DsArea(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1233{
1234 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1235 /** @todo implement IA32_DS_AREA. */
1236 *puValue = 0;
1237 return VINF_SUCCESS;
1238}
1239
1240
1241/** @callback_method_impl{FNCPUMWRMSR} */
1242static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32DsArea(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1243{
1244 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1245 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1246 return VINF_SUCCESS;
1247}
1248
1249
1250/** @callback_method_impl{FNCPUMRDMSR} */
1251static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32TscDeadline(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1252{
1253 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1254 /** @todo implement TSC deadline timer. */
1255 *puValue = 0;
1256 return VINF_SUCCESS;
1257}
1258
1259
1260/** @callback_method_impl{FNCPUMWRMSR} */
1261static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32TscDeadline(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1262{
1263 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1264 /** @todo implement TSC deadline timer. */
1265 return VINF_SUCCESS;
1266}
1267
1268
1269/** @callback_method_impl{FNCPUMRDMSR} */
1270static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32X2ApicN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1271{
1272 RT_NOREF_PV(pRange);
1273 return APICReadMsr(pVCpu, idMsr, puValue);
1274}
1275
1276
1277/** @callback_method_impl{FNCPUMWRMSR} */
1278static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32X2ApicN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1279{
1280 RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1281 return APICWriteMsr(pVCpu, idMsr, uValue);
1282}
1283
1284
1285/** @callback_method_impl{FNCPUMRDMSR} */
1286static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32DebugInterface(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1287{
1288 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1289 /** @todo IA32_DEBUG_INTERFACE (no docs) */
1290 *puValue = 0;
1291 return VINF_SUCCESS;
1292}
1293
1294
1295/** @callback_method_impl{FNCPUMWRMSR} */
1296static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32DebugInterface(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1297{
1298 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1299 /** @todo IA32_DEBUG_INTERFACE (no docs) */
1300 return VINF_SUCCESS;
1301}
1302
1303
1304/**
1305 * Gets IA32_VMX_BASIC for IEM and cpumMsrRd_Ia32VmxBasic.
1306 *
1307 * @returns IA32_VMX_BASIC value.
1308 * @param pVCpu The cross context per CPU structure.
1309 */
1310VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxBasic(PVMCPU pVCpu)
1311{
1312 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1313 uint64_t uVmxMsr;
1314 if (pGuestFeatures->fVmx)
1315 {
1316 uVmxMsr = RT_BF_MAKE(VMX_BF_BASIC_VMCS_ID, VMX_V_VMCS_REVISION_ID )
1317 | RT_BF_MAKE(VMX_BF_BASIC_VMCS_SIZE, VMX_V_VMCS_SIZE )
1318 | RT_BF_MAKE(VMX_BF_BASIC_PHYSADDR_WIDTH, !pGuestFeatures->fLongMode )
1319 | RT_BF_MAKE(VMX_BF_BASIC_DUAL_MON, 0 )
1320 | RT_BF_MAKE(VMX_BF_BASIC_VMCS_MEM_TYPE, VMX_BASIC_MEM_TYPE_WB )
1321 | RT_BF_MAKE(VMX_BF_BASIC_VMCS_INS_OUTS, pGuestFeatures->fVmxInsOutInfo)
1322 | RT_BF_MAKE(VMX_BF_BASIC_TRUE_CTLS, 0 );
1323 }
1324 else
1325 uVmxMsr = 0;
1326 return uVmxMsr;
1327}
1328
1329
1330/** @callback_method_impl{FNCPUMRDMSR} */
1331static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxBasic(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1332{
1333 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1334 *puValue = CPUMGetGuestIa32VmxBasic(pVCpu);
1335 return VINF_SUCCESS;
1336}
1337
1338
1339/**
1340 * Gets IA32_VMX_PINBASED_CTLS for IEM and cpumMsrRd_Ia32VmxPinbasedCtls.
1341 *
1342 * @returns IA32_VMX_PINBASED_CTLS value.
1343 * @param pVCpu The cross context per CPU structure.
1344 */
1345VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxPinbasedCtls(PVMCPU pVCpu)
1346{
1347 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1348 uint64_t uVmxMsr;
1349 if (pGuestFeatures->fVmx)
1350 {
1351 uint32_t const fFeatures = (pGuestFeatures->fVmxExtIntExit << VMX_BF_PIN_CTLS_EXT_INT_EXIT_SHIFT )
1352 | (pGuestFeatures->fVmxNmiExit << VMX_BF_PIN_CTLS_NMI_EXIT_SHIFT )
1353 | (pGuestFeatures->fVmxVirtNmi << VMX_BF_PIN_CTLS_VIRT_NMI_SHIFT )
1354 | (pGuestFeatures->fVmxPreemptTimer << VMX_BF_PIN_CTLS_PREEMPT_TIMER_SHIFT)
1355 | (pGuestFeatures->fVmxPostedInt << VMX_BF_PIN_CTLS_POSTED_INT_SHIFT );
1356 uint32_t const fVal = VMX_PIN_CTLS_DEFAULT1;
1357 uint32_t const fZap = fFeatures | VMX_PIN_CTLS_DEFAULT1;
1358 AssertMsg((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures));
1359 uVmxMsr = RT_MAKE_U64(fVal, fZap);
1360 }
1361 else
1362 uVmxMsr = 0;
1363 return uVmxMsr;
1364}
1365
1366
1367/** @callback_method_impl{FNCPUMRDMSR} */
1368static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxPinbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1369{
1370 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1371 *puValue = CPUMGetGuestIa32VmxPinbasedCtls(pVCpu);
1372 return VINF_SUCCESS;
1373}
1374
1375
1376/**
1377 * Gets IA32_VMX_PROCBASED_CTLS for IEM and cpumMsrRd_Ia32VmxProcbasedCtls.
1378 *
1379 * @returns IA32_VMX_PROCBASED_CTLS value.
1380 * @param pVCpu The cross context per CPU structure.
1381 */
1382VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxProcbasedCtls(PVMCPU pVCpu)
1383{
1384 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1385 uint64_t uVmxMsr;
1386 if (pGuestFeatures->fVmx)
1387 {
1388 uint32_t const fFeatures = (pGuestFeatures->fVmxIntWindowExit << VMX_BF_PROC_CTLS_INT_WINDOW_EXIT_SHIFT )
1389 | (pGuestFeatures->fVmxTscOffsetting << VMX_BF_PROC_CTLS_USE_TSC_OFFSETTING_SHIFT)
1390 | (pGuestFeatures->fVmxHltExit << VMX_BF_PROC_CTLS_HLT_EXIT_SHIFT )
1391 | (pGuestFeatures->fVmxInvlpgExit << VMX_BF_PROC_CTLS_INVLPG_EXIT_SHIFT )
1392 | (pGuestFeatures->fVmxMwaitExit << VMX_BF_PROC_CTLS_MWAIT_EXIT_SHIFT )
1393 | (pGuestFeatures->fVmxRdpmcExit << VMX_BF_PROC_CTLS_RDPMC_EXIT_SHIFT )
1394 | (pGuestFeatures->fVmxRdtscExit << VMX_BF_PROC_CTLS_RDTSC_EXIT_SHIFT )
1395 | (pGuestFeatures->fVmxCr3LoadExit << VMX_BF_PROC_CTLS_CR3_LOAD_EXIT_SHIFT )
1396 | (pGuestFeatures->fVmxCr3StoreExit << VMX_BF_PROC_CTLS_CR3_STORE_EXIT_SHIFT )
1397 | (pGuestFeatures->fVmxCr8LoadExit << VMX_BF_PROC_CTLS_CR8_LOAD_EXIT_SHIFT )
1398 | (pGuestFeatures->fVmxCr8StoreExit << VMX_BF_PROC_CTLS_CR8_STORE_EXIT_SHIFT )
1399 | (pGuestFeatures->fVmxUseTprShadow << VMX_BF_PROC_CTLS_USE_TPR_SHADOW_SHIFT )
1400 | (pGuestFeatures->fVmxNmiWindowExit << VMX_BF_PROC_CTLS_NMI_WINDOW_EXIT_SHIFT )
1401 | (pGuestFeatures->fVmxMovDRxExit << VMX_BF_PROC_CTLS_MOV_DR_EXIT_SHIFT )
1402 | (pGuestFeatures->fVmxUncondIoExit << VMX_BF_PROC_CTLS_UNCOND_IO_EXIT_SHIFT )
1403 | (pGuestFeatures->fVmxUseIoBitmaps << VMX_BF_PROC_CTLS_USE_IO_BITMAPS_SHIFT )
1404 | (pGuestFeatures->fVmxMonitorTrapFlag << VMX_BF_PROC_CTLS_MONITOR_TRAP_FLAG_SHIFT )
1405 | (pGuestFeatures->fVmxUseMsrBitmaps << VMX_BF_PROC_CTLS_USE_MSR_BITMAPS_SHIFT )
1406 | (pGuestFeatures->fVmxMonitorExit << VMX_BF_PROC_CTLS_MONITOR_EXIT_SHIFT )
1407 | (pGuestFeatures->fVmxPauseExit << VMX_BF_PROC_CTLS_PAUSE_EXIT_SHIFT )
1408 | (pGuestFeatures->fVmxSecondaryExecCtls << VMX_BF_PROC_CTLS_USE_SECONDARY_CTLS_SHIFT);
1409 uint32_t const fVal = VMX_PROC_CTLS_DEFAULT1;
1410 uint32_t const fZap = fFeatures | VMX_PROC_CTLS_DEFAULT1;
1411 AssertMsg((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures));
1412 uVmxMsr = RT_MAKE_U64(fVal, fZap);
1413 }
1414 else
1415 uVmxMsr = 0;
1416 return uVmxMsr;
1417}
1418
1419
1420/** @callback_method_impl{FNCPUMRDMSR} */
1421static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxProcbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1422{
1423 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1424 *puValue = CPUMGetGuestIa32VmxProcbasedCtls(pVCpu);
1425 return VINF_SUCCESS;
1426}
1427
1428
1429/**
1430 * Gets IA32_VMX_EXIT_CTLS for IEM and cpumMsrRd_Ia32VmxProcbasedCtls.
1431 *
1432 * @returns IA32_VMX_EXIT_CTLS value.
1433 * @param pVCpu The cross context per CPU structure.
1434 */
1435VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxExitCtls(PVMCPU pVCpu)
1436{
1437 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1438 uint64_t uVmxMsr;
1439 if (pGuestFeatures->fVmx)
1440 {
1441 uint32_t const fFeatures = (pGuestFeatures->fVmxExitSaveDebugCtls << VMX_BF_EXIT_CTLS_SAVE_DEBUG_SHIFT )
1442 | (pGuestFeatures->fVmxHostAddrSpaceSize << VMX_BF_EXIT_CTLS_HOST_ADDR_SPACE_SIZE_SHIFT)
1443 | (pGuestFeatures->fVmxExitAckExtInt << VMX_BF_EXIT_CTLS_ACK_EXT_INT_SHIFT )
1444 | (pGuestFeatures->fVmxExitSavePatMsr << VMX_BF_EXIT_CTLS_SAVE_PAT_MSR_SHIFT )
1445 | (pGuestFeatures->fVmxExitLoadPatMsr << VMX_BF_EXIT_CTLS_LOAD_PAT_MSR_SHIFT )
1446 | (pGuestFeatures->fVmxExitSaveEferMsr << VMX_BF_EXIT_CTLS_SAVE_EFER_MSR_SHIFT )
1447 | (pGuestFeatures->fVmxExitLoadEferMsr << VMX_BF_EXIT_CTLS_LOAD_EFER_MSR_SHIFT )
1448 | (pGuestFeatures->fVmxSavePreemptTimer << VMX_BF_EXIT_CTLS_SAVE_PREEMPT_TIMER_SHIFT );
1449 uint32_t const fVal = VMX_EXIT_CTLS_DEFAULT1;
1450 uint32_t const fZap = fFeatures | VMX_EXIT_CTLS_DEFAULT1;
1451 AssertMsg((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures));
1452 uVmxMsr = RT_MAKE_U64(fVal, fZap);
1453 }
1454 else
1455 uVmxMsr = 0;
1456 return uVmxMsr;
1457}
1458
1459
1460/** @callback_method_impl{FNCPUMRDMSR} */
1461static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxExitCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1462{
1463 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1464 *puValue = CPUMGetGuestIa32VmxExitCtls(pVCpu);
1465 return VINF_SUCCESS;
1466}
1467
1468
1469/**
1470 * Gets IA32_VMX_ENTRY_CTLS for IEM and cpumMsrRd_Ia32VmxEntryCtls.
1471 *
1472 * @returns IA32_VMX_ENTRY_CTLS value.
1473 * @param pVCpu The cross context per CPU structure.
1474 */
1475VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxEntryCtls(PVMCPU pVCpu)
1476{
1477 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1478 uint64_t uVmxMsr;
1479 if (pGuestFeatures->fVmx)
1480 {
1481 uint32_t const fFeatures = (pGuestFeatures->fVmxEntryLoadDebugCtls << VMX_BF_ENTRY_CTLS_LOAD_DEBUG_SHIFT )
1482 | (pGuestFeatures->fVmxIa32eModeGuest << VMX_BF_ENTRY_CTLS_IA32E_MODE_GUEST_SHIFT)
1483 | (pGuestFeatures->fVmxEntryLoadEferMsr << VMX_BF_ENTRY_CTLS_LOAD_EFER_MSR_SHIFT )
1484 | (pGuestFeatures->fVmxEntryLoadPatMsr << VMX_BF_ENTRY_CTLS_LOAD_PAT_MSR_SHIFT );
1485 uint32_t const fDefault1 = VMX_ENTRY_CTLS_DEFAULT1;
1486 uint32_t const fVal = fDefault1;
1487 uint32_t const fZap = fFeatures | fDefault1;
1488 AssertMsg((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures));
1489 uVmxMsr = RT_MAKE_U64(fVal, fZap);
1490 }
1491 else
1492 uVmxMsr = 0;
1493 return uVmxMsr;
1494}
1495
1496
1497/** @callback_method_impl{FNCPUMRDMSR} */
1498static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxEntryCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1499{
1500 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1501 *puValue = CPUMGetGuestIa32VmxEntryCtls(pVCpu);
1502 return VINF_SUCCESS;
1503}
1504
1505
1506/**
1507 * Gets IA32_VMX_MISC for IEM and cpumMsrRd_Ia32VmxMisc.
1508 *
1509 * @returns IA32_VMX_MISC MSR.
1510 * @param pVCpu The cross context per CPU structure.
1511 */
1512VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxMisc(PVMCPU pVCpu)
1513{
1514 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1515 uint64_t uVmxMsr;
1516 if (pGuestFeatures->fVmx)
1517 {
1518 uint64_t uHostMsr;
1519 int rc = HMVmxGetHostMsr(pVCpu->CTX_SUFF(pVM), MSR_IA32_VMX_MISC, &uHostMsr);
1520 AssertMsgRC(rc, ("HMVmxGetHostMsr failed. rc=%Rrc\n", rc)); RT_NOREF_PV(rc);
1521 uint8_t const cMaxMsrs = RT_MIN(RT_BF_GET(uHostMsr, VMX_BF_MISC_MAX_MSRS), VMX_V_MAX_MSRS);
1522 uint8_t const fActivityState = RT_BF_GET(uHostMsr, VMX_BF_MISC_ACTIVITY_STATES) & VMX_V_GUEST_ACTIVITY_STATE_MASK;
1523 uVmxMsr = RT_BF_MAKE(VMX_BF_MISC_PREEMPT_TIMER_TSC, VMX_V_PREEMPT_TIMER_SHIFT )
1524 | RT_BF_MAKE(VMX_BF_MISC_EXIT_STORE_EFER_LMA, pGuestFeatures->fVmxExitStoreEferLma )
1525 | RT_BF_MAKE(VMX_BF_MISC_ACTIVITY_STATES, fActivityState )
1526 | RT_BF_MAKE(VMX_BF_MISC_PT, 0 )
1527 | RT_BF_MAKE(VMX_BF_MISC_SMM_READ_SMBASE_MSR, 0 )
1528 | RT_BF_MAKE(VMX_BF_MISC_CR3_TARGET, VMX_V_CR3_TARGET_COUNT )
1529 | RT_BF_MAKE(VMX_BF_MISC_MAX_MSRS, cMaxMsrs )
1530 | RT_BF_MAKE(VMX_BF_MISC_VMXOFF_BLOCK_SMI, 0 )
1531 | RT_BF_MAKE(VMX_BF_MISC_VMWRITE_ALL, pGuestFeatures->fVmxVmwriteAll )
1532 | RT_BF_MAKE(VMX_BF_MISC_ENTRY_INJECT_SOFT_INT, pGuestFeatures->fVmxEntryInjectSoftInt)
1533 | RT_BF_MAKE(VMX_BF_MISC_MSEG_ID, VMX_V_MSEG_REV_ID );
1534 }
1535 else
1536 uVmxMsr = 0;
1537 return uVmxMsr;
1538}
1539
1540
1541/** @callback_method_impl{FNCPUMRDMSR} */
1542static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxMisc(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1543{
1544 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1545 *puValue = CPUMGetGuestIa32VmxMisc(pVCpu);
1546 return VINF_SUCCESS;
1547}
1548
1549
1550/**
1551 * Gets IA32_VMX_CR0_FIXED0 for IEM and cpumMsrRd_Ia32VmxMisc.
1552 *
1553 * @returns IA32_VMX_CR0_FIXED0 value.
1554 * @param pVCpu The cross context per CPU structure.
1555 */
1556VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr0Fixed0(PVMCPU pVCpu)
1557{
1558 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1559 uint64_t const uVmxMsr = pGuestFeatures->fVmx ? VMX_V_CR0_FIXED0 : 0;
1560 return uVmxMsr;
1561}
1562
1563
1564/** @callback_method_impl{FNCPUMRDMSR} */
1565static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr0Fixed0(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1566{
1567 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1568 *puValue = CPUMGetGuestIa32VmxCr0Fixed0(pVCpu);
1569 return VINF_SUCCESS;
1570}
1571
1572
1573/**
1574 * Gets IA32_VMX_CR0_FIXED1 for IEM and cpumMsrRd_Ia32VmxMisc.
1575 *
1576 * @returns IA32_VMX_CR0_FIXED1 MSR.
1577 * @param pVCpu The cross context per CPU structure.
1578 */
1579VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr0Fixed1(PVMCPU pVCpu)
1580{
1581 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1582 uint64_t uVmxMsr;
1583 if (pGuestFeatures->fVmx)
1584 {
1585 int rc = HMVmxGetHostMsr(pVCpu->CTX_SUFF(pVM), MSR_IA32_VMX_CR0_FIXED1, &uVmxMsr);
1586 AssertMsgRC(rc, ("HMVmxGetHostMsr failed. rc=%Rrc\n", rc)); RT_NOREF_PV(rc);
1587 uVmxMsr |= VMX_V_CR0_FIXED0; /* Make sure the CR0 MB1 bits are not clear. */
1588 }
1589 else
1590 uVmxMsr = 0;
1591 return uVmxMsr;
1592}
1593
1594
1595/** @callback_method_impl{FNCPUMRDMSR} */
1596static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr0Fixed1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1597{
1598 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1599 Assert(idMsr == MSR_IA32_VMX_CR0_FIXED1);
1600 *puValue = CPUMGetGuestIa32VmxCr0Fixed1(pVCpu);
1601 return VINF_SUCCESS;
1602}
1603
1604
1605/**
1606 * Gets IA32_VMX_CR4_FIXED0 for IEM and cpumMsrRd_Ia32VmxCr4Fixed0.
1607 *
1608 * @returns IA32_VMX_CR4_FIXED0 value.
1609 * @param pVCpu The cross context per CPU structure.
1610 */
1611VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr4Fixed0(PVMCPU pVCpu)
1612{
1613 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1614 uint64_t const uVmxMsr = pGuestFeatures->fVmx ? VMX_V_CR4_FIXED0 : 0;
1615 return uVmxMsr;
1616}
1617
1618
1619/** @callback_method_impl{FNCPUMRDMSR} */
1620static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr4Fixed0(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1621{
1622 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1623 *puValue = CPUMGetGuestIa32VmxCr4Fixed0(pVCpu);
1624 return VINF_SUCCESS;
1625}
1626
1627
1628/**
1629 * Gets IA32_VMX_CR4_FIXED1 for IEM and cpumMsrRd_Ia32VmxCr4Fixed1.
1630 *
1631 * @returns IA32_VMX_CR4_FIXED1 MSR.
1632 * @param pVCpu The cross context per CPU structure.
1633 */
1634VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr4Fixed1(PVMCPU pVCpu)
1635{
1636 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1637 uint64_t uVmxMsr;
1638 if (pGuestFeatures->fVmx)
1639 {
1640 int rc = HMVmxGetHostMsr(pVCpu->CTX_SUFF(pVM), MSR_IA32_VMX_CR4_FIXED1, &uVmxMsr);
1641 AssertMsgRC(rc, ("HMVmxGetHostMsr failed. rc=%Rrc\n", rc)); RT_NOREF_PV(rc);
1642 uVmxMsr |= VMX_V_CR4_FIXED0; /* Make sure the CR4 MB1 bits are not clear. */
1643 }
1644 else
1645 uVmxMsr = 0;
1646 return uVmxMsr;
1647}
1648
1649
1650/** @callback_method_impl{FNCPUMRDMSR} */
1651static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr4Fixed1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1652{
1653 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1654 Assert(idMsr == MSR_IA32_VMX_CR4_FIXED1);
1655 *puValue = CPUMGetGuestIa32VmxCr4Fixed1(pVCpu);
1656 return VINF_SUCCESS;
1657}
1658
1659
1660/**
1661 * Gets IA32_VMX_VMCS_ENUM for IEM and cpumMsrRd_Ia32VmxVmcsEnum.
1662 *
1663 * @returns IA32_VMX_VMCS_ENUM value.
1664 * @param pVCpu The cross context per CPU structure.
1665 */
1666VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxVmcsEnum(PVMCPU pVCpu)
1667{
1668 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1669 uint64_t uVmxMsr;
1670 if (pGuestFeatures->fVmx)
1671 uVmxMsr = VMX_V_VMCS_MAX_INDEX << VMX_BF_VMCS_ENUM_HIGHEST_IDX_SHIFT;
1672 else
1673 uVmxMsr = 0;
1674 return uVmxMsr;
1675}
1676
1677
1678/** @callback_method_impl{FNCPUMRDMSR} */
1679static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxVmcsEnum(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1680{
1681 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1682 *puValue = CPUMGetGuestIa32VmxVmcsEnum(pVCpu);
1683 return VINF_SUCCESS;
1684}
1685
1686
1687/**
1688 * Gets MSR_IA32_VMX_PROCBASED_CTLS2 for IEM and cpumMsrRd_Ia32VmxProcBasedCtls2.
1689 *
1690 * @returns MSR_IA32_VMX_PROCBASED_CTLS2 value.
1691 * @param pVCpu The cross context per CPU structure.
1692 */
1693VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxProcbasedCtls2(PVMCPU pVCpu)
1694{
1695 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1696 uint64_t uVmxMsr;
1697 if ( pGuestFeatures->fVmx
1698 && pGuestFeatures->fVmxSecondaryExecCtls)
1699 {
1700 uint32_t const fFeatures = (pGuestFeatures->fVmxVirtApicAccess << VMX_BF_PROC_CTLS2_VIRT_APIC_ACCESS_SHIFT )
1701 | (pGuestFeatures->fVmxEpt << VMX_BF_PROC_CTLS2_EPT_SHIFT )
1702 | (pGuestFeatures->fVmxDescTableExit << VMX_BF_PROC_CTLS2_DESC_TABLE_EXIT_SHIFT )
1703 | (pGuestFeatures->fVmxRdtscp << VMX_BF_PROC_CTLS2_RDTSCP_SHIFT )
1704 | (pGuestFeatures->fVmxVirtX2ApicMode << VMX_BF_PROC_CTLS2_VIRT_X2APIC_MODE_SHIFT )
1705 | (pGuestFeatures->fVmxVpid << VMX_BF_PROC_CTLS2_VPID_SHIFT )
1706 | (pGuestFeatures->fVmxWbinvdExit << VMX_BF_PROC_CTLS2_WBINVD_EXIT_SHIFT )
1707 | (pGuestFeatures->fVmxUnrestrictedGuest << VMX_BF_PROC_CTLS2_UNRESTRICTED_GUEST_SHIFT)
1708 | (pGuestFeatures->fVmxApicRegVirt << VMX_BF_PROC_CTLS2_APIC_REG_VIRT_SHIFT )
1709 | (pGuestFeatures->fVmxVirtIntDelivery << VMX_BF_PROC_CTLS2_VIRT_INT_DELIVERY_SHIFT )
1710 | (pGuestFeatures->fVmxPauseLoopExit << VMX_BF_PROC_CTLS2_PAUSE_LOOP_EXIT_SHIFT )
1711 | (pGuestFeatures->fVmxRdrandExit << VMX_BF_PROC_CTLS2_RDRAND_EXIT_SHIFT )
1712 | (pGuestFeatures->fVmxInvpcid << VMX_BF_PROC_CTLS2_INVPCID_SHIFT )
1713 | (pGuestFeatures->fVmxVmFunc << VMX_BF_PROC_CTLS2_VMFUNC_SHIFT )
1714 | (pGuestFeatures->fVmxVmcsShadowing << VMX_BF_PROC_CTLS2_VMCS_SHADOWING_SHIFT )
1715 | (pGuestFeatures->fVmxRdseedExit << VMX_BF_PROC_CTLS2_RDSEED_EXIT_SHIFT )
1716 | (pGuestFeatures->fVmxPml << VMX_BF_PROC_CTLS2_PML_SHIFT )
1717 | (pGuestFeatures->fVmxEptXcptVe << VMX_BF_PROC_CTLS2_EPT_VE_SHIFT )
1718 | (pGuestFeatures->fVmxXsavesXrstors << VMX_BF_PROC_CTLS2_XSAVES_XRSTORS_SHIFT )
1719 | (pGuestFeatures->fVmxUseTscScaling << VMX_BF_PROC_CTLS2_TSC_SCALING_SHIFT );
1720 uint32_t const fVal = 0;
1721 uint32_t const fZap = fFeatures;
1722 uVmxMsr = RT_MAKE_U64(fVal, fZap);
1723 }
1724 else
1725 uVmxMsr = 0;
1726 return uVmxMsr;
1727}
1728
1729
1730/** @callback_method_impl{FNCPUMRDMSR} */
1731static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxProcBasedCtls2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1732{
1733 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1734 *puValue = CPUMGetGuestIa32VmxProcbasedCtls2(pVCpu);
1735 return VINF_SUCCESS;
1736}
1737
1738
1739/** @callback_method_impl{FNCPUMRDMSR} */
1740static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxEptVpidCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1741{
1742 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1743 *puValue = 0;
1744 return VINF_SUCCESS;
1745}
1746
1747
1748/** @callback_method_impl{FNCPUMRDMSR} */
1749static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTruePinbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1750{
1751 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1752 *puValue = 0;
1753 return VINF_SUCCESS;
1754}
1755
1756
1757/** @callback_method_impl{FNCPUMRDMSR} */
1758static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTrueProcbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1759{
1760 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1761 *puValue = 0;
1762 return VINF_SUCCESS;
1763}
1764
1765
1766/** @callback_method_impl{FNCPUMRDMSR} */
1767static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTrueExitCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1768{
1769 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1770 *puValue = 0;
1771 return VINF_SUCCESS;
1772}
1773
1774
1775/** @callback_method_impl{FNCPUMRDMSR} */
1776static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTrueEntryCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1777{
1778 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1779 *puValue = 0;
1780 return VINF_SUCCESS;
1781}
1782
1783
1784/**
1785 * Gets IA32_VMX_VMFUNC for IEM and cpumMsrRd_Ia32VmxVmFunc.
1786 *
1787 * @returns IA32_VMX_VMFUNC value.
1788 * @param pVCpu The cross context per CPU structure.
1789 */
1790VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxVmFunc(PVMCPU pVCpu)
1791{
1792 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1793 uint64_t uVmxMsr;
1794 if ( pGuestFeatures->fVmx
1795 && pGuestFeatures->fVmxVmFunc)
1796 uVmxMsr = RT_BF_MAKE(VMX_BF_VMFUNC_EPTP_SWITCHING, 1);
1797 else
1798 uVmxMsr = 0;
1799 return uVmxMsr;
1800}
1801
1802
1803/** @callback_method_impl{FNCPUMRDMSR} */
1804static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxVmFunc(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1805{
1806 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1807 *puValue = CPUMGetGuestIa32VmxVmFunc(pVCpu);
1808 return VINF_SUCCESS;
1809}
1810
1811
1812/** @callback_method_impl{FNCPUMRDMSR} */
1813static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SpecCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1814{
1815 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1816 *puValue = pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl;
1817 return VINF_SUCCESS;
1818}
1819
1820
1821/** @callback_method_impl{FNCPUMWRMSR} */
1822static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SpecCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1823{
1824 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1825
1826 /* NB: The STIBP bit can be set even when IBRS is present, regardless of whether STIBP is actually implemented. */
1827 if (uValue & ~(MSR_IA32_SPEC_CTRL_F_IBRS | MSR_IA32_SPEC_CTRL_F_STIBP))
1828 {
1829 Log(("CPUM: Invalid IA32_SPEC_CTRL bits (trying to write %#llx)\n", uValue));
1830 return VERR_CPUM_RAISE_GP_0;
1831 }
1832
1833 pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl = uValue;
1834 return VINF_SUCCESS;
1835}
1836
1837
1838/** @callback_method_impl{FNCPUMWRMSR} */
1839static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PredCmd(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1840{
1841 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1842 return VINF_SUCCESS;
1843}
1844
1845
1846/** @callback_method_impl{FNCPUMRDMSR} */
1847static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ArchCapabilities(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1848{
1849 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1850 *puValue = pVCpu->cpum.s.GuestMsrs.msr.ArchCaps;
1851 return VINF_SUCCESS;
1852}
1853
1854
1855
1856
1857
1858
1859
1860
1861
1862
1863
1864
1865/*
1866 * AMD64
1867 * AMD64
1868 * AMD64
1869 */
1870
1871
1872/** @callback_method_impl{FNCPUMRDMSR} */
1873static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64Efer(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1874{
1875 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1876 *puValue = pVCpu->cpum.s.Guest.msrEFER;
1877 return VINF_SUCCESS;
1878}
1879
1880
1881/** @callback_method_impl{FNCPUMWRMSR} */
1882static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64Efer(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1883{
1884 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1885 uint64_t uValidatedEfer;
1886 uint64_t const uOldEfer = pVCpu->cpum.s.Guest.msrEFER;
1887 int rc = CPUMIsGuestEferMsrWriteValid(pVCpu->CTX_SUFF(pVM), pVCpu->cpum.s.Guest.cr0, uOldEfer, uValue, &uValidatedEfer);
1888 if (RT_FAILURE(rc))
1889 return VERR_CPUM_RAISE_GP_0;
1890
1891 CPUMSetGuestEferMsrNoChecks(pVCpu, uOldEfer, uValidatedEfer);
1892 return VINF_SUCCESS;
1893}
1894
1895
1896/** @callback_method_impl{FNCPUMRDMSR} */
1897static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64SyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1898{
1899 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1900 *puValue = pVCpu->cpum.s.Guest.msrSTAR;
1901 return VINF_SUCCESS;
1902}
1903
1904
1905/** @callback_method_impl{FNCPUMWRMSR} */
1906static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64SyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1907{
1908 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1909 pVCpu->cpum.s.Guest.msrSTAR = uValue;
1910 return VINF_SUCCESS;
1911}
1912
1913
1914/** @callback_method_impl{FNCPUMRDMSR} */
1915static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64LongSyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1916{
1917 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1918 *puValue = pVCpu->cpum.s.Guest.msrLSTAR;
1919 return VINF_SUCCESS;
1920}
1921
1922
1923/** @callback_method_impl{FNCPUMWRMSR} */
1924static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64LongSyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1925{
1926 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1927 if (!X86_IS_CANONICAL(uValue))
1928 {
1929 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
1930 return VERR_CPUM_RAISE_GP_0;
1931 }
1932 pVCpu->cpum.s.Guest.msrLSTAR = uValue;
1933 return VINF_SUCCESS;
1934}
1935
1936
1937/** @callback_method_impl{FNCPUMRDMSR} */
1938static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64CompSyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1939{
1940 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1941 *puValue = pVCpu->cpum.s.Guest.msrCSTAR;
1942 return VINF_SUCCESS;
1943}
1944
1945
1946/** @callback_method_impl{FNCPUMWRMSR} */
1947static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64CompSyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1948{
1949 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1950 if (!X86_IS_CANONICAL(uValue))
1951 {
1952 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
1953 return VERR_CPUM_RAISE_GP_0;
1954 }
1955 pVCpu->cpum.s.Guest.msrCSTAR = uValue;
1956 return VINF_SUCCESS;
1957}
1958
1959
1960/** @callback_method_impl{FNCPUMRDMSR} */
1961static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64SyscallFlagMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1962{
1963 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1964 *puValue = pVCpu->cpum.s.Guest.msrSFMASK;
1965 return VINF_SUCCESS;
1966}
1967
1968
1969/** @callback_method_impl{FNCPUMWRMSR} */
1970static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64SyscallFlagMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1971{
1972 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1973 pVCpu->cpum.s.Guest.msrSFMASK = uValue;
1974 return VINF_SUCCESS;
1975}
1976
1977
1978/** @callback_method_impl{FNCPUMRDMSR} */
1979static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64FsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1980{
1981 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1982 *puValue = pVCpu->cpum.s.Guest.fs.u64Base;
1983 return VINF_SUCCESS;
1984}
1985
1986
1987/** @callback_method_impl{FNCPUMWRMSR} */
1988static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64FsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1989{
1990 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1991 pVCpu->cpum.s.Guest.fs.u64Base = uValue;
1992 return VINF_SUCCESS;
1993}
1994
1995
1996/** @callback_method_impl{FNCPUMRDMSR} */
1997static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64GsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1998{
1999 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2000 *puValue = pVCpu->cpum.s.Guest.gs.u64Base;
2001 return VINF_SUCCESS;
2002}
2003
2004/** @callback_method_impl{FNCPUMWRMSR} */
2005static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64GsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2006{
2007 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2008 pVCpu->cpum.s.Guest.gs.u64Base = uValue;
2009 return VINF_SUCCESS;
2010}
2011
2012
2013
2014/** @callback_method_impl{FNCPUMRDMSR} */
2015static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64KernelGsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2016{
2017 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2018 *puValue = pVCpu->cpum.s.Guest.msrKERNELGSBASE;
2019 return VINF_SUCCESS;
2020}
2021
2022/** @callback_method_impl{FNCPUMWRMSR} */
2023static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64KernelGsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2024{
2025 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2026 pVCpu->cpum.s.Guest.msrKERNELGSBASE = uValue;
2027 return VINF_SUCCESS;
2028}
2029
2030
2031/** @callback_method_impl{FNCPUMRDMSR} */
2032static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64TscAux(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2033{
2034 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2035 *puValue = pVCpu->cpum.s.GuestMsrs.msr.TscAux;
2036 return VINF_SUCCESS;
2037}
2038
2039/** @callback_method_impl{FNCPUMWRMSR} */
2040static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64TscAux(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2041{
2042 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2043 pVCpu->cpum.s.GuestMsrs.msr.TscAux = uValue;
2044 return VINF_SUCCESS;
2045}
2046
2047
2048/*
2049 * Intel specific
2050 * Intel specific
2051 * Intel specific
2052 */
2053
2054/** @callback_method_impl{FNCPUMRDMSR} */
2055static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelEblCrPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2056{
2057 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2058 /** @todo recalc clock frequency ratio? */
2059 *puValue = pRange->uValue;
2060 return VINF_SUCCESS;
2061}
2062
2063
2064/** @callback_method_impl{FNCPUMWRMSR} */
2065static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelEblCrPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2066{
2067 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2068 /** @todo Write EBL_CR_POWERON: Remember written bits. */
2069 return VINF_SUCCESS;
2070}
2071
2072
2073/** @callback_method_impl{FNCPUMRDMSR} */
2074static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7CoreThreadCount(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2075{
2076 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2077
2078 /* Note! According to cpuid_set_info in XNU (10.7.0), Westmere CPU only
2079 have a 4-bit core count. */
2080 uint16_t cCores = pVCpu->CTX_SUFF(pVM)->cCpus;
2081 uint16_t cThreads = cCores; /** @todo hyper-threading. */
2082 *puValue = RT_MAKE_U32(cThreads, cCores);
2083 return VINF_SUCCESS;
2084}
2085
2086
2087/** @callback_method_impl{FNCPUMRDMSR} */
2088static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP4EbcHardPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2089{
2090 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2091 /** @todo P4 hard power on config */
2092 *puValue = pRange->uValue;
2093 return VINF_SUCCESS;
2094}
2095
2096
2097/** @callback_method_impl{FNCPUMWRMSR} */
2098static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP4EbcHardPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2099{
2100 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2101 /** @todo P4 hard power on config */
2102 return VINF_SUCCESS;
2103}
2104
2105
2106/** @callback_method_impl{FNCPUMRDMSR} */
2107static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP4EbcSoftPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2108{
2109 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2110 /** @todo P4 soft power on config */
2111 *puValue = pRange->uValue;
2112 return VINF_SUCCESS;
2113}
2114
2115
2116/** @callback_method_impl{FNCPUMWRMSR} */
2117static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP4EbcSoftPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2118{
2119 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2120 /** @todo P4 soft power on config */
2121 return VINF_SUCCESS;
2122}
2123
2124
2125/** @callback_method_impl{FNCPUMRDMSR} */
2126static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP4EbcFrequencyId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2127{
2128 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2129
2130 uint64_t uValue;
2131 PVM pVM = pVCpu->CTX_SUFF(pVM);
2132 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
2133 if (pVM->cpum.s.GuestFeatures.uModel >= 2)
2134 {
2135 if (uScalableBusHz <= CPUM_SBUSFREQ_100MHZ && pVM->cpum.s.GuestFeatures.uModel <= 2)
2136 {
2137 uScalableBusHz = CPUM_SBUSFREQ_100MHZ;
2138 uValue = 0;
2139 }
2140 else if (uScalableBusHz <= CPUM_SBUSFREQ_133MHZ)
2141 {
2142 uScalableBusHz = CPUM_SBUSFREQ_133MHZ;
2143 uValue = 1;
2144 }
2145 else if (uScalableBusHz <= CPUM_SBUSFREQ_167MHZ)
2146 {
2147 uScalableBusHz = CPUM_SBUSFREQ_167MHZ;
2148 uValue = 3;
2149 }
2150 else if (uScalableBusHz <= CPUM_SBUSFREQ_200MHZ)
2151 {
2152 uScalableBusHz = CPUM_SBUSFREQ_200MHZ;
2153 uValue = 2;
2154 }
2155 else if (uScalableBusHz <= CPUM_SBUSFREQ_267MHZ && pVM->cpum.s.GuestFeatures.uModel > 2)
2156 {
2157 uScalableBusHz = CPUM_SBUSFREQ_267MHZ;
2158 uValue = 0;
2159 }
2160 else
2161 {
2162 uScalableBusHz = CPUM_SBUSFREQ_333MHZ;
2163 uValue = 6;
2164 }
2165 uValue <<= 16;
2166
2167 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
2168 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
2169 uValue |= (uint32_t)uTscRatio << 24;
2170
2171 uValue |= pRange->uValue & ~UINT64_C(0xff0f0000);
2172 }
2173 else
2174 {
2175 /* Probably more stuff here, but intel doesn't want to tell us. */
2176 uValue = pRange->uValue;
2177 uValue &= ~(RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23)); /* 100 MHz is only documented value */
2178 }
2179
2180 *puValue = uValue;
2181 return VINF_SUCCESS;
2182}
2183
2184
2185/** @callback_method_impl{FNCPUMWRMSR} */
2186static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP4EbcFrequencyId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2187{
2188 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2189 /** @todo P4 bus frequency config */
2190 return VINF_SUCCESS;
2191}
2192
2193
2194/** @callback_method_impl{FNCPUMRDMSR} */
2195static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP6FsbFrequency(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2196{
2197 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2198
2199 /* Convert the scalable bus frequency to the encoding in the intel manual (for core+). */
2200 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVCpu->CTX_SUFF(pVM));
2201 if (uScalableBusHz <= CPUM_SBUSFREQ_100MHZ)
2202 *puValue = 5;
2203 else if (uScalableBusHz <= CPUM_SBUSFREQ_133MHZ)
2204 *puValue = 1;
2205 else if (uScalableBusHz <= CPUM_SBUSFREQ_167MHZ)
2206 *puValue = 3;
2207 else if (uScalableBusHz <= CPUM_SBUSFREQ_200MHZ)
2208 *puValue = 2;
2209 else if (uScalableBusHz <= CPUM_SBUSFREQ_267MHZ)
2210 *puValue = 0;
2211 else if (uScalableBusHz <= CPUM_SBUSFREQ_333MHZ)
2212 *puValue = 4;
2213 else /*if (uScalableBusHz <= CPUM_SBUSFREQ_400MHZ)*/
2214 *puValue = 6;
2215
2216 *puValue |= pRange->uValue & ~UINT64_C(0x7);
2217
2218 return VINF_SUCCESS;
2219}
2220
2221
2222/** @callback_method_impl{FNCPUMRDMSR} */
2223static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelPlatformInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2224{
2225 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2226
2227 /* Just indicate a fixed TSC, no turbo boost, no programmable anything. */
2228 PVM pVM = pVCpu->CTX_SUFF(pVM);
2229 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
2230 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
2231 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
2232 uint64_t uValue = ((uint32_t)uTscRatio << 8) /* TSC invariant frequency. */
2233 | ((uint64_t)uTscRatio << 40); /* The max turbo frequency. */
2234
2235 /* Ivy bridge has a minimum operating ratio as well. */
2236 if (true) /** @todo detect sandy bridge. */
2237 uValue |= (uint64_t)uTscRatio << 48;
2238
2239 *puValue = uValue;
2240 return VINF_SUCCESS;
2241}
2242
2243
2244/** @callback_method_impl{FNCPUMRDMSR} */
2245static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelFlexRatio(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2246{
2247 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2248
2249 uint64_t uValue = pRange->uValue & ~UINT64_C(0x1ff00);
2250
2251 PVM pVM = pVCpu->CTX_SUFF(pVM);
2252 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
2253 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
2254 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
2255 uValue |= (uint32_t)uTscRatio << 8;
2256
2257 *puValue = uValue;
2258 return VINF_SUCCESS;
2259}
2260
2261
2262/** @callback_method_impl{FNCPUMWRMSR} */
2263static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelFlexRatio(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2264{
2265 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2266 /** @todo implement writing MSR_FLEX_RATIO. */
2267 return VINF_SUCCESS;
2268}
2269
2270
2271/** @callback_method_impl{FNCPUMRDMSR} */
2272static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelPkgCStConfigControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2273{
2274 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2275 *puValue = pVCpu->cpum.s.GuestMsrs.msr.PkgCStateCfgCtrl;
2276 return VINF_SUCCESS;
2277}
2278
2279
2280/** @callback_method_impl{FNCPUMWRMSR} */
2281static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelPkgCStConfigControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2282{
2283 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2284
2285 if (pVCpu->cpum.s.GuestMsrs.msr.PkgCStateCfgCtrl & RT_BIT_64(15))
2286 {
2287 Log(("CPUM: WRMSR %#x (%s), %#llx: Write protected -> #GP\n", idMsr, pRange->szName, uValue));
2288 return VERR_CPUM_RAISE_GP_0;
2289 }
2290#if 0 /** @todo check what real (old) hardware does. */
2291 if ((uValue & 7) >= 5)
2292 {
2293 Log(("CPUM: WRMSR %#x (%s), %#llx: Invalid limit (%d) -> #GP\n", idMsr, pRange->szName, uValue, (uint32_t)(uValue & 7)));
2294 return VERR_CPUM_RAISE_GP_0;
2295 }
2296#endif
2297 pVCpu->cpum.s.GuestMsrs.msr.PkgCStateCfgCtrl = uValue;
2298 return VINF_SUCCESS;
2299}
2300
2301
2302/** @callback_method_impl{FNCPUMRDMSR} */
2303static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelPmgIoCaptureBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2304{
2305 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2306 /** @todo implement I/O mwait wakeup. */
2307 *puValue = 0;
2308 return VINF_SUCCESS;
2309}
2310
2311
2312/** @callback_method_impl{FNCPUMWRMSR} */
2313static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelPmgIoCaptureBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2314{
2315 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2316 /** @todo implement I/O mwait wakeup. */
2317 return VINF_SUCCESS;
2318}
2319
2320
2321/** @callback_method_impl{FNCPUMRDMSR} */
2322static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchFromToN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2323{
2324 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2325 /** @todo implement last branch records. */
2326 *puValue = 0;
2327 return VINF_SUCCESS;
2328}
2329
2330
2331/** @callback_method_impl{FNCPUMWRMSR} */
2332static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchFromToN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2333{
2334 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2335 /** @todo implement last branch records. */
2336 return VINF_SUCCESS;
2337}
2338
2339
2340/** @callback_method_impl{FNCPUMRDMSR} */
2341static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchFromN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2342{
2343 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2344 /** @todo implement last branch records. */
2345 *puValue = 0;
2346 return VINF_SUCCESS;
2347}
2348
2349
2350/** @callback_method_impl{FNCPUMWRMSR} */
2351static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchFromN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2352{
2353 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2354 /** @todo implement last branch records. */
2355 /** @todo Probing indicates that bit 63 is settable on SandyBridge, at least
2356 * if the rest of the bits are zero. Automatic sign extending?
2357 * Investigate! */
2358 if (!X86_IS_CANONICAL(uValue))
2359 {
2360 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
2361 return VERR_CPUM_RAISE_GP_0;
2362 }
2363 return VINF_SUCCESS;
2364}
2365
2366
2367/** @callback_method_impl{FNCPUMRDMSR} */
2368static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchToN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2369{
2370 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2371 /** @todo implement last branch records. */
2372 *puValue = 0;
2373 return VINF_SUCCESS;
2374}
2375
2376
2377/** @callback_method_impl{FNCPUMWRMSR} */
2378static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchToN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2379{
2380 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2381 /** @todo implement last branch records. */
2382 /** @todo Probing indicates that bit 63 is settable on SandyBridge, at least
2383 * if the rest of the bits are zero. Automatic sign extending?
2384 * Investigate! */
2385 if (!X86_IS_CANONICAL(uValue))
2386 {
2387 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
2388 return VERR_CPUM_RAISE_GP_0;
2389 }
2390 return VINF_SUCCESS;
2391}
2392
2393
2394/** @callback_method_impl{FNCPUMRDMSR} */
2395static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchTos(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2396{
2397 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2398 /** @todo implement last branch records. */
2399 *puValue = 0;
2400 return VINF_SUCCESS;
2401}
2402
2403
2404/** @callback_method_impl{FNCPUMWRMSR} */
2405static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchTos(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2406{
2407 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2408 /** @todo implement last branch records. */
2409 return VINF_SUCCESS;
2410}
2411
2412
2413/** @callback_method_impl{FNCPUMRDMSR} */
2414static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelBblCrCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2415{
2416 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2417 *puValue = pRange->uValue;
2418 return VINF_SUCCESS;
2419}
2420
2421
2422/** @callback_method_impl{FNCPUMWRMSR} */
2423static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelBblCrCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2424{
2425 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2426 return VINF_SUCCESS;
2427}
2428
2429
2430/** @callback_method_impl{FNCPUMRDMSR} */
2431static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelBblCrCtl3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2432{
2433 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2434 *puValue = pRange->uValue;
2435 return VINF_SUCCESS;
2436}
2437
2438
2439/** @callback_method_impl{FNCPUMWRMSR} */
2440static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelBblCrCtl3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2441{
2442 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2443 return VINF_SUCCESS;
2444}
2445
2446
2447/** @callback_method_impl{FNCPUMRDMSR} */
2448static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7TemperatureTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2449{
2450 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2451 *puValue = pRange->uValue;
2452 return VINF_SUCCESS;
2453}
2454
2455
2456/** @callback_method_impl{FNCPUMWRMSR} */
2457static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7TemperatureTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2458{
2459 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2460 return VINF_SUCCESS;
2461}
2462
2463
2464/** @callback_method_impl{FNCPUMRDMSR} */
2465static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7MsrOffCoreResponseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2466{
2467 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2468 /** @todo machine check. */
2469 *puValue = pRange->uValue;
2470 return VINF_SUCCESS;
2471}
2472
2473
2474/** @callback_method_impl{FNCPUMWRMSR} */
2475static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7MsrOffCoreResponseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2476{
2477 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2478 /** @todo machine check. */
2479 return VINF_SUCCESS;
2480}
2481
2482
2483/** @callback_method_impl{FNCPUMRDMSR} */
2484static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7MiscPwrMgmt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2485{
2486 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2487 *puValue = 0;
2488 return VINF_SUCCESS;
2489}
2490
2491
2492/** @callback_method_impl{FNCPUMWRMSR} */
2493static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7MiscPwrMgmt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2494{
2495 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2496 return VINF_SUCCESS;
2497}
2498
2499
2500/** @callback_method_impl{FNCPUMRDMSR} */
2501static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP6CrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2502{
2503 RT_NOREF_PV(idMsr);
2504 int rc = CPUMGetGuestCRx(pVCpu, pRange->uValue, puValue);
2505 AssertRC(rc);
2506 return VINF_SUCCESS;
2507}
2508
2509
2510/** @callback_method_impl{FNCPUMWRMSR} */
2511static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP6CrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2512{
2513 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2514 /* This CRx interface differs from the MOV CRx, GReg interface in that
2515 #GP(0) isn't raised if unsupported bits are written to. Instead they
2516 are simply ignored and masked off. (Pentium M Dothan) */
2517 /** @todo Implement MSR_P6_CRx writing. Too much effort for very little, if
2518 * any, gain. */
2519 return VINF_SUCCESS;
2520}
2521
2522
2523/** @callback_method_impl{FNCPUMRDMSR} */
2524static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCpuId1FeatureMaskEcdx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2525{
2526 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2527 /** @todo implement CPUID masking. */
2528 *puValue = UINT64_MAX;
2529 return VINF_SUCCESS;
2530}
2531
2532
2533/** @callback_method_impl{FNCPUMWRMSR} */
2534static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCpuId1FeatureMaskEcdx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2535{
2536 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2537 /** @todo implement CPUID masking. */
2538 return VINF_SUCCESS;
2539}
2540
2541
2542/** @callback_method_impl{FNCPUMRDMSR} */
2543static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCpuId1FeatureMaskEax(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2544{
2545 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2546 /** @todo implement CPUID masking. */
2547 *puValue = 0;
2548 return VINF_SUCCESS;
2549}
2550
2551
2552/** @callback_method_impl{FNCPUMWRMSR} */
2553static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCpuId1FeatureMaskEax(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2554{
2555 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2556 /** @todo implement CPUID masking. */
2557 return VINF_SUCCESS;
2558}
2559
2560
2561
2562/** @callback_method_impl{FNCPUMRDMSR} */
2563static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCpuId80000001FeatureMaskEcdx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2564{
2565 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2566 /** @todo implement CPUID masking. */
2567 *puValue = UINT64_MAX;
2568 return VINF_SUCCESS;
2569}
2570
2571
2572/** @callback_method_impl{FNCPUMWRMSR} */
2573static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCpuId80000001FeatureMaskEcdx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2574{
2575 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2576 /** @todo implement CPUID masking. */
2577 return VINF_SUCCESS;
2578}
2579
2580
2581
2582/** @callback_method_impl{FNCPUMRDMSR} */
2583static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyAesNiCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2584{
2585 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2586 /** @todo implement AES-NI. */
2587 *puValue = 3; /* Bit 0 is lock bit, bit 1 disables AES-NI. That's what they say. */
2588 return VINF_SUCCESS;
2589}
2590
2591
2592/** @callback_method_impl{FNCPUMWRMSR} */
2593static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyAesNiCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2594{
2595 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2596 /** @todo implement AES-NI. */
2597 return VERR_CPUM_RAISE_GP_0;
2598}
2599
2600
2601/** @callback_method_impl{FNCPUMRDMSR} */
2602static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7TurboRatioLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2603{
2604 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2605 /** @todo implement intel C states. */
2606 *puValue = pRange->uValue;
2607 return VINF_SUCCESS;
2608}
2609
2610
2611/** @callback_method_impl{FNCPUMWRMSR} */
2612static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7TurboRatioLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2613{
2614 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2615 /** @todo implement intel C states. */
2616 return VINF_SUCCESS;
2617}
2618
2619
2620/** @callback_method_impl{FNCPUMRDMSR} */
2621static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7LbrSelect(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2622{
2623 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2624 /** @todo implement last-branch-records. */
2625 *puValue = 0;
2626 return VINF_SUCCESS;
2627}
2628
2629
2630/** @callback_method_impl{FNCPUMWRMSR} */
2631static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7LbrSelect(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2632{
2633 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2634 /** @todo implement last-branch-records. */
2635 return VINF_SUCCESS;
2636}
2637
2638
2639/** @callback_method_impl{FNCPUMRDMSR} */
2640static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyErrorControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2641{
2642 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2643 /** @todo implement memory error injection (MSR_ERROR_CONTROL). */
2644 *puValue = 0;
2645 return VINF_SUCCESS;
2646}
2647
2648
2649/** @callback_method_impl{FNCPUMWRMSR} */
2650static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyErrorControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2651{
2652 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2653 /** @todo implement memory error injection (MSR_ERROR_CONTROL). */
2654 return VINF_SUCCESS;
2655}
2656
2657
2658/** @callback_method_impl{FNCPUMRDMSR} */
2659static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7VirtualLegacyWireCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2660{
2661 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2662 /** @todo implement memory VLW? */
2663 *puValue = pRange->uValue;
2664 /* Note: A20M is known to be bit 1 as this was disclosed in spec update
2665 AAJ49/AAK51/????, which documents the inversion of this bit. The
2666 Sandy bridge CPU here has value 0x74, so it probably doesn't have a BIOS
2667 that correct things. Some guesses at the other bits:
2668 bit 2 = INTR
2669 bit 4 = SMI
2670 bit 5 = INIT
2671 bit 6 = NMI */
2672 return VINF_SUCCESS;
2673}
2674
2675
2676/** @callback_method_impl{FNCPUMRDMSR} */
2677static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7PowerCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2678{
2679 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2680 /** @todo intel power management */
2681 *puValue = 0;
2682 return VINF_SUCCESS;
2683}
2684
2685
2686/** @callback_method_impl{FNCPUMWRMSR} */
2687static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7PowerCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2688{
2689 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2690 /** @todo intel power management */
2691 return VINF_SUCCESS;
2692}
2693
2694
2695/** @callback_method_impl{FNCPUMRDMSR} */
2696static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyPebsNumAlt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2697{
2698 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2699 /** @todo intel performance counters. */
2700 *puValue = 0;
2701 return VINF_SUCCESS;
2702}
2703
2704
2705/** @callback_method_impl{FNCPUMWRMSR} */
2706static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyPebsNumAlt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2707{
2708 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2709 /** @todo intel performance counters. */
2710 return VINF_SUCCESS;
2711}
2712
2713
2714/** @callback_method_impl{FNCPUMRDMSR} */
2715static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7PebsLdLat(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2716{
2717 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2718 /** @todo intel performance counters. */
2719 *puValue = 0;
2720 return VINF_SUCCESS;
2721}
2722
2723
2724/** @callback_method_impl{FNCPUMWRMSR} */
2725static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7PebsLdLat(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2726{
2727 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2728 /** @todo intel performance counters. */
2729 return VINF_SUCCESS;
2730}
2731
2732
2733/** @callback_method_impl{FNCPUMRDMSR} */
2734static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7PkgCnResidencyN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2735{
2736 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2737 /** @todo intel power management. */
2738 *puValue = 0;
2739 return VINF_SUCCESS;
2740}
2741
2742
2743/** @callback_method_impl{FNCPUMRDMSR} */
2744static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7CoreCnResidencyN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2745{
2746 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2747 /** @todo intel power management. */
2748 *puValue = 0;
2749 return VINF_SUCCESS;
2750}
2751
2752
2753/** @callback_method_impl{FNCPUMRDMSR} */
2754static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyVrCurrentConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2755{
2756 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2757 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2758 *puValue = 0;
2759 return VINF_SUCCESS;
2760}
2761
2762
2763/** @callback_method_impl{FNCPUMWRMSR} */
2764static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyVrCurrentConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2765{
2766 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2767 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2768 return VINF_SUCCESS;
2769}
2770
2771
2772/** @callback_method_impl{FNCPUMRDMSR} */
2773static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyVrMiscConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2774{
2775 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2776 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2777 *puValue = 0;
2778 return VINF_SUCCESS;
2779}
2780
2781
2782/** @callback_method_impl{FNCPUMWRMSR} */
2783static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyVrMiscConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2784{
2785 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2786 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2787 return VINF_SUCCESS;
2788}
2789
2790
2791/** @callback_method_impl{FNCPUMRDMSR} */
2792static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyRaplPowerUnit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2793{
2794 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2795 /** @todo intel RAPL. */
2796 *puValue = pRange->uValue;
2797 return VINF_SUCCESS;
2798}
2799
2800
2801/** @callback_method_impl{FNCPUMWRMSR} */
2802static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyRaplPowerUnit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2803{
2804 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2805 /* Note! This is documented as read only and except for a Silvermont sample has
2806 always been classified as read only. This is just here to make it compile. */
2807 return VINF_SUCCESS;
2808}
2809
2810
2811/** @callback_method_impl{FNCPUMRDMSR} */
2812static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyPkgCnIrtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2813{
2814 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2815 /** @todo intel power management. */
2816 *puValue = 0;
2817 return VINF_SUCCESS;
2818}
2819
2820
2821/** @callback_method_impl{FNCPUMWRMSR} */
2822static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyPkgCnIrtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2823{
2824 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2825 /** @todo intel power management. */
2826 return VINF_SUCCESS;
2827}
2828
2829
2830/** @callback_method_impl{FNCPUMRDMSR} */
2831static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyPkgC2Residency(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2832{
2833 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2834 /** @todo intel power management. */
2835 *puValue = 0;
2836 return VINF_SUCCESS;
2837}
2838
2839
2840/** @callback_method_impl{FNCPUMWRMSR} */
2841static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyPkgC2Residency(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2842{
2843 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2844 /* Note! This is documented as read only and except for a Silvermont sample has
2845 always been classified as read only. This is just here to make it compile. */
2846 return VINF_SUCCESS;
2847}
2848
2849
2850/** @callback_method_impl{FNCPUMRDMSR} */
2851static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgPowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2852{
2853 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2854 /** @todo intel RAPL. */
2855 *puValue = 0;
2856 return VINF_SUCCESS;
2857}
2858
2859
2860/** @callback_method_impl{FNCPUMWRMSR} */
2861static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPkgPowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2862{
2863 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2864 /** @todo intel RAPL. */
2865 return VINF_SUCCESS;
2866}
2867
2868
2869/** @callback_method_impl{FNCPUMRDMSR} */
2870static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgEnergyStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2871{
2872 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2873 /** @todo intel power management. */
2874 *puValue = 0;
2875 return VINF_SUCCESS;
2876}
2877
2878
2879/** @callback_method_impl{FNCPUMRDMSR} */
2880static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgPerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2881{
2882 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2883 /** @todo intel power management. */
2884 *puValue = 0;
2885 return VINF_SUCCESS;
2886}
2887
2888
2889/** @callback_method_impl{FNCPUMRDMSR} */
2890static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgPowerInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2891{
2892 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2893 /** @todo intel power management. */
2894 *puValue = 0;
2895 return VINF_SUCCESS;
2896}
2897
2898
2899/** @callback_method_impl{FNCPUMRDMSR} */
2900static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramPowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2901{
2902 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2903 /** @todo intel RAPL. */
2904 *puValue = 0;
2905 return VINF_SUCCESS;
2906}
2907
2908
2909/** @callback_method_impl{FNCPUMWRMSR} */
2910static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplDramPowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2911{
2912 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2913 /** @todo intel RAPL. */
2914 return VINF_SUCCESS;
2915}
2916
2917
2918/** @callback_method_impl{FNCPUMRDMSR} */
2919static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramEnergyStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2920{
2921 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2922 /** @todo intel power management. */
2923 *puValue = 0;
2924 return VINF_SUCCESS;
2925}
2926
2927
2928/** @callback_method_impl{FNCPUMRDMSR} */
2929static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramPerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2930{
2931 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2932 /** @todo intel power management. */
2933 *puValue = 0;
2934 return VINF_SUCCESS;
2935}
2936
2937
2938/** @callback_method_impl{FNCPUMRDMSR} */
2939static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramPowerInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2940{
2941 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2942 /** @todo intel power management. */
2943 *puValue = 0;
2944 return VINF_SUCCESS;
2945}
2946
2947
2948/** @callback_method_impl{FNCPUMRDMSR} */
2949static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0PowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2950{
2951 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2952 /** @todo intel RAPL. */
2953 *puValue = 0;
2954 return VINF_SUCCESS;
2955}
2956
2957
2958/** @callback_method_impl{FNCPUMWRMSR} */
2959static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp0PowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2960{
2961 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2962 /** @todo intel RAPL. */
2963 return VINF_SUCCESS;
2964}
2965
2966
2967/** @callback_method_impl{FNCPUMRDMSR} */
2968static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0EnergyStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2969{
2970 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2971 /** @todo intel power management. */
2972 *puValue = 0;
2973 return VINF_SUCCESS;
2974}
2975
2976
2977/** @callback_method_impl{FNCPUMRDMSR} */
2978static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0Policy(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2979{
2980 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2981 /** @todo intel RAPL. */
2982 *puValue = 0;
2983 return VINF_SUCCESS;
2984}
2985
2986
2987/** @callback_method_impl{FNCPUMWRMSR} */
2988static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp0Policy(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2989{
2990 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2991 /** @todo intel RAPL. */
2992 return VINF_SUCCESS;
2993}
2994
2995
2996/** @callback_method_impl{FNCPUMRDMSR} */
2997static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0PerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2998{
2999 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3000 /** @todo intel power management. */
3001 *puValue = 0;
3002 return VINF_SUCCESS;
3003}
3004
3005
3006/** @callback_method_impl{FNCPUMRDMSR} */
3007static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp1PowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3008{
3009 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3010 /** @todo intel RAPL. */
3011 *puValue = 0;
3012 return VINF_SUCCESS;
3013}
3014
3015
3016/** @callback_method_impl{FNCPUMWRMSR} */
3017static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp1PowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3018{
3019 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3020 /** @todo intel RAPL. */
3021 return VINF_SUCCESS;
3022}
3023
3024
3025/** @callback_method_impl{FNCPUMRDMSR} */
3026static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp1EnergyStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3027{
3028 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3029 /** @todo intel power management. */
3030 *puValue = 0;
3031 return VINF_SUCCESS;
3032}
3033
3034
3035/** @callback_method_impl{FNCPUMRDMSR} */
3036static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp1Policy(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3037{
3038 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3039 /** @todo intel RAPL. */
3040 *puValue = 0;
3041 return VINF_SUCCESS;
3042}
3043
3044
3045/** @callback_method_impl{FNCPUMWRMSR} */
3046static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp1Policy(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3047{
3048 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3049 /** @todo intel RAPL. */
3050 return VINF_SUCCESS;
3051}
3052
3053
3054/** @callback_method_impl{FNCPUMRDMSR} */
3055static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpNominal(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3056{
3057 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3058 /** @todo intel power management. */
3059 *puValue = pRange->uValue;
3060 return VINF_SUCCESS;
3061}
3062
3063
3064/** @callback_method_impl{FNCPUMRDMSR} */
3065static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpLevel1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3066{
3067 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3068 /** @todo intel power management. */
3069 *puValue = pRange->uValue;
3070 return VINF_SUCCESS;
3071}
3072
3073
3074/** @callback_method_impl{FNCPUMRDMSR} */
3075static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpLevel2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3076{
3077 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3078 /** @todo intel power management. */
3079 *puValue = pRange->uValue;
3080 return VINF_SUCCESS;
3081}
3082
3083
3084/** @callback_method_impl{FNCPUMRDMSR} */
3085static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3086{
3087 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3088 /** @todo intel power management. */
3089 *puValue = 0;
3090 return VINF_SUCCESS;
3091}
3092
3093
3094/** @callback_method_impl{FNCPUMWRMSR} */
3095static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7IvyConfigTdpControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3096{
3097 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3098 /** @todo intel power management. */
3099 return VINF_SUCCESS;
3100}
3101
3102
3103/** @callback_method_impl{FNCPUMRDMSR} */
3104static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyTurboActivationRatio(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3105{
3106 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3107 /** @todo intel power management. */
3108 *puValue = 0;
3109 return VINF_SUCCESS;
3110}
3111
3112
3113/** @callback_method_impl{FNCPUMWRMSR} */
3114static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7IvyTurboActivationRatio(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3115{
3116 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3117 /** @todo intel power management. */
3118 return VINF_SUCCESS;
3119}
3120
3121
3122/** @callback_method_impl{FNCPUMRDMSR} */
3123static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfGlobalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3124{
3125 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3126 /** @todo uncore msrs. */
3127 *puValue = 0;
3128 return VINF_SUCCESS;
3129}
3130
3131
3132/** @callback_method_impl{FNCPUMWRMSR} */
3133static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfGlobalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3134{
3135 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3136 /** @todo uncore msrs. */
3137 return VINF_SUCCESS;
3138}
3139
3140
3141/** @callback_method_impl{FNCPUMRDMSR} */
3142static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfGlobalStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3143{
3144 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3145 /** @todo uncore msrs. */
3146 *puValue = 0;
3147 return VINF_SUCCESS;
3148}
3149
3150
3151/** @callback_method_impl{FNCPUMWRMSR} */
3152static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfGlobalStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3153{
3154 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3155 /** @todo uncore msrs. */
3156 return VINF_SUCCESS;
3157}
3158
3159
3160/** @callback_method_impl{FNCPUMRDMSR} */
3161static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfGlobalOvfCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3162{
3163 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3164 /** @todo uncore msrs. */
3165 *puValue = 0;
3166 return VINF_SUCCESS;
3167}
3168
3169
3170/** @callback_method_impl{FNCPUMWRMSR} */
3171static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfGlobalOvfCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3172{
3173 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3174 /** @todo uncore msrs. */
3175 return VINF_SUCCESS;
3176}
3177
3178
3179/** @callback_method_impl{FNCPUMRDMSR} */
3180static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfFixedCtrCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3181{
3182 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3183 /** @todo uncore msrs. */
3184 *puValue = 0;
3185 return VINF_SUCCESS;
3186}
3187
3188
3189/** @callback_method_impl{FNCPUMWRMSR} */
3190static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfFixedCtrCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3191{
3192 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3193 /** @todo uncore msrs. */
3194 return VINF_SUCCESS;
3195}
3196
3197
3198/** @callback_method_impl{FNCPUMRDMSR} */
3199static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfFixedCtr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3200{
3201 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3202 /** @todo uncore msrs. */
3203 *puValue = 0;
3204 return VINF_SUCCESS;
3205}
3206
3207
3208/** @callback_method_impl{FNCPUMWRMSR} */
3209static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfFixedCtr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3210{
3211 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3212 /** @todo uncore msrs. */
3213 return VINF_SUCCESS;
3214}
3215
3216
3217/** @callback_method_impl{FNCPUMRDMSR} */
3218static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncCBoxConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3219{
3220 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3221 /** @todo uncore msrs. */
3222 *puValue = 0;
3223 return VINF_SUCCESS;
3224}
3225
3226
3227/** @callback_method_impl{FNCPUMRDMSR} */
3228static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncArbPerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3229{
3230 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3231 /** @todo uncore msrs. */
3232 *puValue = 0;
3233 return VINF_SUCCESS;
3234}
3235
3236
3237/** @callback_method_impl{FNCPUMWRMSR} */
3238static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncArbPerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3239{
3240 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3241 /** @todo uncore msrs. */
3242 return VINF_SUCCESS;
3243}
3244
3245
3246/** @callback_method_impl{FNCPUMRDMSR} */
3247static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncArbPerfEvtSelN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3248{
3249 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3250 /** @todo uncore msrs. */
3251 *puValue = 0;
3252 return VINF_SUCCESS;
3253}
3254
3255
3256/** @callback_method_impl{FNCPUMWRMSR} */
3257static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncArbPerfEvtSelN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3258{
3259 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3260 /** @todo uncore msrs. */
3261 return VINF_SUCCESS;
3262}
3263
3264
3265/** @callback_method_impl{FNCPUMRDMSR} */
3266static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SmiCount(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3267{
3268 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3269
3270 /*
3271 * 31:0 is SMI count (read only), 63:32 reserved.
3272 * Since we don't do SMI, the count is always zero.
3273 */
3274 *puValue = 0;
3275 return VINF_SUCCESS;
3276}
3277
3278
3279/** @callback_method_impl{FNCPUMRDMSR} */
3280static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore2EmttmCrTablesN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3281{
3282 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3283 /** @todo implement enhanced multi thread termal monitoring? */
3284 *puValue = pRange->uValue;
3285 return VINF_SUCCESS;
3286}
3287
3288
3289/** @callback_method_impl{FNCPUMWRMSR} */
3290static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore2EmttmCrTablesN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3291{
3292 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3293 /** @todo implement enhanced multi thread termal monitoring? */
3294 return VINF_SUCCESS;
3295}
3296
3297
3298/** @callback_method_impl{FNCPUMRDMSR} */
3299static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore2SmmCStMiscInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3300{
3301 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3302 /** @todo SMM & C-states? */
3303 *puValue = 0;
3304 return VINF_SUCCESS;
3305}
3306
3307
3308/** @callback_method_impl{FNCPUMWRMSR} */
3309static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore2SmmCStMiscInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3310{
3311 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3312 /** @todo SMM & C-states? */
3313 return VINF_SUCCESS;
3314}
3315
3316
3317/** @callback_method_impl{FNCPUMRDMSR} */
3318static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore1ExtConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3319{
3320 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3321 /** @todo Core1&2 EXT_CONFIG (whatever that is)? */
3322 *puValue = 0;
3323 return VINF_SUCCESS;
3324}
3325
3326
3327/** @callback_method_impl{FNCPUMWRMSR} */
3328static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore1ExtConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3329{
3330 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3331 /** @todo Core1&2 EXT_CONFIG (whatever that is)? */
3332 return VINF_SUCCESS;
3333}
3334
3335
3336/** @callback_method_impl{FNCPUMRDMSR} */
3337static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore1DtsCalControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3338{
3339 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3340 /** @todo Core1&2(?) DTS_CAL_CTRL (whatever that is)? */
3341 *puValue = 0;
3342 return VINF_SUCCESS;
3343}
3344
3345
3346/** @callback_method_impl{FNCPUMWRMSR} */
3347static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore1DtsCalControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3348{
3349 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3350 /** @todo Core1&2(?) DTS_CAL_CTRL (whatever that is)? */
3351 return VINF_SUCCESS;
3352}
3353
3354
3355/** @callback_method_impl{FNCPUMRDMSR} */
3356static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore2PeciControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3357{
3358 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3359 /** @todo Core2+ platform environment control interface control register? */
3360 *puValue = 0;
3361 return VINF_SUCCESS;
3362}
3363
3364
3365/** @callback_method_impl{FNCPUMWRMSR} */
3366static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore2PeciControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3367{
3368 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3369 /** @todo Core2+ platform environment control interface control register? */
3370 return VINF_SUCCESS;
3371}
3372
3373
3374/** @callback_method_impl{FNCPUMRDMSR} */
3375static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelAtSilvCoreC1Recidency(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3376{
3377 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3378 *puValue = 0;
3379 return VINF_SUCCESS;
3380}
3381
3382
3383/*
3384 * Multiple vendor P6 MSRs.
3385 * Multiple vendor P6 MSRs.
3386 * Multiple vendor P6 MSRs.
3387 *
3388 * These MSRs were introduced with the P6 but not elevated to architectural
3389 * MSRs, despite other vendors implementing them.
3390 */
3391
3392
3393/** @callback_method_impl{FNCPUMRDMSR} */
3394static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastBranchFromIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3395{
3396 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3397 /* AMD seems to just record RIP, while intel claims to record RIP+CS.BASE
3398 if I read the docs correctly, thus the need for separate functions. */
3399 /** @todo implement last branch records. */
3400 *puValue = 0;
3401 return VINF_SUCCESS;
3402}
3403
3404
3405/** @callback_method_impl{FNCPUMRDMSR} */
3406static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastBranchToIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3407{
3408 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3409 /** @todo implement last branch records. */
3410 *puValue = 0;
3411 return VINF_SUCCESS;
3412}
3413
3414
3415/** @callback_method_impl{FNCPUMRDMSR} */
3416static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastIntFromIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3417{
3418 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3419 /** @todo implement last exception records. */
3420 *puValue = 0;
3421 return VINF_SUCCESS;
3422}
3423
3424
3425/** @callback_method_impl{FNCPUMWRMSR} */
3426static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_P6LastIntFromIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3427{
3428 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3429 /** @todo implement last exception records. */
3430 /* Note! On many CPUs, the high bit of the 0x000001dd register is always writable, even when the result is
3431 a non-cannonical address. */
3432 return VINF_SUCCESS;
3433}
3434
3435
3436/** @callback_method_impl{FNCPUMRDMSR} */
3437static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastIntToIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3438{
3439 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3440 /** @todo implement last exception records. */
3441 *puValue = 0;
3442 return VINF_SUCCESS;
3443}
3444
3445
3446/** @callback_method_impl{FNCPUMWRMSR} */
3447static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_P6LastIntToIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3448{
3449 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3450 /** @todo implement last exception records. */
3451 return VINF_SUCCESS;
3452}
3453
3454
3455
3456/*
3457 * AMD specific
3458 * AMD specific
3459 * AMD specific
3460 */
3461
3462
3463/** @callback_method_impl{FNCPUMRDMSR} */
3464static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hTscRate(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3465{
3466 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3467 /** @todo Implement TscRateMsr */
3468 *puValue = RT_MAKE_U64(0, 1); /* 1.0 = reset value. */
3469 return VINF_SUCCESS;
3470}
3471
3472
3473/** @callback_method_impl{FNCPUMWRMSR} */
3474static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hTscRate(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3475{
3476 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3477 /** @todo Implement TscRateMsr */
3478 return VINF_SUCCESS;
3479}
3480
3481
3482/** @callback_method_impl{FNCPUMRDMSR} */
3483static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hLwpCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3484{
3485 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3486 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3487 /* Note: Only listes in BKDG for Family 15H. */
3488 *puValue = 0;
3489 return VINF_SUCCESS;
3490}
3491
3492
3493/** @callback_method_impl{FNCPUMWRMSR} */
3494static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hLwpCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3495{
3496 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3497 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3498 return VINF_SUCCESS;
3499}
3500
3501
3502/** @callback_method_impl{FNCPUMRDMSR} */
3503static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hLwpCbAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3504{
3505 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3506 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3507 /* Note: Only listes in BKDG for Family 15H. */
3508 *puValue = 0;
3509 return VINF_SUCCESS;
3510}
3511
3512
3513/** @callback_method_impl{FNCPUMWRMSR} */
3514static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hLwpCbAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3515{
3516 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3517 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3518 return VINF_SUCCESS;
3519}
3520
3521
3522/** @callback_method_impl{FNCPUMRDMSR} */
3523static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hMc4MiscN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3524{
3525 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3526 /** @todo machine check. */
3527 *puValue = 0;
3528 return VINF_SUCCESS;
3529}
3530
3531
3532/** @callback_method_impl{FNCPUMWRMSR} */
3533static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hMc4MiscN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3534{
3535 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3536 /** @todo machine check. */
3537 return VINF_SUCCESS;
3538}
3539
3540
3541/** @callback_method_impl{FNCPUMRDMSR} */
3542static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8PerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3543{
3544 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3545 /** @todo AMD performance events. */
3546 *puValue = 0;
3547 return VINF_SUCCESS;
3548}
3549
3550
3551/** @callback_method_impl{FNCPUMWRMSR} */
3552static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8PerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3553{
3554 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3555 /** @todo AMD performance events. */
3556 return VINF_SUCCESS;
3557}
3558
3559
3560/** @callback_method_impl{FNCPUMRDMSR} */
3561static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8PerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3562{
3563 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3564 /** @todo AMD performance events. */
3565 *puValue = 0;
3566 return VINF_SUCCESS;
3567}
3568
3569
3570/** @callback_method_impl{FNCPUMWRMSR} */
3571static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8PerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3572{
3573 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3574 /** @todo AMD performance events. */
3575 return VINF_SUCCESS;
3576}
3577
3578
3579/** @callback_method_impl{FNCPUMRDMSR} */
3580static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SysCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3581{
3582 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3583 /** @todo AMD SYS_CFG */
3584 *puValue = pRange->uValue;
3585 return VINF_SUCCESS;
3586}
3587
3588
3589/** @callback_method_impl{FNCPUMWRMSR} */
3590static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SysCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3591{
3592 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3593 /** @todo AMD SYS_CFG */
3594 return VINF_SUCCESS;
3595}
3596
3597
3598/** @callback_method_impl{FNCPUMRDMSR} */
3599static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8HwCr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3600{
3601 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3602 /** @todo AMD HW_CFG */
3603 *puValue = 0;
3604 return VINF_SUCCESS;
3605}
3606
3607
3608/** @callback_method_impl{FNCPUMWRMSR} */
3609static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8HwCr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3610{
3611 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3612 /** @todo AMD HW_CFG */
3613 return VINF_SUCCESS;
3614}
3615
3616
3617/** @callback_method_impl{FNCPUMRDMSR} */
3618static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IorrBaseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3619{
3620 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3621 /** @todo AMD IorrMask/IorrBase */
3622 *puValue = 0;
3623 return VINF_SUCCESS;
3624}
3625
3626
3627/** @callback_method_impl{FNCPUMWRMSR} */
3628static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IorrBaseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3629{
3630 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3631 /** @todo AMD IorrMask/IorrBase */
3632 return VINF_SUCCESS;
3633}
3634
3635
3636/** @callback_method_impl{FNCPUMRDMSR} */
3637static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IorrMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3638{
3639 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3640 /** @todo AMD IorrMask/IorrBase */
3641 *puValue = 0;
3642 return VINF_SUCCESS;
3643}
3644
3645
3646/** @callback_method_impl{FNCPUMWRMSR} */
3647static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IorrMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3648{
3649 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3650 /** @todo AMD IorrMask/IorrBase */
3651 return VINF_SUCCESS;
3652}
3653
3654
3655/** @callback_method_impl{FNCPUMRDMSR} */
3656static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8TopOfMemN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3657{
3658 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3659 *puValue = 0;
3660 /** @todo return 4GB - RamHoleSize here for TOPMEM. Figure out what to return
3661 * for TOPMEM2. */
3662 //if (pRange->uValue == 0)
3663 // *puValue = _4G - RamHoleSize;
3664 return VINF_SUCCESS;
3665}
3666
3667
3668/** @callback_method_impl{FNCPUMWRMSR} */
3669static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8TopOfMemN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3670{
3671 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3672 /** @todo AMD TOPMEM and TOPMEM2/TOM2. */
3673 return VINF_SUCCESS;
3674}
3675
3676
3677/** @callback_method_impl{FNCPUMRDMSR} */
3678static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8NbCfg1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3679{
3680 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3681 /** @todo AMD NB_CFG1 */
3682 *puValue = 0;
3683 return VINF_SUCCESS;
3684}
3685
3686
3687/** @callback_method_impl{FNCPUMWRMSR} */
3688static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8NbCfg1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3689{
3690 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3691 /** @todo AMD NB_CFG1 */
3692 return VINF_SUCCESS;
3693}
3694
3695
3696/** @callback_method_impl{FNCPUMRDMSR} */
3697static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8McXcptRedir(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3698{
3699 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3700 /** @todo machine check. */
3701 *puValue = 0;
3702 return VINF_SUCCESS;
3703}
3704
3705
3706/** @callback_method_impl{FNCPUMWRMSR} */
3707static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8McXcptRedir(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3708{
3709 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3710 /** @todo machine check. */
3711 return VINF_SUCCESS;
3712}
3713
3714
3715/** @callback_method_impl{FNCPUMRDMSR} */
3716static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuNameN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3717{
3718 RT_NOREF_PV(idMsr);
3719 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), pRange->uValue / 2 + 0x80000001);
3720 if (pLeaf)
3721 {
3722 if (!(pRange->uValue & 1))
3723 *puValue = RT_MAKE_U64(pLeaf->uEax, pLeaf->uEbx);
3724 else
3725 *puValue = RT_MAKE_U64(pLeaf->uEcx, pLeaf->uEdx);
3726 }
3727 else
3728 *puValue = 0;
3729 return VINF_SUCCESS;
3730}
3731
3732
3733/** @callback_method_impl{FNCPUMWRMSR} */
3734static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuNameN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3735{
3736 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3737 /** @todo Remember guest programmed CPU name. */
3738 return VINF_SUCCESS;
3739}
3740
3741
3742/** @callback_method_impl{FNCPUMRDMSR} */
3743static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8HwThermalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3744{
3745 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3746 /** @todo AMD HTC. */
3747 *puValue = pRange->uValue;
3748 return VINF_SUCCESS;
3749}
3750
3751
3752/** @callback_method_impl{FNCPUMWRMSR} */
3753static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8HwThermalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3754{
3755 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3756 /** @todo AMD HTC. */
3757 return VINF_SUCCESS;
3758}
3759
3760
3761/** @callback_method_impl{FNCPUMRDMSR} */
3762static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SwThermalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3763{
3764 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3765 /** @todo AMD STC. */
3766 *puValue = 0;
3767 return VINF_SUCCESS;
3768}
3769
3770
3771/** @callback_method_impl{FNCPUMWRMSR} */
3772static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SwThermalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3773{
3774 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3775 /** @todo AMD STC. */
3776 return VINF_SUCCESS;
3777}
3778
3779
3780/** @callback_method_impl{FNCPUMRDMSR} */
3781static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8FidVidControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3782{
3783 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3784 /** @todo AMD FIDVID_CTL. */
3785 *puValue = pRange->uValue;
3786 return VINF_SUCCESS;
3787}
3788
3789
3790/** @callback_method_impl{FNCPUMWRMSR} */
3791static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8FidVidControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3792{
3793 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3794 /** @todo AMD FIDVID_CTL. */
3795 return VINF_SUCCESS;
3796}
3797
3798
3799/** @callback_method_impl{FNCPUMRDMSR} */
3800static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8FidVidStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3801{
3802 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3803 /** @todo AMD FIDVID_STATUS. */
3804 *puValue = pRange->uValue;
3805 return VINF_SUCCESS;
3806}
3807
3808
3809/** @callback_method_impl{FNCPUMRDMSR} */
3810static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8McCtlMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3811{
3812 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3813 /** @todo AMD MC. */
3814 *puValue = 0;
3815 return VINF_SUCCESS;
3816}
3817
3818
3819/** @callback_method_impl{FNCPUMWRMSR} */
3820static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8McCtlMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3821{
3822 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3823 /** @todo AMD MC. */
3824 return VINF_SUCCESS;
3825}
3826
3827
3828/** @callback_method_impl{FNCPUMRDMSR} */
3829static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmiOnIoTrapN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3830{
3831 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3832 /** @todo AMD SMM/SMI and I/O trap. */
3833 *puValue = 0;
3834 return VINF_SUCCESS;
3835}
3836
3837
3838/** @callback_method_impl{FNCPUMWRMSR} */
3839static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmiOnIoTrapN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3840{
3841 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3842 /** @todo AMD SMM/SMI and I/O trap. */
3843 return VINF_SUCCESS;
3844}
3845
3846
3847/** @callback_method_impl{FNCPUMRDMSR} */
3848static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmiOnIoTrapCtlSts(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3849{
3850 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3851 /** @todo AMD SMM/SMI and I/O trap. */
3852 *puValue = 0;
3853 return VINF_SUCCESS;
3854}
3855
3856
3857/** @callback_method_impl{FNCPUMWRMSR} */
3858static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmiOnIoTrapCtlSts(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3859{
3860 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3861 /** @todo AMD SMM/SMI and I/O trap. */
3862 return VINF_SUCCESS;
3863}
3864
3865
3866/** @callback_method_impl{FNCPUMRDMSR} */
3867static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IntPendingMessage(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3868{
3869 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3870 /** @todo Interrupt pending message. */
3871 *puValue = 0;
3872 return VINF_SUCCESS;
3873}
3874
3875
3876/** @callback_method_impl{FNCPUMWRMSR} */
3877static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IntPendingMessage(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3878{
3879 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3880 /** @todo Interrupt pending message. */
3881 return VINF_SUCCESS;
3882}
3883
3884
3885/** @callback_method_impl{FNCPUMRDMSR} */
3886static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmiTriggerIoCycle(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3887{
3888 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3889 /** @todo AMD SMM/SMI and trigger I/O cycle. */
3890 *puValue = 0;
3891 return VINF_SUCCESS;
3892}
3893
3894
3895/** @callback_method_impl{FNCPUMWRMSR} */
3896static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmiTriggerIoCycle(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3897{
3898 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3899 /** @todo AMD SMM/SMI and trigger I/O cycle. */
3900 return VINF_SUCCESS;
3901}
3902
3903
3904/** @callback_method_impl{FNCPUMRDMSR} */
3905static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hMmioCfgBaseAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3906{
3907 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3908 /** @todo AMD MMIO Configuration base address. */
3909 *puValue = 0;
3910 return VINF_SUCCESS;
3911}
3912
3913
3914/** @callback_method_impl{FNCPUMWRMSR} */
3915static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hMmioCfgBaseAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3916{
3917 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3918 /** @todo AMD MMIO Configuration base address. */
3919 return VINF_SUCCESS;
3920}
3921
3922
3923/** @callback_method_impl{FNCPUMRDMSR} */
3924static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hTrapCtlMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3925{
3926 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3927 /** @todo AMD 0xc0010059. */
3928 *puValue = 0;
3929 return VINF_SUCCESS;
3930}
3931
3932
3933/** @callback_method_impl{FNCPUMWRMSR} */
3934static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hTrapCtlMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3935{
3936 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3937 /** @todo AMD 0xc0010059. */
3938 return VINF_SUCCESS;
3939}
3940
3941
3942/** @callback_method_impl{FNCPUMRDMSR} */
3943static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateCurLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3944{
3945 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3946 /** @todo AMD P-states. */
3947 *puValue = pRange->uValue;
3948 return VINF_SUCCESS;
3949}
3950
3951
3952/** @callback_method_impl{FNCPUMRDMSR} */
3953static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3954{
3955 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3956 /** @todo AMD P-states. */
3957 *puValue = pRange->uValue;
3958 return VINF_SUCCESS;
3959}
3960
3961
3962/** @callback_method_impl{FNCPUMWRMSR} */
3963static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hPStateControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3964{
3965 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3966 /** @todo AMD P-states. */
3967 return VINF_SUCCESS;
3968}
3969
3970
3971/** @callback_method_impl{FNCPUMRDMSR} */
3972static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3973{
3974 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3975 /** @todo AMD P-states. */
3976 *puValue = pRange->uValue;
3977 return VINF_SUCCESS;
3978}
3979
3980
3981/** @callback_method_impl{FNCPUMWRMSR} */
3982static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hPStateStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3983{
3984 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3985 /** @todo AMD P-states. */
3986 return VINF_SUCCESS;
3987}
3988
3989
3990/** @callback_method_impl{FNCPUMRDMSR} */
3991static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3992{
3993 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3994 /** @todo AMD P-states. */
3995 *puValue = pRange->uValue;
3996 return VINF_SUCCESS;
3997}
3998
3999
4000/** @callback_method_impl{FNCPUMWRMSR} */
4001static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hPStateN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4002{
4003 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4004 /** @todo AMD P-states. */
4005 return VINF_SUCCESS;
4006}
4007
4008
4009/** @callback_method_impl{FNCPUMRDMSR} */
4010static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCofVidControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4011{
4012 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4013 /** @todo AMD P-states. */
4014 *puValue = pRange->uValue;
4015 return VINF_SUCCESS;
4016}
4017
4018
4019/** @callback_method_impl{FNCPUMWRMSR} */
4020static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCofVidControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4021{
4022 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4023 /** @todo AMD P-states. */
4024 return VINF_SUCCESS;
4025}
4026
4027
4028/** @callback_method_impl{FNCPUMRDMSR} */
4029static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCofVidStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4030{
4031 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4032 /** @todo AMD P-states. */
4033 *puValue = pRange->uValue;
4034 return VINF_SUCCESS;
4035}
4036
4037
4038/** @callback_method_impl{FNCPUMWRMSR} */
4039static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCofVidStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4040{
4041 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4042 /* Note! Writing 0 seems to not GP, not sure if it does anything to the value... */
4043 /** @todo AMD P-states. */
4044 return VINF_SUCCESS;
4045}
4046
4047
4048/** @callback_method_impl{FNCPUMRDMSR} */
4049static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCStateIoBaseAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4050{
4051 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4052 /** @todo AMD C-states. */
4053 *puValue = 0;
4054 return VINF_SUCCESS;
4055}
4056
4057
4058/** @callback_method_impl{FNCPUMWRMSR} */
4059static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCStateIoBaseAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4060{
4061 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4062 /** @todo AMD C-states. */
4063 return VINF_SUCCESS;
4064}
4065
4066
4067/** @callback_method_impl{FNCPUMRDMSR} */
4068static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCpuWatchdogTimer(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4069{
4070 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4071 /** @todo AMD machine checks. */
4072 *puValue = 0;
4073 return VINF_SUCCESS;
4074}
4075
4076
4077/** @callback_method_impl{FNCPUMWRMSR} */
4078static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCpuWatchdogTimer(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4079{
4080 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4081 /** @todo AMD machine checks. */
4082 return VINF_SUCCESS;
4083}
4084
4085
4086/** @callback_method_impl{FNCPUMRDMSR} */
4087static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4088{
4089 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4090 /** @todo AMD SMM. */
4091 *puValue = 0;
4092 return VINF_SUCCESS;
4093}
4094
4095
4096/** @callback_method_impl{FNCPUMWRMSR} */
4097static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4098{
4099 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4100 /** @todo AMD SMM. */
4101 return VINF_SUCCESS;
4102}
4103
4104
4105/** @callback_method_impl{FNCPUMRDMSR} */
4106static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4107{
4108 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4109 /** @todo AMD SMM. */
4110 *puValue = 0;
4111 return VINF_SUCCESS;
4112}
4113
4114
4115/** @callback_method_impl{FNCPUMWRMSR} */
4116static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4117{
4118 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4119 /** @todo AMD SMM. */
4120 return VINF_SUCCESS;
4121}
4122
4123
4124
4125/** @callback_method_impl{FNCPUMRDMSR} */
4126static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4127{
4128 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4129 /** @todo AMD SMM. */
4130 *puValue = 0;
4131 return VINF_SUCCESS;
4132}
4133
4134
4135/** @callback_method_impl{FNCPUMWRMSR} */
4136static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4137{
4138 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4139 /** @todo AMD SMM. */
4140 return VINF_SUCCESS;
4141}
4142
4143
4144/** @callback_method_impl{FNCPUMRDMSR} */
4145static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8VmCr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4146{
4147 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4148 PVM pVM = pVCpu->CTX_SUFF(pVM);
4149 if (pVM->cpum.s.GuestFeatures.fSvm)
4150 *puValue = MSR_K8_VM_CR_LOCK;
4151 else
4152 *puValue = 0;
4153 return VINF_SUCCESS;
4154}
4155
4156
4157/** @callback_method_impl{FNCPUMWRMSR} */
4158static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8VmCr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4159{
4160 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
4161 PVM pVM = pVCpu->CTX_SUFF(pVM);
4162 if (pVM->cpum.s.GuestFeatures.fSvm)
4163 {
4164 /* Silently ignore writes to LOCK and SVM_DISABLE bit when the LOCK bit is set (see cpumMsrRd_AmdK8VmCr). */
4165 if (uValue & (MSR_K8_VM_CR_DPD | MSR_K8_VM_CR_R_INIT | MSR_K8_VM_CR_DIS_A20M))
4166 return VERR_CPUM_RAISE_GP_0;
4167 return VINF_SUCCESS;
4168 }
4169 return VERR_CPUM_RAISE_GP_0;
4170}
4171
4172
4173/** @callback_method_impl{FNCPUMRDMSR} */
4174static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IgnNe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4175{
4176 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4177 /** @todo AMD IGNNE\# control. */
4178 *puValue = 0;
4179 return VINF_SUCCESS;
4180}
4181
4182
4183/** @callback_method_impl{FNCPUMWRMSR} */
4184static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IgnNe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4185{
4186 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4187 /** @todo AMD IGNNE\# control. */
4188 return VINF_SUCCESS;
4189}
4190
4191
4192/** @callback_method_impl{FNCPUMRDMSR} */
4193static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4194{
4195 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4196 /** @todo AMD SMM. */
4197 *puValue = 0;
4198 return VINF_SUCCESS;
4199}
4200
4201
4202/** @callback_method_impl{FNCPUMWRMSR} */
4203static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4204{
4205 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4206 /** @todo AMD SMM. */
4207 return VINF_SUCCESS;
4208}
4209
4210
4211/** @callback_method_impl{FNCPUMRDMSR} */
4212static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8VmHSavePa(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4213{
4214 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4215 *puValue = pVCpu->cpum.s.Guest.hwvirt.svm.uMsrHSavePa;
4216 return VINF_SUCCESS;
4217}
4218
4219
4220/** @callback_method_impl{FNCPUMWRMSR} */
4221static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8VmHSavePa(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4222{
4223 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
4224 if (uValue & UINT64_C(0xfff))
4225 {
4226 Log(("CPUM: Invalid setting of low 12 bits set writing host-state save area MSR %#x: %#llx\n", idMsr, uValue));
4227 return VERR_CPUM_RAISE_GP_0;
4228 }
4229
4230 uint64_t fInvPhysMask = ~(RT_BIT_64(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.cMaxPhysAddrWidth) - 1U);
4231 if (fInvPhysMask & uValue)
4232 {
4233 Log(("CPUM: Invalid physical address bits set writing host-state save area MSR %#x: %#llx (%#llx)\n",
4234 idMsr, uValue, uValue & fInvPhysMask));
4235 return VERR_CPUM_RAISE_GP_0;
4236 }
4237
4238 pVCpu->cpum.s.Guest.hwvirt.svm.uMsrHSavePa = uValue;
4239 return VINF_SUCCESS;
4240}
4241
4242
4243/** @callback_method_impl{FNCPUMRDMSR} */
4244static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hVmLockKey(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4245{
4246 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4247 /** @todo AMD SVM. */
4248 *puValue = 0; /* RAZ */
4249 return VINF_SUCCESS;
4250}
4251
4252
4253/** @callback_method_impl{FNCPUMWRMSR} */
4254static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hVmLockKey(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4255{
4256 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4257 /** @todo AMD SVM. */
4258 return VINF_SUCCESS;
4259}
4260
4261
4262/** @callback_method_impl{FNCPUMRDMSR} */
4263static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hSmmLockKey(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4264{
4265 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4266 /** @todo AMD SMM. */
4267 *puValue = 0; /* RAZ */
4268 return VINF_SUCCESS;
4269}
4270
4271
4272/** @callback_method_impl{FNCPUMWRMSR} */
4273static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hSmmLockKey(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4274{
4275 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4276 /** @todo AMD SMM. */
4277 return VINF_SUCCESS;
4278}
4279
4280
4281/** @callback_method_impl{FNCPUMRDMSR} */
4282static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hLocalSmiStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4283{
4284 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4285 /** @todo AMD SMM/SMI. */
4286 *puValue = 0;
4287 return VINF_SUCCESS;
4288}
4289
4290
4291/** @callback_method_impl{FNCPUMWRMSR} */
4292static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hLocalSmiStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4293{
4294 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4295 /** @todo AMD SMM/SMI. */
4296 return VINF_SUCCESS;
4297}
4298
4299
4300/** @callback_method_impl{FNCPUMRDMSR} */
4301static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hOsVisWrkIdLength(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4302{
4303 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
4304 /** @todo AMD OS visible workaround. */
4305 *puValue = pRange->uValue;
4306 return VINF_SUCCESS;
4307}
4308
4309
4310/** @callback_method_impl{FNCPUMWRMSR} */
4311static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hOsVisWrkIdLength(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4312{
4313 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4314 /** @todo AMD OS visible workaround. */
4315 return VINF_SUCCESS;
4316}
4317
4318
4319/** @callback_method_impl{FNCPUMRDMSR} */
4320static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hOsVisWrkStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4321{
4322 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4323 /** @todo AMD OS visible workaround. */
4324 *puValue = 0;
4325 return VINF_SUCCESS;
4326}
4327
4328
4329/** @callback_method_impl{FNCPUMWRMSR} */
4330static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hOsVisWrkStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4331{
4332 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4333 /** @todo AMD OS visible workaround. */
4334 return VINF_SUCCESS;
4335}
4336
4337
4338/** @callback_method_impl{FNCPUMRDMSR} */
4339static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam16hL2IPerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4340{
4341 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4342 /** @todo AMD L2I performance counters. */
4343 *puValue = 0;
4344 return VINF_SUCCESS;
4345}
4346
4347
4348/** @callback_method_impl{FNCPUMWRMSR} */
4349static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam16hL2IPerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4350{
4351 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4352 /** @todo AMD L2I performance counters. */
4353 return VINF_SUCCESS;
4354}
4355
4356
4357/** @callback_method_impl{FNCPUMRDMSR} */
4358static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam16hL2IPerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4359{
4360 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4361 /** @todo AMD L2I performance counters. */
4362 *puValue = 0;
4363 return VINF_SUCCESS;
4364}
4365
4366
4367/** @callback_method_impl{FNCPUMWRMSR} */
4368static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam16hL2IPerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4369{
4370 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4371 /** @todo AMD L2I performance counters. */
4372 return VINF_SUCCESS;
4373}
4374
4375
4376/** @callback_method_impl{FNCPUMRDMSR} */
4377static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hNorthbridgePerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4378{
4379 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4380 /** @todo AMD Northbridge performance counters. */
4381 *puValue = 0;
4382 return VINF_SUCCESS;
4383}
4384
4385
4386/** @callback_method_impl{FNCPUMWRMSR} */
4387static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hNorthbridgePerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4388{
4389 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4390 /** @todo AMD Northbridge performance counters. */
4391 return VINF_SUCCESS;
4392}
4393
4394
4395/** @callback_method_impl{FNCPUMRDMSR} */
4396static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hNorthbridgePerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4397{
4398 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4399 /** @todo AMD Northbridge performance counters. */
4400 *puValue = 0;
4401 return VINF_SUCCESS;
4402}
4403
4404
4405/** @callback_method_impl{FNCPUMWRMSR} */
4406static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hNorthbridgePerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4407{
4408 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4409 /** @todo AMD Northbridge performance counters. */
4410 return VINF_SUCCESS;
4411}
4412
4413
4414/** @callback_method_impl{FNCPUMRDMSR} */
4415static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7MicrocodeCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4416{
4417 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4418 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4419 * cpus. Need to be explored and verify K7 presence. */
4420 /** @todo Undocumented register only seen mentioned in fam15h erratum \#608. */
4421 *puValue = pRange->uValue;
4422 return VINF_SUCCESS;
4423}
4424
4425
4426/** @callback_method_impl{FNCPUMWRMSR} */
4427static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7MicrocodeCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4428{
4429 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4430 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4431 * cpus. Need to be explored and verify K7 presence. */
4432 /** @todo Undocumented register only seen mentioned in fam15h erratum \#608. */
4433 return VINF_SUCCESS;
4434}
4435
4436
4437/** @callback_method_impl{FNCPUMRDMSR} */
4438static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7ClusterIdMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4439{
4440 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4441 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4442 * cpus. Need to be explored and verify K7 presence. */
4443 /** @todo Undocumented register only seen mentioned in fam16h BKDG r3.00 when
4444 * describing EBL_CR_POWERON. */
4445 *puValue = pRange->uValue;
4446 return VINF_SUCCESS;
4447}
4448
4449
4450/** @callback_method_impl{FNCPUMWRMSR} */
4451static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7ClusterIdMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4452{
4453 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4454 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4455 * cpus. Need to be explored and verify K7 presence. */
4456 /** @todo Undocumented register only seen mentioned in fam16h BKDG r3.00 when
4457 * describing EBL_CR_POWERON. */
4458 return VINF_SUCCESS;
4459}
4460
4461
4462/** @callback_method_impl{FNCPUMRDMSR} */
4463static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlStd07hEbax(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4464{
4465 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4466 bool fIgnored;
4467 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeafEx(pVCpu->CTX_SUFF(pVM), 0x00000007, 0, &fIgnored);
4468 if (pLeaf)
4469 *puValue = RT_MAKE_U64(pLeaf->uEbx, pLeaf->uEax);
4470 else
4471 *puValue = 0;
4472 return VINF_SUCCESS;
4473}
4474
4475
4476/** @callback_method_impl{FNCPUMWRMSR} */
4477static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlStd07hEbax(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4478{
4479 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4480 /** @todo Changing CPUID leaf 7/0. */
4481 return VINF_SUCCESS;
4482}
4483
4484
4485/** @callback_method_impl{FNCPUMRDMSR} */
4486static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlStd06hEcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4487{
4488 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4489 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), 0x00000006);
4490 if (pLeaf)
4491 *puValue = pLeaf->uEcx;
4492 else
4493 *puValue = 0;
4494 return VINF_SUCCESS;
4495}
4496
4497
4498/** @callback_method_impl{FNCPUMWRMSR} */
4499static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlStd06hEcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4500{
4501 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4502 /** @todo Changing CPUID leaf 6. */
4503 return VINF_SUCCESS;
4504}
4505
4506
4507/** @callback_method_impl{FNCPUMRDMSR} */
4508static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlStd01hEdcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4509{
4510 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4511 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), 0x00000001);
4512 if (pLeaf)
4513 *puValue = RT_MAKE_U64(pLeaf->uEdx, pLeaf->uEcx);
4514 else
4515 *puValue = 0;
4516 return VINF_SUCCESS;
4517}
4518
4519
4520/** @callback_method_impl{FNCPUMWRMSR} */
4521static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlStd01hEdcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4522{
4523 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4524 /** @todo Changing CPUID leaf 0x80000001. */
4525 return VINF_SUCCESS;
4526}
4527
4528
4529/** @callback_method_impl{FNCPUMRDMSR} */
4530static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlExt01hEdcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4531{
4532 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4533 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), 0x80000001);
4534 if (pLeaf)
4535 *puValue = RT_MAKE_U64(pLeaf->uEdx, pLeaf->uEcx);
4536 else
4537 *puValue = 0;
4538 return VINF_SUCCESS;
4539}
4540
4541
4542/** @callback_method_impl{FNCPUMWRMSR} */
4543static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlExt01hEdcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4544{
4545 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4546 /** @todo Changing CPUID leaf 0x80000001. */
4547 return VINF_SUCCESS;
4548}
4549
4550
4551/** @callback_method_impl{FNCPUMRDMSR} */
4552static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8PatchLevel(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4553{
4554 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4555 /** @todo Fake AMD microcode patching. */
4556 *puValue = pRange->uValue;
4557 return VINF_SUCCESS;
4558}
4559
4560
4561/** @callback_method_impl{FNCPUMWRMSR} */
4562static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8PatchLoader(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4563{
4564 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4565 /** @todo Fake AMD microcode patching. */
4566 return VINF_SUCCESS;
4567}
4568
4569
4570/** @callback_method_impl{FNCPUMRDMSR} */
4571static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DebugStatusMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4572{
4573 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4574 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4575 * cpus. Need to be explored and verify K7 presence. */
4576 /** @todo undocumented */
4577 *puValue = 0;
4578 return VINF_SUCCESS;
4579}
4580
4581
4582/** @callback_method_impl{FNCPUMWRMSR} */
4583static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DebugStatusMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4584{
4585 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4586 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4587 * cpus. Need to be explored and verify K7 presence. */
4588 /** @todo undocumented */
4589 return VINF_SUCCESS;
4590}
4591
4592
4593/** @callback_method_impl{FNCPUMRDMSR} */
4594static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BHTraceBaseMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4595{
4596 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4597 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4598 * cpus. Need to be explored and verify K7 presence. */
4599 /** @todo undocumented */
4600 *puValue = 0;
4601 return VINF_SUCCESS;
4602}
4603
4604
4605/** @callback_method_impl{FNCPUMWRMSR} */
4606static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BHTraceBaseMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4607{
4608 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4609 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4610 * cpus. Need to be explored and verify K7 presence. */
4611 /** @todo undocumented */
4612 return VINF_SUCCESS;
4613}
4614
4615
4616/** @callback_method_impl{FNCPUMRDMSR} */
4617static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BHTracePtrMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4618{
4619 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4620 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4621 * cpus. Need to be explored and verify K7 presence. */
4622 /** @todo undocumented */
4623 *puValue = 0;
4624 return VINF_SUCCESS;
4625}
4626
4627
4628/** @callback_method_impl{FNCPUMWRMSR} */
4629static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BHTracePtrMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4630{
4631 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4632 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4633 * cpus. Need to be explored and verify K7 presence. */
4634 /** @todo undocumented */
4635 return VINF_SUCCESS;
4636}
4637
4638
4639/** @callback_method_impl{FNCPUMRDMSR} */
4640static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BHTraceLimitMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4641{
4642 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4643 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4644 * cpus. Need to be explored and verify K7 presence. */
4645 /** @todo undocumented */
4646 *puValue = 0;
4647 return VINF_SUCCESS;
4648}
4649
4650
4651/** @callback_method_impl{FNCPUMWRMSR} */
4652static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BHTraceLimitMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4653{
4654 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4655 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4656 * cpus. Need to be explored and verify K7 presence. */
4657 /** @todo undocumented */
4658 return VINF_SUCCESS;
4659}
4660
4661
4662/** @callback_method_impl{FNCPUMRDMSR} */
4663static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7HardwareDebugToolCfgMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4664{
4665 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4666 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4667 * cpus. Need to be explored and verify K7 presence. */
4668 /** @todo undocumented */
4669 *puValue = 0;
4670 return VINF_SUCCESS;
4671}
4672
4673
4674/** @callback_method_impl{FNCPUMWRMSR} */
4675static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7HardwareDebugToolCfgMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4676{
4677 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4678 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4679 * cpus. Need to be explored and verify K7 presence. */
4680 /** @todo undocumented */
4681 return VINF_SUCCESS;
4682}
4683
4684
4685/** @callback_method_impl{FNCPUMRDMSR} */
4686static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7FastFlushCountMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4687{
4688 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4689 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4690 * cpus. Need to be explored and verify K7 presence. */
4691 /** @todo undocumented */
4692 *puValue = 0;
4693 return VINF_SUCCESS;
4694}
4695
4696
4697/** @callback_method_impl{FNCPUMWRMSR} */
4698static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7FastFlushCountMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4699{
4700 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4701 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4702 * cpus. Need to be explored and verify K7 presence. */
4703 /** @todo undocumented */
4704 return VINF_SUCCESS;
4705}
4706
4707
4708/** @callback_method_impl{FNCPUMRDMSR} */
4709static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7NodeId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4710{
4711 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4712 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4713 * cpus. Need to be explored and verify K7 presence. */
4714 /** @todo AMD node ID and bios scratch. */
4715 *puValue = 0; /* nodeid = 0; nodes-per-cpu = 1 */
4716 return VINF_SUCCESS;
4717}
4718
4719
4720/** @callback_method_impl{FNCPUMWRMSR} */
4721static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7NodeId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4722{
4723 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4724 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4725 * cpus. Need to be explored and verify K7 presence. */
4726 /** @todo AMD node ID and bios scratch. */
4727 return VINF_SUCCESS;
4728}
4729
4730
4731/** @callback_method_impl{FNCPUMRDMSR} */
4732static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DrXAddrMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4733{
4734 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4735 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4736 * cpus. Need to be explored and verify K7 presence. */
4737 /** @todo AMD DRx address masking (range breakpoints). */
4738 *puValue = 0;
4739 return VINF_SUCCESS;
4740}
4741
4742
4743/** @callback_method_impl{FNCPUMWRMSR} */
4744static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DrXAddrMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4745{
4746 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4747 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4748 * cpus. Need to be explored and verify K7 presence. */
4749 /** @todo AMD DRx address masking (range breakpoints). */
4750 return VINF_SUCCESS;
4751}
4752
4753
4754/** @callback_method_impl{FNCPUMRDMSR} */
4755static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7Dr0DataMatchMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4756{
4757 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4758 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4759 * cpus. Need to be explored and verify K7 presence. */
4760 /** @todo AMD undocument debugging features. */
4761 *puValue = 0;
4762 return VINF_SUCCESS;
4763}
4764
4765
4766/** @callback_method_impl{FNCPUMWRMSR} */
4767static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7Dr0DataMatchMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4768{
4769 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4770 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4771 * cpus. Need to be explored and verify K7 presence. */
4772 /** @todo AMD undocument debugging features. */
4773 return VINF_SUCCESS;
4774}
4775
4776
4777/** @callback_method_impl{FNCPUMRDMSR} */
4778static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7Dr0DataMaskMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4779{
4780 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4781 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4782 * cpus. Need to be explored and verify K7 presence. */
4783 /** @todo AMD undocument debugging features. */
4784 *puValue = 0;
4785 return VINF_SUCCESS;
4786}
4787
4788
4789/** @callback_method_impl{FNCPUMWRMSR} */
4790static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7Dr0DataMaskMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4791{
4792 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4793 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4794 * cpus. Need to be explored and verify K7 presence. */
4795 /** @todo AMD undocument debugging features. */
4796 return VINF_SUCCESS;
4797}
4798
4799
4800/** @callback_method_impl{FNCPUMRDMSR} */
4801static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7LoadStoreCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4802{
4803 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4804 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4805 * cpus. Need to be explored and verify K7 presence. */
4806 /** @todo AMD load-store config. */
4807 *puValue = 0;
4808 return VINF_SUCCESS;
4809}
4810
4811
4812/** @callback_method_impl{FNCPUMWRMSR} */
4813static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7LoadStoreCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4814{
4815 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4816 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4817 * cpus. Need to be explored and verify K7 presence. */
4818 /** @todo AMD load-store config. */
4819 return VINF_SUCCESS;
4820}
4821
4822
4823/** @callback_method_impl{FNCPUMRDMSR} */
4824static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7InstrCacheCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4825{
4826 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4827 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4828 * cpus. Need to be explored and verify K7 presence. */
4829 /** @todo AMD instruction cache config. */
4830 *puValue = 0;
4831 return VINF_SUCCESS;
4832}
4833
4834
4835/** @callback_method_impl{FNCPUMWRMSR} */
4836static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7InstrCacheCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4837{
4838 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4839 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4840 * cpus. Need to be explored and verify K7 presence. */
4841 /** @todo AMD instruction cache config. */
4842 return VINF_SUCCESS;
4843}
4844
4845
4846/** @callback_method_impl{FNCPUMRDMSR} */
4847static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DataCacheCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4848{
4849 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4850 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4851 * cpus. Need to be explored and verify K7 presence. */
4852 /** @todo AMD data cache config. */
4853 *puValue = 0;
4854 return VINF_SUCCESS;
4855}
4856
4857
4858/** @callback_method_impl{FNCPUMWRMSR} */
4859static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DataCacheCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4860{
4861 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4862 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4863 * cpus. Need to be explored and verify K7 presence. */
4864 /** @todo AMD data cache config. */
4865 return VINF_SUCCESS;
4866}
4867
4868
4869/** @callback_method_impl{FNCPUMRDMSR} */
4870static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BusUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4871{
4872 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4873 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4874 * cpus. Need to be explored and verify K7 presence. */
4875 /** @todo AMD bus unit config. */
4876 *puValue = 0;
4877 return VINF_SUCCESS;
4878}
4879
4880
4881/** @callback_method_impl{FNCPUMWRMSR} */
4882static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BusUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4883{
4884 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4885 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4886 * cpus. Need to be explored and verify K7 presence. */
4887 /** @todo AMD bus unit config. */
4888 return VINF_SUCCESS;
4889}
4890
4891
4892/** @callback_method_impl{FNCPUMRDMSR} */
4893static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DebugCtl2Maybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4894{
4895 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4896 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4897 * cpus. Need to be explored and verify K7 presence. */
4898 /** @todo Undocument AMD debug control register \#2. */
4899 *puValue = 0;
4900 return VINF_SUCCESS;
4901}
4902
4903
4904/** @callback_method_impl{FNCPUMWRMSR} */
4905static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DebugCtl2Maybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4906{
4907 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4908 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4909 * cpus. Need to be explored and verify K7 presence. */
4910 /** @todo Undocument AMD debug control register \#2. */
4911 return VINF_SUCCESS;
4912}
4913
4914
4915/** @callback_method_impl{FNCPUMRDMSR} */
4916static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hFpuCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4917{
4918 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4919 /** @todo AMD FPU config. */
4920 *puValue = 0;
4921 return VINF_SUCCESS;
4922}
4923
4924
4925/** @callback_method_impl{FNCPUMWRMSR} */
4926static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hFpuCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4927{
4928 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4929 /** @todo AMD FPU config. */
4930 return VINF_SUCCESS;
4931}
4932
4933
4934/** @callback_method_impl{FNCPUMRDMSR} */
4935static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hDecoderCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4936{
4937 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4938 /** @todo AMD decoder config. */
4939 *puValue = 0;
4940 return VINF_SUCCESS;
4941}
4942
4943
4944/** @callback_method_impl{FNCPUMWRMSR} */
4945static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hDecoderCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4946{
4947 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4948 /** @todo AMD decoder config. */
4949 return VINF_SUCCESS;
4950}
4951
4952
4953/** @callback_method_impl{FNCPUMRDMSR} */
4954static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hBusUnitCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4955{
4956 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4957 /* Note! 10h and 16h */
4958 /** @todo AMD bus unit config. */
4959 *puValue = 0;
4960 return VINF_SUCCESS;
4961}
4962
4963
4964/** @callback_method_impl{FNCPUMWRMSR} */
4965static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hBusUnitCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4966{
4967 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4968 /* Note! 10h and 16h */
4969 /** @todo AMD bus unit config. */
4970 return VINF_SUCCESS;
4971}
4972
4973
4974/** @callback_method_impl{FNCPUMRDMSR} */
4975static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hCombUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4976{
4977 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4978 /** @todo AMD unit config. */
4979 *puValue = 0;
4980 return VINF_SUCCESS;
4981}
4982
4983
4984/** @callback_method_impl{FNCPUMWRMSR} */
4985static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hCombUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4986{
4987 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4988 /** @todo AMD unit config. */
4989 return VINF_SUCCESS;
4990}
4991
4992
4993/** @callback_method_impl{FNCPUMRDMSR} */
4994static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hCombUnitCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4995{
4996 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4997 /** @todo AMD unit config 2. */
4998 *puValue = 0;
4999 return VINF_SUCCESS;
5000}
5001
5002
5003/** @callback_method_impl{FNCPUMWRMSR} */
5004static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hCombUnitCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5005{
5006 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5007 /** @todo AMD unit config 2. */
5008 return VINF_SUCCESS;
5009}
5010
5011
5012/** @callback_method_impl{FNCPUMRDMSR} */
5013static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hCombUnitCfg3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5014{
5015 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5016 /** @todo AMD combined unit config 3. */
5017 *puValue = 0;
5018 return VINF_SUCCESS;
5019}
5020
5021
5022/** @callback_method_impl{FNCPUMWRMSR} */
5023static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hCombUnitCfg3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5024{
5025 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5026 /** @todo AMD combined unit config 3. */
5027 return VINF_SUCCESS;
5028}
5029
5030
5031/** @callback_method_impl{FNCPUMRDMSR} */
5032static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hExecUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5033{
5034 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5035 /** @todo AMD execution unit config. */
5036 *puValue = 0;
5037 return VINF_SUCCESS;
5038}
5039
5040
5041/** @callback_method_impl{FNCPUMWRMSR} */
5042static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hExecUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5043{
5044 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5045 /** @todo AMD execution unit config. */
5046 return VINF_SUCCESS;
5047}
5048
5049
5050/** @callback_method_impl{FNCPUMRDMSR} */
5051static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hLoadStoreCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5052{
5053 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5054 /** @todo AMD load-store config 2. */
5055 *puValue = 0;
5056 return VINF_SUCCESS;
5057}
5058
5059
5060/** @callback_method_impl{FNCPUMWRMSR} */
5061static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hLoadStoreCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5062{
5063 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5064 /** @todo AMD load-store config 2. */
5065 return VINF_SUCCESS;
5066}
5067
5068
5069/** @callback_method_impl{FNCPUMRDMSR} */
5070static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsFetchCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5071{
5072 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5073 /** @todo AMD IBS. */
5074 *puValue = 0;
5075 return VINF_SUCCESS;
5076}
5077
5078
5079/** @callback_method_impl{FNCPUMWRMSR} */
5080static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsFetchCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5081{
5082 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5083 /** @todo AMD IBS. */
5084 return VINF_SUCCESS;
5085}
5086
5087
5088/** @callback_method_impl{FNCPUMRDMSR} */
5089static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsFetchLinAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5090{
5091 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5092 /** @todo AMD IBS. */
5093 *puValue = 0;
5094 return VINF_SUCCESS;
5095}
5096
5097
5098/** @callback_method_impl{FNCPUMWRMSR} */
5099static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsFetchLinAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5100{
5101 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5102 /** @todo AMD IBS. */
5103 return VINF_SUCCESS;
5104}
5105
5106
5107/** @callback_method_impl{FNCPUMRDMSR} */
5108static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsFetchPhysAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5109{
5110 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5111 /** @todo AMD IBS. */
5112 *puValue = 0;
5113 return VINF_SUCCESS;
5114}
5115
5116
5117/** @callback_method_impl{FNCPUMWRMSR} */
5118static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsFetchPhysAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5119{
5120 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5121 /** @todo AMD IBS. */
5122 return VINF_SUCCESS;
5123}
5124
5125
5126/** @callback_method_impl{FNCPUMRDMSR} */
5127static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpExecCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5128{
5129 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5130 /** @todo AMD IBS. */
5131 *puValue = 0;
5132 return VINF_SUCCESS;
5133}
5134
5135
5136/** @callback_method_impl{FNCPUMWRMSR} */
5137static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpExecCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5138{
5139 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5140 /** @todo AMD IBS. */
5141 return VINF_SUCCESS;
5142}
5143
5144
5145/** @callback_method_impl{FNCPUMRDMSR} */
5146static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpRip(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5147{
5148 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5149 /** @todo AMD IBS. */
5150 *puValue = 0;
5151 return VINF_SUCCESS;
5152}
5153
5154
5155/** @callback_method_impl{FNCPUMWRMSR} */
5156static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpRip(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5157{
5158 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5159 /** @todo AMD IBS. */
5160 if (!X86_IS_CANONICAL(uValue))
5161 {
5162 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
5163 return VERR_CPUM_RAISE_GP_0;
5164 }
5165 return VINF_SUCCESS;
5166}
5167
5168
5169/** @callback_method_impl{FNCPUMRDMSR} */
5170static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpData(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5171{
5172 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5173 /** @todo AMD IBS. */
5174 *puValue = 0;
5175 return VINF_SUCCESS;
5176}
5177
5178
5179/** @callback_method_impl{FNCPUMWRMSR} */
5180static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpData(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5181{
5182 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5183 /** @todo AMD IBS. */
5184 return VINF_SUCCESS;
5185}
5186
5187
5188/** @callback_method_impl{FNCPUMRDMSR} */
5189static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpData2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5190{
5191 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5192 /** @todo AMD IBS. */
5193 *puValue = 0;
5194 return VINF_SUCCESS;
5195}
5196
5197
5198/** @callback_method_impl{FNCPUMWRMSR} */
5199static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpData2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5200{
5201 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5202 /** @todo AMD IBS. */
5203 return VINF_SUCCESS;
5204}
5205
5206
5207/** @callback_method_impl{FNCPUMRDMSR} */
5208static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpData3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5209{
5210 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5211 /** @todo AMD IBS. */
5212 *puValue = 0;
5213 return VINF_SUCCESS;
5214}
5215
5216
5217/** @callback_method_impl{FNCPUMWRMSR} */
5218static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpData3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5219{
5220 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5221 /** @todo AMD IBS. */
5222 return VINF_SUCCESS;
5223}
5224
5225
5226/** @callback_method_impl{FNCPUMRDMSR} */
5227static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsDcLinAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5228{
5229 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5230 /** @todo AMD IBS. */
5231 *puValue = 0;
5232 return VINF_SUCCESS;
5233}
5234
5235
5236/** @callback_method_impl{FNCPUMWRMSR} */
5237static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsDcLinAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5238{
5239 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5240 /** @todo AMD IBS. */
5241 if (!X86_IS_CANONICAL(uValue))
5242 {
5243 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
5244 return VERR_CPUM_RAISE_GP_0;
5245 }
5246 return VINF_SUCCESS;
5247}
5248
5249
5250/** @callback_method_impl{FNCPUMRDMSR} */
5251static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsDcPhysAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5252{
5253 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5254 /** @todo AMD IBS. */
5255 *puValue = 0;
5256 return VINF_SUCCESS;
5257}
5258
5259
5260/** @callback_method_impl{FNCPUMWRMSR} */
5261static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsDcPhysAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5262{
5263 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5264 /** @todo AMD IBS. */
5265 return VINF_SUCCESS;
5266}
5267
5268
5269/** @callback_method_impl{FNCPUMRDMSR} */
5270static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5271{
5272 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5273 /** @todo AMD IBS. */
5274 *puValue = 0;
5275 return VINF_SUCCESS;
5276}
5277
5278
5279/** @callback_method_impl{FNCPUMWRMSR} */
5280static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5281{
5282 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5283 /** @todo AMD IBS. */
5284 return VINF_SUCCESS;
5285}
5286
5287
5288/** @callback_method_impl{FNCPUMRDMSR} */
5289static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam14hIbsBrTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5290{
5291 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5292 /** @todo AMD IBS. */
5293 *puValue = 0;
5294 return VINF_SUCCESS;
5295}
5296
5297
5298/** @callback_method_impl{FNCPUMWRMSR} */
5299static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam14hIbsBrTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5300{
5301 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5302 /** @todo AMD IBS. */
5303 if (!X86_IS_CANONICAL(uValue))
5304 {
5305 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
5306 return VERR_CPUM_RAISE_GP_0;
5307 }
5308 return VINF_SUCCESS;
5309}
5310
5311
5312
5313/*
5314 * GIM MSRs.
5315 * GIM MSRs.
5316 * GIM MSRs.
5317 */
5318
5319
5320/** @callback_method_impl{FNCPUMRDMSR} */
5321static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Gim(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5322{
5323#if defined(VBOX_WITH_NESTED_HWVIRT_SVM) || defined(VBOX_WITH_NESTED_HWVIRT_VMX)
5324 /* Raise #GP(0) like a physical CPU would since the nested-hypervisor hasn't intercept these MSRs. */
5325 if ( CPUMIsGuestInSvmNestedHwVirtMode(&pVCpu->cpum.s.Guest)
5326 || CPUMIsGuestInVmxNonRootMode(&pVCpu->cpum.s.Guest))
5327 return VERR_CPUM_RAISE_GP_0;
5328#endif
5329 return GIMReadMsr(pVCpu, idMsr, pRange, puValue);
5330}
5331
5332
5333/** @callback_method_impl{FNCPUMWRMSR} */
5334static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Gim(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5335{
5336#if defined(VBOX_WITH_NESTED_HWVIRT_SVM) || defined(VBOX_WITH_NESTED_HWVIRT_VMX)
5337 /* Raise #GP(0) like a physical CPU would since the nested-hypervisor hasn't intercept these MSRs. */
5338 if ( CPUMIsGuestInSvmNestedHwVirtMode(&pVCpu->cpum.s.Guest)
5339 || CPUMIsGuestInVmxNonRootMode(&pVCpu->cpum.s.Guest))
5340 return VERR_CPUM_RAISE_GP_0;
5341#endif
5342 return GIMWriteMsr(pVCpu, idMsr, pRange, uValue, uRawValue);
5343}
5344
5345
5346/**
5347 * MSR read function table.
5348 */
5349static const PFNCPUMRDMSR g_aCpumRdMsrFns[kCpumMsrRdFn_End] =
5350{
5351 NULL, /* Invalid */
5352 cpumMsrRd_FixedValue,
5353 NULL, /* Alias */
5354 cpumMsrRd_WriteOnly,
5355 cpumMsrRd_Ia32P5McAddr,
5356 cpumMsrRd_Ia32P5McType,
5357 cpumMsrRd_Ia32TimestampCounter,
5358 cpumMsrRd_Ia32PlatformId,
5359 cpumMsrRd_Ia32ApicBase,
5360 cpumMsrRd_Ia32FeatureControl,
5361 cpumMsrRd_Ia32BiosSignId,
5362 cpumMsrRd_Ia32SmmMonitorCtl,
5363 cpumMsrRd_Ia32PmcN,
5364 cpumMsrRd_Ia32MonitorFilterLineSize,
5365 cpumMsrRd_Ia32MPerf,
5366 cpumMsrRd_Ia32APerf,
5367 cpumMsrRd_Ia32MtrrCap,
5368 cpumMsrRd_Ia32MtrrPhysBaseN,
5369 cpumMsrRd_Ia32MtrrPhysMaskN,
5370 cpumMsrRd_Ia32MtrrFixed,
5371 cpumMsrRd_Ia32MtrrDefType,
5372 cpumMsrRd_Ia32Pat,
5373 cpumMsrRd_Ia32SysEnterCs,
5374 cpumMsrRd_Ia32SysEnterEsp,
5375 cpumMsrRd_Ia32SysEnterEip,
5376 cpumMsrRd_Ia32McgCap,
5377 cpumMsrRd_Ia32McgStatus,
5378 cpumMsrRd_Ia32McgCtl,
5379 cpumMsrRd_Ia32DebugCtl,
5380 cpumMsrRd_Ia32SmrrPhysBase,
5381 cpumMsrRd_Ia32SmrrPhysMask,
5382 cpumMsrRd_Ia32PlatformDcaCap,
5383 cpumMsrRd_Ia32CpuDcaCap,
5384 cpumMsrRd_Ia32Dca0Cap,
5385 cpumMsrRd_Ia32PerfEvtSelN,
5386 cpumMsrRd_Ia32PerfStatus,
5387 cpumMsrRd_Ia32PerfCtl,
5388 cpumMsrRd_Ia32FixedCtrN,
5389 cpumMsrRd_Ia32PerfCapabilities,
5390 cpumMsrRd_Ia32FixedCtrCtrl,
5391 cpumMsrRd_Ia32PerfGlobalStatus,
5392 cpumMsrRd_Ia32PerfGlobalCtrl,
5393 cpumMsrRd_Ia32PerfGlobalOvfCtrl,
5394 cpumMsrRd_Ia32PebsEnable,
5395 cpumMsrRd_Ia32ClockModulation,
5396 cpumMsrRd_Ia32ThermInterrupt,
5397 cpumMsrRd_Ia32ThermStatus,
5398 cpumMsrRd_Ia32Therm2Ctl,
5399 cpumMsrRd_Ia32MiscEnable,
5400 cpumMsrRd_Ia32McCtlStatusAddrMiscN,
5401 cpumMsrRd_Ia32McNCtl2,
5402 cpumMsrRd_Ia32DsArea,
5403 cpumMsrRd_Ia32TscDeadline,
5404 cpumMsrRd_Ia32X2ApicN,
5405 cpumMsrRd_Ia32DebugInterface,
5406 cpumMsrRd_Ia32VmxBasic,
5407 cpumMsrRd_Ia32VmxPinbasedCtls,
5408 cpumMsrRd_Ia32VmxProcbasedCtls,
5409 cpumMsrRd_Ia32VmxExitCtls,
5410 cpumMsrRd_Ia32VmxEntryCtls,
5411 cpumMsrRd_Ia32VmxMisc,
5412 cpumMsrRd_Ia32VmxCr0Fixed0,
5413 cpumMsrRd_Ia32VmxCr0Fixed1,
5414 cpumMsrRd_Ia32VmxCr4Fixed0,
5415 cpumMsrRd_Ia32VmxCr4Fixed1,
5416 cpumMsrRd_Ia32VmxVmcsEnum,
5417 cpumMsrRd_Ia32VmxProcBasedCtls2,
5418 cpumMsrRd_Ia32VmxEptVpidCap,
5419 cpumMsrRd_Ia32VmxTruePinbasedCtls,
5420 cpumMsrRd_Ia32VmxTrueProcbasedCtls,
5421 cpumMsrRd_Ia32VmxTrueExitCtls,
5422 cpumMsrRd_Ia32VmxTrueEntryCtls,
5423 cpumMsrRd_Ia32VmxVmFunc,
5424 cpumMsrRd_Ia32SpecCtrl,
5425 cpumMsrRd_Ia32ArchCapabilities,
5426
5427 cpumMsrRd_Amd64Efer,
5428 cpumMsrRd_Amd64SyscallTarget,
5429 cpumMsrRd_Amd64LongSyscallTarget,
5430 cpumMsrRd_Amd64CompSyscallTarget,
5431 cpumMsrRd_Amd64SyscallFlagMask,
5432 cpumMsrRd_Amd64FsBase,
5433 cpumMsrRd_Amd64GsBase,
5434 cpumMsrRd_Amd64KernelGsBase,
5435 cpumMsrRd_Amd64TscAux,
5436
5437 cpumMsrRd_IntelEblCrPowerOn,
5438 cpumMsrRd_IntelI7CoreThreadCount,
5439 cpumMsrRd_IntelP4EbcHardPowerOn,
5440 cpumMsrRd_IntelP4EbcSoftPowerOn,
5441 cpumMsrRd_IntelP4EbcFrequencyId,
5442 cpumMsrRd_IntelP6FsbFrequency,
5443 cpumMsrRd_IntelPlatformInfo,
5444 cpumMsrRd_IntelFlexRatio,
5445 cpumMsrRd_IntelPkgCStConfigControl,
5446 cpumMsrRd_IntelPmgIoCaptureBase,
5447 cpumMsrRd_IntelLastBranchFromToN,
5448 cpumMsrRd_IntelLastBranchFromN,
5449 cpumMsrRd_IntelLastBranchToN,
5450 cpumMsrRd_IntelLastBranchTos,
5451 cpumMsrRd_IntelBblCrCtl,
5452 cpumMsrRd_IntelBblCrCtl3,
5453 cpumMsrRd_IntelI7TemperatureTarget,
5454 cpumMsrRd_IntelI7MsrOffCoreResponseN,
5455 cpumMsrRd_IntelI7MiscPwrMgmt,
5456 cpumMsrRd_IntelP6CrN,
5457 cpumMsrRd_IntelCpuId1FeatureMaskEcdx,
5458 cpumMsrRd_IntelCpuId1FeatureMaskEax,
5459 cpumMsrRd_IntelCpuId80000001FeatureMaskEcdx,
5460 cpumMsrRd_IntelI7SandyAesNiCtl,
5461 cpumMsrRd_IntelI7TurboRatioLimit,
5462 cpumMsrRd_IntelI7LbrSelect,
5463 cpumMsrRd_IntelI7SandyErrorControl,
5464 cpumMsrRd_IntelI7VirtualLegacyWireCap,
5465 cpumMsrRd_IntelI7PowerCtl,
5466 cpumMsrRd_IntelI7SandyPebsNumAlt,
5467 cpumMsrRd_IntelI7PebsLdLat,
5468 cpumMsrRd_IntelI7PkgCnResidencyN,
5469 cpumMsrRd_IntelI7CoreCnResidencyN,
5470 cpumMsrRd_IntelI7SandyVrCurrentConfig,
5471 cpumMsrRd_IntelI7SandyVrMiscConfig,
5472 cpumMsrRd_IntelI7SandyRaplPowerUnit,
5473 cpumMsrRd_IntelI7SandyPkgCnIrtlN,
5474 cpumMsrRd_IntelI7SandyPkgC2Residency,
5475 cpumMsrRd_IntelI7RaplPkgPowerLimit,
5476 cpumMsrRd_IntelI7RaplPkgEnergyStatus,
5477 cpumMsrRd_IntelI7RaplPkgPerfStatus,
5478 cpumMsrRd_IntelI7RaplPkgPowerInfo,
5479 cpumMsrRd_IntelI7RaplDramPowerLimit,
5480 cpumMsrRd_IntelI7RaplDramEnergyStatus,
5481 cpumMsrRd_IntelI7RaplDramPerfStatus,
5482 cpumMsrRd_IntelI7RaplDramPowerInfo,
5483 cpumMsrRd_IntelI7RaplPp0PowerLimit,
5484 cpumMsrRd_IntelI7RaplPp0EnergyStatus,
5485 cpumMsrRd_IntelI7RaplPp0Policy,
5486 cpumMsrRd_IntelI7RaplPp0PerfStatus,
5487 cpumMsrRd_IntelI7RaplPp1PowerLimit,
5488 cpumMsrRd_IntelI7RaplPp1EnergyStatus,
5489 cpumMsrRd_IntelI7RaplPp1Policy,
5490 cpumMsrRd_IntelI7IvyConfigTdpNominal,
5491 cpumMsrRd_IntelI7IvyConfigTdpLevel1,
5492 cpumMsrRd_IntelI7IvyConfigTdpLevel2,
5493 cpumMsrRd_IntelI7IvyConfigTdpControl,
5494 cpumMsrRd_IntelI7IvyTurboActivationRatio,
5495 cpumMsrRd_IntelI7UncPerfGlobalCtrl,
5496 cpumMsrRd_IntelI7UncPerfGlobalStatus,
5497 cpumMsrRd_IntelI7UncPerfGlobalOvfCtrl,
5498 cpumMsrRd_IntelI7UncPerfFixedCtrCtrl,
5499 cpumMsrRd_IntelI7UncPerfFixedCtr,
5500 cpumMsrRd_IntelI7UncCBoxConfig,
5501 cpumMsrRd_IntelI7UncArbPerfCtrN,
5502 cpumMsrRd_IntelI7UncArbPerfEvtSelN,
5503 cpumMsrRd_IntelI7SmiCount,
5504 cpumMsrRd_IntelCore2EmttmCrTablesN,
5505 cpumMsrRd_IntelCore2SmmCStMiscInfo,
5506 cpumMsrRd_IntelCore1ExtConfig,
5507 cpumMsrRd_IntelCore1DtsCalControl,
5508 cpumMsrRd_IntelCore2PeciControl,
5509 cpumMsrRd_IntelAtSilvCoreC1Recidency,
5510
5511 cpumMsrRd_P6LastBranchFromIp,
5512 cpumMsrRd_P6LastBranchToIp,
5513 cpumMsrRd_P6LastIntFromIp,
5514 cpumMsrRd_P6LastIntToIp,
5515
5516 cpumMsrRd_AmdFam15hTscRate,
5517 cpumMsrRd_AmdFam15hLwpCfg,
5518 cpumMsrRd_AmdFam15hLwpCbAddr,
5519 cpumMsrRd_AmdFam10hMc4MiscN,
5520 cpumMsrRd_AmdK8PerfCtlN,
5521 cpumMsrRd_AmdK8PerfCtrN,
5522 cpumMsrRd_AmdK8SysCfg,
5523 cpumMsrRd_AmdK8HwCr,
5524 cpumMsrRd_AmdK8IorrBaseN,
5525 cpumMsrRd_AmdK8IorrMaskN,
5526 cpumMsrRd_AmdK8TopOfMemN,
5527 cpumMsrRd_AmdK8NbCfg1,
5528 cpumMsrRd_AmdK8McXcptRedir,
5529 cpumMsrRd_AmdK8CpuNameN,
5530 cpumMsrRd_AmdK8HwThermalCtrl,
5531 cpumMsrRd_AmdK8SwThermalCtrl,
5532 cpumMsrRd_AmdK8FidVidControl,
5533 cpumMsrRd_AmdK8FidVidStatus,
5534 cpumMsrRd_AmdK8McCtlMaskN,
5535 cpumMsrRd_AmdK8SmiOnIoTrapN,
5536 cpumMsrRd_AmdK8SmiOnIoTrapCtlSts,
5537 cpumMsrRd_AmdK8IntPendingMessage,
5538 cpumMsrRd_AmdK8SmiTriggerIoCycle,
5539 cpumMsrRd_AmdFam10hMmioCfgBaseAddr,
5540 cpumMsrRd_AmdFam10hTrapCtlMaybe,
5541 cpumMsrRd_AmdFam10hPStateCurLimit,
5542 cpumMsrRd_AmdFam10hPStateControl,
5543 cpumMsrRd_AmdFam10hPStateStatus,
5544 cpumMsrRd_AmdFam10hPStateN,
5545 cpumMsrRd_AmdFam10hCofVidControl,
5546 cpumMsrRd_AmdFam10hCofVidStatus,
5547 cpumMsrRd_AmdFam10hCStateIoBaseAddr,
5548 cpumMsrRd_AmdFam10hCpuWatchdogTimer,
5549 cpumMsrRd_AmdK8SmmBase,
5550 cpumMsrRd_AmdK8SmmAddr,
5551 cpumMsrRd_AmdK8SmmMask,
5552 cpumMsrRd_AmdK8VmCr,
5553 cpumMsrRd_AmdK8IgnNe,
5554 cpumMsrRd_AmdK8SmmCtl,
5555 cpumMsrRd_AmdK8VmHSavePa,
5556 cpumMsrRd_AmdFam10hVmLockKey,
5557 cpumMsrRd_AmdFam10hSmmLockKey,
5558 cpumMsrRd_AmdFam10hLocalSmiStatus,
5559 cpumMsrRd_AmdFam10hOsVisWrkIdLength,
5560 cpumMsrRd_AmdFam10hOsVisWrkStatus,
5561 cpumMsrRd_AmdFam16hL2IPerfCtlN,
5562 cpumMsrRd_AmdFam16hL2IPerfCtrN,
5563 cpumMsrRd_AmdFam15hNorthbridgePerfCtlN,
5564 cpumMsrRd_AmdFam15hNorthbridgePerfCtrN,
5565 cpumMsrRd_AmdK7MicrocodeCtl,
5566 cpumMsrRd_AmdK7ClusterIdMaybe,
5567 cpumMsrRd_AmdK8CpuIdCtlStd07hEbax,
5568 cpumMsrRd_AmdK8CpuIdCtlStd06hEcx,
5569 cpumMsrRd_AmdK8CpuIdCtlStd01hEdcx,
5570 cpumMsrRd_AmdK8CpuIdCtlExt01hEdcx,
5571 cpumMsrRd_AmdK8PatchLevel,
5572 cpumMsrRd_AmdK7DebugStatusMaybe,
5573 cpumMsrRd_AmdK7BHTraceBaseMaybe,
5574 cpumMsrRd_AmdK7BHTracePtrMaybe,
5575 cpumMsrRd_AmdK7BHTraceLimitMaybe,
5576 cpumMsrRd_AmdK7HardwareDebugToolCfgMaybe,
5577 cpumMsrRd_AmdK7FastFlushCountMaybe,
5578 cpumMsrRd_AmdK7NodeId,
5579 cpumMsrRd_AmdK7DrXAddrMaskN,
5580 cpumMsrRd_AmdK7Dr0DataMatchMaybe,
5581 cpumMsrRd_AmdK7Dr0DataMaskMaybe,
5582 cpumMsrRd_AmdK7LoadStoreCfg,
5583 cpumMsrRd_AmdK7InstrCacheCfg,
5584 cpumMsrRd_AmdK7DataCacheCfg,
5585 cpumMsrRd_AmdK7BusUnitCfg,
5586 cpumMsrRd_AmdK7DebugCtl2Maybe,
5587 cpumMsrRd_AmdFam15hFpuCfg,
5588 cpumMsrRd_AmdFam15hDecoderCfg,
5589 cpumMsrRd_AmdFam10hBusUnitCfg2,
5590 cpumMsrRd_AmdFam15hCombUnitCfg,
5591 cpumMsrRd_AmdFam15hCombUnitCfg2,
5592 cpumMsrRd_AmdFam15hCombUnitCfg3,
5593 cpumMsrRd_AmdFam15hExecUnitCfg,
5594 cpumMsrRd_AmdFam15hLoadStoreCfg2,
5595 cpumMsrRd_AmdFam10hIbsFetchCtl,
5596 cpumMsrRd_AmdFam10hIbsFetchLinAddr,
5597 cpumMsrRd_AmdFam10hIbsFetchPhysAddr,
5598 cpumMsrRd_AmdFam10hIbsOpExecCtl,
5599 cpumMsrRd_AmdFam10hIbsOpRip,
5600 cpumMsrRd_AmdFam10hIbsOpData,
5601 cpumMsrRd_AmdFam10hIbsOpData2,
5602 cpumMsrRd_AmdFam10hIbsOpData3,
5603 cpumMsrRd_AmdFam10hIbsDcLinAddr,
5604 cpumMsrRd_AmdFam10hIbsDcPhysAddr,
5605 cpumMsrRd_AmdFam10hIbsCtl,
5606 cpumMsrRd_AmdFam14hIbsBrTarget,
5607
5608 cpumMsrRd_Gim
5609};
5610
5611
5612/**
5613 * MSR write function table.
5614 */
5615static const PFNCPUMWRMSR g_aCpumWrMsrFns[kCpumMsrWrFn_End] =
5616{
5617 NULL, /* Invalid */
5618 cpumMsrWr_IgnoreWrite,
5619 cpumMsrWr_ReadOnly,
5620 NULL, /* Alias */
5621 cpumMsrWr_Ia32P5McAddr,
5622 cpumMsrWr_Ia32P5McType,
5623 cpumMsrWr_Ia32TimestampCounter,
5624 cpumMsrWr_Ia32ApicBase,
5625 cpumMsrWr_Ia32FeatureControl,
5626 cpumMsrWr_Ia32BiosSignId,
5627 cpumMsrWr_Ia32BiosUpdateTrigger,
5628 cpumMsrWr_Ia32SmmMonitorCtl,
5629 cpumMsrWr_Ia32PmcN,
5630 cpumMsrWr_Ia32MonitorFilterLineSize,
5631 cpumMsrWr_Ia32MPerf,
5632 cpumMsrWr_Ia32APerf,
5633 cpumMsrWr_Ia32MtrrPhysBaseN,
5634 cpumMsrWr_Ia32MtrrPhysMaskN,
5635 cpumMsrWr_Ia32MtrrFixed,
5636 cpumMsrWr_Ia32MtrrDefType,
5637 cpumMsrWr_Ia32Pat,
5638 cpumMsrWr_Ia32SysEnterCs,
5639 cpumMsrWr_Ia32SysEnterEsp,
5640 cpumMsrWr_Ia32SysEnterEip,
5641 cpumMsrWr_Ia32McgStatus,
5642 cpumMsrWr_Ia32McgCtl,
5643 cpumMsrWr_Ia32DebugCtl,
5644 cpumMsrWr_Ia32SmrrPhysBase,
5645 cpumMsrWr_Ia32SmrrPhysMask,
5646 cpumMsrWr_Ia32PlatformDcaCap,
5647 cpumMsrWr_Ia32Dca0Cap,
5648 cpumMsrWr_Ia32PerfEvtSelN,
5649 cpumMsrWr_Ia32PerfStatus,
5650 cpumMsrWr_Ia32PerfCtl,
5651 cpumMsrWr_Ia32FixedCtrN,
5652 cpumMsrWr_Ia32PerfCapabilities,
5653 cpumMsrWr_Ia32FixedCtrCtrl,
5654 cpumMsrWr_Ia32PerfGlobalStatus,
5655 cpumMsrWr_Ia32PerfGlobalCtrl,
5656 cpumMsrWr_Ia32PerfGlobalOvfCtrl,
5657 cpumMsrWr_Ia32PebsEnable,
5658 cpumMsrWr_Ia32ClockModulation,
5659 cpumMsrWr_Ia32ThermInterrupt,
5660 cpumMsrWr_Ia32ThermStatus,
5661 cpumMsrWr_Ia32Therm2Ctl,
5662 cpumMsrWr_Ia32MiscEnable,
5663 cpumMsrWr_Ia32McCtlStatusAddrMiscN,
5664 cpumMsrWr_Ia32McNCtl2,
5665 cpumMsrWr_Ia32DsArea,
5666 cpumMsrWr_Ia32TscDeadline,
5667 cpumMsrWr_Ia32X2ApicN,
5668 cpumMsrWr_Ia32DebugInterface,
5669 cpumMsrWr_Ia32SpecCtrl,
5670 cpumMsrWr_Ia32PredCmd,
5671
5672 cpumMsrWr_Amd64Efer,
5673 cpumMsrWr_Amd64SyscallTarget,
5674 cpumMsrWr_Amd64LongSyscallTarget,
5675 cpumMsrWr_Amd64CompSyscallTarget,
5676 cpumMsrWr_Amd64SyscallFlagMask,
5677 cpumMsrWr_Amd64FsBase,
5678 cpumMsrWr_Amd64GsBase,
5679 cpumMsrWr_Amd64KernelGsBase,
5680 cpumMsrWr_Amd64TscAux,
5681
5682 cpumMsrWr_IntelEblCrPowerOn,
5683 cpumMsrWr_IntelP4EbcHardPowerOn,
5684 cpumMsrWr_IntelP4EbcSoftPowerOn,
5685 cpumMsrWr_IntelP4EbcFrequencyId,
5686 cpumMsrWr_IntelFlexRatio,
5687 cpumMsrWr_IntelPkgCStConfigControl,
5688 cpumMsrWr_IntelPmgIoCaptureBase,
5689 cpumMsrWr_IntelLastBranchFromToN,
5690 cpumMsrWr_IntelLastBranchFromN,
5691 cpumMsrWr_IntelLastBranchToN,
5692 cpumMsrWr_IntelLastBranchTos,
5693 cpumMsrWr_IntelBblCrCtl,
5694 cpumMsrWr_IntelBblCrCtl3,
5695 cpumMsrWr_IntelI7TemperatureTarget,
5696 cpumMsrWr_IntelI7MsrOffCoreResponseN,
5697 cpumMsrWr_IntelI7MiscPwrMgmt,
5698 cpumMsrWr_IntelP6CrN,
5699 cpumMsrWr_IntelCpuId1FeatureMaskEcdx,
5700 cpumMsrWr_IntelCpuId1FeatureMaskEax,
5701 cpumMsrWr_IntelCpuId80000001FeatureMaskEcdx,
5702 cpumMsrWr_IntelI7SandyAesNiCtl,
5703 cpumMsrWr_IntelI7TurboRatioLimit,
5704 cpumMsrWr_IntelI7LbrSelect,
5705 cpumMsrWr_IntelI7SandyErrorControl,
5706 cpumMsrWr_IntelI7PowerCtl,
5707 cpumMsrWr_IntelI7SandyPebsNumAlt,
5708 cpumMsrWr_IntelI7PebsLdLat,
5709 cpumMsrWr_IntelI7SandyVrCurrentConfig,
5710 cpumMsrWr_IntelI7SandyVrMiscConfig,
5711 cpumMsrWr_IntelI7SandyRaplPowerUnit,
5712 cpumMsrWr_IntelI7SandyPkgCnIrtlN,
5713 cpumMsrWr_IntelI7SandyPkgC2Residency,
5714 cpumMsrWr_IntelI7RaplPkgPowerLimit,
5715 cpumMsrWr_IntelI7RaplDramPowerLimit,
5716 cpumMsrWr_IntelI7RaplPp0PowerLimit,
5717 cpumMsrWr_IntelI7RaplPp0Policy,
5718 cpumMsrWr_IntelI7RaplPp1PowerLimit,
5719 cpumMsrWr_IntelI7RaplPp1Policy,
5720 cpumMsrWr_IntelI7IvyConfigTdpControl,
5721 cpumMsrWr_IntelI7IvyTurboActivationRatio,
5722 cpumMsrWr_IntelI7UncPerfGlobalCtrl,
5723 cpumMsrWr_IntelI7UncPerfGlobalStatus,
5724 cpumMsrWr_IntelI7UncPerfGlobalOvfCtrl,
5725 cpumMsrWr_IntelI7UncPerfFixedCtrCtrl,
5726 cpumMsrWr_IntelI7UncPerfFixedCtr,
5727 cpumMsrWr_IntelI7UncArbPerfCtrN,
5728 cpumMsrWr_IntelI7UncArbPerfEvtSelN,
5729 cpumMsrWr_IntelCore2EmttmCrTablesN,
5730 cpumMsrWr_IntelCore2SmmCStMiscInfo,
5731 cpumMsrWr_IntelCore1ExtConfig,
5732 cpumMsrWr_IntelCore1DtsCalControl,
5733 cpumMsrWr_IntelCore2PeciControl,
5734
5735 cpumMsrWr_P6LastIntFromIp,
5736 cpumMsrWr_P6LastIntToIp,
5737
5738 cpumMsrWr_AmdFam15hTscRate,
5739 cpumMsrWr_AmdFam15hLwpCfg,
5740 cpumMsrWr_AmdFam15hLwpCbAddr,
5741 cpumMsrWr_AmdFam10hMc4MiscN,
5742 cpumMsrWr_AmdK8PerfCtlN,
5743 cpumMsrWr_AmdK8PerfCtrN,
5744 cpumMsrWr_AmdK8SysCfg,
5745 cpumMsrWr_AmdK8HwCr,
5746 cpumMsrWr_AmdK8IorrBaseN,
5747 cpumMsrWr_AmdK8IorrMaskN,
5748 cpumMsrWr_AmdK8TopOfMemN,
5749 cpumMsrWr_AmdK8NbCfg1,
5750 cpumMsrWr_AmdK8McXcptRedir,
5751 cpumMsrWr_AmdK8CpuNameN,
5752 cpumMsrWr_AmdK8HwThermalCtrl,
5753 cpumMsrWr_AmdK8SwThermalCtrl,
5754 cpumMsrWr_AmdK8FidVidControl,
5755 cpumMsrWr_AmdK8McCtlMaskN,
5756 cpumMsrWr_AmdK8SmiOnIoTrapN,
5757 cpumMsrWr_AmdK8SmiOnIoTrapCtlSts,
5758 cpumMsrWr_AmdK8IntPendingMessage,
5759 cpumMsrWr_AmdK8SmiTriggerIoCycle,
5760 cpumMsrWr_AmdFam10hMmioCfgBaseAddr,
5761 cpumMsrWr_AmdFam10hTrapCtlMaybe,
5762 cpumMsrWr_AmdFam10hPStateControl,
5763 cpumMsrWr_AmdFam10hPStateStatus,
5764 cpumMsrWr_AmdFam10hPStateN,
5765 cpumMsrWr_AmdFam10hCofVidControl,
5766 cpumMsrWr_AmdFam10hCofVidStatus,
5767 cpumMsrWr_AmdFam10hCStateIoBaseAddr,
5768 cpumMsrWr_AmdFam10hCpuWatchdogTimer,
5769 cpumMsrWr_AmdK8SmmBase,
5770 cpumMsrWr_AmdK8SmmAddr,
5771 cpumMsrWr_AmdK8SmmMask,
5772 cpumMsrWr_AmdK8VmCr,
5773 cpumMsrWr_AmdK8IgnNe,
5774 cpumMsrWr_AmdK8SmmCtl,
5775 cpumMsrWr_AmdK8VmHSavePa,
5776 cpumMsrWr_AmdFam10hVmLockKey,
5777 cpumMsrWr_AmdFam10hSmmLockKey,
5778 cpumMsrWr_AmdFam10hLocalSmiStatus,
5779 cpumMsrWr_AmdFam10hOsVisWrkIdLength,
5780 cpumMsrWr_AmdFam10hOsVisWrkStatus,
5781 cpumMsrWr_AmdFam16hL2IPerfCtlN,
5782 cpumMsrWr_AmdFam16hL2IPerfCtrN,
5783 cpumMsrWr_AmdFam15hNorthbridgePerfCtlN,
5784 cpumMsrWr_AmdFam15hNorthbridgePerfCtrN,
5785 cpumMsrWr_AmdK7MicrocodeCtl,
5786 cpumMsrWr_AmdK7ClusterIdMaybe,
5787 cpumMsrWr_AmdK8CpuIdCtlStd07hEbax,
5788 cpumMsrWr_AmdK8CpuIdCtlStd06hEcx,
5789 cpumMsrWr_AmdK8CpuIdCtlStd01hEdcx,
5790 cpumMsrWr_AmdK8CpuIdCtlExt01hEdcx,
5791 cpumMsrWr_AmdK8PatchLoader,
5792 cpumMsrWr_AmdK7DebugStatusMaybe,
5793 cpumMsrWr_AmdK7BHTraceBaseMaybe,
5794 cpumMsrWr_AmdK7BHTracePtrMaybe,
5795 cpumMsrWr_AmdK7BHTraceLimitMaybe,
5796 cpumMsrWr_AmdK7HardwareDebugToolCfgMaybe,
5797 cpumMsrWr_AmdK7FastFlushCountMaybe,
5798 cpumMsrWr_AmdK7NodeId,
5799 cpumMsrWr_AmdK7DrXAddrMaskN,
5800 cpumMsrWr_AmdK7Dr0DataMatchMaybe,
5801 cpumMsrWr_AmdK7Dr0DataMaskMaybe,
5802 cpumMsrWr_AmdK7LoadStoreCfg,
5803 cpumMsrWr_AmdK7InstrCacheCfg,
5804 cpumMsrWr_AmdK7DataCacheCfg,
5805 cpumMsrWr_AmdK7BusUnitCfg,
5806 cpumMsrWr_AmdK7DebugCtl2Maybe,
5807 cpumMsrWr_AmdFam15hFpuCfg,
5808 cpumMsrWr_AmdFam15hDecoderCfg,
5809 cpumMsrWr_AmdFam10hBusUnitCfg2,
5810 cpumMsrWr_AmdFam15hCombUnitCfg,
5811 cpumMsrWr_AmdFam15hCombUnitCfg2,
5812 cpumMsrWr_AmdFam15hCombUnitCfg3,
5813 cpumMsrWr_AmdFam15hExecUnitCfg,
5814 cpumMsrWr_AmdFam15hLoadStoreCfg2,
5815 cpumMsrWr_AmdFam10hIbsFetchCtl,
5816 cpumMsrWr_AmdFam10hIbsFetchLinAddr,
5817 cpumMsrWr_AmdFam10hIbsFetchPhysAddr,
5818 cpumMsrWr_AmdFam10hIbsOpExecCtl,
5819 cpumMsrWr_AmdFam10hIbsOpRip,
5820 cpumMsrWr_AmdFam10hIbsOpData,
5821 cpumMsrWr_AmdFam10hIbsOpData2,
5822 cpumMsrWr_AmdFam10hIbsOpData3,
5823 cpumMsrWr_AmdFam10hIbsDcLinAddr,
5824 cpumMsrWr_AmdFam10hIbsDcPhysAddr,
5825 cpumMsrWr_AmdFam10hIbsCtl,
5826 cpumMsrWr_AmdFam14hIbsBrTarget,
5827
5828 cpumMsrWr_Gim
5829};
5830
5831
5832/**
5833 * Looks up the range for the given MSR.
5834 *
5835 * @returns Pointer to the range if found, NULL if not.
5836 * @param pVM The cross context VM structure.
5837 * @param idMsr The MSR to look up.
5838 */
5839# ifndef IN_RING3
5840static
5841# endif
5842PCPUMMSRRANGE cpumLookupMsrRange(PVM pVM, uint32_t idMsr)
5843{
5844 /*
5845 * Binary lookup.
5846 */
5847 uint32_t cRanges = pVM->cpum.s.GuestInfo.cMsrRanges;
5848 if (!cRanges)
5849 return NULL;
5850 PCPUMMSRRANGE paRanges = pVM->cpum.s.GuestInfo.CTX_SUFF(paMsrRanges);
5851 for (;;)
5852 {
5853 uint32_t i = cRanges / 2;
5854 if (idMsr < paRanges[i].uFirst)
5855 {
5856 if (i == 0)
5857 break;
5858 cRanges = i;
5859 }
5860 else if (idMsr > paRanges[i].uLast)
5861 {
5862 i++;
5863 if (i >= cRanges)
5864 break;
5865 cRanges -= i;
5866 paRanges = &paRanges[i];
5867 }
5868 else
5869 {
5870 if (paRanges[i].enmRdFn == kCpumMsrRdFn_MsrAlias)
5871 return cpumLookupMsrRange(pVM, paRanges[i].uValue);
5872 return &paRanges[i];
5873 }
5874 }
5875
5876# ifdef VBOX_STRICT
5877 /*
5878 * Linear lookup to verify the above binary search.
5879 */
5880 uint32_t cLeft = pVM->cpum.s.GuestInfo.cMsrRanges;
5881 PCPUMMSRRANGE pCur = pVM->cpum.s.GuestInfo.CTX_SUFF(paMsrRanges);
5882 while (cLeft-- > 0)
5883 {
5884 if (idMsr >= pCur->uFirst && idMsr <= pCur->uLast)
5885 {
5886 AssertFailed();
5887 if (pCur->enmRdFn == kCpumMsrRdFn_MsrAlias)
5888 return cpumLookupMsrRange(pVM, pCur->uValue);
5889 return pCur;
5890 }
5891 pCur++;
5892 }
5893# endif
5894 return NULL;
5895}
5896
5897
5898/**
5899 * Query a guest MSR.
5900 *
5901 * The caller is responsible for checking privilege if the call is the result of
5902 * a RDMSR instruction. We'll do the rest.
5903 *
5904 * @retval VINF_SUCCESS on success.
5905 * @retval VINF_CPUM_R3_MSR_READ if the MSR read could not be serviced in the
5906 * current context (raw-mode or ring-0).
5907 * @retval VERR_CPUM_RAISE_GP_0 on failure (invalid MSR), the caller is
5908 * expected to take the appropriate actions. @a *puValue is set to 0.
5909 * @param pVCpu The cross context virtual CPU structure.
5910 * @param idMsr The MSR.
5911 * @param puValue Where to return the value.
5912 *
5913 * @remarks This will always return the right values, even when we're in the
5914 * recompiler.
5915 */
5916VMMDECL(VBOXSTRICTRC) CPUMQueryGuestMsr(PVMCPU pVCpu, uint32_t idMsr, uint64_t *puValue)
5917{
5918 *puValue = 0;
5919
5920 VBOXSTRICTRC rcStrict;
5921 PVM pVM = pVCpu->CTX_SUFF(pVM);
5922 PCPUMMSRRANGE pRange = cpumLookupMsrRange(pVM, idMsr);
5923 if (pRange)
5924 {
5925 CPUMMSRRDFN enmRdFn = (CPUMMSRRDFN)pRange->enmRdFn;
5926 AssertReturn(enmRdFn > kCpumMsrRdFn_Invalid && enmRdFn < kCpumMsrRdFn_End, VERR_CPUM_IPE_1);
5927
5928 PFNCPUMRDMSR pfnRdMsr = g_aCpumRdMsrFns[enmRdFn];
5929 AssertReturn(pfnRdMsr, VERR_CPUM_IPE_2);
5930
5931 STAM_COUNTER_INC(&pRange->cReads);
5932 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReads);
5933
5934 rcStrict = pfnRdMsr(pVCpu, idMsr, pRange, puValue);
5935 if (rcStrict == VINF_SUCCESS)
5936 Log2(("CPUM: RDMSR %#x (%s) -> %#llx\n", idMsr, pRange->szName, *puValue));
5937 else if (rcStrict == VERR_CPUM_RAISE_GP_0)
5938 {
5939 Log(("CPUM: RDMSR %#x (%s) -> #GP(0)\n", idMsr, pRange->szName));
5940 STAM_COUNTER_INC(&pRange->cGps);
5941 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReadsRaiseGp);
5942 }
5943#ifndef IN_RING3
5944 else if (rcStrict == VINF_CPUM_R3_MSR_READ)
5945 Log(("CPUM: RDMSR %#x (%s) -> ring-3\n", idMsr, pRange->szName));
5946#endif
5947 else
5948 {
5949 Log(("CPUM: RDMSR %#x (%s) -> rcStrict=%Rrc\n", idMsr, pRange->szName, VBOXSTRICTRC_VAL(rcStrict)));
5950 AssertMsgStmt(RT_FAILURE_NP(rcStrict), ("%Rrc idMsr=%#x\n", VBOXSTRICTRC_VAL(rcStrict), idMsr),
5951 rcStrict = VERR_IPE_UNEXPECTED_INFO_STATUS);
5952 Assert(rcStrict != VERR_EM_INTERPRETER);
5953 }
5954 }
5955 else
5956 {
5957 Log(("CPUM: Unknown RDMSR %#x -> #GP(0)\n", idMsr));
5958 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReads);
5959 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReadsUnknown);
5960 rcStrict = VERR_CPUM_RAISE_GP_0;
5961 }
5962 return rcStrict;
5963}
5964
5965
5966/**
5967 * Writes to a guest MSR.
5968 *
5969 * The caller is responsible for checking privilege if the call is the result of
5970 * a WRMSR instruction. We'll do the rest.
5971 *
5972 * @retval VINF_SUCCESS on success.
5973 * @retval VINF_CPUM_R3_MSR_WRITE if the MSR write could not be serviced in the
5974 * current context (raw-mode or ring-0).
5975 * @retval VERR_CPUM_RAISE_GP_0 on failure, the caller is expected to take the
5976 * appropriate actions.
5977 *
5978 * @param pVCpu The cross context virtual CPU structure.
5979 * @param idMsr The MSR id.
5980 * @param uValue The value to set.
5981 *
5982 * @remarks Everyone changing MSR values, including the recompiler, shall do it
5983 * by calling this method. This makes sure we have current values and
5984 * that we trigger all the right actions when something changes.
5985 *
5986 * For performance reasons, this actually isn't entirely true for some
5987 * MSRs when in HM mode. The code here and in HM must be aware of
5988 * this.
5989 */
5990VMMDECL(VBOXSTRICTRC) CPUMSetGuestMsr(PVMCPU pVCpu, uint32_t idMsr, uint64_t uValue)
5991{
5992 VBOXSTRICTRC rcStrict;
5993 PVM pVM = pVCpu->CTX_SUFF(pVM);
5994 PCPUMMSRRANGE pRange = cpumLookupMsrRange(pVM, idMsr);
5995 if (pRange)
5996 {
5997 STAM_COUNTER_INC(&pRange->cWrites);
5998 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWrites);
5999
6000 if (!(uValue & pRange->fWrGpMask))
6001 {
6002 CPUMMSRWRFN enmWrFn = (CPUMMSRWRFN)pRange->enmWrFn;
6003 AssertReturn(enmWrFn > kCpumMsrWrFn_Invalid && enmWrFn < kCpumMsrWrFn_End, VERR_CPUM_IPE_1);
6004
6005 PFNCPUMWRMSR pfnWrMsr = g_aCpumWrMsrFns[enmWrFn];
6006 AssertReturn(pfnWrMsr, VERR_CPUM_IPE_2);
6007
6008 uint64_t uValueAdjusted = uValue & ~pRange->fWrIgnMask;
6009 if (uValueAdjusted != uValue)
6010 {
6011 STAM_COUNTER_INC(&pRange->cIgnoredBits);
6012 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesToIgnoredBits);
6013 }
6014
6015 rcStrict = pfnWrMsr(pVCpu, idMsr, pRange, uValueAdjusted, uValue);
6016 if (rcStrict == VINF_SUCCESS)
6017 Log2(("CPUM: WRMSR %#x (%s), %#llx [%#llx]\n", idMsr, pRange->szName, uValueAdjusted, uValue));
6018 else if (rcStrict == VERR_CPUM_RAISE_GP_0)
6019 {
6020 Log(("CPUM: WRMSR %#x (%s), %#llx [%#llx] -> #GP(0)\n", idMsr, pRange->szName, uValueAdjusted, uValue));
6021 STAM_COUNTER_INC(&pRange->cGps);
6022 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesRaiseGp);
6023 }
6024#ifndef IN_RING3
6025 else if (rcStrict == VINF_CPUM_R3_MSR_WRITE)
6026 Log(("CPUM: WRMSR %#x (%s), %#llx [%#llx] -> ring-3\n", idMsr, pRange->szName, uValueAdjusted, uValue));
6027#endif
6028 else
6029 {
6030 Log(("CPUM: WRMSR %#x (%s), %#llx [%#llx] -> rcStrict=%Rrc\n",
6031 idMsr, pRange->szName, uValueAdjusted, uValue, VBOXSTRICTRC_VAL(rcStrict)));
6032 AssertMsgStmt(RT_FAILURE_NP(rcStrict), ("%Rrc idMsr=%#x\n", VBOXSTRICTRC_VAL(rcStrict), idMsr),
6033 rcStrict = VERR_IPE_UNEXPECTED_INFO_STATUS);
6034 Assert(rcStrict != VERR_EM_INTERPRETER);
6035 }
6036 }
6037 else
6038 {
6039 Log(("CPUM: WRMSR %#x (%s), %#llx -> #GP(0) - invalid bits %#llx\n",
6040 idMsr, pRange->szName, uValue, uValue & pRange->fWrGpMask));
6041 STAM_COUNTER_INC(&pRange->cGps);
6042 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesRaiseGp);
6043 rcStrict = VERR_CPUM_RAISE_GP_0;
6044 }
6045 }
6046 else
6047 {
6048 Log(("CPUM: Unknown WRMSR %#x, %#llx -> #GP(0)\n", idMsr, uValue));
6049 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWrites);
6050 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesUnknown);
6051 rcStrict = VERR_CPUM_RAISE_GP_0;
6052 }
6053 return rcStrict;
6054}
6055
6056
6057#if defined(VBOX_STRICT) && defined(IN_RING3)
6058/**
6059 * Performs some checks on the static data related to MSRs.
6060 *
6061 * @returns VINF_SUCCESS on success, error on failure.
6062 */
6063int cpumR3MsrStrictInitChecks(void)
6064{
6065#define CPUM_ASSERT_RD_MSR_FN(a_Register) \
6066 AssertReturn(g_aCpumRdMsrFns[kCpumMsrRdFn_##a_Register] == cpumMsrRd_##a_Register, VERR_CPUM_IPE_2);
6067#define CPUM_ASSERT_WR_MSR_FN(a_Register) \
6068 AssertReturn(g_aCpumWrMsrFns[kCpumMsrWrFn_##a_Register] == cpumMsrWr_##a_Register, VERR_CPUM_IPE_2);
6069
6070 AssertReturn(g_aCpumRdMsrFns[kCpumMsrRdFn_Invalid] == NULL, VERR_CPUM_IPE_2);
6071 CPUM_ASSERT_RD_MSR_FN(FixedValue);
6072 CPUM_ASSERT_RD_MSR_FN(WriteOnly);
6073 CPUM_ASSERT_RD_MSR_FN(Ia32P5McAddr);
6074 CPUM_ASSERT_RD_MSR_FN(Ia32P5McType);
6075 CPUM_ASSERT_RD_MSR_FN(Ia32TimestampCounter);
6076 CPUM_ASSERT_RD_MSR_FN(Ia32PlatformId);
6077 CPUM_ASSERT_RD_MSR_FN(Ia32ApicBase);
6078 CPUM_ASSERT_RD_MSR_FN(Ia32FeatureControl);
6079 CPUM_ASSERT_RD_MSR_FN(Ia32BiosSignId);
6080 CPUM_ASSERT_RD_MSR_FN(Ia32SmmMonitorCtl);
6081 CPUM_ASSERT_RD_MSR_FN(Ia32PmcN);
6082 CPUM_ASSERT_RD_MSR_FN(Ia32MonitorFilterLineSize);
6083 CPUM_ASSERT_RD_MSR_FN(Ia32MPerf);
6084 CPUM_ASSERT_RD_MSR_FN(Ia32APerf);
6085 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrCap);
6086 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrPhysBaseN);
6087 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrPhysMaskN);
6088 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrFixed);
6089 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrDefType);
6090 CPUM_ASSERT_RD_MSR_FN(Ia32Pat);
6091 CPUM_ASSERT_RD_MSR_FN(Ia32SysEnterCs);
6092 CPUM_ASSERT_RD_MSR_FN(Ia32SysEnterEsp);
6093 CPUM_ASSERT_RD_MSR_FN(Ia32SysEnterEip);
6094 CPUM_ASSERT_RD_MSR_FN(Ia32McgCap);
6095 CPUM_ASSERT_RD_MSR_FN(Ia32McgStatus);
6096 CPUM_ASSERT_RD_MSR_FN(Ia32McgCtl);
6097 CPUM_ASSERT_RD_MSR_FN(Ia32DebugCtl);
6098 CPUM_ASSERT_RD_MSR_FN(Ia32SmrrPhysBase);
6099 CPUM_ASSERT_RD_MSR_FN(Ia32SmrrPhysMask);
6100 CPUM_ASSERT_RD_MSR_FN(Ia32PlatformDcaCap);
6101 CPUM_ASSERT_RD_MSR_FN(Ia32CpuDcaCap);
6102 CPUM_ASSERT_RD_MSR_FN(Ia32Dca0Cap);
6103 CPUM_ASSERT_RD_MSR_FN(Ia32PerfEvtSelN);
6104 CPUM_ASSERT_RD_MSR_FN(Ia32PerfStatus);
6105 CPUM_ASSERT_RD_MSR_FN(Ia32PerfCtl);
6106 CPUM_ASSERT_RD_MSR_FN(Ia32FixedCtrN);
6107 CPUM_ASSERT_RD_MSR_FN(Ia32PerfCapabilities);
6108 CPUM_ASSERT_RD_MSR_FN(Ia32FixedCtrCtrl);
6109 CPUM_ASSERT_RD_MSR_FN(Ia32PerfGlobalStatus);
6110 CPUM_ASSERT_RD_MSR_FN(Ia32PerfGlobalCtrl);
6111 CPUM_ASSERT_RD_MSR_FN(Ia32PerfGlobalOvfCtrl);
6112 CPUM_ASSERT_RD_MSR_FN(Ia32PebsEnable);
6113 CPUM_ASSERT_RD_MSR_FN(Ia32ClockModulation);
6114 CPUM_ASSERT_RD_MSR_FN(Ia32ThermInterrupt);
6115 CPUM_ASSERT_RD_MSR_FN(Ia32ThermStatus);
6116 CPUM_ASSERT_RD_MSR_FN(Ia32MiscEnable);
6117 CPUM_ASSERT_RD_MSR_FN(Ia32McCtlStatusAddrMiscN);
6118 CPUM_ASSERT_RD_MSR_FN(Ia32McNCtl2);
6119 CPUM_ASSERT_RD_MSR_FN(Ia32DsArea);
6120 CPUM_ASSERT_RD_MSR_FN(Ia32TscDeadline);
6121 CPUM_ASSERT_RD_MSR_FN(Ia32X2ApicN);
6122 CPUM_ASSERT_RD_MSR_FN(Ia32DebugInterface);
6123 CPUM_ASSERT_RD_MSR_FN(Ia32VmxBasic);
6124 CPUM_ASSERT_RD_MSR_FN(Ia32VmxPinbasedCtls);
6125 CPUM_ASSERT_RD_MSR_FN(Ia32VmxProcbasedCtls);
6126 CPUM_ASSERT_RD_MSR_FN(Ia32VmxExitCtls);
6127 CPUM_ASSERT_RD_MSR_FN(Ia32VmxEntryCtls);
6128 CPUM_ASSERT_RD_MSR_FN(Ia32VmxMisc);
6129 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr0Fixed0);
6130 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr0Fixed1);
6131 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr4Fixed0);
6132 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr4Fixed1);
6133 CPUM_ASSERT_RD_MSR_FN(Ia32VmxVmcsEnum);
6134 CPUM_ASSERT_RD_MSR_FN(Ia32VmxProcBasedCtls2);
6135 CPUM_ASSERT_RD_MSR_FN(Ia32VmxEptVpidCap);
6136 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTruePinbasedCtls);
6137 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTrueProcbasedCtls);
6138 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTrueExitCtls);
6139 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTrueEntryCtls);
6140 CPUM_ASSERT_RD_MSR_FN(Ia32VmxVmFunc);
6141 CPUM_ASSERT_RD_MSR_FN(Ia32SpecCtrl);
6142 CPUM_ASSERT_RD_MSR_FN(Ia32ArchCapabilities);
6143
6144 CPUM_ASSERT_RD_MSR_FN(Amd64Efer);
6145 CPUM_ASSERT_RD_MSR_FN(Amd64SyscallTarget);
6146 CPUM_ASSERT_RD_MSR_FN(Amd64LongSyscallTarget);
6147 CPUM_ASSERT_RD_MSR_FN(Amd64CompSyscallTarget);
6148 CPUM_ASSERT_RD_MSR_FN(Amd64SyscallFlagMask);
6149 CPUM_ASSERT_RD_MSR_FN(Amd64FsBase);
6150 CPUM_ASSERT_RD_MSR_FN(Amd64GsBase);
6151 CPUM_ASSERT_RD_MSR_FN(Amd64KernelGsBase);
6152 CPUM_ASSERT_RD_MSR_FN(Amd64TscAux);
6153
6154 CPUM_ASSERT_RD_MSR_FN(IntelEblCrPowerOn);
6155 CPUM_ASSERT_RD_MSR_FN(IntelI7CoreThreadCount);
6156 CPUM_ASSERT_RD_MSR_FN(IntelP4EbcHardPowerOn);
6157 CPUM_ASSERT_RD_MSR_FN(IntelP4EbcSoftPowerOn);
6158 CPUM_ASSERT_RD_MSR_FN(IntelP4EbcFrequencyId);
6159 CPUM_ASSERT_RD_MSR_FN(IntelP6FsbFrequency);
6160 CPUM_ASSERT_RD_MSR_FN(IntelPlatformInfo);
6161 CPUM_ASSERT_RD_MSR_FN(IntelFlexRatio);
6162 CPUM_ASSERT_RD_MSR_FN(IntelPkgCStConfigControl);
6163 CPUM_ASSERT_RD_MSR_FN(IntelPmgIoCaptureBase);
6164 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchFromToN);
6165 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchFromN);
6166 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchToN);
6167 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchTos);
6168 CPUM_ASSERT_RD_MSR_FN(IntelBblCrCtl);
6169 CPUM_ASSERT_RD_MSR_FN(IntelBblCrCtl3);
6170 CPUM_ASSERT_RD_MSR_FN(IntelI7TemperatureTarget);
6171 CPUM_ASSERT_RD_MSR_FN(IntelI7MsrOffCoreResponseN);
6172 CPUM_ASSERT_RD_MSR_FN(IntelI7MiscPwrMgmt);
6173 CPUM_ASSERT_RD_MSR_FN(IntelP6CrN);
6174 CPUM_ASSERT_RD_MSR_FN(IntelCpuId1FeatureMaskEcdx);
6175 CPUM_ASSERT_RD_MSR_FN(IntelCpuId1FeatureMaskEax);
6176 CPUM_ASSERT_RD_MSR_FN(IntelCpuId80000001FeatureMaskEcdx);
6177 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyAesNiCtl);
6178 CPUM_ASSERT_RD_MSR_FN(IntelI7TurboRatioLimit);
6179 CPUM_ASSERT_RD_MSR_FN(IntelI7LbrSelect);
6180 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyErrorControl);
6181 CPUM_ASSERT_RD_MSR_FN(IntelI7VirtualLegacyWireCap);
6182 CPUM_ASSERT_RD_MSR_FN(IntelI7PowerCtl);
6183 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyPebsNumAlt);
6184 CPUM_ASSERT_RD_MSR_FN(IntelI7PebsLdLat);
6185 CPUM_ASSERT_RD_MSR_FN(IntelI7PkgCnResidencyN);
6186 CPUM_ASSERT_RD_MSR_FN(IntelI7CoreCnResidencyN);
6187 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyVrCurrentConfig);
6188 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyVrMiscConfig);
6189 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyRaplPowerUnit);
6190 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyPkgCnIrtlN);
6191 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyPkgC2Residency);
6192 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgPowerLimit);
6193 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgEnergyStatus);
6194 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgPerfStatus);
6195 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgPowerInfo);
6196 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramPowerLimit);
6197 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramEnergyStatus);
6198 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramPerfStatus);
6199 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramPowerInfo);
6200 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0PowerLimit);
6201 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0EnergyStatus);
6202 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0Policy);
6203 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0PerfStatus);
6204 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp1PowerLimit);
6205 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp1EnergyStatus);
6206 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp1Policy);
6207 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpNominal);
6208 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpLevel1);
6209 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpLevel2);
6210 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpControl);
6211 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyTurboActivationRatio);
6212 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfGlobalCtrl);
6213 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfGlobalStatus);
6214 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfGlobalOvfCtrl);
6215 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfFixedCtrCtrl);
6216 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfFixedCtr);
6217 CPUM_ASSERT_RD_MSR_FN(IntelI7UncCBoxConfig);
6218 CPUM_ASSERT_RD_MSR_FN(IntelI7UncArbPerfCtrN);
6219 CPUM_ASSERT_RD_MSR_FN(IntelI7UncArbPerfEvtSelN);
6220 CPUM_ASSERT_RD_MSR_FN(IntelI7SmiCount);
6221 CPUM_ASSERT_RD_MSR_FN(IntelCore2EmttmCrTablesN);
6222 CPUM_ASSERT_RD_MSR_FN(IntelCore2SmmCStMiscInfo);
6223 CPUM_ASSERT_RD_MSR_FN(IntelCore1ExtConfig);
6224 CPUM_ASSERT_RD_MSR_FN(IntelCore1DtsCalControl);
6225 CPUM_ASSERT_RD_MSR_FN(IntelCore2PeciControl);
6226 CPUM_ASSERT_RD_MSR_FN(IntelAtSilvCoreC1Recidency);
6227
6228 CPUM_ASSERT_RD_MSR_FN(P6LastBranchFromIp);
6229 CPUM_ASSERT_RD_MSR_FN(P6LastBranchToIp);
6230 CPUM_ASSERT_RD_MSR_FN(P6LastIntFromIp);
6231 CPUM_ASSERT_RD_MSR_FN(P6LastIntToIp);
6232
6233 CPUM_ASSERT_RD_MSR_FN(AmdFam15hTscRate);
6234 CPUM_ASSERT_RD_MSR_FN(AmdFam15hLwpCfg);
6235 CPUM_ASSERT_RD_MSR_FN(AmdFam15hLwpCbAddr);
6236 CPUM_ASSERT_RD_MSR_FN(AmdFam10hMc4MiscN);
6237 CPUM_ASSERT_RD_MSR_FN(AmdK8PerfCtlN);
6238 CPUM_ASSERT_RD_MSR_FN(AmdK8PerfCtrN);
6239 CPUM_ASSERT_RD_MSR_FN(AmdK8SysCfg);
6240 CPUM_ASSERT_RD_MSR_FN(AmdK8HwCr);
6241 CPUM_ASSERT_RD_MSR_FN(AmdK8IorrBaseN);
6242 CPUM_ASSERT_RD_MSR_FN(AmdK8IorrMaskN);
6243 CPUM_ASSERT_RD_MSR_FN(AmdK8TopOfMemN);
6244 CPUM_ASSERT_RD_MSR_FN(AmdK8NbCfg1);
6245 CPUM_ASSERT_RD_MSR_FN(AmdK8McXcptRedir);
6246 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuNameN);
6247 CPUM_ASSERT_RD_MSR_FN(AmdK8HwThermalCtrl);
6248 CPUM_ASSERT_RD_MSR_FN(AmdK8SwThermalCtrl);
6249 CPUM_ASSERT_RD_MSR_FN(AmdK8FidVidControl);
6250 CPUM_ASSERT_RD_MSR_FN(AmdK8FidVidStatus);
6251 CPUM_ASSERT_RD_MSR_FN(AmdK8McCtlMaskN);
6252 CPUM_ASSERT_RD_MSR_FN(AmdK8SmiOnIoTrapN);
6253 CPUM_ASSERT_RD_MSR_FN(AmdK8SmiOnIoTrapCtlSts);
6254 CPUM_ASSERT_RD_MSR_FN(AmdK8IntPendingMessage);
6255 CPUM_ASSERT_RD_MSR_FN(AmdK8SmiTriggerIoCycle);
6256 CPUM_ASSERT_RD_MSR_FN(AmdFam10hMmioCfgBaseAddr);
6257 CPUM_ASSERT_RD_MSR_FN(AmdFam10hTrapCtlMaybe);
6258 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateCurLimit);
6259 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateControl);
6260 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateStatus);
6261 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateN);
6262 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCofVidControl);
6263 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCofVidStatus);
6264 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCStateIoBaseAddr);
6265 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCpuWatchdogTimer);
6266 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmBase);
6267 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmAddr);
6268 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmMask);
6269 CPUM_ASSERT_RD_MSR_FN(AmdK8VmCr);
6270 CPUM_ASSERT_RD_MSR_FN(AmdK8IgnNe);
6271 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmCtl);
6272 CPUM_ASSERT_RD_MSR_FN(AmdK8VmHSavePa);
6273 CPUM_ASSERT_RD_MSR_FN(AmdFam10hVmLockKey);
6274 CPUM_ASSERT_RD_MSR_FN(AmdFam10hSmmLockKey);
6275 CPUM_ASSERT_RD_MSR_FN(AmdFam10hLocalSmiStatus);
6276 CPUM_ASSERT_RD_MSR_FN(AmdFam10hOsVisWrkIdLength);
6277 CPUM_ASSERT_RD_MSR_FN(AmdFam10hOsVisWrkStatus);
6278 CPUM_ASSERT_RD_MSR_FN(AmdFam16hL2IPerfCtlN);
6279 CPUM_ASSERT_RD_MSR_FN(AmdFam16hL2IPerfCtrN);
6280 CPUM_ASSERT_RD_MSR_FN(AmdFam15hNorthbridgePerfCtlN);
6281 CPUM_ASSERT_RD_MSR_FN(AmdFam15hNorthbridgePerfCtrN);
6282 CPUM_ASSERT_RD_MSR_FN(AmdK7MicrocodeCtl);
6283 CPUM_ASSERT_RD_MSR_FN(AmdK7ClusterIdMaybe);
6284 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlStd07hEbax);
6285 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlStd06hEcx);
6286 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlStd01hEdcx);
6287 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlExt01hEdcx);
6288 CPUM_ASSERT_RD_MSR_FN(AmdK8PatchLevel);
6289 CPUM_ASSERT_RD_MSR_FN(AmdK7DebugStatusMaybe);
6290 CPUM_ASSERT_RD_MSR_FN(AmdK7BHTraceBaseMaybe);
6291 CPUM_ASSERT_RD_MSR_FN(AmdK7BHTracePtrMaybe);
6292 CPUM_ASSERT_RD_MSR_FN(AmdK7BHTraceLimitMaybe);
6293 CPUM_ASSERT_RD_MSR_FN(AmdK7HardwareDebugToolCfgMaybe);
6294 CPUM_ASSERT_RD_MSR_FN(AmdK7FastFlushCountMaybe);
6295 CPUM_ASSERT_RD_MSR_FN(AmdK7NodeId);
6296 CPUM_ASSERT_RD_MSR_FN(AmdK7DrXAddrMaskN);
6297 CPUM_ASSERT_RD_MSR_FN(AmdK7Dr0DataMatchMaybe);
6298 CPUM_ASSERT_RD_MSR_FN(AmdK7Dr0DataMaskMaybe);
6299 CPUM_ASSERT_RD_MSR_FN(AmdK7LoadStoreCfg);
6300 CPUM_ASSERT_RD_MSR_FN(AmdK7InstrCacheCfg);
6301 CPUM_ASSERT_RD_MSR_FN(AmdK7DataCacheCfg);
6302 CPUM_ASSERT_RD_MSR_FN(AmdK7BusUnitCfg);
6303 CPUM_ASSERT_RD_MSR_FN(AmdK7DebugCtl2Maybe);
6304 CPUM_ASSERT_RD_MSR_FN(AmdFam15hFpuCfg);
6305 CPUM_ASSERT_RD_MSR_FN(AmdFam15hDecoderCfg);
6306 CPUM_ASSERT_RD_MSR_FN(AmdFam10hBusUnitCfg2);
6307 CPUM_ASSERT_RD_MSR_FN(AmdFam15hCombUnitCfg);
6308 CPUM_ASSERT_RD_MSR_FN(AmdFam15hCombUnitCfg2);
6309 CPUM_ASSERT_RD_MSR_FN(AmdFam15hCombUnitCfg3);
6310 CPUM_ASSERT_RD_MSR_FN(AmdFam15hExecUnitCfg);
6311 CPUM_ASSERT_RD_MSR_FN(AmdFam15hLoadStoreCfg2);
6312 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsFetchCtl);
6313 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsFetchLinAddr);
6314 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsFetchPhysAddr);
6315 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpExecCtl);
6316 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpRip);
6317 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpData);
6318 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpData2);
6319 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpData3);
6320 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsDcLinAddr);
6321 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsDcPhysAddr);
6322 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsCtl);
6323 CPUM_ASSERT_RD_MSR_FN(AmdFam14hIbsBrTarget);
6324
6325 CPUM_ASSERT_RD_MSR_FN(Gim)
6326
6327 AssertReturn(g_aCpumWrMsrFns[kCpumMsrWrFn_Invalid] == NULL, VERR_CPUM_IPE_2);
6328 CPUM_ASSERT_WR_MSR_FN(Ia32P5McAddr);
6329 CPUM_ASSERT_WR_MSR_FN(Ia32P5McType);
6330 CPUM_ASSERT_WR_MSR_FN(Ia32TimestampCounter);
6331 CPUM_ASSERT_WR_MSR_FN(Ia32ApicBase);
6332 CPUM_ASSERT_WR_MSR_FN(Ia32FeatureControl);
6333 CPUM_ASSERT_WR_MSR_FN(Ia32BiosSignId);
6334 CPUM_ASSERT_WR_MSR_FN(Ia32BiosUpdateTrigger);
6335 CPUM_ASSERT_WR_MSR_FN(Ia32SmmMonitorCtl);
6336 CPUM_ASSERT_WR_MSR_FN(Ia32PmcN);
6337 CPUM_ASSERT_WR_MSR_FN(Ia32MonitorFilterLineSize);
6338 CPUM_ASSERT_WR_MSR_FN(Ia32MPerf);
6339 CPUM_ASSERT_WR_MSR_FN(Ia32APerf);
6340 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrPhysBaseN);
6341 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrPhysMaskN);
6342 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrFixed);
6343 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrDefType);
6344 CPUM_ASSERT_WR_MSR_FN(Ia32Pat);
6345 CPUM_ASSERT_WR_MSR_FN(Ia32SysEnterCs);
6346 CPUM_ASSERT_WR_MSR_FN(Ia32SysEnterEsp);
6347 CPUM_ASSERT_WR_MSR_FN(Ia32SysEnterEip);
6348 CPUM_ASSERT_WR_MSR_FN(Ia32McgStatus);
6349 CPUM_ASSERT_WR_MSR_FN(Ia32McgCtl);
6350 CPUM_ASSERT_WR_MSR_FN(Ia32DebugCtl);
6351 CPUM_ASSERT_WR_MSR_FN(Ia32SmrrPhysBase);
6352 CPUM_ASSERT_WR_MSR_FN(Ia32SmrrPhysMask);
6353 CPUM_ASSERT_WR_MSR_FN(Ia32PlatformDcaCap);
6354 CPUM_ASSERT_WR_MSR_FN(Ia32Dca0Cap);
6355 CPUM_ASSERT_WR_MSR_FN(Ia32PerfEvtSelN);
6356 CPUM_ASSERT_WR_MSR_FN(Ia32PerfStatus);
6357 CPUM_ASSERT_WR_MSR_FN(Ia32PerfCtl);
6358 CPUM_ASSERT_WR_MSR_FN(Ia32FixedCtrN);
6359 CPUM_ASSERT_WR_MSR_FN(Ia32PerfCapabilities);
6360 CPUM_ASSERT_WR_MSR_FN(Ia32FixedCtrCtrl);
6361 CPUM_ASSERT_WR_MSR_FN(Ia32PerfGlobalStatus);
6362 CPUM_ASSERT_WR_MSR_FN(Ia32PerfGlobalCtrl);
6363 CPUM_ASSERT_WR_MSR_FN(Ia32PerfGlobalOvfCtrl);
6364 CPUM_ASSERT_WR_MSR_FN(Ia32PebsEnable);
6365 CPUM_ASSERT_WR_MSR_FN(Ia32ClockModulation);
6366 CPUM_ASSERT_WR_MSR_FN(Ia32ThermInterrupt);
6367 CPUM_ASSERT_WR_MSR_FN(Ia32ThermStatus);
6368 CPUM_ASSERT_WR_MSR_FN(Ia32MiscEnable);
6369 CPUM_ASSERT_WR_MSR_FN(Ia32McCtlStatusAddrMiscN);
6370 CPUM_ASSERT_WR_MSR_FN(Ia32McNCtl2);
6371 CPUM_ASSERT_WR_MSR_FN(Ia32DsArea);
6372 CPUM_ASSERT_WR_MSR_FN(Ia32TscDeadline);
6373 CPUM_ASSERT_WR_MSR_FN(Ia32X2ApicN);
6374 CPUM_ASSERT_WR_MSR_FN(Ia32DebugInterface);
6375 CPUM_ASSERT_WR_MSR_FN(Ia32SpecCtrl);
6376 CPUM_ASSERT_WR_MSR_FN(Ia32PredCmd);
6377
6378 CPUM_ASSERT_WR_MSR_FN(Amd64Efer);
6379 CPUM_ASSERT_WR_MSR_FN(Amd64SyscallTarget);
6380 CPUM_ASSERT_WR_MSR_FN(Amd64LongSyscallTarget);
6381 CPUM_ASSERT_WR_MSR_FN(Amd64CompSyscallTarget);
6382 CPUM_ASSERT_WR_MSR_FN(Amd64SyscallFlagMask);
6383 CPUM_ASSERT_WR_MSR_FN(Amd64FsBase);
6384 CPUM_ASSERT_WR_MSR_FN(Amd64GsBase);
6385 CPUM_ASSERT_WR_MSR_FN(Amd64KernelGsBase);
6386 CPUM_ASSERT_WR_MSR_FN(Amd64TscAux);
6387
6388 CPUM_ASSERT_WR_MSR_FN(IntelEblCrPowerOn);
6389 CPUM_ASSERT_WR_MSR_FN(IntelP4EbcHardPowerOn);
6390 CPUM_ASSERT_WR_MSR_FN(IntelP4EbcSoftPowerOn);
6391 CPUM_ASSERT_WR_MSR_FN(IntelP4EbcFrequencyId);
6392 CPUM_ASSERT_WR_MSR_FN(IntelFlexRatio);
6393 CPUM_ASSERT_WR_MSR_FN(IntelPkgCStConfigControl);
6394 CPUM_ASSERT_WR_MSR_FN(IntelPmgIoCaptureBase);
6395 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchFromToN);
6396 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchFromN);
6397 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchToN);
6398 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchTos);
6399 CPUM_ASSERT_WR_MSR_FN(IntelBblCrCtl);
6400 CPUM_ASSERT_WR_MSR_FN(IntelBblCrCtl3);
6401 CPUM_ASSERT_WR_MSR_FN(IntelI7TemperatureTarget);
6402 CPUM_ASSERT_WR_MSR_FN(IntelI7MsrOffCoreResponseN);
6403 CPUM_ASSERT_WR_MSR_FN(IntelI7MiscPwrMgmt);
6404 CPUM_ASSERT_WR_MSR_FN(IntelP6CrN);
6405 CPUM_ASSERT_WR_MSR_FN(IntelCpuId1FeatureMaskEcdx);
6406 CPUM_ASSERT_WR_MSR_FN(IntelCpuId1FeatureMaskEax);
6407 CPUM_ASSERT_WR_MSR_FN(IntelCpuId80000001FeatureMaskEcdx);
6408 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyAesNiCtl);
6409 CPUM_ASSERT_WR_MSR_FN(IntelI7TurboRatioLimit);
6410 CPUM_ASSERT_WR_MSR_FN(IntelI7LbrSelect);
6411 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyErrorControl);
6412 CPUM_ASSERT_WR_MSR_FN(IntelI7PowerCtl);
6413 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyPebsNumAlt);
6414 CPUM_ASSERT_WR_MSR_FN(IntelI7PebsLdLat);
6415 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyVrCurrentConfig);
6416 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyVrMiscConfig);
6417 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyPkgCnIrtlN);
6418 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyPkgC2Residency);
6419 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPkgPowerLimit);
6420 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplDramPowerLimit);
6421 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp0PowerLimit);
6422 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp0Policy);
6423 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp1PowerLimit);
6424 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp1Policy);
6425 CPUM_ASSERT_WR_MSR_FN(IntelI7IvyConfigTdpControl);
6426 CPUM_ASSERT_WR_MSR_FN(IntelI7IvyTurboActivationRatio);
6427 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfGlobalCtrl);
6428 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfGlobalStatus);
6429 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfGlobalOvfCtrl);
6430 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfFixedCtrCtrl);
6431 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfFixedCtr);
6432 CPUM_ASSERT_WR_MSR_FN(IntelI7UncArbPerfCtrN);
6433 CPUM_ASSERT_WR_MSR_FN(IntelI7UncArbPerfEvtSelN);
6434 CPUM_ASSERT_WR_MSR_FN(IntelCore2EmttmCrTablesN);
6435 CPUM_ASSERT_WR_MSR_FN(IntelCore2SmmCStMiscInfo);
6436 CPUM_ASSERT_WR_MSR_FN(IntelCore1ExtConfig);
6437 CPUM_ASSERT_WR_MSR_FN(IntelCore1DtsCalControl);
6438 CPUM_ASSERT_WR_MSR_FN(IntelCore2PeciControl);
6439
6440 CPUM_ASSERT_WR_MSR_FN(P6LastIntFromIp);
6441 CPUM_ASSERT_WR_MSR_FN(P6LastIntToIp);
6442
6443 CPUM_ASSERT_WR_MSR_FN(AmdFam15hTscRate);
6444 CPUM_ASSERT_WR_MSR_FN(AmdFam15hLwpCfg);
6445 CPUM_ASSERT_WR_MSR_FN(AmdFam15hLwpCbAddr);
6446 CPUM_ASSERT_WR_MSR_FN(AmdFam10hMc4MiscN);
6447 CPUM_ASSERT_WR_MSR_FN(AmdK8PerfCtlN);
6448 CPUM_ASSERT_WR_MSR_FN(AmdK8PerfCtrN);
6449 CPUM_ASSERT_WR_MSR_FN(AmdK8SysCfg);
6450 CPUM_ASSERT_WR_MSR_FN(AmdK8HwCr);
6451 CPUM_ASSERT_WR_MSR_FN(AmdK8IorrBaseN);
6452 CPUM_ASSERT_WR_MSR_FN(AmdK8IorrMaskN);
6453 CPUM_ASSERT_WR_MSR_FN(AmdK8TopOfMemN);
6454 CPUM_ASSERT_WR_MSR_FN(AmdK8NbCfg1);
6455 CPUM_ASSERT_WR_MSR_FN(AmdK8McXcptRedir);
6456 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuNameN);
6457 CPUM_ASSERT_WR_MSR_FN(AmdK8HwThermalCtrl);
6458 CPUM_ASSERT_WR_MSR_FN(AmdK8SwThermalCtrl);
6459 CPUM_ASSERT_WR_MSR_FN(AmdK8FidVidControl);
6460 CPUM_ASSERT_WR_MSR_FN(AmdK8McCtlMaskN);
6461 CPUM_ASSERT_WR_MSR_FN(AmdK8SmiOnIoTrapN);
6462 CPUM_ASSERT_WR_MSR_FN(AmdK8SmiOnIoTrapCtlSts);
6463 CPUM_ASSERT_WR_MSR_FN(AmdK8IntPendingMessage);
6464 CPUM_ASSERT_WR_MSR_FN(AmdK8SmiTriggerIoCycle);
6465 CPUM_ASSERT_WR_MSR_FN(AmdFam10hMmioCfgBaseAddr);
6466 CPUM_ASSERT_WR_MSR_FN(AmdFam10hTrapCtlMaybe);
6467 CPUM_ASSERT_WR_MSR_FN(AmdFam10hPStateControl);
6468 CPUM_ASSERT_WR_MSR_FN(AmdFam10hPStateStatus);
6469 CPUM_ASSERT_WR_MSR_FN(AmdFam10hPStateN);
6470 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCofVidControl);
6471 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCofVidStatus);
6472 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCStateIoBaseAddr);
6473 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCpuWatchdogTimer);
6474 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmBase);
6475 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmAddr);
6476 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmMask);
6477 CPUM_ASSERT_WR_MSR_FN(AmdK8VmCr);
6478 CPUM_ASSERT_WR_MSR_FN(AmdK8IgnNe);
6479 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmCtl);
6480 CPUM_ASSERT_WR_MSR_FN(AmdK8VmHSavePa);
6481 CPUM_ASSERT_WR_MSR_FN(AmdFam10hVmLockKey);
6482 CPUM_ASSERT_WR_MSR_FN(AmdFam10hSmmLockKey);
6483 CPUM_ASSERT_WR_MSR_FN(AmdFam10hLocalSmiStatus);
6484 CPUM_ASSERT_WR_MSR_FN(AmdFam10hOsVisWrkIdLength);
6485 CPUM_ASSERT_WR_MSR_FN(AmdFam10hOsVisWrkStatus);
6486 CPUM_ASSERT_WR_MSR_FN(AmdFam16hL2IPerfCtlN);
6487 CPUM_ASSERT_WR_MSR_FN(AmdFam16hL2IPerfCtrN);
6488 CPUM_ASSERT_WR_MSR_FN(AmdFam15hNorthbridgePerfCtlN);
6489 CPUM_ASSERT_WR_MSR_FN(AmdFam15hNorthbridgePerfCtrN);
6490 CPUM_ASSERT_WR_MSR_FN(AmdK7MicrocodeCtl);
6491 CPUM_ASSERT_WR_MSR_FN(AmdK7ClusterIdMaybe);
6492 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlStd07hEbax);
6493 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlStd06hEcx);
6494 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlStd01hEdcx);
6495 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlExt01hEdcx);
6496 CPUM_ASSERT_WR_MSR_FN(AmdK8PatchLoader);
6497 CPUM_ASSERT_WR_MSR_FN(AmdK7DebugStatusMaybe);
6498 CPUM_ASSERT_WR_MSR_FN(AmdK7BHTraceBaseMaybe);
6499 CPUM_ASSERT_WR_MSR_FN(AmdK7BHTracePtrMaybe);
6500 CPUM_ASSERT_WR_MSR_FN(AmdK7BHTraceLimitMaybe);
6501 CPUM_ASSERT_WR_MSR_FN(AmdK7HardwareDebugToolCfgMaybe);
6502 CPUM_ASSERT_WR_MSR_FN(AmdK7FastFlushCountMaybe);
6503 CPUM_ASSERT_WR_MSR_FN(AmdK7NodeId);
6504 CPUM_ASSERT_WR_MSR_FN(AmdK7DrXAddrMaskN);
6505 CPUM_ASSERT_WR_MSR_FN(AmdK7Dr0DataMatchMaybe);
6506 CPUM_ASSERT_WR_MSR_FN(AmdK7Dr0DataMaskMaybe);
6507 CPUM_ASSERT_WR_MSR_FN(AmdK7LoadStoreCfg);
6508 CPUM_ASSERT_WR_MSR_FN(AmdK7InstrCacheCfg);
6509 CPUM_ASSERT_WR_MSR_FN(AmdK7DataCacheCfg);
6510 CPUM_ASSERT_WR_MSR_FN(AmdK7BusUnitCfg);
6511 CPUM_ASSERT_WR_MSR_FN(AmdK7DebugCtl2Maybe);
6512 CPUM_ASSERT_WR_MSR_FN(AmdFam15hFpuCfg);
6513 CPUM_ASSERT_WR_MSR_FN(AmdFam15hDecoderCfg);
6514 CPUM_ASSERT_WR_MSR_FN(AmdFam10hBusUnitCfg2);
6515 CPUM_ASSERT_WR_MSR_FN(AmdFam15hCombUnitCfg);
6516 CPUM_ASSERT_WR_MSR_FN(AmdFam15hCombUnitCfg2);
6517 CPUM_ASSERT_WR_MSR_FN(AmdFam15hCombUnitCfg3);
6518 CPUM_ASSERT_WR_MSR_FN(AmdFam15hExecUnitCfg);
6519 CPUM_ASSERT_WR_MSR_FN(AmdFam15hLoadStoreCfg2);
6520 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsFetchCtl);
6521 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsFetchLinAddr);
6522 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsFetchPhysAddr);
6523 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpExecCtl);
6524 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpRip);
6525 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpData);
6526 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpData2);
6527 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpData3);
6528 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsDcLinAddr);
6529 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsDcPhysAddr);
6530 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsCtl);
6531 CPUM_ASSERT_WR_MSR_FN(AmdFam14hIbsBrTarget);
6532
6533 CPUM_ASSERT_WR_MSR_FN(Gim);
6534
6535 return VINF_SUCCESS;
6536}
6537#endif /* VBOX_STRICT && IN_RING3 */
6538
6539
6540/**
6541 * Gets the scalable bus frequency.
6542 *
6543 * The bus frequency is used as a base in several MSRs that gives the CPU and
6544 * other frequency ratios.
6545 *
6546 * @returns Scalable bus frequency in Hz. Will not return CPUM_SBUSFREQ_UNKNOWN.
6547 * @param pVM The cross context VM structure.
6548 */
6549VMMDECL(uint64_t) CPUMGetGuestScalableBusFrequency(PVM pVM)
6550{
6551 uint64_t uFreq = pVM->cpum.s.GuestInfo.uScalableBusFreq;
6552 if (uFreq == CPUM_SBUSFREQ_UNKNOWN)
6553 uFreq = CPUM_SBUSFREQ_100MHZ;
6554 return uFreq;
6555}
6556
6557
6558/**
6559 * Sets the guest EFER MSR without performing any additional checks.
6560 *
6561 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6562 * @param uOldEfer The previous EFER MSR value.
6563 * @param uValidEfer The new, validated EFER MSR value.
6564 *
6565 * @remarks One would normally call CPUMIsGuestEferMsrWriteValid() before calling
6566 * this function to change the EFER in order to perform an EFER transition.
6567 */
6568VMMDECL(void) CPUMSetGuestEferMsrNoChecks(PVMCPU pVCpu, uint64_t uOldEfer, uint64_t uValidEfer)
6569{
6570 pVCpu->cpum.s.Guest.msrEFER = uValidEfer;
6571
6572 /* AMD64 Architecture Programmer's Manual: 15.15 TLB Control; flush the TLB
6573 if MSR_K6_EFER_NXE, MSR_K6_EFER_LME or MSR_K6_EFER_LMA are changed. */
6574 if ( (uOldEfer & (MSR_K6_EFER_NXE | MSR_K6_EFER_LME | MSR_K6_EFER_LMA))
6575 != (pVCpu->cpum.s.Guest.msrEFER & (MSR_K6_EFER_NXE | MSR_K6_EFER_LME | MSR_K6_EFER_LMA)))
6576 {
6577 /// @todo PGMFlushTLB(pVCpu, cr3, true /*fGlobal*/);
6578 HMFlushTLB(pVCpu);
6579
6580 /* Notify PGM about NXE changes. */
6581 if ( (uOldEfer & MSR_K6_EFER_NXE)
6582 != (pVCpu->cpum.s.Guest.msrEFER & MSR_K6_EFER_NXE))
6583 PGMNotifyNxeChanged(pVCpu, !(uOldEfer & MSR_K6_EFER_NXE));
6584 }
6585}
6586
6587
6588/**
6589 * Checks if a guest PAT MSR write is valid.
6590 *
6591 * @returns @c true if the PAT bit combination is valid, @c false otherwise.
6592 * @param uValue The PAT MSR value.
6593 */
6594VMMDECL(bool) CPUMIsPatMsrValid(uint64_t uValue)
6595{
6596 for (uint32_t cShift = 0; cShift < 63; cShift += 8)
6597 {
6598 /* Check all eight bits because the top 5 bits of each byte are reserved. */
6599 uint8_t uType = (uint8_t)(uValue >> cShift);
6600 if ((uType >= 8) || (uType == 2) || (uType == 3))
6601 {
6602 Log(("CPUM: Invalid PAT type at %u:%u in IA32_PAT: %#llx (%#llx)\n", cShift + 7, cShift, uValue, uType));
6603 return false;
6604 }
6605 }
6606 return true;
6607}
6608
6609
6610/**
6611 * Validates an EFER MSR write and provides the new, validated EFER MSR.
6612 *
6613 * @returns VBox status code.
6614 * @param pVM The cross context VM structure.
6615 * @param uCr0 The CR0 of the CPU corresponding to the EFER MSR.
6616 * @param uOldEfer Value of the previous EFER MSR on the CPU if any.
6617 * @param uNewEfer The new EFER MSR value being written.
6618 * @param puValidEfer Where to store the validated EFER (only updated if
6619 * this function returns VINF_SUCCESS).
6620 */
6621VMMDECL(int) CPUMIsGuestEferMsrWriteValid(PVM pVM, uint64_t uCr0, uint64_t uOldEfer, uint64_t uNewEfer, uint64_t *puValidEfer)
6622{
6623 /* #GP(0) If anything outside the allowed bits is set. */
6624 uint64_t fMask = CPUMGetGuestEferMsrValidMask(pVM);
6625 if (uNewEfer & ~fMask)
6626 {
6627 Log(("CPUM: Settings disallowed EFER bit. uNewEfer=%#RX64 fAllowed=%#RX64 -> #GP(0)\n", uNewEfer, fMask));
6628 return VERR_CPUM_RAISE_GP_0;
6629 }
6630
6631 /* Check for illegal MSR_K6_EFER_LME transitions: not allowed to change LME if
6632 paging is enabled. (AMD Arch. Programmer's Manual Volume 2: Table 14-5) */
6633 if ( (uOldEfer & MSR_K6_EFER_LME) != (uNewEfer & MSR_K6_EFER_LME)
6634 && (uCr0 & X86_CR0_PG))
6635 {
6636 Log(("CPUM: Illegal MSR_K6_EFER_LME change: paging is enabled!!\n"));
6637 return VERR_CPUM_RAISE_GP_0;
6638 }
6639
6640 /* There are a few more: e.g. MSR_K6_EFER_LMSLE. */
6641 AssertMsg(!(uNewEfer & ~( MSR_K6_EFER_NXE
6642 | MSR_K6_EFER_LME
6643 | MSR_K6_EFER_LMA /* ignored anyway */
6644 | MSR_K6_EFER_SCE
6645 | MSR_K6_EFER_FFXSR
6646 | MSR_K6_EFER_SVME)),
6647 ("Unexpected value %#RX64\n", uNewEfer));
6648
6649 /* Ignore EFER.LMA, it's updated when setting CR0. */
6650 fMask &= ~MSR_K6_EFER_LMA;
6651
6652 *puValidEfer = (uOldEfer & ~fMask) | (uNewEfer & fMask);
6653 return VINF_SUCCESS;
6654}
6655
6656
6657/**
6658 * Gets the mask of valid EFER bits depending on supported guest-CPU features.
6659 *
6660 * @returns Mask of valid EFER bits.
6661 * @param pVM The cross context VM structure.
6662 *
6663 * @remarks EFER.LMA is included as part of the valid mask. It's not invalid but
6664 * rather a read-only bit.
6665 */
6666VMMDECL(uint64_t) CPUMGetGuestEferMsrValidMask(PVM pVM)
6667{
6668 uint32_t const fExtFeatures = pVM->cpum.s.aGuestCpuIdPatmExt[0].uEax >= 0x80000001
6669 ? pVM->cpum.s.aGuestCpuIdPatmExt[1].uEdx
6670 : 0;
6671 uint64_t fMask = 0;
6672 uint64_t const fIgnoreMask = MSR_K6_EFER_LMA;
6673
6674 /* Filter out those bits the guest is allowed to change. (e.g. LMA is read-only) */
6675 if (fExtFeatures & X86_CPUID_EXT_FEATURE_EDX_NX)
6676 fMask |= MSR_K6_EFER_NXE;
6677 if (fExtFeatures & X86_CPUID_EXT_FEATURE_EDX_LONG_MODE)
6678 fMask |= MSR_K6_EFER_LME;
6679 if (fExtFeatures & X86_CPUID_EXT_FEATURE_EDX_SYSCALL)
6680 fMask |= MSR_K6_EFER_SCE;
6681 if (fExtFeatures & X86_CPUID_AMD_FEATURE_EDX_FFXSR)
6682 fMask |= MSR_K6_EFER_FFXSR;
6683 if (pVM->cpum.s.GuestFeatures.fSvm)
6684 fMask |= MSR_K6_EFER_SVME;
6685
6686 return (fIgnoreMask | fMask);
6687}
6688
6689
6690/**
6691 * Fast way for HM to access the MSR_K8_TSC_AUX register.
6692 *
6693 * @returns The register value.
6694 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6695 * @thread EMT(pVCpu)
6696 */
6697VMM_INT_DECL(uint64_t) CPUMGetGuestTscAux(PVMCPU pVCpu)
6698{
6699 Assert(!(pVCpu->cpum.s.Guest.fExtrn & CPUMCTX_EXTRN_TSC_AUX));
6700 return pVCpu->cpum.s.GuestMsrs.msr.TscAux;
6701}
6702
6703
6704/**
6705 * Fast way for HM to access the MSR_K8_TSC_AUX register.
6706 *
6707 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6708 * @param uValue The new value.
6709 * @thread EMT(pVCpu)
6710 */
6711VMM_INT_DECL(void) CPUMSetGuestTscAux(PVMCPU pVCpu, uint64_t uValue)
6712{
6713 pVCpu->cpum.s.Guest.fExtrn &= ~CPUMCTX_EXTRN_TSC_AUX;
6714 pVCpu->cpum.s.GuestMsrs.msr.TscAux = uValue;
6715}
6716
6717
6718/**
6719 * Fast way for HM to access the IA32_SPEC_CTRL register.
6720 *
6721 * @returns The register value.
6722 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6723 * @thread EMT(pVCpu)
6724 */
6725VMM_INT_DECL(uint64_t) CPUMGetGuestSpecCtrl(PVMCPU pVCpu)
6726{
6727 return pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl;
6728}
6729
6730
6731/**
6732 * Fast way for HM to access the IA32_SPEC_CTRL register.
6733 *
6734 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6735 * @param uValue The new value.
6736 * @thread EMT(pVCpu)
6737 */
6738VMM_INT_DECL(void) CPUMSetGuestSpecCtrl(PVMCPU pVCpu, uint64_t uValue)
6739{
6740 pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl = uValue;
6741}
6742
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette