VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/CPUMAllMsrs.cpp@ 74101

Last change on this file since 74101 was 74101, checked in by vboxsync, 6 years ago

VMM/CPUM: Naming nit.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 252.4 KB
Line 
1/* $Id: CPUMAllMsrs.cpp 74101 2018-09-06 04:30:45Z vboxsync $ */
2/** @file
3 * CPUM - CPU MSR Registers.
4 */
5
6/*
7 * Copyright (C) 2013-2017 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*********************************************************************************************************************************
20* Header Files *
21*********************************************************************************************************************************/
22#define LOG_GROUP LOG_GROUP_CPUM
23#include <VBox/vmm/cpum.h>
24#include <VBox/vmm/apic.h>
25#include <VBox/vmm/hm.h>
26#include <VBox/vmm/hm_vmx.h>
27#include <VBox/vmm/tm.h>
28#include <VBox/vmm/gim.h>
29#include "CPUMInternal.h"
30#include <VBox/vmm/vm.h>
31#include <VBox/err.h>
32
33
34/*********************************************************************************************************************************
35* Defined Constants And Macros *
36*********************************************************************************************************************************/
37/**
38 * Validates the CPUMMSRRANGE::offCpumCpu value and declares a local variable
39 * pointing to it.
40 *
41 * ASSUMES sizeof(a_Type) is a power of two and that the member is aligned
42 * correctly.
43 */
44#define CPUM_MSR_ASSERT_CPUMCPU_OFFSET_RETURN(a_pVCpu, a_pRange, a_Type, a_VarName) \
45 AssertMsgReturn( (a_pRange)->offCpumCpu >= 8 \
46 && (a_pRange)->offCpumCpu < sizeof(CPUMCPU) \
47 && !((a_pRange)->offCpumCpu & (RT_MIN(sizeof(a_Type), 8) - 1)) \
48 , ("offCpumCpu=%#x %s\n", (a_pRange)->offCpumCpu, (a_pRange)->szName), \
49 VERR_CPUM_MSR_BAD_CPUMCPU_OFFSET); \
50 a_Type *a_VarName = (a_Type *)((uintptr_t)&(a_pVCpu)->cpum.s + (a_pRange)->offCpumCpu)
51
52
53/*********************************************************************************************************************************
54* Structures and Typedefs *
55*********************************************************************************************************************************/
56
57/**
58 * Implements reading one or more MSRs.
59 *
60 * @returns VBox status code.
61 * @retval VINF_SUCCESS on success.
62 * @retval VINF_CPUM_R3_MSR_READ if the MSR read could not be serviced in the
63 * current context (raw-mode or ring-0).
64 * @retval VERR_CPUM_RAISE_GP_0 on failure (invalid MSR).
65 *
66 * @param pVCpu The cross context virtual CPU structure.
67 * @param idMsr The MSR we're reading.
68 * @param pRange The MSR range descriptor.
69 * @param puValue Where to return the value.
70 */
71typedef DECLCALLBACK(VBOXSTRICTRC) FNCPUMRDMSR(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue);
72/** Pointer to a RDMSR worker for a specific MSR or range of MSRs. */
73typedef FNCPUMRDMSR *PFNCPUMRDMSR;
74
75
76/**
77 * Implements writing one or more MSRs.
78 *
79 * @retval VINF_SUCCESS on success.
80 * @retval VINF_CPUM_R3_MSR_WRITE if the MSR write could not be serviced in the
81 * current context (raw-mode or ring-0).
82 * @retval VERR_CPUM_RAISE_GP_0 on failure.
83 *
84 * @param pVCpu The cross context virtual CPU structure.
85 * @param idMsr The MSR we're writing.
86 * @param pRange The MSR range descriptor.
87 * @param uValue The value to set, ignored bits masked.
88 * @param uRawValue The raw value with the ignored bits not masked.
89 */
90typedef DECLCALLBACK(VBOXSTRICTRC) FNCPUMWRMSR(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue);
91/** Pointer to a WRMSR worker for a specific MSR or range of MSRs. */
92typedef FNCPUMWRMSR *PFNCPUMWRMSR;
93
94
95
96/*
97 * Generic functions.
98 * Generic functions.
99 * Generic functions.
100 */
101
102
103/** @callback_method_impl{FNCPUMRDMSR} */
104static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_FixedValue(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
105{
106 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
107 *puValue = pRange->uValue;
108 return VINF_SUCCESS;
109}
110
111
112/** @callback_method_impl{FNCPUMWRMSR} */
113static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IgnoreWrite(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
114{
115 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
116 Log(("CPUM: Ignoring WRMSR %#x (%s), %#llx\n", idMsr, pRange->szName, uValue));
117 return VINF_SUCCESS;
118}
119
120
121/** @callback_method_impl{FNCPUMRDMSR} */
122static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_WriteOnly(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
123{
124 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(puValue);
125 return VERR_CPUM_RAISE_GP_0;
126}
127
128
129/** @callback_method_impl{FNCPUMWRMSR} */
130static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_ReadOnly(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
131{
132 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
133 Assert(pRange->fWrGpMask == UINT64_MAX);
134 return VERR_CPUM_RAISE_GP_0;
135}
136
137
138
139
140/*
141 * IA32
142 * IA32
143 * IA32
144 */
145
146/** @callback_method_impl{FNCPUMRDMSR} */
147static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32P5McAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
148{
149 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
150 *puValue = 0; /** @todo implement machine check injection. */
151 return VINF_SUCCESS;
152}
153
154
155/** @callback_method_impl{FNCPUMWRMSR} */
156static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32P5McAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
157{
158 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
159 /** @todo implement machine check injection. */
160 return VINF_SUCCESS;
161}
162
163
164/** @callback_method_impl{FNCPUMRDMSR} */
165static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32P5McType(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
166{
167 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
168 *puValue = 0; /** @todo implement machine check injection. */
169 return VINF_SUCCESS;
170}
171
172
173/** @callback_method_impl{FNCPUMWRMSR} */
174static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32P5McType(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
175{
176 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
177 /** @todo implement machine check injection. */
178 return VINF_SUCCESS;
179}
180
181
182/** @callback_method_impl{FNCPUMRDMSR} */
183static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32TimestampCounter(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
184{
185 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
186 *puValue = TMCpuTickGet(pVCpu);
187#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
188 *puValue = CPUMApplyNestedGuestTscOffset(pVCpu, *puValue);
189#endif
190 return VINF_SUCCESS;
191}
192
193
194/** @callback_method_impl{FNCPUMWRMSR} */
195static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32TimestampCounter(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
196{
197 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
198 TMCpuTickSet(pVCpu->CTX_SUFF(pVM), pVCpu, uValue);
199 return VINF_SUCCESS;
200}
201
202
203/** @callback_method_impl{FNCPUMRDMSR} */
204static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PlatformId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
205{
206 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
207 uint64_t uValue = pRange->uValue;
208 if (uValue & 0x1f00)
209 {
210 /* Max allowed bus ratio present. */
211 /** @todo Implement scaled BUS frequency. */
212 }
213
214 *puValue = uValue;
215 return VINF_SUCCESS;
216}
217
218
219/** @callback_method_impl{FNCPUMRDMSR} */
220static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ApicBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
221{
222 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
223 return APICGetBaseMsr(pVCpu, puValue);
224}
225
226
227/** @callback_method_impl{FNCPUMWRMSR} */
228static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ApicBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
229{
230 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
231 return APICSetBaseMsr(pVCpu, uValue);
232}
233
234
235/**
236 * Get fixed IA32_FEATURE_CONTROL value for NEM and cpumMsrRd_Ia32FeatureControl.
237 *
238 * @returns Fixed IA32_FEATURE_CONTROL value.
239 * @param pVCpu The cross context per CPU structure.
240 */
241VMM_INT_DECL(uint64_t) CPUMGetGuestIa32FeatureControl(PVMCPU pVCpu)
242{
243 /* Always report the MSR lock bit as set, in order to prevent guests from modifiying this MSR. */
244 uint64_t fFeatCtl = MSR_IA32_FEATURE_CONTROL_LOCK;
245
246 /* Report VMX features. */
247 if (pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fVmx)
248 fFeatCtl |= MSR_IA32_FEATURE_CONTROL_VMXON;
249
250 return fFeatCtl;
251}
252
253/** @callback_method_impl{FNCPUMRDMSR} */
254static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32FeatureControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
255{
256 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
257 *puValue = CPUMGetGuestIa32FeatureControl(pVCpu);
258 return VINF_SUCCESS;
259}
260
261
262/** @callback_method_impl{FNCPUMWRMSR} */
263static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32FeatureControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
264{
265 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
266 return VERR_CPUM_RAISE_GP_0;
267}
268
269
270/** @callback_method_impl{FNCPUMRDMSR} */
271static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32BiosSignId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
272{
273 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
274 /** @todo fake microcode update. */
275 *puValue = pRange->uValue;
276 return VINF_SUCCESS;
277}
278
279
280/** @callback_method_impl{FNCPUMWRMSR} */
281static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32BiosSignId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
282{
283 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
284 /* Normally, zero is written to Ia32BiosSignId before reading it in order
285 to select the signature instead of the BBL_CR_D3 behaviour. The GP mask
286 of the database entry should take care of most illegal writes for now, so
287 just ignore all writes atm. */
288 return VINF_SUCCESS;
289}
290
291
292/** @callback_method_impl{FNCPUMWRMSR} */
293static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32BiosUpdateTrigger(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
294{
295 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
296 /** @todo Fake bios update trigger better. The value is the address to an
297 * update package, I think. We should probably GP if it's invalid. */
298 return VINF_SUCCESS;
299}
300
301
302/**
303 * Get MSR_IA32_SMM_MONITOR_CTL value for IEM and cpumMsrRd_Ia32SmmMonitorCtl.
304 *
305 * @returns The MSR_IA32_SMM_MONITOR_CTL value.
306 * @param pVCpu The cross context per CPU structure.
307 */
308VMM_INT_DECL(uint64_t) CPUMGetGuestIa32SmmMonitorCtl(PVMCPU pVCpu)
309{
310 /* We do not support dual-monitor treatment for SMI and SMM. */
311 /** @todo SMM. */
312 RT_NOREF(pVCpu);
313 return 0;
314}
315
316
317/** @callback_method_impl{FNCPUMRDMSR} */
318static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SmmMonitorCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
319{
320 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
321 *puValue = CPUMGetGuestIa32SmmMonitorCtl(pVCpu);
322 return VINF_SUCCESS;
323}
324
325
326/** @callback_method_impl{FNCPUMWRMSR} */
327static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SmmMonitorCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
328{
329 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
330 /** @todo SMM. */
331 return VINF_SUCCESS;
332}
333
334
335/** @callback_method_impl{FNCPUMRDMSR} */
336static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PmcN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
337{
338 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
339 /** @todo check CPUID leaf 0ah. */
340 *puValue = 0;
341 return VINF_SUCCESS;
342}
343
344
345/** @callback_method_impl{FNCPUMWRMSR} */
346static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PmcN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
347{
348 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
349 /** @todo check CPUID leaf 0ah. */
350 return VINF_SUCCESS;
351}
352
353
354/** @callback_method_impl{FNCPUMRDMSR} */
355static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MonitorFilterLineSize(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
356{
357 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
358 /** @todo return 0x1000 if we try emulate mwait 100% correctly. */
359 *puValue = 0x40; /** @todo Change to CPU cache line size. */
360 return VINF_SUCCESS;
361}
362
363
364/** @callback_method_impl{FNCPUMWRMSR} */
365static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MonitorFilterLineSize(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
366{
367 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
368 /** @todo should remember writes, though it's supposedly something only a BIOS
369 * would write so, it's not extremely important. */
370 return VINF_SUCCESS;
371}
372
373/** @callback_method_impl{FNCPUMRDMSR} */
374static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MPerf(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
375{
376 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
377 /** @todo Read MPERF: Adjust against previously written MPERF value. Is TSC
378 * what we want? */
379 *puValue = TMCpuTickGet(pVCpu);
380#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
381 *puValue = CPUMApplyNestedGuestTscOffset(pVCpu, *puValue);
382#endif
383 return VINF_SUCCESS;
384}
385
386
387/** @callback_method_impl{FNCPUMWRMSR} */
388static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MPerf(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
389{
390 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
391 /** @todo Write MPERF: Calc adjustment. */
392 return VINF_SUCCESS;
393}
394
395
396/** @callback_method_impl{FNCPUMRDMSR} */
397static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32APerf(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
398{
399 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
400 /** @todo Read APERF: Adjust against previously written MPERF value. Is TSC
401 * what we want? */
402 *puValue = TMCpuTickGet(pVCpu);
403#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
404 *puValue = CPUMApplyNestedGuestTscOffset(pVCpu, *puValue);
405#endif
406 return VINF_SUCCESS;
407}
408
409
410/** @callback_method_impl{FNCPUMWRMSR} */
411static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32APerf(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
412{
413 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
414 /** @todo Write APERF: Calc adjustment. */
415 return VINF_SUCCESS;
416}
417
418
419/**
420 * Get fixed IA32_MTRR_CAP value for NEM and cpumMsrRd_Ia32MtrrCap.
421 *
422 * @returns Fixed IA32_MTRR_CAP value.
423 * @param pVCpu The cross context per CPU structure.
424 */
425VMM_INT_DECL(uint64_t) CPUMGetGuestIa32MtrrCap(PVMCPU pVCpu)
426{
427 RT_NOREF_PV(pVCpu);
428
429 /* This is currently a bit weird. :-) */
430 uint8_t const cVariableRangeRegs = 0;
431 bool const fSystemManagementRangeRegisters = false;
432 bool const fFixedRangeRegisters = false;
433 bool const fWriteCombiningType = false;
434 return cVariableRangeRegs
435 | (fFixedRangeRegisters ? RT_BIT_64(8) : 0)
436 | (fWriteCombiningType ? RT_BIT_64(10) : 0)
437 | (fSystemManagementRangeRegisters ? RT_BIT_64(11) : 0);
438}
439
440/** @callback_method_impl{FNCPUMRDMSR} */
441static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
442{
443 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
444 *puValue = CPUMGetGuestIa32MtrrCap(pVCpu);
445 return VINF_SUCCESS;
446}
447
448
449/** @callback_method_impl{FNCPUMRDMSR} */
450static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrPhysBaseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
451{
452 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
453 /** @todo Implement variable MTRR storage. */
454 Assert(pRange->uValue == (idMsr - 0x200) / 2);
455 *puValue = 0;
456 return VINF_SUCCESS;
457}
458
459
460/** @callback_method_impl{FNCPUMWRMSR} */
461static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrPhysBaseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
462{
463 /*
464 * Validate the value.
465 */
466 Assert(pRange->uValue == (idMsr - 0x200) / 2);
467 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(uRawValue); RT_NOREF_PV(pRange);
468
469 uint8_t uType = uValue & 0xff;
470 if ((uType >= 7) || (uType == 2) || (uType == 3))
471 {
472 Log(("CPUM: Invalid type set writing MTRR PhysBase MSR %#x: %#llx (%#llx)\n", idMsr, uValue, uType));
473 return VERR_CPUM_RAISE_GP_0;
474 }
475
476 uint64_t fInvPhysMask = ~(RT_BIT_64(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.cMaxPhysAddrWidth) - 1U);
477 if (fInvPhysMask & uValue)
478 {
479 Log(("CPUM: Invalid physical address bits set writing MTRR PhysBase MSR %#x: %#llx (%#llx)\n",
480 idMsr, uValue, uValue & fInvPhysMask));
481 return VERR_CPUM_RAISE_GP_0;
482 }
483
484 /*
485 * Store it.
486 */
487 /** @todo Implement variable MTRR storage. */
488 return VINF_SUCCESS;
489}
490
491
492/** @callback_method_impl{FNCPUMRDMSR} */
493static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrPhysMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
494{
495 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
496 /** @todo Implement variable MTRR storage. */
497 Assert(pRange->uValue == (idMsr - 0x200) / 2);
498 *puValue = 0;
499 return VINF_SUCCESS;
500}
501
502
503/** @callback_method_impl{FNCPUMWRMSR} */
504static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrPhysMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
505{
506 /*
507 * Validate the value.
508 */
509 Assert(pRange->uValue == (idMsr - 0x200) / 2);
510 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(uRawValue); RT_NOREF_PV(pRange);
511
512 uint64_t fInvPhysMask = ~(RT_BIT_64(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.cMaxPhysAddrWidth) - 1U);
513 if (fInvPhysMask & uValue)
514 {
515 Log(("CPUM: Invalid physical address bits set writing MTRR PhysMask MSR %#x: %#llx (%#llx)\n",
516 idMsr, uValue, uValue & fInvPhysMask));
517 return VERR_CPUM_RAISE_GP_0;
518 }
519
520 /*
521 * Store it.
522 */
523 /** @todo Implement variable MTRR storage. */
524 return VINF_SUCCESS;
525}
526
527
528/** @callback_method_impl{FNCPUMRDMSR} */
529static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrFixed(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
530{
531 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
532 CPUM_MSR_ASSERT_CPUMCPU_OFFSET_RETURN(pVCpu, pRange, uint64_t, puFixedMtrr);
533 *puValue = *puFixedMtrr;
534 return VINF_SUCCESS;
535}
536
537
538/** @callback_method_impl{FNCPUMWRMSR} */
539static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrFixed(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
540{
541 CPUM_MSR_ASSERT_CPUMCPU_OFFSET_RETURN(pVCpu, pRange, uint64_t, puFixedMtrr);
542 RT_NOREF_PV(idMsr); RT_NOREF_PV(uRawValue);
543
544 for (uint32_t cShift = 0; cShift < 63; cShift += 8)
545 {
546 uint8_t uType = (uint8_t)(uValue >> cShift);
547 if ((uType >= 7) || (uType == 2) || (uType == 3))
548 {
549 Log(("CPUM: Invalid MTRR type at %u:%u in fixed range (%#x/%s): %#llx (%#llx)\n",
550 cShift + 7, cShift, idMsr, pRange->szName, uValue, uType));
551 return VERR_CPUM_RAISE_GP_0;
552 }
553 }
554 *puFixedMtrr = uValue;
555 return VINF_SUCCESS;
556}
557
558
559/** @callback_method_impl{FNCPUMRDMSR} */
560static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrDefType(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
561{
562 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
563 *puValue = pVCpu->cpum.s.GuestMsrs.msr.MtrrDefType;
564 return VINF_SUCCESS;
565}
566
567
568/** @callback_method_impl{FNCPUMWRMSR} */
569static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrDefType(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
570{
571 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
572
573 uint8_t uType = uValue & 0xff;
574 if ((uType >= 7) || (uType == 2) || (uType == 3))
575 {
576 Log(("CPUM: Invalid MTRR default type value on %s: %#llx (%#llx)\n", pRange->szName, uValue, uType));
577 return VERR_CPUM_RAISE_GP_0;
578 }
579
580 pVCpu->cpum.s.GuestMsrs.msr.MtrrDefType = uValue;
581 return VINF_SUCCESS;
582}
583
584
585/** @callback_method_impl{FNCPUMRDMSR} */
586static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32Pat(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
587{
588 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
589 *puValue = pVCpu->cpum.s.Guest.msrPAT;
590 return VINF_SUCCESS;
591}
592
593
594/** @callback_method_impl{FNCPUMWRMSR} */
595static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32Pat(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
596{
597 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
598 if (CPUMIsPatMsrValid(uValue))
599 {
600 pVCpu->cpum.s.Guest.msrPAT = uValue;
601 return VINF_SUCCESS;
602 }
603 return VERR_CPUM_RAISE_GP_0;
604}
605
606
607/** @callback_method_impl{FNCPUMRDMSR} */
608static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SysEnterCs(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
609{
610 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
611 *puValue = pVCpu->cpum.s.Guest.SysEnter.cs;
612 return VINF_SUCCESS;
613}
614
615
616/** @callback_method_impl{FNCPUMWRMSR} */
617static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SysEnterCs(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
618{
619 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
620
621 /* Note! We used to mask this by 0xffff, but turns out real HW doesn't and
622 there are generally 32-bit working bits backing this register. */
623 pVCpu->cpum.s.Guest.SysEnter.cs = uValue;
624 return VINF_SUCCESS;
625}
626
627
628/** @callback_method_impl{FNCPUMRDMSR} */
629static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SysEnterEsp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
630{
631 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
632 *puValue = pVCpu->cpum.s.Guest.SysEnter.esp;
633 return VINF_SUCCESS;
634}
635
636
637/** @callback_method_impl{FNCPUMWRMSR} */
638static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SysEnterEsp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
639{
640 if (X86_IS_CANONICAL(uValue))
641 {
642 pVCpu->cpum.s.Guest.SysEnter.esp = uValue;
643 return VINF_SUCCESS;
644 }
645 Log(("CPUM: IA32_SYSENTER_ESP not canonical! %#llx\n", uValue));
646 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
647 return VERR_CPUM_RAISE_GP_0;
648}
649
650
651/** @callback_method_impl{FNCPUMRDMSR} */
652static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SysEnterEip(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
653{
654 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
655 *puValue = pVCpu->cpum.s.Guest.SysEnter.eip;
656 return VINF_SUCCESS;
657}
658
659
660/** @callback_method_impl{FNCPUMWRMSR} */
661static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SysEnterEip(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
662{
663 if (X86_IS_CANONICAL(uValue))
664 {
665 pVCpu->cpum.s.Guest.SysEnter.eip = uValue;
666 return VINF_SUCCESS;
667 }
668 LogRel(("CPUM: IA32_SYSENTER_EIP not canonical! %#llx\n", uValue));
669 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
670 return VERR_CPUM_RAISE_GP_0;
671}
672
673
674/** @callback_method_impl{FNCPUMRDMSR} */
675static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McgCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
676{
677#if 0 /** @todo implement machine checks. */
678 *puValue = pRange->uValue & (RT_BIT_64(8) | 0);
679#else
680 *puValue = 0;
681#endif
682 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
683 return VINF_SUCCESS;
684}
685
686
687/** @callback_method_impl{FNCPUMRDMSR} */
688static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McgStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
689{
690 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
691 /** @todo implement machine checks. */
692 *puValue = 0;
693 return VINF_SUCCESS;
694}
695
696
697/** @callback_method_impl{FNCPUMWRMSR} */
698static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McgStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
699{
700 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
701 /** @todo implement machine checks. */
702 return VINF_SUCCESS;
703}
704
705
706/** @callback_method_impl{FNCPUMRDMSR} */
707static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McgCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
708{
709 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
710 /** @todo implement machine checks. */
711 *puValue = 0;
712 return VINF_SUCCESS;
713}
714
715
716/** @callback_method_impl{FNCPUMWRMSR} */
717static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McgCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
718{
719 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
720 /** @todo implement machine checks. */
721 return VINF_SUCCESS;
722}
723
724
725/** @callback_method_impl{FNCPUMRDMSR} */
726static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32DebugCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
727{
728 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
729 /** @todo implement IA32_DEBUGCTL. */
730 *puValue = 0;
731 return VINF_SUCCESS;
732}
733
734
735/** @callback_method_impl{FNCPUMWRMSR} */
736static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32DebugCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
737{
738 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
739 /** @todo implement IA32_DEBUGCTL. */
740 return VINF_SUCCESS;
741}
742
743
744/** @callback_method_impl{FNCPUMRDMSR} */
745static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SmrrPhysBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
746{
747 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
748 /** @todo implement intel SMM. */
749 *puValue = 0;
750 return VINF_SUCCESS;
751}
752
753
754/** @callback_method_impl{FNCPUMWRMSR} */
755static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SmrrPhysBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
756{
757 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
758 /** @todo implement intel SMM. */
759 return VERR_CPUM_RAISE_GP_0;
760}
761
762
763/** @callback_method_impl{FNCPUMRDMSR} */
764static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SmrrPhysMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
765{
766 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
767 /** @todo implement intel SMM. */
768 *puValue = 0;
769 return VINF_SUCCESS;
770}
771
772
773/** @callback_method_impl{FNCPUMWRMSR} */
774static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SmrrPhysMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
775{
776 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
777 /** @todo implement intel SMM. */
778 return VERR_CPUM_RAISE_GP_0;
779}
780
781
782/** @callback_method_impl{FNCPUMRDMSR} */
783static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PlatformDcaCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
784{
785 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
786 /** @todo implement intel direct cache access (DCA)?? */
787 *puValue = 0;
788 return VINF_SUCCESS;
789}
790
791
792/** @callback_method_impl{FNCPUMWRMSR} */
793static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PlatformDcaCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
794{
795 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
796 /** @todo implement intel direct cache access (DCA)?? */
797 return VINF_SUCCESS;
798}
799
800
801/** @callback_method_impl{FNCPUMRDMSR} */
802static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32CpuDcaCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
803{
804 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
805 /** @todo implement intel direct cache access (DCA)?? */
806 *puValue = 0;
807 return VINF_SUCCESS;
808}
809
810
811/** @callback_method_impl{FNCPUMRDMSR} */
812static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32Dca0Cap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
813{
814 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
815 /** @todo implement intel direct cache access (DCA)?? */
816 *puValue = 0;
817 return VINF_SUCCESS;
818}
819
820
821/** @callback_method_impl{FNCPUMWRMSR} */
822static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32Dca0Cap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
823{
824 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
825 /** @todo implement intel direct cache access (DCA)?? */
826 return VINF_SUCCESS;
827}
828
829
830/** @callback_method_impl{FNCPUMRDMSR} */
831static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfEvtSelN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
832{
833 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
834 /** @todo implement IA32_PERFEVTSEL0+. */
835 *puValue = 0;
836 return VINF_SUCCESS;
837}
838
839
840/** @callback_method_impl{FNCPUMWRMSR} */
841static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfEvtSelN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
842{
843 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
844 /** @todo implement IA32_PERFEVTSEL0+. */
845 return VINF_SUCCESS;
846}
847
848
849/** @callback_method_impl{FNCPUMRDMSR} */
850static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
851{
852 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
853 uint64_t uValue = pRange->uValue;
854
855 /* Always provide the max bus ratio for now. XNU expects it. */
856 uValue &= ~((UINT64_C(0x1f) << 40) | RT_BIT_64(46));
857
858 PVM pVM = pVCpu->CTX_SUFF(pVM);
859 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
860 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
861 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
862 if (uTscRatio > 0x1f)
863 uTscRatio = 0x1f;
864 uValue |= (uint64_t)uTscRatio << 40;
865
866 *puValue = uValue;
867 return VINF_SUCCESS;
868}
869
870
871/** @callback_method_impl{FNCPUMWRMSR} */
872static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
873{
874 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
875 /* Pentium4 allows writing, but all bits are ignored. */
876 return VINF_SUCCESS;
877}
878
879
880/** @callback_method_impl{FNCPUMRDMSR} */
881static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
882{
883 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
884 /** @todo implement IA32_PERFCTL. */
885 *puValue = 0;
886 return VINF_SUCCESS;
887}
888
889
890/** @callback_method_impl{FNCPUMWRMSR} */
891static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
892{
893 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
894 /** @todo implement IA32_PERFCTL. */
895 return VINF_SUCCESS;
896}
897
898
899/** @callback_method_impl{FNCPUMRDMSR} */
900static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32FixedCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
901{
902 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
903 /** @todo implement IA32_FIXED_CTRn (fixed performance counters). */
904 *puValue = 0;
905 return VINF_SUCCESS;
906}
907
908
909/** @callback_method_impl{FNCPUMWRMSR} */
910static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32FixedCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
911{
912 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
913 /** @todo implement IA32_FIXED_CTRn (fixed performance counters). */
914 return VINF_SUCCESS;
915}
916
917
918/** @callback_method_impl{FNCPUMRDMSR} */
919static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfCapabilities(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
920{
921 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
922 /** @todo implement performance counters. */
923 *puValue = 0;
924 return VINF_SUCCESS;
925}
926
927
928/** @callback_method_impl{FNCPUMWRMSR} */
929static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfCapabilities(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
930{
931 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
932 /** @todo implement performance counters. */
933 return VINF_SUCCESS;
934}
935
936
937/** @callback_method_impl{FNCPUMRDMSR} */
938static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32FixedCtrCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
939{
940 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
941 /** @todo implement performance counters. */
942 *puValue = 0;
943 return VINF_SUCCESS;
944}
945
946
947/** @callback_method_impl{FNCPUMWRMSR} */
948static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32FixedCtrCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
949{
950 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
951 /** @todo implement performance counters. */
952 return VINF_SUCCESS;
953}
954
955
956/** @callback_method_impl{FNCPUMRDMSR} */
957static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfGlobalStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
958{
959 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
960 /** @todo implement performance counters. */
961 *puValue = 0;
962 return VINF_SUCCESS;
963}
964
965
966/** @callback_method_impl{FNCPUMWRMSR} */
967static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfGlobalStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
968{
969 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
970 /** @todo implement performance counters. */
971 return VINF_SUCCESS;
972}
973
974
975/** @callback_method_impl{FNCPUMRDMSR} */
976static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfGlobalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
977{
978 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
979 /** @todo implement performance counters. */
980 *puValue = 0;
981 return VINF_SUCCESS;
982}
983
984
985/** @callback_method_impl{FNCPUMWRMSR} */
986static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfGlobalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
987{
988 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
989 /** @todo implement performance counters. */
990 return VINF_SUCCESS;
991}
992
993
994/** @callback_method_impl{FNCPUMRDMSR} */
995static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfGlobalOvfCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
996{
997 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
998 /** @todo implement performance counters. */
999 *puValue = 0;
1000 return VINF_SUCCESS;
1001}
1002
1003
1004/** @callback_method_impl{FNCPUMWRMSR} */
1005static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfGlobalOvfCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1006{
1007 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1008 /** @todo implement performance counters. */
1009 return VINF_SUCCESS;
1010}
1011
1012
1013/** @callback_method_impl{FNCPUMRDMSR} */
1014static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PebsEnable(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1015{
1016 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1017 /** @todo implement performance counters. */
1018 *puValue = 0;
1019 return VINF_SUCCESS;
1020}
1021
1022
1023/** @callback_method_impl{FNCPUMWRMSR} */
1024static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PebsEnable(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1025{
1026 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1027 /** @todo implement performance counters. */
1028 return VINF_SUCCESS;
1029}
1030
1031
1032/** @callback_method_impl{FNCPUMRDMSR} */
1033static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ClockModulation(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1034{
1035 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1036 /** @todo implement IA32_CLOCK_MODULATION. */
1037 *puValue = 0;
1038 return VINF_SUCCESS;
1039}
1040
1041
1042/** @callback_method_impl{FNCPUMWRMSR} */
1043static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ClockModulation(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1044{
1045 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1046 /** @todo implement IA32_CLOCK_MODULATION. */
1047 return VINF_SUCCESS;
1048}
1049
1050
1051/** @callback_method_impl{FNCPUMRDMSR} */
1052static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ThermInterrupt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1053{
1054 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1055 /** @todo implement IA32_THERM_INTERRUPT. */
1056 *puValue = 0;
1057 return VINF_SUCCESS;
1058}
1059
1060
1061/** @callback_method_impl{FNCPUMWRMSR} */
1062static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ThermInterrupt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1063{
1064 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1065 /** @todo implement IA32_THERM_STATUS. */
1066 return VINF_SUCCESS;
1067}
1068
1069
1070/** @callback_method_impl{FNCPUMRDMSR} */
1071static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ThermStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1072{
1073 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1074 /** @todo implement IA32_THERM_STATUS. */
1075 *puValue = 0;
1076 return VINF_SUCCESS;
1077}
1078
1079
1080/** @callback_method_impl{FNCPUMWRMSR} */
1081static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ThermStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1082{
1083 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1084 /** @todo implement IA32_THERM_INTERRUPT. */
1085 return VINF_SUCCESS;
1086}
1087
1088
1089/** @callback_method_impl{FNCPUMRDMSR} */
1090static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32Therm2Ctl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1091{
1092 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1093 /** @todo implement IA32_THERM2_CTL. */
1094 *puValue = 0;
1095 return VINF_SUCCESS;
1096}
1097
1098
1099/** @callback_method_impl{FNCPUMWRMSR} */
1100static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32Therm2Ctl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1101{
1102 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1103 /** @todo implement IA32_THERM2_CTL. */
1104 return VINF_SUCCESS;
1105}
1106
1107
1108/** @callback_method_impl{FNCPUMRDMSR} */
1109static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MiscEnable(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1110{
1111 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1112 *puValue = pVCpu->cpum.s.GuestMsrs.msr.MiscEnable;
1113 return VINF_SUCCESS;
1114}
1115
1116
1117/** @callback_method_impl{FNCPUMWRMSR} */
1118static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MiscEnable(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1119{
1120 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1121#ifdef LOG_ENABLED
1122 uint64_t const uOld = pVCpu->cpum.s.GuestMsrs.msr.MiscEnable;
1123#endif
1124
1125 /* Unsupported bits are generally ignored and stripped by the MSR range
1126 entry that got us here. So, we just need to preserve fixed bits. */
1127 pVCpu->cpum.s.GuestMsrs.msr.MiscEnable = uValue
1128 | MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL
1129 | MSR_IA32_MISC_ENABLE_BTS_UNAVAIL;
1130
1131 Log(("CPUM: IA32_MISC_ENABLE; old=%#llx written=%#llx => %#llx\n",
1132 uOld, uValue, pVCpu->cpum.s.GuestMsrs.msr.MiscEnable));
1133
1134 /** @todo Wire IA32_MISC_ENABLE bit 22 to our NT 4 CPUID trick. */
1135 /** @todo Wire up MSR_IA32_MISC_ENABLE_XD_DISABLE. */
1136 return VINF_SUCCESS;
1137}
1138
1139
1140/** @callback_method_impl{FNCPUMRDMSR} */
1141static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McCtlStatusAddrMiscN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1142{
1143 RT_NOREF_PV(pVCpu); RT_NOREF_PV(pRange);
1144
1145 /** @todo Implement machine check exception injection. */
1146 switch (idMsr & 3)
1147 {
1148 case 0:
1149 case 1:
1150 *puValue = 0;
1151 break;
1152
1153 /* The ADDR and MISC registers aren't accessible since the
1154 corresponding STATUS bits are zero. */
1155 case 2:
1156 Log(("CPUM: Reading IA32_MCi_ADDR %#x -> #GP\n", idMsr));
1157 return VERR_CPUM_RAISE_GP_0;
1158 case 3:
1159 Log(("CPUM: Reading IA32_MCi_MISC %#x -> #GP\n", idMsr));
1160 return VERR_CPUM_RAISE_GP_0;
1161 }
1162 return VINF_SUCCESS;
1163}
1164
1165
1166/** @callback_method_impl{FNCPUMWRMSR} */
1167static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McCtlStatusAddrMiscN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1168{
1169 RT_NOREF_PV(pVCpu); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1170 switch (idMsr & 3)
1171 {
1172 case 0:
1173 /* Ignore writes to the CTL register. */
1174 break;
1175
1176 case 1:
1177 /* According to specs, the STATUS register can only be written to
1178 with the value 0. VBoxCpuReport thinks different for a
1179 Pentium M Dothan, but implementing according to specs now. */
1180 if (uValue != 0)
1181 {
1182 Log(("CPUM: Writing non-zero value (%#llx) to IA32_MCi_STATUS %#x -> #GP\n", uValue, idMsr));
1183 return VERR_CPUM_RAISE_GP_0;
1184 }
1185 break;
1186
1187 /* Specs states that ADDR and MISC can be cleared by writing zeros.
1188 Writing 1s will GP. Need to figure out how this relates to the
1189 ADDRV and MISCV status flags. If writing is independent of those
1190 bits, we need to know whether the CPU really implements them since
1191 that is exposed by writing 0 to them.
1192 Implementing the solution with the fewer GPs for now. */
1193 case 2:
1194 if (uValue != 0)
1195 {
1196 Log(("CPUM: Writing non-zero value (%#llx) to IA32_MCi_ADDR %#x -> #GP\n", uValue, idMsr));
1197 return VERR_CPUM_RAISE_GP_0;
1198 }
1199 break;
1200 case 3:
1201 if (uValue != 0)
1202 {
1203 Log(("CPUM: Writing non-zero value (%#llx) to IA32_MCi_MISC %#x -> #GP\n", uValue, idMsr));
1204 return VERR_CPUM_RAISE_GP_0;
1205 }
1206 break;
1207 }
1208 return VINF_SUCCESS;
1209}
1210
1211
1212/** @callback_method_impl{FNCPUMRDMSR} */
1213static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McNCtl2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1214{
1215 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1216 /** @todo Implement machine check exception injection. */
1217 *puValue = 0;
1218 return VINF_SUCCESS;
1219}
1220
1221
1222/** @callback_method_impl{FNCPUMWRMSR} */
1223static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McNCtl2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1224{
1225 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1226 /** @todo Implement machine check exception injection. */
1227 return VINF_SUCCESS;
1228}
1229
1230
1231/** @callback_method_impl{FNCPUMRDMSR} */
1232static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32DsArea(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1233{
1234 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1235 /** @todo implement IA32_DS_AREA. */
1236 *puValue = 0;
1237 return VINF_SUCCESS;
1238}
1239
1240
1241/** @callback_method_impl{FNCPUMWRMSR} */
1242static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32DsArea(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1243{
1244 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1245 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1246 return VINF_SUCCESS;
1247}
1248
1249
1250/** @callback_method_impl{FNCPUMRDMSR} */
1251static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32TscDeadline(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1252{
1253 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1254 /** @todo implement TSC deadline timer. */
1255 *puValue = 0;
1256 return VINF_SUCCESS;
1257}
1258
1259
1260/** @callback_method_impl{FNCPUMWRMSR} */
1261static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32TscDeadline(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1262{
1263 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1264 /** @todo implement TSC deadline timer. */
1265 return VINF_SUCCESS;
1266}
1267
1268
1269/** @callback_method_impl{FNCPUMRDMSR} */
1270static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32X2ApicN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1271{
1272 RT_NOREF_PV(pRange);
1273 return APICReadMsr(pVCpu, idMsr, puValue);
1274}
1275
1276
1277/** @callback_method_impl{FNCPUMWRMSR} */
1278static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32X2ApicN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1279{
1280 RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1281 return APICWriteMsr(pVCpu, idMsr, uValue);
1282}
1283
1284
1285/** @callback_method_impl{FNCPUMRDMSR} */
1286static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32DebugInterface(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1287{
1288 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1289 /** @todo IA32_DEBUG_INTERFACE (no docs) */
1290 *puValue = 0;
1291 return VINF_SUCCESS;
1292}
1293
1294
1295/** @callback_method_impl{FNCPUMWRMSR} */
1296static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32DebugInterface(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1297{
1298 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1299 /** @todo IA32_DEBUG_INTERFACE (no docs) */
1300 return VINF_SUCCESS;
1301}
1302
1303
1304/**
1305 * Gets IA32_VMX_BASIC for IEM and cpumMsrRd_Ia32VmxBasic.
1306 *
1307 * @returns IA32_VMX_BASIC value.
1308 * @param pVCpu The cross context per CPU structure.
1309 */
1310VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxBasic(PVMCPU pVCpu)
1311{
1312 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1313 uint64_t uVmxMsr;
1314 if (pGuestFeatures->fVmx)
1315 {
1316 uVmxMsr = RT_BF_MAKE(VMX_BF_BASIC_VMCS_ID, VMX_V_VMCS_REVISION_ID )
1317 | RT_BF_MAKE(VMX_BF_BASIC_VMCS_SIZE, VMX_V_VMCS_SIZE )
1318 | RT_BF_MAKE(VMX_BF_BASIC_PHYSADDR_WIDTH, VMX_V_VMCS_PHYSADDR_4G_LIMIT )
1319 | RT_BF_MAKE(VMX_BF_BASIC_DUAL_MON, 0 )
1320 | RT_BF_MAKE(VMX_BF_BASIC_VMCS_MEM_TYPE, VMX_BASIC_MEM_TYPE_WB )
1321 | RT_BF_MAKE(VMX_BF_BASIC_VMCS_INS_OUTS, pGuestFeatures->fVmxInsOutInfo)
1322 | RT_BF_MAKE(VMX_BF_BASIC_TRUE_CTLS, 0 );
1323 }
1324 else
1325 uVmxMsr = 0;
1326 return uVmxMsr;
1327}
1328
1329
1330/** @callback_method_impl{FNCPUMRDMSR} */
1331static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxBasic(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1332{
1333 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1334 *puValue = CPUMGetGuestIa32VmxBasic(pVCpu);
1335 return VINF_SUCCESS;
1336}
1337
1338
1339/**
1340 * Gets IA32_VMX_PINBASED_CTLS for IEM and cpumMsrRd_Ia32VmxPinbasedCtls.
1341 *
1342 * @returns IA32_VMX_PINBASED_CTLS value.
1343 * @param pVCpu The cross context per CPU structure.
1344 */
1345VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxPinbasedCtls(PVMCPU pVCpu)
1346{
1347 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1348 uint64_t uVmxMsr;
1349 if (pGuestFeatures->fVmx)
1350 {
1351 uint32_t const fFeatures = (pGuestFeatures->fVmxExtIntExit << VMX_BF_PIN_CTLS_EXT_INT_EXIT_SHIFT )
1352 | (pGuestFeatures->fVmxNmiExit << VMX_BF_PIN_CTLS_NMI_EXIT_SHIFT )
1353 | (pGuestFeatures->fVmxVirtNmi << VMX_BF_PIN_CTLS_VIRT_NMI_SHIFT )
1354 | (pGuestFeatures->fVmxPreemptTimer << VMX_BF_PIN_CTLS_PREEMPT_TIMER_SHIFT)
1355 | (pGuestFeatures->fVmxPostedInt << VMX_BF_PIN_CTLS_POSTED_INT_SHIFT );
1356 uint32_t const fVal = VMX_PIN_CTLS_DEFAULT1;
1357 uint32_t const fZap = fFeatures | VMX_PIN_CTLS_DEFAULT1;
1358 AssertMsg((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures));
1359 uVmxMsr = RT_MAKE_U64(fVal, fZap);
1360 }
1361 else
1362 uVmxMsr = 0;
1363 return uVmxMsr;
1364}
1365
1366
1367/** @callback_method_impl{FNCPUMRDMSR} */
1368static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxPinbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1369{
1370 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1371 *puValue = CPUMGetGuestIa32VmxPinbasedCtls(pVCpu);
1372 return VINF_SUCCESS;
1373}
1374
1375
1376/**
1377 * Gets IA32_VMX_PROCBASED_CTLS for IEM and cpumMsrRd_Ia32VmxProcbasedCtls.
1378 *
1379 * @returns IA32_VMX_PROCBASED_CTLS value.
1380 * @param pVCpu The cross context per CPU structure.
1381 */
1382VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxProcbasedCtls(PVMCPU pVCpu)
1383{
1384 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1385 uint64_t uVmxMsr;
1386 if (pGuestFeatures->fVmx)
1387 {
1388 uint32_t const fFeatures = (pGuestFeatures->fVmxIntWindowExit << VMX_BF_PROC_CTLS_INT_WINDOW_EXIT_SHIFT )
1389 | (pGuestFeatures->fVmxTscOffsetting << VMX_BF_PROC_CTLS_USE_TSC_OFFSETTING_SHIFT)
1390 | (pGuestFeatures->fVmxHltExit << VMX_BF_PROC_CTLS_HLT_EXIT_SHIFT )
1391 | (pGuestFeatures->fVmxInvlpgExit << VMX_BF_PROC_CTLS_INVLPG_EXIT_SHIFT )
1392 | (pGuestFeatures->fVmxMwaitExit << VMX_BF_PROC_CTLS_MWAIT_EXIT_SHIFT )
1393 | (pGuestFeatures->fVmxRdpmcExit << VMX_BF_PROC_CTLS_RDPMC_EXIT_SHIFT )
1394 | (pGuestFeatures->fVmxRdtscExit << VMX_BF_PROC_CTLS_RDTSC_EXIT_SHIFT )
1395 | (pGuestFeatures->fVmxCr3LoadExit << VMX_BF_PROC_CTLS_CR3_LOAD_EXIT_SHIFT )
1396 | (pGuestFeatures->fVmxCr3StoreExit << VMX_BF_PROC_CTLS_CR3_STORE_EXIT_SHIFT )
1397 | (pGuestFeatures->fVmxCr8LoadExit << VMX_BF_PROC_CTLS_CR8_LOAD_EXIT_SHIFT )
1398 | (pGuestFeatures->fVmxCr8StoreExit << VMX_BF_PROC_CTLS_CR8_STORE_EXIT_SHIFT )
1399 | (pGuestFeatures->fVmxUseTprShadow << VMX_BF_PROC_CTLS_USE_TPR_SHADOW_SHIFT )
1400 | (pGuestFeatures->fVmxNmiWindowExit << VMX_BF_PROC_CTLS_NMI_WINDOW_EXIT_SHIFT )
1401 | (pGuestFeatures->fVmxMovDRxExit << VMX_BF_PROC_CTLS_MOV_DR_EXIT_SHIFT )
1402 | (pGuestFeatures->fVmxUncondIoExit << VMX_BF_PROC_CTLS_UNCOND_IO_EXIT_SHIFT )
1403 | (pGuestFeatures->fVmxUseIoBitmaps << VMX_BF_PROC_CTLS_USE_IO_BITMAPS_SHIFT )
1404 | (pGuestFeatures->fVmxMonitorTrapFlag << VMX_BF_PROC_CTLS_MONITOR_TRAP_FLAG_SHIFT )
1405 | (pGuestFeatures->fVmxUseMsrBitmaps << VMX_BF_PROC_CTLS_USE_MSR_BITMAPS_SHIFT )
1406 | (pGuestFeatures->fVmxMonitorExit << VMX_BF_PROC_CTLS_MONITOR_EXIT_SHIFT )
1407 | (pGuestFeatures->fVmxPauseExit << VMX_BF_PROC_CTLS_PAUSE_EXIT_SHIFT )
1408 | (pGuestFeatures->fVmxSecondaryExecCtls << VMX_BF_PROC_CTLS_USE_SECONDARY_CTLS_SHIFT);
1409 uint32_t const fVal = VMX_PROC_CTLS_DEFAULT1;
1410 uint32_t const fZap = fFeatures | VMX_PROC_CTLS_DEFAULT1;
1411 AssertMsg((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures));
1412 uVmxMsr = RT_MAKE_U64(fVal, fZap);
1413 }
1414 else
1415 uVmxMsr = 0;
1416 return uVmxMsr;
1417}
1418
1419
1420/** @callback_method_impl{FNCPUMRDMSR} */
1421static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxProcbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1422{
1423 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1424 *puValue = CPUMGetGuestIa32VmxProcbasedCtls(pVCpu);
1425 return VINF_SUCCESS;
1426}
1427
1428
1429/**
1430 * Gets IA32_VMX_EXIT_CTLS for IEM and cpumMsrRd_Ia32VmxProcbasedCtls.
1431 *
1432 * @returns IA32_VMX_EXIT_CTLS value.
1433 * @param pVCpu The cross context per CPU structure.
1434 */
1435VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxExitCtls(PVMCPU pVCpu)
1436{
1437 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1438 uint64_t uVmxMsr;
1439 if (pGuestFeatures->fVmx)
1440 {
1441 uint32_t const fFeatures = (pGuestFeatures->fVmxExitSaveDebugCtls << VMX_BF_EXIT_CTLS_SAVE_DEBUG_SHIFT )
1442 | (pGuestFeatures->fVmxHostAddrSpaceSize << VMX_BF_EXIT_CTLS_HOST_ADDR_SPACE_SIZE_SHIFT)
1443 | (pGuestFeatures->fVmxExitAckExtInt << VMX_BF_EXIT_CTLS_ACK_EXT_INT_SHIFT )
1444 | (pGuestFeatures->fVmxExitSavePatMsr << VMX_BF_EXIT_CTLS_SAVE_PAT_MSR_SHIFT )
1445 | (pGuestFeatures->fVmxExitLoadPatMsr << VMX_BF_EXIT_CTLS_LOAD_PAT_MSR_SHIFT )
1446 | (pGuestFeatures->fVmxExitSaveEferMsr << VMX_BF_EXIT_CTLS_SAVE_EFER_MSR_SHIFT )
1447 | (pGuestFeatures->fVmxExitLoadEferMsr << VMX_BF_EXIT_CTLS_LOAD_EFER_MSR_SHIFT )
1448 | (pGuestFeatures->fVmxSavePreemptTimer << VMX_BF_EXIT_CTLS_SAVE_PREEMPT_TIMER_SHIFT );
1449 uint32_t const fVal = VMX_EXIT_CTLS_DEFAULT1;
1450 uint32_t const fZap = fFeatures | VMX_EXIT_CTLS_DEFAULT1;
1451 AssertMsg((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures));
1452 uVmxMsr = RT_MAKE_U64(fVal, fZap);
1453 }
1454 else
1455 uVmxMsr = 0;
1456 return uVmxMsr;
1457}
1458
1459
1460/** @callback_method_impl{FNCPUMRDMSR} */
1461static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxExitCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1462{
1463 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1464 *puValue = CPUMGetGuestIa32VmxExitCtls(pVCpu);
1465 return VINF_SUCCESS;
1466}
1467
1468
1469/**
1470 * Gets IA32_VMX_ENTRY_CTLS for IEM and cpumMsrRd_Ia32VmxEntryCtls.
1471 *
1472 * @returns IA32_VMX_ENTRY_CTLS value.
1473 * @param pVCpu The cross context per CPU structure.
1474 */
1475VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxEntryCtls(PVMCPU pVCpu)
1476{
1477 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1478 uint64_t uVmxMsr;
1479 if (pGuestFeatures->fVmx)
1480 {
1481 uint32_t const fFeatures = (pGuestFeatures->fVmxEntryLoadDebugCtls << VMX_BF_ENTRY_CTLS_LOAD_DEBUG_SHIFT )
1482 | (pGuestFeatures->fVmxIa32eModeGuest << VMX_BF_ENTRY_CTLS_IA32E_MODE_GUEST_SHIFT)
1483 | (pGuestFeatures->fVmxEntryLoadEferMsr << VMX_BF_ENTRY_CTLS_LOAD_EFER_MSR_SHIFT )
1484 | (pGuestFeatures->fVmxEntryLoadPatMsr << VMX_BF_ENTRY_CTLS_LOAD_PAT_MSR_SHIFT );
1485 uint32_t const fDefault1 = VMX_ENTRY_CTLS_DEFAULT1;
1486 uint32_t const fVal = fDefault1;
1487 uint32_t const fZap = fFeatures | fDefault1;
1488 AssertMsg((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures));
1489 uVmxMsr = RT_MAKE_U64(fVal, fZap);
1490 }
1491 else
1492 uVmxMsr = 0;
1493 return uVmxMsr;
1494}
1495
1496
1497/** @callback_method_impl{FNCPUMRDMSR} */
1498static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxEntryCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1499{
1500 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1501 *puValue = CPUMGetGuestIa32VmxEntryCtls(pVCpu);
1502 return VINF_SUCCESS;
1503}
1504
1505
1506/**
1507 * Gets IA32_VMX_MISC for IEM and cpumMsrRd_Ia32VmxMisc.
1508 *
1509 * @returns IA32_VMX_MISC MSR.
1510 * @param pVCpu The cross context per CPU structure.
1511 */
1512VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxMisc(PVMCPU pVCpu)
1513{
1514 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1515 uint64_t uVmxMsr;
1516 if (pGuestFeatures->fVmx)
1517 {
1518 uint64_t uHostMsr;
1519 int rc = HMVmxGetHostMsr(pVCpu->CTX_SUFF(pVM), MSR_IA32_VMX_MISC, &uHostMsr);
1520 AssertMsgRC(rc, ("HMVmxGetHostMsr failed. rc=%Rrc\n", rc)); RT_NOREF_PV(rc);
1521 uint8_t const cMaxMsrs = RT_MIN(RT_BF_GET(uHostMsr, VMX_BF_MISC_MAX_MSRS), VMX_V_MAX_MSRS);
1522 uVmxMsr = RT_BF_MAKE(VMX_BF_MISC_PREEMPT_TIMER_TSC, VMX_V_PREEMPT_TIMER_SHIFT )
1523 | RT_BF_MAKE(VMX_BF_MISC_EXIT_STORE_EFER_LMA, pGuestFeatures->fVmxExitStoreEferLma )
1524 | RT_BF_MAKE(VMX_BF_MISC_ACTIVITY_STATES, VMX_V_GUEST_ACTIVITY_STATE_MASK )
1525 | RT_BF_MAKE(VMX_BF_MISC_PT, 0 )
1526 | RT_BF_MAKE(VMX_BF_MISC_SMM_READ_SMBASE_MSR, 0 )
1527 | RT_BF_MAKE(VMX_BF_MISC_CR3_TARGET, VMX_V_CR3_TARGET_COUNT )
1528 | RT_BF_MAKE(VMX_BF_MISC_MAX_MSRS, cMaxMsrs )
1529 | RT_BF_MAKE(VMX_BF_MISC_VMXOFF_BLOCK_SMI, 0 )
1530 | RT_BF_MAKE(VMX_BF_MISC_VMWRITE_ALL, pGuestFeatures->fVmxVmwriteAll )
1531 | RT_BF_MAKE(VMX_BF_MISC_ENTRY_INJECT_SOFT_INT, pGuestFeatures->fVmxEntryInjectSoftInt)
1532 | RT_BF_MAKE(VMX_BF_MISC_MSEG_ID, VMX_V_MSEG_REV_ID );
1533 }
1534 else
1535 uVmxMsr = 0;
1536 return uVmxMsr;
1537}
1538
1539
1540/** @callback_method_impl{FNCPUMRDMSR} */
1541static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxMisc(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1542{
1543 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1544 *puValue = CPUMGetGuestIa32VmxMisc(pVCpu);
1545 return VINF_SUCCESS;
1546}
1547
1548
1549/**
1550 * Gets IA32_VMX_CR0_FIXED0 for IEM and cpumMsrRd_Ia32VmxMisc.
1551 *
1552 * @returns IA32_VMX_CR0_FIXED0 value.
1553 * @param pVCpu The cross context per CPU structure.
1554 */
1555VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr0Fixed0(PVMCPU pVCpu)
1556{
1557 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1558 uint64_t const uVmxMsr = pGuestFeatures->fVmx ? VMX_V_CR0_FIXED0 : 0;
1559 return uVmxMsr;
1560}
1561
1562
1563/** @callback_method_impl{FNCPUMRDMSR} */
1564static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr0Fixed0(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1565{
1566 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1567 *puValue = CPUMGetGuestIa32VmxCr0Fixed0(pVCpu);
1568 return VINF_SUCCESS;
1569}
1570
1571
1572/**
1573 * Gets IA32_VMX_CR0_FIXED1 for IEM and cpumMsrRd_Ia32VmxMisc.
1574 *
1575 * @returns IA32_VMX_CR0_FIXED1 MSR.
1576 * @param pVCpu The cross context per CPU structure.
1577 */
1578VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr0Fixed1(PVMCPU pVCpu)
1579{
1580 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1581 uint64_t uVmxMsr;
1582 if (pGuestFeatures->fVmx)
1583 {
1584 int rc = HMVmxGetHostMsr(pVCpu->CTX_SUFF(pVM), MSR_IA32_VMX_CR0_FIXED1, &uVmxMsr);
1585 AssertMsgRC(rc, ("HMVmxGetHostMsr failed. rc=%Rrc\n", rc)); RT_NOREF_PV(rc);
1586 uVmxMsr |= VMX_V_CR0_FIXED0; /* Make sure the CR0 MB1 bits are not clear. */
1587 }
1588 else
1589 uVmxMsr = 0;
1590 return uVmxMsr;
1591}
1592
1593
1594/** @callback_method_impl{FNCPUMRDMSR} */
1595static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr0Fixed1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1596{
1597 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1598 Assert(idMsr == MSR_IA32_VMX_CR0_FIXED1);
1599 *puValue = CPUMGetGuestIa32VmxCr0Fixed1(pVCpu);
1600 return VINF_SUCCESS;
1601}
1602
1603
1604/**
1605 * Gets IA32_VMX_CR4_FIXED0 for IEM and cpumMsrRd_Ia32VmxMisc.
1606 *
1607 * @returns IA32_VMX_CR4_FIXED0 value.
1608 * @param pVCpu The cross context per CPU structure.
1609 */
1610VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr4Fixed0(PVMCPU pVCpu)
1611{
1612 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1613 uint64_t const uVmxMsr = pGuestFeatures->fVmx ? VMX_V_CR4_FIXED0 : 0;
1614 return uVmxMsr;
1615}
1616
1617
1618/** @callback_method_impl{FNCPUMRDMSR} */
1619static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr4Fixed0(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1620{
1621 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1622 *puValue = CPUMGetGuestIa32VmxCr4Fixed0(pVCpu);
1623 return VINF_SUCCESS;
1624}
1625
1626
1627/**
1628 * Gets IA32_VMX_CR4_FIXED1 for IEM and cpumMsrRd_Ia32VmxMisc.
1629 *
1630 * @returns IA32_VMX_CR4_FIXED1 MSR.
1631 * @param pVCpu The cross context per CPU structure.
1632 */
1633VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr4Fixed1(PVMCPU pVCpu)
1634{
1635 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1636 uint64_t uVmxMsr;
1637 if (pGuestFeatures->fVmx)
1638 {
1639 int rc = HMVmxGetHostMsr(pVCpu->CTX_SUFF(pVM), MSR_IA32_VMX_CR4_FIXED1, &uVmxMsr);
1640 AssertMsgRC(rc, ("HMVmxGetHostMsr failed. rc=%Rrc\n", rc)); RT_NOREF_PV(rc);
1641 uVmxMsr |= VMX_V_CR4_FIXED0; /* Make sure the CR4 MB1 bits are not clear. */
1642 }
1643 else
1644 uVmxMsr = 0;
1645 return uVmxMsr;
1646}
1647
1648
1649/** @callback_method_impl{FNCPUMRDMSR} */
1650static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr4Fixed1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1651{
1652 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1653 Assert(idMsr == MSR_IA32_VMX_CR4_FIXED1);
1654 *puValue = CPUMGetGuestIa32VmxCr4Fixed1(pVCpu);
1655 return VINF_SUCCESS;
1656}
1657
1658
1659/**
1660 * Gets IA32_VMX_VMCS_ENUM for IEM and cpumMsrRd_Ia32VmxMisc.
1661 *
1662 * @returns IA32_VMX_VMCS_ENUM value.
1663 * @param pVCpu The cross context per CPU structure.
1664 */
1665VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxVmcsEnum(PVMCPU pVCpu)
1666{
1667 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1668 uint64_t uVmxMsr;
1669 if (pGuestFeatures->fVmx)
1670 uVmxMsr = VMX_V_VMCS_MAX_INDEX << VMX_BF_VMCS_ENUM_HIGHEST_IDX_SHIFT;
1671 else
1672 uVmxMsr = 0;
1673 return uVmxMsr;
1674}
1675
1676
1677/** @callback_method_impl{FNCPUMRDMSR} */
1678static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxVmcsEnum(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1679{
1680 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1681 *puValue = CPUMGetGuestIa32VmxVmcsEnum(pVCpu);
1682 return VINF_SUCCESS;
1683}
1684
1685
1686/**
1687 * Gets MSR_IA32_VMX_PROCBASED_CTLS2 for IEM and cpumMsrRd_Ia32VmxMisc.
1688 *
1689 * @returns MSR_IA32_VMX_PROCBASED_CTLS2 value.
1690 * @param pVCpu The cross context per CPU structure.
1691 */
1692VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxProcbasedCtls2(PVMCPU pVCpu)
1693{
1694 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1695 uint64_t uVmxMsr;
1696 if (pGuestFeatures->fVmx)
1697 {
1698 uint32_t const fFeatures = (pGuestFeatures->fVmxVirtApicAccess << VMX_BF_PROC_CTLS2_VIRT_APIC_ACCESS_SHIFT )
1699 | (pGuestFeatures->fVmxEpt << VMX_BF_PROC_CTLS2_EPT_SHIFT )
1700 | (pGuestFeatures->fVmxDescTableExit << VMX_BF_PROC_CTLS2_DESC_TABLE_EXIT_SHIFT )
1701 | (pGuestFeatures->fVmxRdtscp << VMX_BF_PROC_CTLS2_RDTSCP_SHIFT )
1702 | (pGuestFeatures->fVmxVirtX2ApicMode << VMX_BF_PROC_CTLS2_VIRT_X2APIC_MODE_SHIFT )
1703 | (pGuestFeatures->fVmxVpid << VMX_BF_PROC_CTLS2_VPID_SHIFT )
1704 | (pGuestFeatures->fVmxWbinvdExit << VMX_BF_PROC_CTLS2_WBINVD_EXIT_SHIFT )
1705 | (pGuestFeatures->fVmxUnrestrictedGuest << VMX_BF_PROC_CTLS2_UNRESTRICTED_GUEST_SHIFT)
1706 | (pGuestFeatures->fVmxApicRegVirt << VMX_BF_PROC_CTLS2_APIC_REG_VIRT_SHIFT )
1707 | (pGuestFeatures->fVmxVirtIntDelivery << VMX_BF_PROC_CTLS2_VIRT_INT_DELIVERY_SHIFT )
1708 | (pGuestFeatures->fVmxPauseLoopExit << VMX_BF_PROC_CTLS2_PAUSE_LOOP_EXIT_SHIFT )
1709 | (pGuestFeatures->fVmxRdrandExit << VMX_BF_PROC_CTLS2_RDRAND_EXIT_SHIFT )
1710 | (pGuestFeatures->fVmxInvpcid << VMX_BF_PROC_CTLS2_INVPCID_SHIFT )
1711 | (pGuestFeatures->fVmxVmFunc << VMX_BF_PROC_CTLS2_VMFUNC_SHIFT )
1712 | (pGuestFeatures->fVmxVmcsShadowing << VMX_BF_PROC_CTLS2_VMCS_SHADOWING_SHIFT )
1713 | (pGuestFeatures->fVmxRdseedExit << VMX_BF_PROC_CTLS2_RDSEED_EXIT_SHIFT )
1714 | (pGuestFeatures->fVmxPml << VMX_BF_PROC_CTLS2_PML_SHIFT )
1715 | (pGuestFeatures->fVmxEptXcptVe << VMX_BF_PROC_CTLS2_EPT_VE_SHIFT )
1716 | (pGuestFeatures->fVmxXsavesXrstors << VMX_BF_PROC_CTLS2_XSAVES_XRSTORS_SHIFT )
1717 | (pGuestFeatures->fVmxUseTscScaling << VMX_BF_PROC_CTLS2_TSC_SCALING_SHIFT );
1718 uint32_t const fVal = 0;
1719 uint32_t const fZap = fFeatures;
1720 uVmxMsr = RT_MAKE_U64(fVal, fZap);
1721 }
1722 else
1723 uVmxMsr = 0;
1724 return uVmxMsr;
1725}
1726
1727
1728/** @callback_method_impl{FNCPUMRDMSR} */
1729static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxProcBasedCtls2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1730{
1731 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1732 *puValue = CPUMGetGuestIa32VmxProcbasedCtls2(pVCpu);
1733 return VINF_SUCCESS;
1734}
1735
1736
1737/** @callback_method_impl{FNCPUMRDMSR} */
1738static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxEptVpidCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1739{
1740 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1741 *puValue = 0;
1742 return VINF_SUCCESS;
1743}
1744
1745
1746/** @callback_method_impl{FNCPUMRDMSR} */
1747static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTruePinbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1748{
1749 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1750 *puValue = 0;
1751 return VINF_SUCCESS;
1752}
1753
1754
1755/** @callback_method_impl{FNCPUMRDMSR} */
1756static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTrueProcbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1757{
1758 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1759 *puValue = 0;
1760 return VINF_SUCCESS;
1761}
1762
1763
1764/** @callback_method_impl{FNCPUMRDMSR} */
1765static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTrueExitCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1766{
1767 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1768 *puValue = 0;
1769 return VINF_SUCCESS;
1770}
1771
1772
1773/** @callback_method_impl{FNCPUMRDMSR} */
1774static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTrueEntryCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1775{
1776 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1777 *puValue = 0;
1778 return VINF_SUCCESS;
1779}
1780
1781
1782/**
1783 * Gets IA32_VMX_VMFUNC for IEM and cpumMsrRd_Ia32VmxVmFunc.
1784 *
1785 * @returns IA32_VMX_VMFUNC value.
1786 * @param pVCpu The cross context per CPU structure.
1787 */
1788VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxVmFunc(PVMCPU pVCpu)
1789{
1790 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1791 uint64_t uVmxMsr;
1792 if ( pGuestFeatures->fVmx
1793 && pGuestFeatures->fVmxVmFunc)
1794 uVmxMsr = RT_BF_MAKE(VMX_BF_VMFUNC_EPTP_SWITCHING, 1);
1795 else
1796 uVmxMsr = 0;
1797 return uVmxMsr;
1798}
1799
1800
1801/** @callback_method_impl{FNCPUMRDMSR} */
1802static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxVmFunc(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1803{
1804 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1805 *puValue = CPUMGetGuestIa32VmxVmFunc(pVCpu);
1806 return VINF_SUCCESS;
1807}
1808
1809
1810/** @callback_method_impl{FNCPUMRDMSR} */
1811static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SpecCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1812{
1813 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1814 *puValue = pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl;
1815 return VINF_SUCCESS;
1816}
1817
1818
1819/** @callback_method_impl{FNCPUMWRMSR} */
1820static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SpecCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1821{
1822 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1823
1824 /* NB: The STIBP bit can be set even when IBRS is present, regardless of whether STIBP is actually implemented. */
1825 if (uValue & ~(MSR_IA32_SPEC_CTRL_F_IBRS | MSR_IA32_SPEC_CTRL_F_STIBP))
1826 {
1827 Log(("CPUM: Invalid IA32_SPEC_CTRL bits (trying to write %#llx)\n", uValue));
1828 return VERR_CPUM_RAISE_GP_0;
1829 }
1830
1831 pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl = uValue;
1832 return VINF_SUCCESS;
1833}
1834
1835
1836/** @callback_method_impl{FNCPUMWRMSR} */
1837static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PredCmd(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1838{
1839 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1840 return VINF_SUCCESS;
1841}
1842
1843
1844/** @callback_method_impl{FNCPUMRDMSR} */
1845static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ArchCapabilities(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1846{
1847 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1848 *puValue = pVCpu->cpum.s.GuestMsrs.msr.ArchCaps;
1849 return VINF_SUCCESS;
1850}
1851
1852
1853
1854
1855
1856
1857
1858
1859
1860
1861
1862
1863/*
1864 * AMD64
1865 * AMD64
1866 * AMD64
1867 */
1868
1869
1870/** @callback_method_impl{FNCPUMRDMSR} */
1871static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64Efer(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1872{
1873 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1874 *puValue = pVCpu->cpum.s.Guest.msrEFER;
1875 return VINF_SUCCESS;
1876}
1877
1878
1879/** @callback_method_impl{FNCPUMWRMSR} */
1880static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64Efer(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1881{
1882 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1883 uint64_t uValidatedEfer;
1884 uint64_t const uOldEfer = pVCpu->cpum.s.Guest.msrEFER;
1885 int rc = CPUMQueryValidatedGuestEfer(pVCpu->CTX_SUFF(pVM), pVCpu->cpum.s.Guest.cr0, uOldEfer, uValue, &uValidatedEfer);
1886 if (RT_FAILURE(rc))
1887 return VERR_CPUM_RAISE_GP_0;
1888
1889 CPUMSetGuestEferMsrNoChecks(pVCpu, uOldEfer, uValidatedEfer);
1890 return VINF_SUCCESS;
1891}
1892
1893
1894/** @callback_method_impl{FNCPUMRDMSR} */
1895static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64SyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1896{
1897 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1898 *puValue = pVCpu->cpum.s.Guest.msrSTAR;
1899 return VINF_SUCCESS;
1900}
1901
1902
1903/** @callback_method_impl{FNCPUMWRMSR} */
1904static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64SyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1905{
1906 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1907 pVCpu->cpum.s.Guest.msrSTAR = uValue;
1908 return VINF_SUCCESS;
1909}
1910
1911
1912/** @callback_method_impl{FNCPUMRDMSR} */
1913static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64LongSyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1914{
1915 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1916 *puValue = pVCpu->cpum.s.Guest.msrLSTAR;
1917 return VINF_SUCCESS;
1918}
1919
1920
1921/** @callback_method_impl{FNCPUMWRMSR} */
1922static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64LongSyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1923{
1924 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1925 if (!X86_IS_CANONICAL(uValue))
1926 {
1927 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
1928 return VERR_CPUM_RAISE_GP_0;
1929 }
1930 pVCpu->cpum.s.Guest.msrLSTAR = uValue;
1931 return VINF_SUCCESS;
1932}
1933
1934
1935/** @callback_method_impl{FNCPUMRDMSR} */
1936static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64CompSyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1937{
1938 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1939 *puValue = pVCpu->cpum.s.Guest.msrCSTAR;
1940 return VINF_SUCCESS;
1941}
1942
1943
1944/** @callback_method_impl{FNCPUMWRMSR} */
1945static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64CompSyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1946{
1947 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1948 if (!X86_IS_CANONICAL(uValue))
1949 {
1950 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
1951 return VERR_CPUM_RAISE_GP_0;
1952 }
1953 pVCpu->cpum.s.Guest.msrCSTAR = uValue;
1954 return VINF_SUCCESS;
1955}
1956
1957
1958/** @callback_method_impl{FNCPUMRDMSR} */
1959static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64SyscallFlagMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1960{
1961 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1962 *puValue = pVCpu->cpum.s.Guest.msrSFMASK;
1963 return VINF_SUCCESS;
1964}
1965
1966
1967/** @callback_method_impl{FNCPUMWRMSR} */
1968static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64SyscallFlagMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1969{
1970 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1971 pVCpu->cpum.s.Guest.msrSFMASK = uValue;
1972 return VINF_SUCCESS;
1973}
1974
1975
1976/** @callback_method_impl{FNCPUMRDMSR} */
1977static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64FsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1978{
1979 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1980 *puValue = pVCpu->cpum.s.Guest.fs.u64Base;
1981 return VINF_SUCCESS;
1982}
1983
1984
1985/** @callback_method_impl{FNCPUMWRMSR} */
1986static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64FsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1987{
1988 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1989 pVCpu->cpum.s.Guest.fs.u64Base = uValue;
1990 return VINF_SUCCESS;
1991}
1992
1993
1994/** @callback_method_impl{FNCPUMRDMSR} */
1995static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64GsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1996{
1997 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1998 *puValue = pVCpu->cpum.s.Guest.gs.u64Base;
1999 return VINF_SUCCESS;
2000}
2001
2002/** @callback_method_impl{FNCPUMWRMSR} */
2003static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64GsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2004{
2005 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2006 pVCpu->cpum.s.Guest.gs.u64Base = uValue;
2007 return VINF_SUCCESS;
2008}
2009
2010
2011
2012/** @callback_method_impl{FNCPUMRDMSR} */
2013static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64KernelGsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2014{
2015 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2016 *puValue = pVCpu->cpum.s.Guest.msrKERNELGSBASE;
2017 return VINF_SUCCESS;
2018}
2019
2020/** @callback_method_impl{FNCPUMWRMSR} */
2021static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64KernelGsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2022{
2023 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2024 pVCpu->cpum.s.Guest.msrKERNELGSBASE = uValue;
2025 return VINF_SUCCESS;
2026}
2027
2028
2029/** @callback_method_impl{FNCPUMRDMSR} */
2030static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64TscAux(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2031{
2032 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2033 *puValue = pVCpu->cpum.s.GuestMsrs.msr.TscAux;
2034 return VINF_SUCCESS;
2035}
2036
2037/** @callback_method_impl{FNCPUMWRMSR} */
2038static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64TscAux(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2039{
2040 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2041 pVCpu->cpum.s.GuestMsrs.msr.TscAux = uValue;
2042 return VINF_SUCCESS;
2043}
2044
2045
2046/*
2047 * Intel specific
2048 * Intel specific
2049 * Intel specific
2050 */
2051
2052/** @callback_method_impl{FNCPUMRDMSR} */
2053static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelEblCrPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2054{
2055 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2056 /** @todo recalc clock frequency ratio? */
2057 *puValue = pRange->uValue;
2058 return VINF_SUCCESS;
2059}
2060
2061
2062/** @callback_method_impl{FNCPUMWRMSR} */
2063static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelEblCrPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2064{
2065 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2066 /** @todo Write EBL_CR_POWERON: Remember written bits. */
2067 return VINF_SUCCESS;
2068}
2069
2070
2071/** @callback_method_impl{FNCPUMRDMSR} */
2072static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7CoreThreadCount(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2073{
2074 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2075
2076 /* Note! According to cpuid_set_info in XNU (10.7.0), Westmere CPU only
2077 have a 4-bit core count. */
2078 uint16_t cCores = pVCpu->CTX_SUFF(pVM)->cCpus;
2079 uint16_t cThreads = cCores; /** @todo hyper-threading. */
2080 *puValue = RT_MAKE_U32(cThreads, cCores);
2081 return VINF_SUCCESS;
2082}
2083
2084
2085/** @callback_method_impl{FNCPUMRDMSR} */
2086static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP4EbcHardPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2087{
2088 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2089 /** @todo P4 hard power on config */
2090 *puValue = pRange->uValue;
2091 return VINF_SUCCESS;
2092}
2093
2094
2095/** @callback_method_impl{FNCPUMWRMSR} */
2096static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP4EbcHardPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2097{
2098 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2099 /** @todo P4 hard power on config */
2100 return VINF_SUCCESS;
2101}
2102
2103
2104/** @callback_method_impl{FNCPUMRDMSR} */
2105static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP4EbcSoftPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2106{
2107 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2108 /** @todo P4 soft power on config */
2109 *puValue = pRange->uValue;
2110 return VINF_SUCCESS;
2111}
2112
2113
2114/** @callback_method_impl{FNCPUMWRMSR} */
2115static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP4EbcSoftPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2116{
2117 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2118 /** @todo P4 soft power on config */
2119 return VINF_SUCCESS;
2120}
2121
2122
2123/** @callback_method_impl{FNCPUMRDMSR} */
2124static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP4EbcFrequencyId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2125{
2126 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2127
2128 uint64_t uValue;
2129 PVM pVM = pVCpu->CTX_SUFF(pVM);
2130 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
2131 if (pVM->cpum.s.GuestFeatures.uModel >= 2)
2132 {
2133 if (uScalableBusHz <= CPUM_SBUSFREQ_100MHZ && pVM->cpum.s.GuestFeatures.uModel <= 2)
2134 {
2135 uScalableBusHz = CPUM_SBUSFREQ_100MHZ;
2136 uValue = 0;
2137 }
2138 else if (uScalableBusHz <= CPUM_SBUSFREQ_133MHZ)
2139 {
2140 uScalableBusHz = CPUM_SBUSFREQ_133MHZ;
2141 uValue = 1;
2142 }
2143 else if (uScalableBusHz <= CPUM_SBUSFREQ_167MHZ)
2144 {
2145 uScalableBusHz = CPUM_SBUSFREQ_167MHZ;
2146 uValue = 3;
2147 }
2148 else if (uScalableBusHz <= CPUM_SBUSFREQ_200MHZ)
2149 {
2150 uScalableBusHz = CPUM_SBUSFREQ_200MHZ;
2151 uValue = 2;
2152 }
2153 else if (uScalableBusHz <= CPUM_SBUSFREQ_267MHZ && pVM->cpum.s.GuestFeatures.uModel > 2)
2154 {
2155 uScalableBusHz = CPUM_SBUSFREQ_267MHZ;
2156 uValue = 0;
2157 }
2158 else
2159 {
2160 uScalableBusHz = CPUM_SBUSFREQ_333MHZ;
2161 uValue = 6;
2162 }
2163 uValue <<= 16;
2164
2165 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
2166 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
2167 uValue |= (uint32_t)uTscRatio << 24;
2168
2169 uValue |= pRange->uValue & ~UINT64_C(0xff0f0000);
2170 }
2171 else
2172 {
2173 /* Probably more stuff here, but intel doesn't want to tell us. */
2174 uValue = pRange->uValue;
2175 uValue &= ~(RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23)); /* 100 MHz is only documented value */
2176 }
2177
2178 *puValue = uValue;
2179 return VINF_SUCCESS;
2180}
2181
2182
2183/** @callback_method_impl{FNCPUMWRMSR} */
2184static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP4EbcFrequencyId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2185{
2186 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2187 /** @todo P4 bus frequency config */
2188 return VINF_SUCCESS;
2189}
2190
2191
2192/** @callback_method_impl{FNCPUMRDMSR} */
2193static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP6FsbFrequency(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2194{
2195 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2196
2197 /* Convert the scalable bus frequency to the encoding in the intel manual (for core+). */
2198 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVCpu->CTX_SUFF(pVM));
2199 if (uScalableBusHz <= CPUM_SBUSFREQ_100MHZ)
2200 *puValue = 5;
2201 else if (uScalableBusHz <= CPUM_SBUSFREQ_133MHZ)
2202 *puValue = 1;
2203 else if (uScalableBusHz <= CPUM_SBUSFREQ_167MHZ)
2204 *puValue = 3;
2205 else if (uScalableBusHz <= CPUM_SBUSFREQ_200MHZ)
2206 *puValue = 2;
2207 else if (uScalableBusHz <= CPUM_SBUSFREQ_267MHZ)
2208 *puValue = 0;
2209 else if (uScalableBusHz <= CPUM_SBUSFREQ_333MHZ)
2210 *puValue = 4;
2211 else /*if (uScalableBusHz <= CPUM_SBUSFREQ_400MHZ)*/
2212 *puValue = 6;
2213
2214 *puValue |= pRange->uValue & ~UINT64_C(0x7);
2215
2216 return VINF_SUCCESS;
2217}
2218
2219
2220/** @callback_method_impl{FNCPUMRDMSR} */
2221static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelPlatformInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2222{
2223 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2224
2225 /* Just indicate a fixed TSC, no turbo boost, no programmable anything. */
2226 PVM pVM = pVCpu->CTX_SUFF(pVM);
2227 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
2228 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
2229 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
2230 uint64_t uValue = ((uint32_t)uTscRatio << 8) /* TSC invariant frequency. */
2231 | ((uint64_t)uTscRatio << 40); /* The max turbo frequency. */
2232
2233 /* Ivy bridge has a minimum operating ratio as well. */
2234 if (true) /** @todo detect sandy bridge. */
2235 uValue |= (uint64_t)uTscRatio << 48;
2236
2237 *puValue = uValue;
2238 return VINF_SUCCESS;
2239}
2240
2241
2242/** @callback_method_impl{FNCPUMRDMSR} */
2243static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelFlexRatio(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2244{
2245 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2246
2247 uint64_t uValue = pRange->uValue & ~UINT64_C(0x1ff00);
2248
2249 PVM pVM = pVCpu->CTX_SUFF(pVM);
2250 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
2251 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
2252 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
2253 uValue |= (uint32_t)uTscRatio << 8;
2254
2255 *puValue = uValue;
2256 return VINF_SUCCESS;
2257}
2258
2259
2260/** @callback_method_impl{FNCPUMWRMSR} */
2261static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelFlexRatio(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2262{
2263 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2264 /** @todo implement writing MSR_FLEX_RATIO. */
2265 return VINF_SUCCESS;
2266}
2267
2268
2269/** @callback_method_impl{FNCPUMRDMSR} */
2270static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelPkgCStConfigControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2271{
2272 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2273 *puValue = pVCpu->cpum.s.GuestMsrs.msr.PkgCStateCfgCtrl;
2274 return VINF_SUCCESS;
2275}
2276
2277
2278/** @callback_method_impl{FNCPUMWRMSR} */
2279static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelPkgCStConfigControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2280{
2281 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2282
2283 if (pVCpu->cpum.s.GuestMsrs.msr.PkgCStateCfgCtrl & RT_BIT_64(15))
2284 {
2285 Log(("CPUM: WRMSR %#x (%s), %#llx: Write protected -> #GP\n", idMsr, pRange->szName, uValue));
2286 return VERR_CPUM_RAISE_GP_0;
2287 }
2288#if 0 /** @todo check what real (old) hardware does. */
2289 if ((uValue & 7) >= 5)
2290 {
2291 Log(("CPUM: WRMSR %#x (%s), %#llx: Invalid limit (%d) -> #GP\n", idMsr, pRange->szName, uValue, (uint32_t)(uValue & 7)));
2292 return VERR_CPUM_RAISE_GP_0;
2293 }
2294#endif
2295 pVCpu->cpum.s.GuestMsrs.msr.PkgCStateCfgCtrl = uValue;
2296 return VINF_SUCCESS;
2297}
2298
2299
2300/** @callback_method_impl{FNCPUMRDMSR} */
2301static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelPmgIoCaptureBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2302{
2303 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2304 /** @todo implement I/O mwait wakeup. */
2305 *puValue = 0;
2306 return VINF_SUCCESS;
2307}
2308
2309
2310/** @callback_method_impl{FNCPUMWRMSR} */
2311static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelPmgIoCaptureBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2312{
2313 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2314 /** @todo implement I/O mwait wakeup. */
2315 return VINF_SUCCESS;
2316}
2317
2318
2319/** @callback_method_impl{FNCPUMRDMSR} */
2320static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchFromToN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2321{
2322 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2323 /** @todo implement last branch records. */
2324 *puValue = 0;
2325 return VINF_SUCCESS;
2326}
2327
2328
2329/** @callback_method_impl{FNCPUMWRMSR} */
2330static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchFromToN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2331{
2332 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2333 /** @todo implement last branch records. */
2334 return VINF_SUCCESS;
2335}
2336
2337
2338/** @callback_method_impl{FNCPUMRDMSR} */
2339static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchFromN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2340{
2341 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2342 /** @todo implement last branch records. */
2343 *puValue = 0;
2344 return VINF_SUCCESS;
2345}
2346
2347
2348/** @callback_method_impl{FNCPUMWRMSR} */
2349static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchFromN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2350{
2351 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2352 /** @todo implement last branch records. */
2353 /** @todo Probing indicates that bit 63 is settable on SandyBridge, at least
2354 * if the rest of the bits are zero. Automatic sign extending?
2355 * Investigate! */
2356 if (!X86_IS_CANONICAL(uValue))
2357 {
2358 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
2359 return VERR_CPUM_RAISE_GP_0;
2360 }
2361 return VINF_SUCCESS;
2362}
2363
2364
2365/** @callback_method_impl{FNCPUMRDMSR} */
2366static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchToN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2367{
2368 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2369 /** @todo implement last branch records. */
2370 *puValue = 0;
2371 return VINF_SUCCESS;
2372}
2373
2374
2375/** @callback_method_impl{FNCPUMWRMSR} */
2376static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchToN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2377{
2378 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2379 /** @todo implement last branch records. */
2380 /** @todo Probing indicates that bit 63 is settable on SandyBridge, at least
2381 * if the rest of the bits are zero. Automatic sign extending?
2382 * Investigate! */
2383 if (!X86_IS_CANONICAL(uValue))
2384 {
2385 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
2386 return VERR_CPUM_RAISE_GP_0;
2387 }
2388 return VINF_SUCCESS;
2389}
2390
2391
2392/** @callback_method_impl{FNCPUMRDMSR} */
2393static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchTos(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2394{
2395 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2396 /** @todo implement last branch records. */
2397 *puValue = 0;
2398 return VINF_SUCCESS;
2399}
2400
2401
2402/** @callback_method_impl{FNCPUMWRMSR} */
2403static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchTos(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2404{
2405 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2406 /** @todo implement last branch records. */
2407 return VINF_SUCCESS;
2408}
2409
2410
2411/** @callback_method_impl{FNCPUMRDMSR} */
2412static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelBblCrCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2413{
2414 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2415 *puValue = pRange->uValue;
2416 return VINF_SUCCESS;
2417}
2418
2419
2420/** @callback_method_impl{FNCPUMWRMSR} */
2421static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelBblCrCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2422{
2423 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2424 return VINF_SUCCESS;
2425}
2426
2427
2428/** @callback_method_impl{FNCPUMRDMSR} */
2429static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelBblCrCtl3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2430{
2431 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2432 *puValue = pRange->uValue;
2433 return VINF_SUCCESS;
2434}
2435
2436
2437/** @callback_method_impl{FNCPUMWRMSR} */
2438static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelBblCrCtl3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2439{
2440 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2441 return VINF_SUCCESS;
2442}
2443
2444
2445/** @callback_method_impl{FNCPUMRDMSR} */
2446static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7TemperatureTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2447{
2448 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2449 *puValue = pRange->uValue;
2450 return VINF_SUCCESS;
2451}
2452
2453
2454/** @callback_method_impl{FNCPUMWRMSR} */
2455static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7TemperatureTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2456{
2457 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2458 return VINF_SUCCESS;
2459}
2460
2461
2462/** @callback_method_impl{FNCPUMRDMSR} */
2463static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7MsrOffCoreResponseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2464{
2465 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2466 /** @todo machine check. */
2467 *puValue = pRange->uValue;
2468 return VINF_SUCCESS;
2469}
2470
2471
2472/** @callback_method_impl{FNCPUMWRMSR} */
2473static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7MsrOffCoreResponseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2474{
2475 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2476 /** @todo machine check. */
2477 return VINF_SUCCESS;
2478}
2479
2480
2481/** @callback_method_impl{FNCPUMRDMSR} */
2482static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7MiscPwrMgmt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2483{
2484 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2485 *puValue = 0;
2486 return VINF_SUCCESS;
2487}
2488
2489
2490/** @callback_method_impl{FNCPUMWRMSR} */
2491static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7MiscPwrMgmt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2492{
2493 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2494 return VINF_SUCCESS;
2495}
2496
2497
2498/** @callback_method_impl{FNCPUMRDMSR} */
2499static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP6CrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2500{
2501 RT_NOREF_PV(idMsr);
2502 int rc = CPUMGetGuestCRx(pVCpu, pRange->uValue, puValue);
2503 AssertRC(rc);
2504 return VINF_SUCCESS;
2505}
2506
2507
2508/** @callback_method_impl{FNCPUMWRMSR} */
2509static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP6CrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2510{
2511 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2512 /* This CRx interface differs from the MOV CRx, GReg interface in that
2513 #GP(0) isn't raised if unsupported bits are written to. Instead they
2514 are simply ignored and masked off. (Pentium M Dothan) */
2515 /** @todo Implement MSR_P6_CRx writing. Too much effort for very little, if
2516 * any, gain. */
2517 return VINF_SUCCESS;
2518}
2519
2520
2521/** @callback_method_impl{FNCPUMRDMSR} */
2522static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCpuId1FeatureMaskEcdx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2523{
2524 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2525 /** @todo implement CPUID masking. */
2526 *puValue = UINT64_MAX;
2527 return VINF_SUCCESS;
2528}
2529
2530
2531/** @callback_method_impl{FNCPUMWRMSR} */
2532static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCpuId1FeatureMaskEcdx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2533{
2534 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2535 /** @todo implement CPUID masking. */
2536 return VINF_SUCCESS;
2537}
2538
2539
2540/** @callback_method_impl{FNCPUMRDMSR} */
2541static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCpuId1FeatureMaskEax(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2542{
2543 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2544 /** @todo implement CPUID masking. */
2545 *puValue = 0;
2546 return VINF_SUCCESS;
2547}
2548
2549
2550/** @callback_method_impl{FNCPUMWRMSR} */
2551static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCpuId1FeatureMaskEax(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2552{
2553 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2554 /** @todo implement CPUID masking. */
2555 return VINF_SUCCESS;
2556}
2557
2558
2559
2560/** @callback_method_impl{FNCPUMRDMSR} */
2561static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCpuId80000001FeatureMaskEcdx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2562{
2563 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2564 /** @todo implement CPUID masking. */
2565 *puValue = UINT64_MAX;
2566 return VINF_SUCCESS;
2567}
2568
2569
2570/** @callback_method_impl{FNCPUMWRMSR} */
2571static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCpuId80000001FeatureMaskEcdx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2572{
2573 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2574 /** @todo implement CPUID masking. */
2575 return VINF_SUCCESS;
2576}
2577
2578
2579
2580/** @callback_method_impl{FNCPUMRDMSR} */
2581static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyAesNiCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2582{
2583 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2584 /** @todo implement AES-NI. */
2585 *puValue = 3; /* Bit 0 is lock bit, bit 1 disables AES-NI. That's what they say. */
2586 return VINF_SUCCESS;
2587}
2588
2589
2590/** @callback_method_impl{FNCPUMWRMSR} */
2591static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyAesNiCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2592{
2593 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2594 /** @todo implement AES-NI. */
2595 return VERR_CPUM_RAISE_GP_0;
2596}
2597
2598
2599/** @callback_method_impl{FNCPUMRDMSR} */
2600static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7TurboRatioLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2601{
2602 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2603 /** @todo implement intel C states. */
2604 *puValue = pRange->uValue;
2605 return VINF_SUCCESS;
2606}
2607
2608
2609/** @callback_method_impl{FNCPUMWRMSR} */
2610static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7TurboRatioLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2611{
2612 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2613 /** @todo implement intel C states. */
2614 return VINF_SUCCESS;
2615}
2616
2617
2618/** @callback_method_impl{FNCPUMRDMSR} */
2619static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7LbrSelect(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2620{
2621 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2622 /** @todo implement last-branch-records. */
2623 *puValue = 0;
2624 return VINF_SUCCESS;
2625}
2626
2627
2628/** @callback_method_impl{FNCPUMWRMSR} */
2629static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7LbrSelect(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2630{
2631 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2632 /** @todo implement last-branch-records. */
2633 return VINF_SUCCESS;
2634}
2635
2636
2637/** @callback_method_impl{FNCPUMRDMSR} */
2638static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyErrorControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2639{
2640 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2641 /** @todo implement memory error injection (MSR_ERROR_CONTROL). */
2642 *puValue = 0;
2643 return VINF_SUCCESS;
2644}
2645
2646
2647/** @callback_method_impl{FNCPUMWRMSR} */
2648static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyErrorControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2649{
2650 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2651 /** @todo implement memory error injection (MSR_ERROR_CONTROL). */
2652 return VINF_SUCCESS;
2653}
2654
2655
2656/** @callback_method_impl{FNCPUMRDMSR} */
2657static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7VirtualLegacyWireCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2658{
2659 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2660 /** @todo implement memory VLW? */
2661 *puValue = pRange->uValue;
2662 /* Note: A20M is known to be bit 1 as this was disclosed in spec update
2663 AAJ49/AAK51/????, which documents the inversion of this bit. The
2664 Sandy bridge CPU here has value 0x74, so it probably doesn't have a BIOS
2665 that correct things. Some guesses at the other bits:
2666 bit 2 = INTR
2667 bit 4 = SMI
2668 bit 5 = INIT
2669 bit 6 = NMI */
2670 return VINF_SUCCESS;
2671}
2672
2673
2674/** @callback_method_impl{FNCPUMRDMSR} */
2675static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7PowerCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2676{
2677 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2678 /** @todo intel power management */
2679 *puValue = 0;
2680 return VINF_SUCCESS;
2681}
2682
2683
2684/** @callback_method_impl{FNCPUMWRMSR} */
2685static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7PowerCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2686{
2687 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2688 /** @todo intel power management */
2689 return VINF_SUCCESS;
2690}
2691
2692
2693/** @callback_method_impl{FNCPUMRDMSR} */
2694static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyPebsNumAlt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2695{
2696 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2697 /** @todo intel performance counters. */
2698 *puValue = 0;
2699 return VINF_SUCCESS;
2700}
2701
2702
2703/** @callback_method_impl{FNCPUMWRMSR} */
2704static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyPebsNumAlt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2705{
2706 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2707 /** @todo intel performance counters. */
2708 return VINF_SUCCESS;
2709}
2710
2711
2712/** @callback_method_impl{FNCPUMRDMSR} */
2713static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7PebsLdLat(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2714{
2715 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2716 /** @todo intel performance counters. */
2717 *puValue = 0;
2718 return VINF_SUCCESS;
2719}
2720
2721
2722/** @callback_method_impl{FNCPUMWRMSR} */
2723static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7PebsLdLat(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2724{
2725 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2726 /** @todo intel performance counters. */
2727 return VINF_SUCCESS;
2728}
2729
2730
2731/** @callback_method_impl{FNCPUMRDMSR} */
2732static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7PkgCnResidencyN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2733{
2734 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2735 /** @todo intel power management. */
2736 *puValue = 0;
2737 return VINF_SUCCESS;
2738}
2739
2740
2741/** @callback_method_impl{FNCPUMRDMSR} */
2742static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7CoreCnResidencyN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2743{
2744 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2745 /** @todo intel power management. */
2746 *puValue = 0;
2747 return VINF_SUCCESS;
2748}
2749
2750
2751/** @callback_method_impl{FNCPUMRDMSR} */
2752static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyVrCurrentConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2753{
2754 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2755 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2756 *puValue = 0;
2757 return VINF_SUCCESS;
2758}
2759
2760
2761/** @callback_method_impl{FNCPUMWRMSR} */
2762static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyVrCurrentConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2763{
2764 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2765 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2766 return VINF_SUCCESS;
2767}
2768
2769
2770/** @callback_method_impl{FNCPUMRDMSR} */
2771static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyVrMiscConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2772{
2773 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2774 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2775 *puValue = 0;
2776 return VINF_SUCCESS;
2777}
2778
2779
2780/** @callback_method_impl{FNCPUMWRMSR} */
2781static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyVrMiscConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2782{
2783 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2784 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2785 return VINF_SUCCESS;
2786}
2787
2788
2789/** @callback_method_impl{FNCPUMRDMSR} */
2790static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyRaplPowerUnit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2791{
2792 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2793 /** @todo intel RAPL. */
2794 *puValue = pRange->uValue;
2795 return VINF_SUCCESS;
2796}
2797
2798
2799/** @callback_method_impl{FNCPUMWRMSR} */
2800static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyRaplPowerUnit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2801{
2802 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2803 /* Note! This is documented as read only and except for a Silvermont sample has
2804 always been classified as read only. This is just here to make it compile. */
2805 return VINF_SUCCESS;
2806}
2807
2808
2809/** @callback_method_impl{FNCPUMRDMSR} */
2810static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyPkgCnIrtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2811{
2812 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2813 /** @todo intel power management. */
2814 *puValue = 0;
2815 return VINF_SUCCESS;
2816}
2817
2818
2819/** @callback_method_impl{FNCPUMWRMSR} */
2820static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyPkgCnIrtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2821{
2822 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2823 /** @todo intel power management. */
2824 return VINF_SUCCESS;
2825}
2826
2827
2828/** @callback_method_impl{FNCPUMRDMSR} */
2829static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyPkgC2Residency(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2830{
2831 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2832 /** @todo intel power management. */
2833 *puValue = 0;
2834 return VINF_SUCCESS;
2835}
2836
2837
2838/** @callback_method_impl{FNCPUMWRMSR} */
2839static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyPkgC2Residency(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2840{
2841 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2842 /* Note! This is documented as read only and except for a Silvermont sample has
2843 always been classified as read only. This is just here to make it compile. */
2844 return VINF_SUCCESS;
2845}
2846
2847
2848/** @callback_method_impl{FNCPUMRDMSR} */
2849static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgPowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2850{
2851 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2852 /** @todo intel RAPL. */
2853 *puValue = 0;
2854 return VINF_SUCCESS;
2855}
2856
2857
2858/** @callback_method_impl{FNCPUMWRMSR} */
2859static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPkgPowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2860{
2861 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2862 /** @todo intel RAPL. */
2863 return VINF_SUCCESS;
2864}
2865
2866
2867/** @callback_method_impl{FNCPUMRDMSR} */
2868static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgEnergyStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2869{
2870 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2871 /** @todo intel power management. */
2872 *puValue = 0;
2873 return VINF_SUCCESS;
2874}
2875
2876
2877/** @callback_method_impl{FNCPUMRDMSR} */
2878static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgPerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2879{
2880 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2881 /** @todo intel power management. */
2882 *puValue = 0;
2883 return VINF_SUCCESS;
2884}
2885
2886
2887/** @callback_method_impl{FNCPUMRDMSR} */
2888static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgPowerInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2889{
2890 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2891 /** @todo intel power management. */
2892 *puValue = 0;
2893 return VINF_SUCCESS;
2894}
2895
2896
2897/** @callback_method_impl{FNCPUMRDMSR} */
2898static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramPowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2899{
2900 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2901 /** @todo intel RAPL. */
2902 *puValue = 0;
2903 return VINF_SUCCESS;
2904}
2905
2906
2907/** @callback_method_impl{FNCPUMWRMSR} */
2908static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplDramPowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2909{
2910 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2911 /** @todo intel RAPL. */
2912 return VINF_SUCCESS;
2913}
2914
2915
2916/** @callback_method_impl{FNCPUMRDMSR} */
2917static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramEnergyStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2918{
2919 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2920 /** @todo intel power management. */
2921 *puValue = 0;
2922 return VINF_SUCCESS;
2923}
2924
2925
2926/** @callback_method_impl{FNCPUMRDMSR} */
2927static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramPerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2928{
2929 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2930 /** @todo intel power management. */
2931 *puValue = 0;
2932 return VINF_SUCCESS;
2933}
2934
2935
2936/** @callback_method_impl{FNCPUMRDMSR} */
2937static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramPowerInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2938{
2939 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2940 /** @todo intel power management. */
2941 *puValue = 0;
2942 return VINF_SUCCESS;
2943}
2944
2945
2946/** @callback_method_impl{FNCPUMRDMSR} */
2947static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0PowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2948{
2949 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2950 /** @todo intel RAPL. */
2951 *puValue = 0;
2952 return VINF_SUCCESS;
2953}
2954
2955
2956/** @callback_method_impl{FNCPUMWRMSR} */
2957static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp0PowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2958{
2959 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2960 /** @todo intel RAPL. */
2961 return VINF_SUCCESS;
2962}
2963
2964
2965/** @callback_method_impl{FNCPUMRDMSR} */
2966static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0EnergyStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2967{
2968 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2969 /** @todo intel power management. */
2970 *puValue = 0;
2971 return VINF_SUCCESS;
2972}
2973
2974
2975/** @callback_method_impl{FNCPUMRDMSR} */
2976static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0Policy(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2977{
2978 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2979 /** @todo intel RAPL. */
2980 *puValue = 0;
2981 return VINF_SUCCESS;
2982}
2983
2984
2985/** @callback_method_impl{FNCPUMWRMSR} */
2986static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp0Policy(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2987{
2988 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2989 /** @todo intel RAPL. */
2990 return VINF_SUCCESS;
2991}
2992
2993
2994/** @callback_method_impl{FNCPUMRDMSR} */
2995static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0PerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2996{
2997 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2998 /** @todo intel power management. */
2999 *puValue = 0;
3000 return VINF_SUCCESS;
3001}
3002
3003
3004/** @callback_method_impl{FNCPUMRDMSR} */
3005static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp1PowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3006{
3007 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3008 /** @todo intel RAPL. */
3009 *puValue = 0;
3010 return VINF_SUCCESS;
3011}
3012
3013
3014/** @callback_method_impl{FNCPUMWRMSR} */
3015static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp1PowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3016{
3017 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3018 /** @todo intel RAPL. */
3019 return VINF_SUCCESS;
3020}
3021
3022
3023/** @callback_method_impl{FNCPUMRDMSR} */
3024static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp1EnergyStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3025{
3026 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3027 /** @todo intel power management. */
3028 *puValue = 0;
3029 return VINF_SUCCESS;
3030}
3031
3032
3033/** @callback_method_impl{FNCPUMRDMSR} */
3034static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp1Policy(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3035{
3036 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3037 /** @todo intel RAPL. */
3038 *puValue = 0;
3039 return VINF_SUCCESS;
3040}
3041
3042
3043/** @callback_method_impl{FNCPUMWRMSR} */
3044static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp1Policy(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3045{
3046 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3047 /** @todo intel RAPL. */
3048 return VINF_SUCCESS;
3049}
3050
3051
3052/** @callback_method_impl{FNCPUMRDMSR} */
3053static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpNominal(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3054{
3055 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3056 /** @todo intel power management. */
3057 *puValue = pRange->uValue;
3058 return VINF_SUCCESS;
3059}
3060
3061
3062/** @callback_method_impl{FNCPUMRDMSR} */
3063static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpLevel1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3064{
3065 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3066 /** @todo intel power management. */
3067 *puValue = pRange->uValue;
3068 return VINF_SUCCESS;
3069}
3070
3071
3072/** @callback_method_impl{FNCPUMRDMSR} */
3073static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpLevel2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3074{
3075 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3076 /** @todo intel power management. */
3077 *puValue = pRange->uValue;
3078 return VINF_SUCCESS;
3079}
3080
3081
3082/** @callback_method_impl{FNCPUMRDMSR} */
3083static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3084{
3085 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3086 /** @todo intel power management. */
3087 *puValue = 0;
3088 return VINF_SUCCESS;
3089}
3090
3091
3092/** @callback_method_impl{FNCPUMWRMSR} */
3093static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7IvyConfigTdpControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3094{
3095 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3096 /** @todo intel power management. */
3097 return VINF_SUCCESS;
3098}
3099
3100
3101/** @callback_method_impl{FNCPUMRDMSR} */
3102static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyTurboActivationRatio(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3103{
3104 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3105 /** @todo intel power management. */
3106 *puValue = 0;
3107 return VINF_SUCCESS;
3108}
3109
3110
3111/** @callback_method_impl{FNCPUMWRMSR} */
3112static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7IvyTurboActivationRatio(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3113{
3114 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3115 /** @todo intel power management. */
3116 return VINF_SUCCESS;
3117}
3118
3119
3120/** @callback_method_impl{FNCPUMRDMSR} */
3121static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfGlobalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3122{
3123 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3124 /** @todo uncore msrs. */
3125 *puValue = 0;
3126 return VINF_SUCCESS;
3127}
3128
3129
3130/** @callback_method_impl{FNCPUMWRMSR} */
3131static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfGlobalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3132{
3133 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3134 /** @todo uncore msrs. */
3135 return VINF_SUCCESS;
3136}
3137
3138
3139/** @callback_method_impl{FNCPUMRDMSR} */
3140static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfGlobalStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3141{
3142 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3143 /** @todo uncore msrs. */
3144 *puValue = 0;
3145 return VINF_SUCCESS;
3146}
3147
3148
3149/** @callback_method_impl{FNCPUMWRMSR} */
3150static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfGlobalStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3151{
3152 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3153 /** @todo uncore msrs. */
3154 return VINF_SUCCESS;
3155}
3156
3157
3158/** @callback_method_impl{FNCPUMRDMSR} */
3159static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfGlobalOvfCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3160{
3161 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3162 /** @todo uncore msrs. */
3163 *puValue = 0;
3164 return VINF_SUCCESS;
3165}
3166
3167
3168/** @callback_method_impl{FNCPUMWRMSR} */
3169static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfGlobalOvfCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3170{
3171 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3172 /** @todo uncore msrs. */
3173 return VINF_SUCCESS;
3174}
3175
3176
3177/** @callback_method_impl{FNCPUMRDMSR} */
3178static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfFixedCtrCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3179{
3180 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3181 /** @todo uncore msrs. */
3182 *puValue = 0;
3183 return VINF_SUCCESS;
3184}
3185
3186
3187/** @callback_method_impl{FNCPUMWRMSR} */
3188static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfFixedCtrCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3189{
3190 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3191 /** @todo uncore msrs. */
3192 return VINF_SUCCESS;
3193}
3194
3195
3196/** @callback_method_impl{FNCPUMRDMSR} */
3197static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfFixedCtr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3198{
3199 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3200 /** @todo uncore msrs. */
3201 *puValue = 0;
3202 return VINF_SUCCESS;
3203}
3204
3205
3206/** @callback_method_impl{FNCPUMWRMSR} */
3207static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfFixedCtr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3208{
3209 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3210 /** @todo uncore msrs. */
3211 return VINF_SUCCESS;
3212}
3213
3214
3215/** @callback_method_impl{FNCPUMRDMSR} */
3216static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncCBoxConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3217{
3218 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3219 /** @todo uncore msrs. */
3220 *puValue = 0;
3221 return VINF_SUCCESS;
3222}
3223
3224
3225/** @callback_method_impl{FNCPUMRDMSR} */
3226static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncArbPerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3227{
3228 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3229 /** @todo uncore msrs. */
3230 *puValue = 0;
3231 return VINF_SUCCESS;
3232}
3233
3234
3235/** @callback_method_impl{FNCPUMWRMSR} */
3236static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncArbPerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3237{
3238 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3239 /** @todo uncore msrs. */
3240 return VINF_SUCCESS;
3241}
3242
3243
3244/** @callback_method_impl{FNCPUMRDMSR} */
3245static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncArbPerfEvtSelN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3246{
3247 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3248 /** @todo uncore msrs. */
3249 *puValue = 0;
3250 return VINF_SUCCESS;
3251}
3252
3253
3254/** @callback_method_impl{FNCPUMWRMSR} */
3255static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncArbPerfEvtSelN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3256{
3257 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3258 /** @todo uncore msrs. */
3259 return VINF_SUCCESS;
3260}
3261
3262
3263/** @callback_method_impl{FNCPUMRDMSR} */
3264static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SmiCount(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3265{
3266 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3267
3268 /*
3269 * 31:0 is SMI count (read only), 63:32 reserved.
3270 * Since we don't do SMI, the count is always zero.
3271 */
3272 *puValue = 0;
3273 return VINF_SUCCESS;
3274}
3275
3276
3277/** @callback_method_impl{FNCPUMRDMSR} */
3278static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore2EmttmCrTablesN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3279{
3280 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3281 /** @todo implement enhanced multi thread termal monitoring? */
3282 *puValue = pRange->uValue;
3283 return VINF_SUCCESS;
3284}
3285
3286
3287/** @callback_method_impl{FNCPUMWRMSR} */
3288static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore2EmttmCrTablesN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3289{
3290 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3291 /** @todo implement enhanced multi thread termal monitoring? */
3292 return VINF_SUCCESS;
3293}
3294
3295
3296/** @callback_method_impl{FNCPUMRDMSR} */
3297static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore2SmmCStMiscInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3298{
3299 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3300 /** @todo SMM & C-states? */
3301 *puValue = 0;
3302 return VINF_SUCCESS;
3303}
3304
3305
3306/** @callback_method_impl{FNCPUMWRMSR} */
3307static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore2SmmCStMiscInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3308{
3309 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3310 /** @todo SMM & C-states? */
3311 return VINF_SUCCESS;
3312}
3313
3314
3315/** @callback_method_impl{FNCPUMRDMSR} */
3316static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore1ExtConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3317{
3318 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3319 /** @todo Core1&2 EXT_CONFIG (whatever that is)? */
3320 *puValue = 0;
3321 return VINF_SUCCESS;
3322}
3323
3324
3325/** @callback_method_impl{FNCPUMWRMSR} */
3326static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore1ExtConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3327{
3328 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3329 /** @todo Core1&2 EXT_CONFIG (whatever that is)? */
3330 return VINF_SUCCESS;
3331}
3332
3333
3334/** @callback_method_impl{FNCPUMRDMSR} */
3335static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore1DtsCalControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3336{
3337 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3338 /** @todo Core1&2(?) DTS_CAL_CTRL (whatever that is)? */
3339 *puValue = 0;
3340 return VINF_SUCCESS;
3341}
3342
3343
3344/** @callback_method_impl{FNCPUMWRMSR} */
3345static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore1DtsCalControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3346{
3347 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3348 /** @todo Core1&2(?) DTS_CAL_CTRL (whatever that is)? */
3349 return VINF_SUCCESS;
3350}
3351
3352
3353/** @callback_method_impl{FNCPUMRDMSR} */
3354static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore2PeciControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3355{
3356 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3357 /** @todo Core2+ platform environment control interface control register? */
3358 *puValue = 0;
3359 return VINF_SUCCESS;
3360}
3361
3362
3363/** @callback_method_impl{FNCPUMWRMSR} */
3364static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore2PeciControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3365{
3366 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3367 /** @todo Core2+ platform environment control interface control register? */
3368 return VINF_SUCCESS;
3369}
3370
3371
3372/** @callback_method_impl{FNCPUMRDMSR} */
3373static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelAtSilvCoreC1Recidency(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3374{
3375 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3376 *puValue = 0;
3377 return VINF_SUCCESS;
3378}
3379
3380
3381/*
3382 * Multiple vendor P6 MSRs.
3383 * Multiple vendor P6 MSRs.
3384 * Multiple vendor P6 MSRs.
3385 *
3386 * These MSRs were introduced with the P6 but not elevated to architectural
3387 * MSRs, despite other vendors implementing them.
3388 */
3389
3390
3391/** @callback_method_impl{FNCPUMRDMSR} */
3392static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastBranchFromIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3393{
3394 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3395 /* AMD seems to just record RIP, while intel claims to record RIP+CS.BASE
3396 if I read the docs correctly, thus the need for separate functions. */
3397 /** @todo implement last branch records. */
3398 *puValue = 0;
3399 return VINF_SUCCESS;
3400}
3401
3402
3403/** @callback_method_impl{FNCPUMRDMSR} */
3404static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastBranchToIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3405{
3406 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3407 /** @todo implement last branch records. */
3408 *puValue = 0;
3409 return VINF_SUCCESS;
3410}
3411
3412
3413/** @callback_method_impl{FNCPUMRDMSR} */
3414static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastIntFromIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3415{
3416 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3417 /** @todo implement last exception records. */
3418 *puValue = 0;
3419 return VINF_SUCCESS;
3420}
3421
3422
3423/** @callback_method_impl{FNCPUMWRMSR} */
3424static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_P6LastIntFromIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3425{
3426 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3427 /** @todo implement last exception records. */
3428 /* Note! On many CPUs, the high bit of the 0x000001dd register is always writable, even when the result is
3429 a non-cannonical address. */
3430 return VINF_SUCCESS;
3431}
3432
3433
3434/** @callback_method_impl{FNCPUMRDMSR} */
3435static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastIntToIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3436{
3437 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3438 /** @todo implement last exception records. */
3439 *puValue = 0;
3440 return VINF_SUCCESS;
3441}
3442
3443
3444/** @callback_method_impl{FNCPUMWRMSR} */
3445static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_P6LastIntToIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3446{
3447 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3448 /** @todo implement last exception records. */
3449 return VINF_SUCCESS;
3450}
3451
3452
3453
3454/*
3455 * AMD specific
3456 * AMD specific
3457 * AMD specific
3458 */
3459
3460
3461/** @callback_method_impl{FNCPUMRDMSR} */
3462static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hTscRate(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3463{
3464 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3465 /** @todo Implement TscRateMsr */
3466 *puValue = RT_MAKE_U64(0, 1); /* 1.0 = reset value. */
3467 return VINF_SUCCESS;
3468}
3469
3470
3471/** @callback_method_impl{FNCPUMWRMSR} */
3472static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hTscRate(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3473{
3474 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3475 /** @todo Implement TscRateMsr */
3476 return VINF_SUCCESS;
3477}
3478
3479
3480/** @callback_method_impl{FNCPUMRDMSR} */
3481static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hLwpCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3482{
3483 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3484 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3485 /* Note: Only listes in BKDG for Family 15H. */
3486 *puValue = 0;
3487 return VINF_SUCCESS;
3488}
3489
3490
3491/** @callback_method_impl{FNCPUMWRMSR} */
3492static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hLwpCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3493{
3494 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3495 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3496 return VINF_SUCCESS;
3497}
3498
3499
3500/** @callback_method_impl{FNCPUMRDMSR} */
3501static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hLwpCbAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3502{
3503 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3504 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3505 /* Note: Only listes in BKDG for Family 15H. */
3506 *puValue = 0;
3507 return VINF_SUCCESS;
3508}
3509
3510
3511/** @callback_method_impl{FNCPUMWRMSR} */
3512static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hLwpCbAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3513{
3514 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3515 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3516 return VINF_SUCCESS;
3517}
3518
3519
3520/** @callback_method_impl{FNCPUMRDMSR} */
3521static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hMc4MiscN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3522{
3523 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3524 /** @todo machine check. */
3525 *puValue = 0;
3526 return VINF_SUCCESS;
3527}
3528
3529
3530/** @callback_method_impl{FNCPUMWRMSR} */
3531static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hMc4MiscN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3532{
3533 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3534 /** @todo machine check. */
3535 return VINF_SUCCESS;
3536}
3537
3538
3539/** @callback_method_impl{FNCPUMRDMSR} */
3540static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8PerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3541{
3542 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3543 /** @todo AMD performance events. */
3544 *puValue = 0;
3545 return VINF_SUCCESS;
3546}
3547
3548
3549/** @callback_method_impl{FNCPUMWRMSR} */
3550static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8PerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3551{
3552 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3553 /** @todo AMD performance events. */
3554 return VINF_SUCCESS;
3555}
3556
3557
3558/** @callback_method_impl{FNCPUMRDMSR} */
3559static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8PerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3560{
3561 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3562 /** @todo AMD performance events. */
3563 *puValue = 0;
3564 return VINF_SUCCESS;
3565}
3566
3567
3568/** @callback_method_impl{FNCPUMWRMSR} */
3569static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8PerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3570{
3571 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3572 /** @todo AMD performance events. */
3573 return VINF_SUCCESS;
3574}
3575
3576
3577/** @callback_method_impl{FNCPUMRDMSR} */
3578static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SysCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3579{
3580 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3581 /** @todo AMD SYS_CFG */
3582 *puValue = pRange->uValue;
3583 return VINF_SUCCESS;
3584}
3585
3586
3587/** @callback_method_impl{FNCPUMWRMSR} */
3588static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SysCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3589{
3590 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3591 /** @todo AMD SYS_CFG */
3592 return VINF_SUCCESS;
3593}
3594
3595
3596/** @callback_method_impl{FNCPUMRDMSR} */
3597static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8HwCr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3598{
3599 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3600 /** @todo AMD HW_CFG */
3601 *puValue = 0;
3602 return VINF_SUCCESS;
3603}
3604
3605
3606/** @callback_method_impl{FNCPUMWRMSR} */
3607static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8HwCr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3608{
3609 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3610 /** @todo AMD HW_CFG */
3611 return VINF_SUCCESS;
3612}
3613
3614
3615/** @callback_method_impl{FNCPUMRDMSR} */
3616static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IorrBaseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3617{
3618 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3619 /** @todo AMD IorrMask/IorrBase */
3620 *puValue = 0;
3621 return VINF_SUCCESS;
3622}
3623
3624
3625/** @callback_method_impl{FNCPUMWRMSR} */
3626static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IorrBaseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3627{
3628 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3629 /** @todo AMD IorrMask/IorrBase */
3630 return VINF_SUCCESS;
3631}
3632
3633
3634/** @callback_method_impl{FNCPUMRDMSR} */
3635static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IorrMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3636{
3637 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3638 /** @todo AMD IorrMask/IorrBase */
3639 *puValue = 0;
3640 return VINF_SUCCESS;
3641}
3642
3643
3644/** @callback_method_impl{FNCPUMWRMSR} */
3645static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IorrMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3646{
3647 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3648 /** @todo AMD IorrMask/IorrBase */
3649 return VINF_SUCCESS;
3650}
3651
3652
3653/** @callback_method_impl{FNCPUMRDMSR} */
3654static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8TopOfMemN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3655{
3656 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3657 *puValue = 0;
3658 /** @todo return 4GB - RamHoleSize here for TOPMEM. Figure out what to return
3659 * for TOPMEM2. */
3660 //if (pRange->uValue == 0)
3661 // *puValue = _4G - RamHoleSize;
3662 return VINF_SUCCESS;
3663}
3664
3665
3666/** @callback_method_impl{FNCPUMWRMSR} */
3667static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8TopOfMemN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3668{
3669 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3670 /** @todo AMD TOPMEM and TOPMEM2/TOM2. */
3671 return VINF_SUCCESS;
3672}
3673
3674
3675/** @callback_method_impl{FNCPUMRDMSR} */
3676static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8NbCfg1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3677{
3678 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3679 /** @todo AMD NB_CFG1 */
3680 *puValue = 0;
3681 return VINF_SUCCESS;
3682}
3683
3684
3685/** @callback_method_impl{FNCPUMWRMSR} */
3686static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8NbCfg1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3687{
3688 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3689 /** @todo AMD NB_CFG1 */
3690 return VINF_SUCCESS;
3691}
3692
3693
3694/** @callback_method_impl{FNCPUMRDMSR} */
3695static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8McXcptRedir(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3696{
3697 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3698 /** @todo machine check. */
3699 *puValue = 0;
3700 return VINF_SUCCESS;
3701}
3702
3703
3704/** @callback_method_impl{FNCPUMWRMSR} */
3705static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8McXcptRedir(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3706{
3707 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3708 /** @todo machine check. */
3709 return VINF_SUCCESS;
3710}
3711
3712
3713/** @callback_method_impl{FNCPUMRDMSR} */
3714static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuNameN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3715{
3716 RT_NOREF_PV(idMsr);
3717 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), pRange->uValue / 2 + 0x80000001);
3718 if (pLeaf)
3719 {
3720 if (!(pRange->uValue & 1))
3721 *puValue = RT_MAKE_U64(pLeaf->uEax, pLeaf->uEbx);
3722 else
3723 *puValue = RT_MAKE_U64(pLeaf->uEcx, pLeaf->uEdx);
3724 }
3725 else
3726 *puValue = 0;
3727 return VINF_SUCCESS;
3728}
3729
3730
3731/** @callback_method_impl{FNCPUMWRMSR} */
3732static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuNameN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3733{
3734 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3735 /** @todo Remember guest programmed CPU name. */
3736 return VINF_SUCCESS;
3737}
3738
3739
3740/** @callback_method_impl{FNCPUMRDMSR} */
3741static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8HwThermalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3742{
3743 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3744 /** @todo AMD HTC. */
3745 *puValue = pRange->uValue;
3746 return VINF_SUCCESS;
3747}
3748
3749
3750/** @callback_method_impl{FNCPUMWRMSR} */
3751static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8HwThermalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3752{
3753 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3754 /** @todo AMD HTC. */
3755 return VINF_SUCCESS;
3756}
3757
3758
3759/** @callback_method_impl{FNCPUMRDMSR} */
3760static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SwThermalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3761{
3762 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3763 /** @todo AMD STC. */
3764 *puValue = 0;
3765 return VINF_SUCCESS;
3766}
3767
3768
3769/** @callback_method_impl{FNCPUMWRMSR} */
3770static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SwThermalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3771{
3772 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3773 /** @todo AMD STC. */
3774 return VINF_SUCCESS;
3775}
3776
3777
3778/** @callback_method_impl{FNCPUMRDMSR} */
3779static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8FidVidControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3780{
3781 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3782 /** @todo AMD FIDVID_CTL. */
3783 *puValue = pRange->uValue;
3784 return VINF_SUCCESS;
3785}
3786
3787
3788/** @callback_method_impl{FNCPUMWRMSR} */
3789static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8FidVidControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3790{
3791 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3792 /** @todo AMD FIDVID_CTL. */
3793 return VINF_SUCCESS;
3794}
3795
3796
3797/** @callback_method_impl{FNCPUMRDMSR} */
3798static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8FidVidStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3799{
3800 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3801 /** @todo AMD FIDVID_STATUS. */
3802 *puValue = pRange->uValue;
3803 return VINF_SUCCESS;
3804}
3805
3806
3807/** @callback_method_impl{FNCPUMRDMSR} */
3808static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8McCtlMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3809{
3810 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3811 /** @todo AMD MC. */
3812 *puValue = 0;
3813 return VINF_SUCCESS;
3814}
3815
3816
3817/** @callback_method_impl{FNCPUMWRMSR} */
3818static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8McCtlMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3819{
3820 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3821 /** @todo AMD MC. */
3822 return VINF_SUCCESS;
3823}
3824
3825
3826/** @callback_method_impl{FNCPUMRDMSR} */
3827static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmiOnIoTrapN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3828{
3829 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3830 /** @todo AMD SMM/SMI and I/O trap. */
3831 *puValue = 0;
3832 return VINF_SUCCESS;
3833}
3834
3835
3836/** @callback_method_impl{FNCPUMWRMSR} */
3837static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmiOnIoTrapN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3838{
3839 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3840 /** @todo AMD SMM/SMI and I/O trap. */
3841 return VINF_SUCCESS;
3842}
3843
3844
3845/** @callback_method_impl{FNCPUMRDMSR} */
3846static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmiOnIoTrapCtlSts(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3847{
3848 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3849 /** @todo AMD SMM/SMI and I/O trap. */
3850 *puValue = 0;
3851 return VINF_SUCCESS;
3852}
3853
3854
3855/** @callback_method_impl{FNCPUMWRMSR} */
3856static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmiOnIoTrapCtlSts(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3857{
3858 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3859 /** @todo AMD SMM/SMI and I/O trap. */
3860 return VINF_SUCCESS;
3861}
3862
3863
3864/** @callback_method_impl{FNCPUMRDMSR} */
3865static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IntPendingMessage(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3866{
3867 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3868 /** @todo Interrupt pending message. */
3869 *puValue = 0;
3870 return VINF_SUCCESS;
3871}
3872
3873
3874/** @callback_method_impl{FNCPUMWRMSR} */
3875static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IntPendingMessage(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3876{
3877 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3878 /** @todo Interrupt pending message. */
3879 return VINF_SUCCESS;
3880}
3881
3882
3883/** @callback_method_impl{FNCPUMRDMSR} */
3884static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmiTriggerIoCycle(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3885{
3886 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3887 /** @todo AMD SMM/SMI and trigger I/O cycle. */
3888 *puValue = 0;
3889 return VINF_SUCCESS;
3890}
3891
3892
3893/** @callback_method_impl{FNCPUMWRMSR} */
3894static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmiTriggerIoCycle(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3895{
3896 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3897 /** @todo AMD SMM/SMI and trigger I/O cycle. */
3898 return VINF_SUCCESS;
3899}
3900
3901
3902/** @callback_method_impl{FNCPUMRDMSR} */
3903static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hMmioCfgBaseAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3904{
3905 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3906 /** @todo AMD MMIO Configuration base address. */
3907 *puValue = 0;
3908 return VINF_SUCCESS;
3909}
3910
3911
3912/** @callback_method_impl{FNCPUMWRMSR} */
3913static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hMmioCfgBaseAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3914{
3915 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3916 /** @todo AMD MMIO Configuration base address. */
3917 return VINF_SUCCESS;
3918}
3919
3920
3921/** @callback_method_impl{FNCPUMRDMSR} */
3922static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hTrapCtlMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3923{
3924 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3925 /** @todo AMD 0xc0010059. */
3926 *puValue = 0;
3927 return VINF_SUCCESS;
3928}
3929
3930
3931/** @callback_method_impl{FNCPUMWRMSR} */
3932static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hTrapCtlMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3933{
3934 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3935 /** @todo AMD 0xc0010059. */
3936 return VINF_SUCCESS;
3937}
3938
3939
3940/** @callback_method_impl{FNCPUMRDMSR} */
3941static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateCurLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3942{
3943 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3944 /** @todo AMD P-states. */
3945 *puValue = pRange->uValue;
3946 return VINF_SUCCESS;
3947}
3948
3949
3950/** @callback_method_impl{FNCPUMRDMSR} */
3951static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3952{
3953 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3954 /** @todo AMD P-states. */
3955 *puValue = pRange->uValue;
3956 return VINF_SUCCESS;
3957}
3958
3959
3960/** @callback_method_impl{FNCPUMWRMSR} */
3961static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hPStateControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3962{
3963 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3964 /** @todo AMD P-states. */
3965 return VINF_SUCCESS;
3966}
3967
3968
3969/** @callback_method_impl{FNCPUMRDMSR} */
3970static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3971{
3972 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3973 /** @todo AMD P-states. */
3974 *puValue = pRange->uValue;
3975 return VINF_SUCCESS;
3976}
3977
3978
3979/** @callback_method_impl{FNCPUMWRMSR} */
3980static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hPStateStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3981{
3982 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3983 /** @todo AMD P-states. */
3984 return VINF_SUCCESS;
3985}
3986
3987
3988/** @callback_method_impl{FNCPUMRDMSR} */
3989static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3990{
3991 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3992 /** @todo AMD P-states. */
3993 *puValue = pRange->uValue;
3994 return VINF_SUCCESS;
3995}
3996
3997
3998/** @callback_method_impl{FNCPUMWRMSR} */
3999static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hPStateN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4000{
4001 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4002 /** @todo AMD P-states. */
4003 return VINF_SUCCESS;
4004}
4005
4006
4007/** @callback_method_impl{FNCPUMRDMSR} */
4008static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCofVidControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4009{
4010 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4011 /** @todo AMD P-states. */
4012 *puValue = pRange->uValue;
4013 return VINF_SUCCESS;
4014}
4015
4016
4017/** @callback_method_impl{FNCPUMWRMSR} */
4018static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCofVidControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4019{
4020 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4021 /** @todo AMD P-states. */
4022 return VINF_SUCCESS;
4023}
4024
4025
4026/** @callback_method_impl{FNCPUMRDMSR} */
4027static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCofVidStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4028{
4029 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4030 /** @todo AMD P-states. */
4031 *puValue = pRange->uValue;
4032 return VINF_SUCCESS;
4033}
4034
4035
4036/** @callback_method_impl{FNCPUMWRMSR} */
4037static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCofVidStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4038{
4039 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4040 /* Note! Writing 0 seems to not GP, not sure if it does anything to the value... */
4041 /** @todo AMD P-states. */
4042 return VINF_SUCCESS;
4043}
4044
4045
4046/** @callback_method_impl{FNCPUMRDMSR} */
4047static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCStateIoBaseAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4048{
4049 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4050 /** @todo AMD C-states. */
4051 *puValue = 0;
4052 return VINF_SUCCESS;
4053}
4054
4055
4056/** @callback_method_impl{FNCPUMWRMSR} */
4057static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCStateIoBaseAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4058{
4059 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4060 /** @todo AMD C-states. */
4061 return VINF_SUCCESS;
4062}
4063
4064
4065/** @callback_method_impl{FNCPUMRDMSR} */
4066static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCpuWatchdogTimer(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4067{
4068 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4069 /** @todo AMD machine checks. */
4070 *puValue = 0;
4071 return VINF_SUCCESS;
4072}
4073
4074
4075/** @callback_method_impl{FNCPUMWRMSR} */
4076static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCpuWatchdogTimer(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4077{
4078 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4079 /** @todo AMD machine checks. */
4080 return VINF_SUCCESS;
4081}
4082
4083
4084/** @callback_method_impl{FNCPUMRDMSR} */
4085static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4086{
4087 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4088 /** @todo AMD SMM. */
4089 *puValue = 0;
4090 return VINF_SUCCESS;
4091}
4092
4093
4094/** @callback_method_impl{FNCPUMWRMSR} */
4095static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4096{
4097 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4098 /** @todo AMD SMM. */
4099 return VINF_SUCCESS;
4100}
4101
4102
4103/** @callback_method_impl{FNCPUMRDMSR} */
4104static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4105{
4106 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4107 /** @todo AMD SMM. */
4108 *puValue = 0;
4109 return VINF_SUCCESS;
4110}
4111
4112
4113/** @callback_method_impl{FNCPUMWRMSR} */
4114static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4115{
4116 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4117 /** @todo AMD SMM. */
4118 return VINF_SUCCESS;
4119}
4120
4121
4122
4123/** @callback_method_impl{FNCPUMRDMSR} */
4124static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4125{
4126 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4127 /** @todo AMD SMM. */
4128 *puValue = 0;
4129 return VINF_SUCCESS;
4130}
4131
4132
4133/** @callback_method_impl{FNCPUMWRMSR} */
4134static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4135{
4136 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4137 /** @todo AMD SMM. */
4138 return VINF_SUCCESS;
4139}
4140
4141
4142/** @callback_method_impl{FNCPUMRDMSR} */
4143static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8VmCr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4144{
4145 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4146 PVM pVM = pVCpu->CTX_SUFF(pVM);
4147 if (pVM->cpum.s.GuestFeatures.fSvm)
4148 *puValue = MSR_K8_VM_CR_LOCK;
4149 else
4150 *puValue = 0;
4151 return VINF_SUCCESS;
4152}
4153
4154
4155/** @callback_method_impl{FNCPUMWRMSR} */
4156static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8VmCr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4157{
4158 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
4159 PVM pVM = pVCpu->CTX_SUFF(pVM);
4160 if (pVM->cpum.s.GuestFeatures.fSvm)
4161 {
4162 /* Silently ignore writes to LOCK and SVM_DISABLE bit when the LOCK bit is set (see cpumMsrRd_AmdK8VmCr). */
4163 if (uValue & (MSR_K8_VM_CR_DPD | MSR_K8_VM_CR_R_INIT | MSR_K8_VM_CR_DIS_A20M))
4164 return VERR_CPUM_RAISE_GP_0;
4165 return VINF_SUCCESS;
4166 }
4167 return VERR_CPUM_RAISE_GP_0;
4168}
4169
4170
4171/** @callback_method_impl{FNCPUMRDMSR} */
4172static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IgnNe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4173{
4174 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4175 /** @todo AMD IGNNE\# control. */
4176 *puValue = 0;
4177 return VINF_SUCCESS;
4178}
4179
4180
4181/** @callback_method_impl{FNCPUMWRMSR} */
4182static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IgnNe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4183{
4184 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4185 /** @todo AMD IGNNE\# control. */
4186 return VINF_SUCCESS;
4187}
4188
4189
4190/** @callback_method_impl{FNCPUMRDMSR} */
4191static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4192{
4193 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4194 /** @todo AMD SMM. */
4195 *puValue = 0;
4196 return VINF_SUCCESS;
4197}
4198
4199
4200/** @callback_method_impl{FNCPUMWRMSR} */
4201static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4202{
4203 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4204 /** @todo AMD SMM. */
4205 return VINF_SUCCESS;
4206}
4207
4208
4209/** @callback_method_impl{FNCPUMRDMSR} */
4210static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8VmHSavePa(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4211{
4212 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4213 *puValue = pVCpu->cpum.s.Guest.hwvirt.svm.uMsrHSavePa;
4214 return VINF_SUCCESS;
4215}
4216
4217
4218/** @callback_method_impl{FNCPUMWRMSR} */
4219static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8VmHSavePa(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4220{
4221 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
4222 if (uValue & UINT64_C(0xfff))
4223 {
4224 Log(("CPUM: Invalid setting of low 12 bits set writing host-state save area MSR %#x: %#llx\n", idMsr, uValue));
4225 return VERR_CPUM_RAISE_GP_0;
4226 }
4227
4228 uint64_t fInvPhysMask = ~(RT_BIT_64(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.cMaxPhysAddrWidth) - 1U);
4229 if (fInvPhysMask & uValue)
4230 {
4231 Log(("CPUM: Invalid physical address bits set writing host-state save area MSR %#x: %#llx (%#llx)\n",
4232 idMsr, uValue, uValue & fInvPhysMask));
4233 return VERR_CPUM_RAISE_GP_0;
4234 }
4235
4236 pVCpu->cpum.s.Guest.hwvirt.svm.uMsrHSavePa = uValue;
4237 return VINF_SUCCESS;
4238}
4239
4240
4241/** @callback_method_impl{FNCPUMRDMSR} */
4242static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hVmLockKey(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4243{
4244 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4245 /** @todo AMD SVM. */
4246 *puValue = 0; /* RAZ */
4247 return VINF_SUCCESS;
4248}
4249
4250
4251/** @callback_method_impl{FNCPUMWRMSR} */
4252static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hVmLockKey(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4253{
4254 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4255 /** @todo AMD SVM. */
4256 return VINF_SUCCESS;
4257}
4258
4259
4260/** @callback_method_impl{FNCPUMRDMSR} */
4261static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hSmmLockKey(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4262{
4263 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4264 /** @todo AMD SMM. */
4265 *puValue = 0; /* RAZ */
4266 return VINF_SUCCESS;
4267}
4268
4269
4270/** @callback_method_impl{FNCPUMWRMSR} */
4271static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hSmmLockKey(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4272{
4273 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4274 /** @todo AMD SMM. */
4275 return VINF_SUCCESS;
4276}
4277
4278
4279/** @callback_method_impl{FNCPUMRDMSR} */
4280static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hLocalSmiStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4281{
4282 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4283 /** @todo AMD SMM/SMI. */
4284 *puValue = 0;
4285 return VINF_SUCCESS;
4286}
4287
4288
4289/** @callback_method_impl{FNCPUMWRMSR} */
4290static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hLocalSmiStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4291{
4292 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4293 /** @todo AMD SMM/SMI. */
4294 return VINF_SUCCESS;
4295}
4296
4297
4298/** @callback_method_impl{FNCPUMRDMSR} */
4299static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hOsVisWrkIdLength(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4300{
4301 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
4302 /** @todo AMD OS visible workaround. */
4303 *puValue = pRange->uValue;
4304 return VINF_SUCCESS;
4305}
4306
4307
4308/** @callback_method_impl{FNCPUMWRMSR} */
4309static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hOsVisWrkIdLength(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4310{
4311 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4312 /** @todo AMD OS visible workaround. */
4313 return VINF_SUCCESS;
4314}
4315
4316
4317/** @callback_method_impl{FNCPUMRDMSR} */
4318static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hOsVisWrkStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4319{
4320 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4321 /** @todo AMD OS visible workaround. */
4322 *puValue = 0;
4323 return VINF_SUCCESS;
4324}
4325
4326
4327/** @callback_method_impl{FNCPUMWRMSR} */
4328static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hOsVisWrkStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4329{
4330 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4331 /** @todo AMD OS visible workaround. */
4332 return VINF_SUCCESS;
4333}
4334
4335
4336/** @callback_method_impl{FNCPUMRDMSR} */
4337static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam16hL2IPerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4338{
4339 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4340 /** @todo AMD L2I performance counters. */
4341 *puValue = 0;
4342 return VINF_SUCCESS;
4343}
4344
4345
4346/** @callback_method_impl{FNCPUMWRMSR} */
4347static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam16hL2IPerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4348{
4349 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4350 /** @todo AMD L2I performance counters. */
4351 return VINF_SUCCESS;
4352}
4353
4354
4355/** @callback_method_impl{FNCPUMRDMSR} */
4356static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam16hL2IPerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4357{
4358 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4359 /** @todo AMD L2I performance counters. */
4360 *puValue = 0;
4361 return VINF_SUCCESS;
4362}
4363
4364
4365/** @callback_method_impl{FNCPUMWRMSR} */
4366static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam16hL2IPerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4367{
4368 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4369 /** @todo AMD L2I performance counters. */
4370 return VINF_SUCCESS;
4371}
4372
4373
4374/** @callback_method_impl{FNCPUMRDMSR} */
4375static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hNorthbridgePerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4376{
4377 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4378 /** @todo AMD Northbridge performance counters. */
4379 *puValue = 0;
4380 return VINF_SUCCESS;
4381}
4382
4383
4384/** @callback_method_impl{FNCPUMWRMSR} */
4385static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hNorthbridgePerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4386{
4387 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4388 /** @todo AMD Northbridge performance counters. */
4389 return VINF_SUCCESS;
4390}
4391
4392
4393/** @callback_method_impl{FNCPUMRDMSR} */
4394static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hNorthbridgePerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4395{
4396 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4397 /** @todo AMD Northbridge performance counters. */
4398 *puValue = 0;
4399 return VINF_SUCCESS;
4400}
4401
4402
4403/** @callback_method_impl{FNCPUMWRMSR} */
4404static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hNorthbridgePerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4405{
4406 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4407 /** @todo AMD Northbridge performance counters. */
4408 return VINF_SUCCESS;
4409}
4410
4411
4412/** @callback_method_impl{FNCPUMRDMSR} */
4413static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7MicrocodeCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4414{
4415 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4416 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4417 * cpus. Need to be explored and verify K7 presence. */
4418 /** @todo Undocumented register only seen mentioned in fam15h erratum \#608. */
4419 *puValue = pRange->uValue;
4420 return VINF_SUCCESS;
4421}
4422
4423
4424/** @callback_method_impl{FNCPUMWRMSR} */
4425static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7MicrocodeCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4426{
4427 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4428 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4429 * cpus. Need to be explored and verify K7 presence. */
4430 /** @todo Undocumented register only seen mentioned in fam15h erratum \#608. */
4431 return VINF_SUCCESS;
4432}
4433
4434
4435/** @callback_method_impl{FNCPUMRDMSR} */
4436static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7ClusterIdMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4437{
4438 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4439 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4440 * cpus. Need to be explored and verify K7 presence. */
4441 /** @todo Undocumented register only seen mentioned in fam16h BKDG r3.00 when
4442 * describing EBL_CR_POWERON. */
4443 *puValue = pRange->uValue;
4444 return VINF_SUCCESS;
4445}
4446
4447
4448/** @callback_method_impl{FNCPUMWRMSR} */
4449static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7ClusterIdMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4450{
4451 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4452 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4453 * cpus. Need to be explored and verify K7 presence. */
4454 /** @todo Undocumented register only seen mentioned in fam16h BKDG r3.00 when
4455 * describing EBL_CR_POWERON. */
4456 return VINF_SUCCESS;
4457}
4458
4459
4460/** @callback_method_impl{FNCPUMRDMSR} */
4461static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlStd07hEbax(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4462{
4463 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4464 bool fIgnored;
4465 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeafEx(pVCpu->CTX_SUFF(pVM), 0x00000007, 0, &fIgnored);
4466 if (pLeaf)
4467 *puValue = RT_MAKE_U64(pLeaf->uEbx, pLeaf->uEax);
4468 else
4469 *puValue = 0;
4470 return VINF_SUCCESS;
4471}
4472
4473
4474/** @callback_method_impl{FNCPUMWRMSR} */
4475static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlStd07hEbax(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4476{
4477 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4478 /** @todo Changing CPUID leaf 7/0. */
4479 return VINF_SUCCESS;
4480}
4481
4482
4483/** @callback_method_impl{FNCPUMRDMSR} */
4484static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlStd06hEcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4485{
4486 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4487 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), 0x00000006);
4488 if (pLeaf)
4489 *puValue = pLeaf->uEcx;
4490 else
4491 *puValue = 0;
4492 return VINF_SUCCESS;
4493}
4494
4495
4496/** @callback_method_impl{FNCPUMWRMSR} */
4497static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlStd06hEcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4498{
4499 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4500 /** @todo Changing CPUID leaf 6. */
4501 return VINF_SUCCESS;
4502}
4503
4504
4505/** @callback_method_impl{FNCPUMRDMSR} */
4506static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlStd01hEdcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4507{
4508 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4509 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), 0x00000001);
4510 if (pLeaf)
4511 *puValue = RT_MAKE_U64(pLeaf->uEdx, pLeaf->uEcx);
4512 else
4513 *puValue = 0;
4514 return VINF_SUCCESS;
4515}
4516
4517
4518/** @callback_method_impl{FNCPUMWRMSR} */
4519static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlStd01hEdcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4520{
4521 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4522 /** @todo Changing CPUID leaf 0x80000001. */
4523 return VINF_SUCCESS;
4524}
4525
4526
4527/** @callback_method_impl{FNCPUMRDMSR} */
4528static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlExt01hEdcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4529{
4530 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4531 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), 0x80000001);
4532 if (pLeaf)
4533 *puValue = RT_MAKE_U64(pLeaf->uEdx, pLeaf->uEcx);
4534 else
4535 *puValue = 0;
4536 return VINF_SUCCESS;
4537}
4538
4539
4540/** @callback_method_impl{FNCPUMWRMSR} */
4541static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlExt01hEdcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4542{
4543 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4544 /** @todo Changing CPUID leaf 0x80000001. */
4545 return VINF_SUCCESS;
4546}
4547
4548
4549/** @callback_method_impl{FNCPUMRDMSR} */
4550static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8PatchLevel(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4551{
4552 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4553 /** @todo Fake AMD microcode patching. */
4554 *puValue = pRange->uValue;
4555 return VINF_SUCCESS;
4556}
4557
4558
4559/** @callback_method_impl{FNCPUMWRMSR} */
4560static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8PatchLoader(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4561{
4562 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4563 /** @todo Fake AMD microcode patching. */
4564 return VINF_SUCCESS;
4565}
4566
4567
4568/** @callback_method_impl{FNCPUMRDMSR} */
4569static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DebugStatusMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4570{
4571 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4572 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4573 * cpus. Need to be explored and verify K7 presence. */
4574 /** @todo undocumented */
4575 *puValue = 0;
4576 return VINF_SUCCESS;
4577}
4578
4579
4580/** @callback_method_impl{FNCPUMWRMSR} */
4581static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DebugStatusMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4582{
4583 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4584 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4585 * cpus. Need to be explored and verify K7 presence. */
4586 /** @todo undocumented */
4587 return VINF_SUCCESS;
4588}
4589
4590
4591/** @callback_method_impl{FNCPUMRDMSR} */
4592static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BHTraceBaseMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4593{
4594 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4595 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4596 * cpus. Need to be explored and verify K7 presence. */
4597 /** @todo undocumented */
4598 *puValue = 0;
4599 return VINF_SUCCESS;
4600}
4601
4602
4603/** @callback_method_impl{FNCPUMWRMSR} */
4604static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BHTraceBaseMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4605{
4606 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4607 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4608 * cpus. Need to be explored and verify K7 presence. */
4609 /** @todo undocumented */
4610 return VINF_SUCCESS;
4611}
4612
4613
4614/** @callback_method_impl{FNCPUMRDMSR} */
4615static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BHTracePtrMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4616{
4617 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4618 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4619 * cpus. Need to be explored and verify K7 presence. */
4620 /** @todo undocumented */
4621 *puValue = 0;
4622 return VINF_SUCCESS;
4623}
4624
4625
4626/** @callback_method_impl{FNCPUMWRMSR} */
4627static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BHTracePtrMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4628{
4629 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4630 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4631 * cpus. Need to be explored and verify K7 presence. */
4632 /** @todo undocumented */
4633 return VINF_SUCCESS;
4634}
4635
4636
4637/** @callback_method_impl{FNCPUMRDMSR} */
4638static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BHTraceLimitMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4639{
4640 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4641 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4642 * cpus. Need to be explored and verify K7 presence. */
4643 /** @todo undocumented */
4644 *puValue = 0;
4645 return VINF_SUCCESS;
4646}
4647
4648
4649/** @callback_method_impl{FNCPUMWRMSR} */
4650static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BHTraceLimitMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4651{
4652 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4653 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4654 * cpus. Need to be explored and verify K7 presence. */
4655 /** @todo undocumented */
4656 return VINF_SUCCESS;
4657}
4658
4659
4660/** @callback_method_impl{FNCPUMRDMSR} */
4661static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7HardwareDebugToolCfgMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4662{
4663 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4664 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4665 * cpus. Need to be explored and verify K7 presence. */
4666 /** @todo undocumented */
4667 *puValue = 0;
4668 return VINF_SUCCESS;
4669}
4670
4671
4672/** @callback_method_impl{FNCPUMWRMSR} */
4673static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7HardwareDebugToolCfgMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4674{
4675 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4676 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4677 * cpus. Need to be explored and verify K7 presence. */
4678 /** @todo undocumented */
4679 return VINF_SUCCESS;
4680}
4681
4682
4683/** @callback_method_impl{FNCPUMRDMSR} */
4684static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7FastFlushCountMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4685{
4686 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4687 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4688 * cpus. Need to be explored and verify K7 presence. */
4689 /** @todo undocumented */
4690 *puValue = 0;
4691 return VINF_SUCCESS;
4692}
4693
4694
4695/** @callback_method_impl{FNCPUMWRMSR} */
4696static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7FastFlushCountMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4697{
4698 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4699 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4700 * cpus. Need to be explored and verify K7 presence. */
4701 /** @todo undocumented */
4702 return VINF_SUCCESS;
4703}
4704
4705
4706/** @callback_method_impl{FNCPUMRDMSR} */
4707static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7NodeId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4708{
4709 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4710 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4711 * cpus. Need to be explored and verify K7 presence. */
4712 /** @todo AMD node ID and bios scratch. */
4713 *puValue = 0; /* nodeid = 0; nodes-per-cpu = 1 */
4714 return VINF_SUCCESS;
4715}
4716
4717
4718/** @callback_method_impl{FNCPUMWRMSR} */
4719static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7NodeId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4720{
4721 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4722 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4723 * cpus. Need to be explored and verify K7 presence. */
4724 /** @todo AMD node ID and bios scratch. */
4725 return VINF_SUCCESS;
4726}
4727
4728
4729/** @callback_method_impl{FNCPUMRDMSR} */
4730static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DrXAddrMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4731{
4732 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4733 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4734 * cpus. Need to be explored and verify K7 presence. */
4735 /** @todo AMD DRx address masking (range breakpoints). */
4736 *puValue = 0;
4737 return VINF_SUCCESS;
4738}
4739
4740
4741/** @callback_method_impl{FNCPUMWRMSR} */
4742static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DrXAddrMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4743{
4744 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4745 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4746 * cpus. Need to be explored and verify K7 presence. */
4747 /** @todo AMD DRx address masking (range breakpoints). */
4748 return VINF_SUCCESS;
4749}
4750
4751
4752/** @callback_method_impl{FNCPUMRDMSR} */
4753static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7Dr0DataMatchMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4754{
4755 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4756 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4757 * cpus. Need to be explored and verify K7 presence. */
4758 /** @todo AMD undocument debugging features. */
4759 *puValue = 0;
4760 return VINF_SUCCESS;
4761}
4762
4763
4764/** @callback_method_impl{FNCPUMWRMSR} */
4765static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7Dr0DataMatchMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4766{
4767 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4768 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4769 * cpus. Need to be explored and verify K7 presence. */
4770 /** @todo AMD undocument debugging features. */
4771 return VINF_SUCCESS;
4772}
4773
4774
4775/** @callback_method_impl{FNCPUMRDMSR} */
4776static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7Dr0DataMaskMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4777{
4778 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4779 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4780 * cpus. Need to be explored and verify K7 presence. */
4781 /** @todo AMD undocument debugging features. */
4782 *puValue = 0;
4783 return VINF_SUCCESS;
4784}
4785
4786
4787/** @callback_method_impl{FNCPUMWRMSR} */
4788static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7Dr0DataMaskMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4789{
4790 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4791 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4792 * cpus. Need to be explored and verify K7 presence. */
4793 /** @todo AMD undocument debugging features. */
4794 return VINF_SUCCESS;
4795}
4796
4797
4798/** @callback_method_impl{FNCPUMRDMSR} */
4799static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7LoadStoreCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4800{
4801 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4802 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4803 * cpus. Need to be explored and verify K7 presence. */
4804 /** @todo AMD load-store config. */
4805 *puValue = 0;
4806 return VINF_SUCCESS;
4807}
4808
4809
4810/** @callback_method_impl{FNCPUMWRMSR} */
4811static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7LoadStoreCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4812{
4813 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4814 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4815 * cpus. Need to be explored and verify K7 presence. */
4816 /** @todo AMD load-store config. */
4817 return VINF_SUCCESS;
4818}
4819
4820
4821/** @callback_method_impl{FNCPUMRDMSR} */
4822static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7InstrCacheCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4823{
4824 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4825 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4826 * cpus. Need to be explored and verify K7 presence. */
4827 /** @todo AMD instruction cache config. */
4828 *puValue = 0;
4829 return VINF_SUCCESS;
4830}
4831
4832
4833/** @callback_method_impl{FNCPUMWRMSR} */
4834static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7InstrCacheCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4835{
4836 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4837 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4838 * cpus. Need to be explored and verify K7 presence. */
4839 /** @todo AMD instruction cache config. */
4840 return VINF_SUCCESS;
4841}
4842
4843
4844/** @callback_method_impl{FNCPUMRDMSR} */
4845static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DataCacheCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4846{
4847 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4848 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4849 * cpus. Need to be explored and verify K7 presence. */
4850 /** @todo AMD data cache config. */
4851 *puValue = 0;
4852 return VINF_SUCCESS;
4853}
4854
4855
4856/** @callback_method_impl{FNCPUMWRMSR} */
4857static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DataCacheCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4858{
4859 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4860 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4861 * cpus. Need to be explored and verify K7 presence. */
4862 /** @todo AMD data cache config. */
4863 return VINF_SUCCESS;
4864}
4865
4866
4867/** @callback_method_impl{FNCPUMRDMSR} */
4868static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BusUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4869{
4870 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4871 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4872 * cpus. Need to be explored and verify K7 presence. */
4873 /** @todo AMD bus unit config. */
4874 *puValue = 0;
4875 return VINF_SUCCESS;
4876}
4877
4878
4879/** @callback_method_impl{FNCPUMWRMSR} */
4880static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BusUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4881{
4882 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4883 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4884 * cpus. Need to be explored and verify K7 presence. */
4885 /** @todo AMD bus unit config. */
4886 return VINF_SUCCESS;
4887}
4888
4889
4890/** @callback_method_impl{FNCPUMRDMSR} */
4891static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DebugCtl2Maybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4892{
4893 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4894 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4895 * cpus. Need to be explored and verify K7 presence. */
4896 /** @todo Undocument AMD debug control register \#2. */
4897 *puValue = 0;
4898 return VINF_SUCCESS;
4899}
4900
4901
4902/** @callback_method_impl{FNCPUMWRMSR} */
4903static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DebugCtl2Maybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4904{
4905 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4906 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4907 * cpus. Need to be explored and verify K7 presence. */
4908 /** @todo Undocument AMD debug control register \#2. */
4909 return VINF_SUCCESS;
4910}
4911
4912
4913/** @callback_method_impl{FNCPUMRDMSR} */
4914static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hFpuCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4915{
4916 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4917 /** @todo AMD FPU config. */
4918 *puValue = 0;
4919 return VINF_SUCCESS;
4920}
4921
4922
4923/** @callback_method_impl{FNCPUMWRMSR} */
4924static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hFpuCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4925{
4926 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4927 /** @todo AMD FPU config. */
4928 return VINF_SUCCESS;
4929}
4930
4931
4932/** @callback_method_impl{FNCPUMRDMSR} */
4933static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hDecoderCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4934{
4935 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4936 /** @todo AMD decoder config. */
4937 *puValue = 0;
4938 return VINF_SUCCESS;
4939}
4940
4941
4942/** @callback_method_impl{FNCPUMWRMSR} */
4943static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hDecoderCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4944{
4945 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4946 /** @todo AMD decoder config. */
4947 return VINF_SUCCESS;
4948}
4949
4950
4951/** @callback_method_impl{FNCPUMRDMSR} */
4952static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hBusUnitCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4953{
4954 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4955 /* Note! 10h and 16h */
4956 /** @todo AMD bus unit config. */
4957 *puValue = 0;
4958 return VINF_SUCCESS;
4959}
4960
4961
4962/** @callback_method_impl{FNCPUMWRMSR} */
4963static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hBusUnitCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4964{
4965 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4966 /* Note! 10h and 16h */
4967 /** @todo AMD bus unit config. */
4968 return VINF_SUCCESS;
4969}
4970
4971
4972/** @callback_method_impl{FNCPUMRDMSR} */
4973static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hCombUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4974{
4975 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4976 /** @todo AMD unit config. */
4977 *puValue = 0;
4978 return VINF_SUCCESS;
4979}
4980
4981
4982/** @callback_method_impl{FNCPUMWRMSR} */
4983static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hCombUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4984{
4985 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4986 /** @todo AMD unit config. */
4987 return VINF_SUCCESS;
4988}
4989
4990
4991/** @callback_method_impl{FNCPUMRDMSR} */
4992static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hCombUnitCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4993{
4994 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4995 /** @todo AMD unit config 2. */
4996 *puValue = 0;
4997 return VINF_SUCCESS;
4998}
4999
5000
5001/** @callback_method_impl{FNCPUMWRMSR} */
5002static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hCombUnitCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5003{
5004 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5005 /** @todo AMD unit config 2. */
5006 return VINF_SUCCESS;
5007}
5008
5009
5010/** @callback_method_impl{FNCPUMRDMSR} */
5011static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hCombUnitCfg3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5012{
5013 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5014 /** @todo AMD combined unit config 3. */
5015 *puValue = 0;
5016 return VINF_SUCCESS;
5017}
5018
5019
5020/** @callback_method_impl{FNCPUMWRMSR} */
5021static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hCombUnitCfg3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5022{
5023 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5024 /** @todo AMD combined unit config 3. */
5025 return VINF_SUCCESS;
5026}
5027
5028
5029/** @callback_method_impl{FNCPUMRDMSR} */
5030static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hExecUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5031{
5032 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5033 /** @todo AMD execution unit config. */
5034 *puValue = 0;
5035 return VINF_SUCCESS;
5036}
5037
5038
5039/** @callback_method_impl{FNCPUMWRMSR} */
5040static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hExecUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5041{
5042 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5043 /** @todo AMD execution unit config. */
5044 return VINF_SUCCESS;
5045}
5046
5047
5048/** @callback_method_impl{FNCPUMRDMSR} */
5049static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hLoadStoreCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5050{
5051 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5052 /** @todo AMD load-store config 2. */
5053 *puValue = 0;
5054 return VINF_SUCCESS;
5055}
5056
5057
5058/** @callback_method_impl{FNCPUMWRMSR} */
5059static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hLoadStoreCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5060{
5061 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5062 /** @todo AMD load-store config 2. */
5063 return VINF_SUCCESS;
5064}
5065
5066
5067/** @callback_method_impl{FNCPUMRDMSR} */
5068static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsFetchCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5069{
5070 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5071 /** @todo AMD IBS. */
5072 *puValue = 0;
5073 return VINF_SUCCESS;
5074}
5075
5076
5077/** @callback_method_impl{FNCPUMWRMSR} */
5078static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsFetchCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5079{
5080 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5081 /** @todo AMD IBS. */
5082 return VINF_SUCCESS;
5083}
5084
5085
5086/** @callback_method_impl{FNCPUMRDMSR} */
5087static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsFetchLinAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5088{
5089 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5090 /** @todo AMD IBS. */
5091 *puValue = 0;
5092 return VINF_SUCCESS;
5093}
5094
5095
5096/** @callback_method_impl{FNCPUMWRMSR} */
5097static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsFetchLinAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5098{
5099 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5100 /** @todo AMD IBS. */
5101 return VINF_SUCCESS;
5102}
5103
5104
5105/** @callback_method_impl{FNCPUMRDMSR} */
5106static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsFetchPhysAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5107{
5108 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5109 /** @todo AMD IBS. */
5110 *puValue = 0;
5111 return VINF_SUCCESS;
5112}
5113
5114
5115/** @callback_method_impl{FNCPUMWRMSR} */
5116static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsFetchPhysAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5117{
5118 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5119 /** @todo AMD IBS. */
5120 return VINF_SUCCESS;
5121}
5122
5123
5124/** @callback_method_impl{FNCPUMRDMSR} */
5125static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpExecCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5126{
5127 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5128 /** @todo AMD IBS. */
5129 *puValue = 0;
5130 return VINF_SUCCESS;
5131}
5132
5133
5134/** @callback_method_impl{FNCPUMWRMSR} */
5135static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpExecCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5136{
5137 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5138 /** @todo AMD IBS. */
5139 return VINF_SUCCESS;
5140}
5141
5142
5143/** @callback_method_impl{FNCPUMRDMSR} */
5144static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpRip(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5145{
5146 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5147 /** @todo AMD IBS. */
5148 *puValue = 0;
5149 return VINF_SUCCESS;
5150}
5151
5152
5153/** @callback_method_impl{FNCPUMWRMSR} */
5154static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpRip(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5155{
5156 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5157 /** @todo AMD IBS. */
5158 if (!X86_IS_CANONICAL(uValue))
5159 {
5160 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
5161 return VERR_CPUM_RAISE_GP_0;
5162 }
5163 return VINF_SUCCESS;
5164}
5165
5166
5167/** @callback_method_impl{FNCPUMRDMSR} */
5168static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpData(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5169{
5170 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5171 /** @todo AMD IBS. */
5172 *puValue = 0;
5173 return VINF_SUCCESS;
5174}
5175
5176
5177/** @callback_method_impl{FNCPUMWRMSR} */
5178static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpData(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5179{
5180 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5181 /** @todo AMD IBS. */
5182 return VINF_SUCCESS;
5183}
5184
5185
5186/** @callback_method_impl{FNCPUMRDMSR} */
5187static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpData2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5188{
5189 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5190 /** @todo AMD IBS. */
5191 *puValue = 0;
5192 return VINF_SUCCESS;
5193}
5194
5195
5196/** @callback_method_impl{FNCPUMWRMSR} */
5197static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpData2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5198{
5199 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5200 /** @todo AMD IBS. */
5201 return VINF_SUCCESS;
5202}
5203
5204
5205/** @callback_method_impl{FNCPUMRDMSR} */
5206static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpData3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5207{
5208 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5209 /** @todo AMD IBS. */
5210 *puValue = 0;
5211 return VINF_SUCCESS;
5212}
5213
5214
5215/** @callback_method_impl{FNCPUMWRMSR} */
5216static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpData3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5217{
5218 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5219 /** @todo AMD IBS. */
5220 return VINF_SUCCESS;
5221}
5222
5223
5224/** @callback_method_impl{FNCPUMRDMSR} */
5225static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsDcLinAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5226{
5227 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5228 /** @todo AMD IBS. */
5229 *puValue = 0;
5230 return VINF_SUCCESS;
5231}
5232
5233
5234/** @callback_method_impl{FNCPUMWRMSR} */
5235static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsDcLinAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5236{
5237 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5238 /** @todo AMD IBS. */
5239 if (!X86_IS_CANONICAL(uValue))
5240 {
5241 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
5242 return VERR_CPUM_RAISE_GP_0;
5243 }
5244 return VINF_SUCCESS;
5245}
5246
5247
5248/** @callback_method_impl{FNCPUMRDMSR} */
5249static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsDcPhysAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5250{
5251 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5252 /** @todo AMD IBS. */
5253 *puValue = 0;
5254 return VINF_SUCCESS;
5255}
5256
5257
5258/** @callback_method_impl{FNCPUMWRMSR} */
5259static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsDcPhysAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5260{
5261 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5262 /** @todo AMD IBS. */
5263 return VINF_SUCCESS;
5264}
5265
5266
5267/** @callback_method_impl{FNCPUMRDMSR} */
5268static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5269{
5270 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5271 /** @todo AMD IBS. */
5272 *puValue = 0;
5273 return VINF_SUCCESS;
5274}
5275
5276
5277/** @callback_method_impl{FNCPUMWRMSR} */
5278static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5279{
5280 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5281 /** @todo AMD IBS. */
5282 return VINF_SUCCESS;
5283}
5284
5285
5286/** @callback_method_impl{FNCPUMRDMSR} */
5287static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam14hIbsBrTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5288{
5289 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5290 /** @todo AMD IBS. */
5291 *puValue = 0;
5292 return VINF_SUCCESS;
5293}
5294
5295
5296/** @callback_method_impl{FNCPUMWRMSR} */
5297static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam14hIbsBrTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5298{
5299 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5300 /** @todo AMD IBS. */
5301 if (!X86_IS_CANONICAL(uValue))
5302 {
5303 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
5304 return VERR_CPUM_RAISE_GP_0;
5305 }
5306 return VINF_SUCCESS;
5307}
5308
5309
5310
5311/*
5312 * GIM MSRs.
5313 * GIM MSRs.
5314 * GIM MSRs.
5315 */
5316
5317
5318/** @callback_method_impl{FNCPUMRDMSR} */
5319static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Gim(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5320{
5321#if defined(VBOX_WITH_NESTED_HWVIRT_SVM) || defined(VBOX_WITH_NESTED_HWVIRT_VMX)
5322 /* Raise #GP(0) like a physical CPU would since the nested-hypervisor hasn't intercept these MSRs. */
5323 if ( CPUMIsGuestInSvmNestedHwVirtMode(&pVCpu->cpum.s.Guest)
5324 || CPUMIsGuestInVmxNonRootMode(&pVCpu->cpum.s.Guest))
5325 return VERR_CPUM_RAISE_GP_0;
5326#endif
5327 return GIMReadMsr(pVCpu, idMsr, pRange, puValue);
5328}
5329
5330
5331/** @callback_method_impl{FNCPUMWRMSR} */
5332static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Gim(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5333{
5334#if defined(VBOX_WITH_NESTED_HWVIRT_SVM) || defined(VBOX_WITH_NESTED_HWVIRT_VMX)
5335 /* Raise #GP(0) like a physical CPU would since the nested-hypervisor hasn't intercept these MSRs. */
5336 if ( CPUMIsGuestInSvmNestedHwVirtMode(&pVCpu->cpum.s.Guest)
5337 || CPUMIsGuestInVmxNonRootMode(&pVCpu->cpum.s.Guest))
5338 return VERR_CPUM_RAISE_GP_0;
5339#endif
5340 return GIMWriteMsr(pVCpu, idMsr, pRange, uValue, uRawValue);
5341}
5342
5343
5344/**
5345 * MSR read function table.
5346 */
5347static const PFNCPUMRDMSR g_aCpumRdMsrFns[kCpumMsrRdFn_End] =
5348{
5349 NULL, /* Invalid */
5350 cpumMsrRd_FixedValue,
5351 NULL, /* Alias */
5352 cpumMsrRd_WriteOnly,
5353 cpumMsrRd_Ia32P5McAddr,
5354 cpumMsrRd_Ia32P5McType,
5355 cpumMsrRd_Ia32TimestampCounter,
5356 cpumMsrRd_Ia32PlatformId,
5357 cpumMsrRd_Ia32ApicBase,
5358 cpumMsrRd_Ia32FeatureControl,
5359 cpumMsrRd_Ia32BiosSignId,
5360 cpumMsrRd_Ia32SmmMonitorCtl,
5361 cpumMsrRd_Ia32PmcN,
5362 cpumMsrRd_Ia32MonitorFilterLineSize,
5363 cpumMsrRd_Ia32MPerf,
5364 cpumMsrRd_Ia32APerf,
5365 cpumMsrRd_Ia32MtrrCap,
5366 cpumMsrRd_Ia32MtrrPhysBaseN,
5367 cpumMsrRd_Ia32MtrrPhysMaskN,
5368 cpumMsrRd_Ia32MtrrFixed,
5369 cpumMsrRd_Ia32MtrrDefType,
5370 cpumMsrRd_Ia32Pat,
5371 cpumMsrRd_Ia32SysEnterCs,
5372 cpumMsrRd_Ia32SysEnterEsp,
5373 cpumMsrRd_Ia32SysEnterEip,
5374 cpumMsrRd_Ia32McgCap,
5375 cpumMsrRd_Ia32McgStatus,
5376 cpumMsrRd_Ia32McgCtl,
5377 cpumMsrRd_Ia32DebugCtl,
5378 cpumMsrRd_Ia32SmrrPhysBase,
5379 cpumMsrRd_Ia32SmrrPhysMask,
5380 cpumMsrRd_Ia32PlatformDcaCap,
5381 cpumMsrRd_Ia32CpuDcaCap,
5382 cpumMsrRd_Ia32Dca0Cap,
5383 cpumMsrRd_Ia32PerfEvtSelN,
5384 cpumMsrRd_Ia32PerfStatus,
5385 cpumMsrRd_Ia32PerfCtl,
5386 cpumMsrRd_Ia32FixedCtrN,
5387 cpumMsrRd_Ia32PerfCapabilities,
5388 cpumMsrRd_Ia32FixedCtrCtrl,
5389 cpumMsrRd_Ia32PerfGlobalStatus,
5390 cpumMsrRd_Ia32PerfGlobalCtrl,
5391 cpumMsrRd_Ia32PerfGlobalOvfCtrl,
5392 cpumMsrRd_Ia32PebsEnable,
5393 cpumMsrRd_Ia32ClockModulation,
5394 cpumMsrRd_Ia32ThermInterrupt,
5395 cpumMsrRd_Ia32ThermStatus,
5396 cpumMsrRd_Ia32Therm2Ctl,
5397 cpumMsrRd_Ia32MiscEnable,
5398 cpumMsrRd_Ia32McCtlStatusAddrMiscN,
5399 cpumMsrRd_Ia32McNCtl2,
5400 cpumMsrRd_Ia32DsArea,
5401 cpumMsrRd_Ia32TscDeadline,
5402 cpumMsrRd_Ia32X2ApicN,
5403 cpumMsrRd_Ia32DebugInterface,
5404 cpumMsrRd_Ia32VmxBasic,
5405 cpumMsrRd_Ia32VmxPinbasedCtls,
5406 cpumMsrRd_Ia32VmxProcbasedCtls,
5407 cpumMsrRd_Ia32VmxExitCtls,
5408 cpumMsrRd_Ia32VmxEntryCtls,
5409 cpumMsrRd_Ia32VmxMisc,
5410 cpumMsrRd_Ia32VmxCr0Fixed0,
5411 cpumMsrRd_Ia32VmxCr0Fixed1,
5412 cpumMsrRd_Ia32VmxCr4Fixed0,
5413 cpumMsrRd_Ia32VmxCr4Fixed1,
5414 cpumMsrRd_Ia32VmxVmcsEnum,
5415 cpumMsrRd_Ia32VmxProcBasedCtls2,
5416 cpumMsrRd_Ia32VmxEptVpidCap,
5417 cpumMsrRd_Ia32VmxTruePinbasedCtls,
5418 cpumMsrRd_Ia32VmxTrueProcbasedCtls,
5419 cpumMsrRd_Ia32VmxTrueExitCtls,
5420 cpumMsrRd_Ia32VmxTrueEntryCtls,
5421 cpumMsrRd_Ia32VmxVmFunc,
5422 cpumMsrRd_Ia32SpecCtrl,
5423 cpumMsrRd_Ia32ArchCapabilities,
5424
5425 cpumMsrRd_Amd64Efer,
5426 cpumMsrRd_Amd64SyscallTarget,
5427 cpumMsrRd_Amd64LongSyscallTarget,
5428 cpumMsrRd_Amd64CompSyscallTarget,
5429 cpumMsrRd_Amd64SyscallFlagMask,
5430 cpumMsrRd_Amd64FsBase,
5431 cpumMsrRd_Amd64GsBase,
5432 cpumMsrRd_Amd64KernelGsBase,
5433 cpumMsrRd_Amd64TscAux,
5434
5435 cpumMsrRd_IntelEblCrPowerOn,
5436 cpumMsrRd_IntelI7CoreThreadCount,
5437 cpumMsrRd_IntelP4EbcHardPowerOn,
5438 cpumMsrRd_IntelP4EbcSoftPowerOn,
5439 cpumMsrRd_IntelP4EbcFrequencyId,
5440 cpumMsrRd_IntelP6FsbFrequency,
5441 cpumMsrRd_IntelPlatformInfo,
5442 cpumMsrRd_IntelFlexRatio,
5443 cpumMsrRd_IntelPkgCStConfigControl,
5444 cpumMsrRd_IntelPmgIoCaptureBase,
5445 cpumMsrRd_IntelLastBranchFromToN,
5446 cpumMsrRd_IntelLastBranchFromN,
5447 cpumMsrRd_IntelLastBranchToN,
5448 cpumMsrRd_IntelLastBranchTos,
5449 cpumMsrRd_IntelBblCrCtl,
5450 cpumMsrRd_IntelBblCrCtl3,
5451 cpumMsrRd_IntelI7TemperatureTarget,
5452 cpumMsrRd_IntelI7MsrOffCoreResponseN,
5453 cpumMsrRd_IntelI7MiscPwrMgmt,
5454 cpumMsrRd_IntelP6CrN,
5455 cpumMsrRd_IntelCpuId1FeatureMaskEcdx,
5456 cpumMsrRd_IntelCpuId1FeatureMaskEax,
5457 cpumMsrRd_IntelCpuId80000001FeatureMaskEcdx,
5458 cpumMsrRd_IntelI7SandyAesNiCtl,
5459 cpumMsrRd_IntelI7TurboRatioLimit,
5460 cpumMsrRd_IntelI7LbrSelect,
5461 cpumMsrRd_IntelI7SandyErrorControl,
5462 cpumMsrRd_IntelI7VirtualLegacyWireCap,
5463 cpumMsrRd_IntelI7PowerCtl,
5464 cpumMsrRd_IntelI7SandyPebsNumAlt,
5465 cpumMsrRd_IntelI7PebsLdLat,
5466 cpumMsrRd_IntelI7PkgCnResidencyN,
5467 cpumMsrRd_IntelI7CoreCnResidencyN,
5468 cpumMsrRd_IntelI7SandyVrCurrentConfig,
5469 cpumMsrRd_IntelI7SandyVrMiscConfig,
5470 cpumMsrRd_IntelI7SandyRaplPowerUnit,
5471 cpumMsrRd_IntelI7SandyPkgCnIrtlN,
5472 cpumMsrRd_IntelI7SandyPkgC2Residency,
5473 cpumMsrRd_IntelI7RaplPkgPowerLimit,
5474 cpumMsrRd_IntelI7RaplPkgEnergyStatus,
5475 cpumMsrRd_IntelI7RaplPkgPerfStatus,
5476 cpumMsrRd_IntelI7RaplPkgPowerInfo,
5477 cpumMsrRd_IntelI7RaplDramPowerLimit,
5478 cpumMsrRd_IntelI7RaplDramEnergyStatus,
5479 cpumMsrRd_IntelI7RaplDramPerfStatus,
5480 cpumMsrRd_IntelI7RaplDramPowerInfo,
5481 cpumMsrRd_IntelI7RaplPp0PowerLimit,
5482 cpumMsrRd_IntelI7RaplPp0EnergyStatus,
5483 cpumMsrRd_IntelI7RaplPp0Policy,
5484 cpumMsrRd_IntelI7RaplPp0PerfStatus,
5485 cpumMsrRd_IntelI7RaplPp1PowerLimit,
5486 cpumMsrRd_IntelI7RaplPp1EnergyStatus,
5487 cpumMsrRd_IntelI7RaplPp1Policy,
5488 cpumMsrRd_IntelI7IvyConfigTdpNominal,
5489 cpumMsrRd_IntelI7IvyConfigTdpLevel1,
5490 cpumMsrRd_IntelI7IvyConfigTdpLevel2,
5491 cpumMsrRd_IntelI7IvyConfigTdpControl,
5492 cpumMsrRd_IntelI7IvyTurboActivationRatio,
5493 cpumMsrRd_IntelI7UncPerfGlobalCtrl,
5494 cpumMsrRd_IntelI7UncPerfGlobalStatus,
5495 cpumMsrRd_IntelI7UncPerfGlobalOvfCtrl,
5496 cpumMsrRd_IntelI7UncPerfFixedCtrCtrl,
5497 cpumMsrRd_IntelI7UncPerfFixedCtr,
5498 cpumMsrRd_IntelI7UncCBoxConfig,
5499 cpumMsrRd_IntelI7UncArbPerfCtrN,
5500 cpumMsrRd_IntelI7UncArbPerfEvtSelN,
5501 cpumMsrRd_IntelI7SmiCount,
5502 cpumMsrRd_IntelCore2EmttmCrTablesN,
5503 cpumMsrRd_IntelCore2SmmCStMiscInfo,
5504 cpumMsrRd_IntelCore1ExtConfig,
5505 cpumMsrRd_IntelCore1DtsCalControl,
5506 cpumMsrRd_IntelCore2PeciControl,
5507 cpumMsrRd_IntelAtSilvCoreC1Recidency,
5508
5509 cpumMsrRd_P6LastBranchFromIp,
5510 cpumMsrRd_P6LastBranchToIp,
5511 cpumMsrRd_P6LastIntFromIp,
5512 cpumMsrRd_P6LastIntToIp,
5513
5514 cpumMsrRd_AmdFam15hTscRate,
5515 cpumMsrRd_AmdFam15hLwpCfg,
5516 cpumMsrRd_AmdFam15hLwpCbAddr,
5517 cpumMsrRd_AmdFam10hMc4MiscN,
5518 cpumMsrRd_AmdK8PerfCtlN,
5519 cpumMsrRd_AmdK8PerfCtrN,
5520 cpumMsrRd_AmdK8SysCfg,
5521 cpumMsrRd_AmdK8HwCr,
5522 cpumMsrRd_AmdK8IorrBaseN,
5523 cpumMsrRd_AmdK8IorrMaskN,
5524 cpumMsrRd_AmdK8TopOfMemN,
5525 cpumMsrRd_AmdK8NbCfg1,
5526 cpumMsrRd_AmdK8McXcptRedir,
5527 cpumMsrRd_AmdK8CpuNameN,
5528 cpumMsrRd_AmdK8HwThermalCtrl,
5529 cpumMsrRd_AmdK8SwThermalCtrl,
5530 cpumMsrRd_AmdK8FidVidControl,
5531 cpumMsrRd_AmdK8FidVidStatus,
5532 cpumMsrRd_AmdK8McCtlMaskN,
5533 cpumMsrRd_AmdK8SmiOnIoTrapN,
5534 cpumMsrRd_AmdK8SmiOnIoTrapCtlSts,
5535 cpumMsrRd_AmdK8IntPendingMessage,
5536 cpumMsrRd_AmdK8SmiTriggerIoCycle,
5537 cpumMsrRd_AmdFam10hMmioCfgBaseAddr,
5538 cpumMsrRd_AmdFam10hTrapCtlMaybe,
5539 cpumMsrRd_AmdFam10hPStateCurLimit,
5540 cpumMsrRd_AmdFam10hPStateControl,
5541 cpumMsrRd_AmdFam10hPStateStatus,
5542 cpumMsrRd_AmdFam10hPStateN,
5543 cpumMsrRd_AmdFam10hCofVidControl,
5544 cpumMsrRd_AmdFam10hCofVidStatus,
5545 cpumMsrRd_AmdFam10hCStateIoBaseAddr,
5546 cpumMsrRd_AmdFam10hCpuWatchdogTimer,
5547 cpumMsrRd_AmdK8SmmBase,
5548 cpumMsrRd_AmdK8SmmAddr,
5549 cpumMsrRd_AmdK8SmmMask,
5550 cpumMsrRd_AmdK8VmCr,
5551 cpumMsrRd_AmdK8IgnNe,
5552 cpumMsrRd_AmdK8SmmCtl,
5553 cpumMsrRd_AmdK8VmHSavePa,
5554 cpumMsrRd_AmdFam10hVmLockKey,
5555 cpumMsrRd_AmdFam10hSmmLockKey,
5556 cpumMsrRd_AmdFam10hLocalSmiStatus,
5557 cpumMsrRd_AmdFam10hOsVisWrkIdLength,
5558 cpumMsrRd_AmdFam10hOsVisWrkStatus,
5559 cpumMsrRd_AmdFam16hL2IPerfCtlN,
5560 cpumMsrRd_AmdFam16hL2IPerfCtrN,
5561 cpumMsrRd_AmdFam15hNorthbridgePerfCtlN,
5562 cpumMsrRd_AmdFam15hNorthbridgePerfCtrN,
5563 cpumMsrRd_AmdK7MicrocodeCtl,
5564 cpumMsrRd_AmdK7ClusterIdMaybe,
5565 cpumMsrRd_AmdK8CpuIdCtlStd07hEbax,
5566 cpumMsrRd_AmdK8CpuIdCtlStd06hEcx,
5567 cpumMsrRd_AmdK8CpuIdCtlStd01hEdcx,
5568 cpumMsrRd_AmdK8CpuIdCtlExt01hEdcx,
5569 cpumMsrRd_AmdK8PatchLevel,
5570 cpumMsrRd_AmdK7DebugStatusMaybe,
5571 cpumMsrRd_AmdK7BHTraceBaseMaybe,
5572 cpumMsrRd_AmdK7BHTracePtrMaybe,
5573 cpumMsrRd_AmdK7BHTraceLimitMaybe,
5574 cpumMsrRd_AmdK7HardwareDebugToolCfgMaybe,
5575 cpumMsrRd_AmdK7FastFlushCountMaybe,
5576 cpumMsrRd_AmdK7NodeId,
5577 cpumMsrRd_AmdK7DrXAddrMaskN,
5578 cpumMsrRd_AmdK7Dr0DataMatchMaybe,
5579 cpumMsrRd_AmdK7Dr0DataMaskMaybe,
5580 cpumMsrRd_AmdK7LoadStoreCfg,
5581 cpumMsrRd_AmdK7InstrCacheCfg,
5582 cpumMsrRd_AmdK7DataCacheCfg,
5583 cpumMsrRd_AmdK7BusUnitCfg,
5584 cpumMsrRd_AmdK7DebugCtl2Maybe,
5585 cpumMsrRd_AmdFam15hFpuCfg,
5586 cpumMsrRd_AmdFam15hDecoderCfg,
5587 cpumMsrRd_AmdFam10hBusUnitCfg2,
5588 cpumMsrRd_AmdFam15hCombUnitCfg,
5589 cpumMsrRd_AmdFam15hCombUnitCfg2,
5590 cpumMsrRd_AmdFam15hCombUnitCfg3,
5591 cpumMsrRd_AmdFam15hExecUnitCfg,
5592 cpumMsrRd_AmdFam15hLoadStoreCfg2,
5593 cpumMsrRd_AmdFam10hIbsFetchCtl,
5594 cpumMsrRd_AmdFam10hIbsFetchLinAddr,
5595 cpumMsrRd_AmdFam10hIbsFetchPhysAddr,
5596 cpumMsrRd_AmdFam10hIbsOpExecCtl,
5597 cpumMsrRd_AmdFam10hIbsOpRip,
5598 cpumMsrRd_AmdFam10hIbsOpData,
5599 cpumMsrRd_AmdFam10hIbsOpData2,
5600 cpumMsrRd_AmdFam10hIbsOpData3,
5601 cpumMsrRd_AmdFam10hIbsDcLinAddr,
5602 cpumMsrRd_AmdFam10hIbsDcPhysAddr,
5603 cpumMsrRd_AmdFam10hIbsCtl,
5604 cpumMsrRd_AmdFam14hIbsBrTarget,
5605
5606 cpumMsrRd_Gim
5607};
5608
5609
5610/**
5611 * MSR write function table.
5612 */
5613static const PFNCPUMWRMSR g_aCpumWrMsrFns[kCpumMsrWrFn_End] =
5614{
5615 NULL, /* Invalid */
5616 cpumMsrWr_IgnoreWrite,
5617 cpumMsrWr_ReadOnly,
5618 NULL, /* Alias */
5619 cpumMsrWr_Ia32P5McAddr,
5620 cpumMsrWr_Ia32P5McType,
5621 cpumMsrWr_Ia32TimestampCounter,
5622 cpumMsrWr_Ia32ApicBase,
5623 cpumMsrWr_Ia32FeatureControl,
5624 cpumMsrWr_Ia32BiosSignId,
5625 cpumMsrWr_Ia32BiosUpdateTrigger,
5626 cpumMsrWr_Ia32SmmMonitorCtl,
5627 cpumMsrWr_Ia32PmcN,
5628 cpumMsrWr_Ia32MonitorFilterLineSize,
5629 cpumMsrWr_Ia32MPerf,
5630 cpumMsrWr_Ia32APerf,
5631 cpumMsrWr_Ia32MtrrPhysBaseN,
5632 cpumMsrWr_Ia32MtrrPhysMaskN,
5633 cpumMsrWr_Ia32MtrrFixed,
5634 cpumMsrWr_Ia32MtrrDefType,
5635 cpumMsrWr_Ia32Pat,
5636 cpumMsrWr_Ia32SysEnterCs,
5637 cpumMsrWr_Ia32SysEnterEsp,
5638 cpumMsrWr_Ia32SysEnterEip,
5639 cpumMsrWr_Ia32McgStatus,
5640 cpumMsrWr_Ia32McgCtl,
5641 cpumMsrWr_Ia32DebugCtl,
5642 cpumMsrWr_Ia32SmrrPhysBase,
5643 cpumMsrWr_Ia32SmrrPhysMask,
5644 cpumMsrWr_Ia32PlatformDcaCap,
5645 cpumMsrWr_Ia32Dca0Cap,
5646 cpumMsrWr_Ia32PerfEvtSelN,
5647 cpumMsrWr_Ia32PerfStatus,
5648 cpumMsrWr_Ia32PerfCtl,
5649 cpumMsrWr_Ia32FixedCtrN,
5650 cpumMsrWr_Ia32PerfCapabilities,
5651 cpumMsrWr_Ia32FixedCtrCtrl,
5652 cpumMsrWr_Ia32PerfGlobalStatus,
5653 cpumMsrWr_Ia32PerfGlobalCtrl,
5654 cpumMsrWr_Ia32PerfGlobalOvfCtrl,
5655 cpumMsrWr_Ia32PebsEnable,
5656 cpumMsrWr_Ia32ClockModulation,
5657 cpumMsrWr_Ia32ThermInterrupt,
5658 cpumMsrWr_Ia32ThermStatus,
5659 cpumMsrWr_Ia32Therm2Ctl,
5660 cpumMsrWr_Ia32MiscEnable,
5661 cpumMsrWr_Ia32McCtlStatusAddrMiscN,
5662 cpumMsrWr_Ia32McNCtl2,
5663 cpumMsrWr_Ia32DsArea,
5664 cpumMsrWr_Ia32TscDeadline,
5665 cpumMsrWr_Ia32X2ApicN,
5666 cpumMsrWr_Ia32DebugInterface,
5667 cpumMsrWr_Ia32SpecCtrl,
5668 cpumMsrWr_Ia32PredCmd,
5669
5670 cpumMsrWr_Amd64Efer,
5671 cpumMsrWr_Amd64SyscallTarget,
5672 cpumMsrWr_Amd64LongSyscallTarget,
5673 cpumMsrWr_Amd64CompSyscallTarget,
5674 cpumMsrWr_Amd64SyscallFlagMask,
5675 cpumMsrWr_Amd64FsBase,
5676 cpumMsrWr_Amd64GsBase,
5677 cpumMsrWr_Amd64KernelGsBase,
5678 cpumMsrWr_Amd64TscAux,
5679
5680 cpumMsrWr_IntelEblCrPowerOn,
5681 cpumMsrWr_IntelP4EbcHardPowerOn,
5682 cpumMsrWr_IntelP4EbcSoftPowerOn,
5683 cpumMsrWr_IntelP4EbcFrequencyId,
5684 cpumMsrWr_IntelFlexRatio,
5685 cpumMsrWr_IntelPkgCStConfigControl,
5686 cpumMsrWr_IntelPmgIoCaptureBase,
5687 cpumMsrWr_IntelLastBranchFromToN,
5688 cpumMsrWr_IntelLastBranchFromN,
5689 cpumMsrWr_IntelLastBranchToN,
5690 cpumMsrWr_IntelLastBranchTos,
5691 cpumMsrWr_IntelBblCrCtl,
5692 cpumMsrWr_IntelBblCrCtl3,
5693 cpumMsrWr_IntelI7TemperatureTarget,
5694 cpumMsrWr_IntelI7MsrOffCoreResponseN,
5695 cpumMsrWr_IntelI7MiscPwrMgmt,
5696 cpumMsrWr_IntelP6CrN,
5697 cpumMsrWr_IntelCpuId1FeatureMaskEcdx,
5698 cpumMsrWr_IntelCpuId1FeatureMaskEax,
5699 cpumMsrWr_IntelCpuId80000001FeatureMaskEcdx,
5700 cpumMsrWr_IntelI7SandyAesNiCtl,
5701 cpumMsrWr_IntelI7TurboRatioLimit,
5702 cpumMsrWr_IntelI7LbrSelect,
5703 cpumMsrWr_IntelI7SandyErrorControl,
5704 cpumMsrWr_IntelI7PowerCtl,
5705 cpumMsrWr_IntelI7SandyPebsNumAlt,
5706 cpumMsrWr_IntelI7PebsLdLat,
5707 cpumMsrWr_IntelI7SandyVrCurrentConfig,
5708 cpumMsrWr_IntelI7SandyVrMiscConfig,
5709 cpumMsrWr_IntelI7SandyRaplPowerUnit,
5710 cpumMsrWr_IntelI7SandyPkgCnIrtlN,
5711 cpumMsrWr_IntelI7SandyPkgC2Residency,
5712 cpumMsrWr_IntelI7RaplPkgPowerLimit,
5713 cpumMsrWr_IntelI7RaplDramPowerLimit,
5714 cpumMsrWr_IntelI7RaplPp0PowerLimit,
5715 cpumMsrWr_IntelI7RaplPp0Policy,
5716 cpumMsrWr_IntelI7RaplPp1PowerLimit,
5717 cpumMsrWr_IntelI7RaplPp1Policy,
5718 cpumMsrWr_IntelI7IvyConfigTdpControl,
5719 cpumMsrWr_IntelI7IvyTurboActivationRatio,
5720 cpumMsrWr_IntelI7UncPerfGlobalCtrl,
5721 cpumMsrWr_IntelI7UncPerfGlobalStatus,
5722 cpumMsrWr_IntelI7UncPerfGlobalOvfCtrl,
5723 cpumMsrWr_IntelI7UncPerfFixedCtrCtrl,
5724 cpumMsrWr_IntelI7UncPerfFixedCtr,
5725 cpumMsrWr_IntelI7UncArbPerfCtrN,
5726 cpumMsrWr_IntelI7UncArbPerfEvtSelN,
5727 cpumMsrWr_IntelCore2EmttmCrTablesN,
5728 cpumMsrWr_IntelCore2SmmCStMiscInfo,
5729 cpumMsrWr_IntelCore1ExtConfig,
5730 cpumMsrWr_IntelCore1DtsCalControl,
5731 cpumMsrWr_IntelCore2PeciControl,
5732
5733 cpumMsrWr_P6LastIntFromIp,
5734 cpumMsrWr_P6LastIntToIp,
5735
5736 cpumMsrWr_AmdFam15hTscRate,
5737 cpumMsrWr_AmdFam15hLwpCfg,
5738 cpumMsrWr_AmdFam15hLwpCbAddr,
5739 cpumMsrWr_AmdFam10hMc4MiscN,
5740 cpumMsrWr_AmdK8PerfCtlN,
5741 cpumMsrWr_AmdK8PerfCtrN,
5742 cpumMsrWr_AmdK8SysCfg,
5743 cpumMsrWr_AmdK8HwCr,
5744 cpumMsrWr_AmdK8IorrBaseN,
5745 cpumMsrWr_AmdK8IorrMaskN,
5746 cpumMsrWr_AmdK8TopOfMemN,
5747 cpumMsrWr_AmdK8NbCfg1,
5748 cpumMsrWr_AmdK8McXcptRedir,
5749 cpumMsrWr_AmdK8CpuNameN,
5750 cpumMsrWr_AmdK8HwThermalCtrl,
5751 cpumMsrWr_AmdK8SwThermalCtrl,
5752 cpumMsrWr_AmdK8FidVidControl,
5753 cpumMsrWr_AmdK8McCtlMaskN,
5754 cpumMsrWr_AmdK8SmiOnIoTrapN,
5755 cpumMsrWr_AmdK8SmiOnIoTrapCtlSts,
5756 cpumMsrWr_AmdK8IntPendingMessage,
5757 cpumMsrWr_AmdK8SmiTriggerIoCycle,
5758 cpumMsrWr_AmdFam10hMmioCfgBaseAddr,
5759 cpumMsrWr_AmdFam10hTrapCtlMaybe,
5760 cpumMsrWr_AmdFam10hPStateControl,
5761 cpumMsrWr_AmdFam10hPStateStatus,
5762 cpumMsrWr_AmdFam10hPStateN,
5763 cpumMsrWr_AmdFam10hCofVidControl,
5764 cpumMsrWr_AmdFam10hCofVidStatus,
5765 cpumMsrWr_AmdFam10hCStateIoBaseAddr,
5766 cpumMsrWr_AmdFam10hCpuWatchdogTimer,
5767 cpumMsrWr_AmdK8SmmBase,
5768 cpumMsrWr_AmdK8SmmAddr,
5769 cpumMsrWr_AmdK8SmmMask,
5770 cpumMsrWr_AmdK8VmCr,
5771 cpumMsrWr_AmdK8IgnNe,
5772 cpumMsrWr_AmdK8SmmCtl,
5773 cpumMsrWr_AmdK8VmHSavePa,
5774 cpumMsrWr_AmdFam10hVmLockKey,
5775 cpumMsrWr_AmdFam10hSmmLockKey,
5776 cpumMsrWr_AmdFam10hLocalSmiStatus,
5777 cpumMsrWr_AmdFam10hOsVisWrkIdLength,
5778 cpumMsrWr_AmdFam10hOsVisWrkStatus,
5779 cpumMsrWr_AmdFam16hL2IPerfCtlN,
5780 cpumMsrWr_AmdFam16hL2IPerfCtrN,
5781 cpumMsrWr_AmdFam15hNorthbridgePerfCtlN,
5782 cpumMsrWr_AmdFam15hNorthbridgePerfCtrN,
5783 cpumMsrWr_AmdK7MicrocodeCtl,
5784 cpumMsrWr_AmdK7ClusterIdMaybe,
5785 cpumMsrWr_AmdK8CpuIdCtlStd07hEbax,
5786 cpumMsrWr_AmdK8CpuIdCtlStd06hEcx,
5787 cpumMsrWr_AmdK8CpuIdCtlStd01hEdcx,
5788 cpumMsrWr_AmdK8CpuIdCtlExt01hEdcx,
5789 cpumMsrWr_AmdK8PatchLoader,
5790 cpumMsrWr_AmdK7DebugStatusMaybe,
5791 cpumMsrWr_AmdK7BHTraceBaseMaybe,
5792 cpumMsrWr_AmdK7BHTracePtrMaybe,
5793 cpumMsrWr_AmdK7BHTraceLimitMaybe,
5794 cpumMsrWr_AmdK7HardwareDebugToolCfgMaybe,
5795 cpumMsrWr_AmdK7FastFlushCountMaybe,
5796 cpumMsrWr_AmdK7NodeId,
5797 cpumMsrWr_AmdK7DrXAddrMaskN,
5798 cpumMsrWr_AmdK7Dr0DataMatchMaybe,
5799 cpumMsrWr_AmdK7Dr0DataMaskMaybe,
5800 cpumMsrWr_AmdK7LoadStoreCfg,
5801 cpumMsrWr_AmdK7InstrCacheCfg,
5802 cpumMsrWr_AmdK7DataCacheCfg,
5803 cpumMsrWr_AmdK7BusUnitCfg,
5804 cpumMsrWr_AmdK7DebugCtl2Maybe,
5805 cpumMsrWr_AmdFam15hFpuCfg,
5806 cpumMsrWr_AmdFam15hDecoderCfg,
5807 cpumMsrWr_AmdFam10hBusUnitCfg2,
5808 cpumMsrWr_AmdFam15hCombUnitCfg,
5809 cpumMsrWr_AmdFam15hCombUnitCfg2,
5810 cpumMsrWr_AmdFam15hCombUnitCfg3,
5811 cpumMsrWr_AmdFam15hExecUnitCfg,
5812 cpumMsrWr_AmdFam15hLoadStoreCfg2,
5813 cpumMsrWr_AmdFam10hIbsFetchCtl,
5814 cpumMsrWr_AmdFam10hIbsFetchLinAddr,
5815 cpumMsrWr_AmdFam10hIbsFetchPhysAddr,
5816 cpumMsrWr_AmdFam10hIbsOpExecCtl,
5817 cpumMsrWr_AmdFam10hIbsOpRip,
5818 cpumMsrWr_AmdFam10hIbsOpData,
5819 cpumMsrWr_AmdFam10hIbsOpData2,
5820 cpumMsrWr_AmdFam10hIbsOpData3,
5821 cpumMsrWr_AmdFam10hIbsDcLinAddr,
5822 cpumMsrWr_AmdFam10hIbsDcPhysAddr,
5823 cpumMsrWr_AmdFam10hIbsCtl,
5824 cpumMsrWr_AmdFam14hIbsBrTarget,
5825
5826 cpumMsrWr_Gim
5827};
5828
5829
5830/**
5831 * Looks up the range for the given MSR.
5832 *
5833 * @returns Pointer to the range if found, NULL if not.
5834 * @param pVM The cross context VM structure.
5835 * @param idMsr The MSR to look up.
5836 */
5837# ifndef IN_RING3
5838static
5839# endif
5840PCPUMMSRRANGE cpumLookupMsrRange(PVM pVM, uint32_t idMsr)
5841{
5842 /*
5843 * Binary lookup.
5844 */
5845 uint32_t cRanges = pVM->cpum.s.GuestInfo.cMsrRanges;
5846 if (!cRanges)
5847 return NULL;
5848 PCPUMMSRRANGE paRanges = pVM->cpum.s.GuestInfo.CTX_SUFF(paMsrRanges);
5849 for (;;)
5850 {
5851 uint32_t i = cRanges / 2;
5852 if (idMsr < paRanges[i].uFirst)
5853 {
5854 if (i == 0)
5855 break;
5856 cRanges = i;
5857 }
5858 else if (idMsr > paRanges[i].uLast)
5859 {
5860 i++;
5861 if (i >= cRanges)
5862 break;
5863 cRanges -= i;
5864 paRanges = &paRanges[i];
5865 }
5866 else
5867 {
5868 if (paRanges[i].enmRdFn == kCpumMsrRdFn_MsrAlias)
5869 return cpumLookupMsrRange(pVM, paRanges[i].uValue);
5870 return &paRanges[i];
5871 }
5872 }
5873
5874# ifdef VBOX_STRICT
5875 /*
5876 * Linear lookup to verify the above binary search.
5877 */
5878 uint32_t cLeft = pVM->cpum.s.GuestInfo.cMsrRanges;
5879 PCPUMMSRRANGE pCur = pVM->cpum.s.GuestInfo.CTX_SUFF(paMsrRanges);
5880 while (cLeft-- > 0)
5881 {
5882 if (idMsr >= pCur->uFirst && idMsr <= pCur->uLast)
5883 {
5884 AssertFailed();
5885 if (pCur->enmRdFn == kCpumMsrRdFn_MsrAlias)
5886 return cpumLookupMsrRange(pVM, pCur->uValue);
5887 return pCur;
5888 }
5889 pCur++;
5890 }
5891# endif
5892 return NULL;
5893}
5894
5895
5896/**
5897 * Query a guest MSR.
5898 *
5899 * The caller is responsible for checking privilege if the call is the result of
5900 * a RDMSR instruction. We'll do the rest.
5901 *
5902 * @retval VINF_SUCCESS on success.
5903 * @retval VINF_CPUM_R3_MSR_READ if the MSR read could not be serviced in the
5904 * current context (raw-mode or ring-0).
5905 * @retval VERR_CPUM_RAISE_GP_0 on failure (invalid MSR), the caller is
5906 * expected to take the appropriate actions. @a *puValue is set to 0.
5907 * @param pVCpu The cross context virtual CPU structure.
5908 * @param idMsr The MSR.
5909 * @param puValue Where to return the value.
5910 *
5911 * @remarks This will always return the right values, even when we're in the
5912 * recompiler.
5913 */
5914VMMDECL(VBOXSTRICTRC) CPUMQueryGuestMsr(PVMCPU pVCpu, uint32_t idMsr, uint64_t *puValue)
5915{
5916 *puValue = 0;
5917
5918 VBOXSTRICTRC rcStrict;
5919 PVM pVM = pVCpu->CTX_SUFF(pVM);
5920 PCPUMMSRRANGE pRange = cpumLookupMsrRange(pVM, idMsr);
5921 if (pRange)
5922 {
5923 CPUMMSRRDFN enmRdFn = (CPUMMSRRDFN)pRange->enmRdFn;
5924 AssertReturn(enmRdFn > kCpumMsrRdFn_Invalid && enmRdFn < kCpumMsrRdFn_End, VERR_CPUM_IPE_1);
5925
5926 PFNCPUMRDMSR pfnRdMsr = g_aCpumRdMsrFns[enmRdFn];
5927 AssertReturn(pfnRdMsr, VERR_CPUM_IPE_2);
5928
5929 STAM_COUNTER_INC(&pRange->cReads);
5930 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReads);
5931
5932 rcStrict = pfnRdMsr(pVCpu, idMsr, pRange, puValue);
5933 if (rcStrict == VINF_SUCCESS)
5934 Log2(("CPUM: RDMSR %#x (%s) -> %#llx\n", idMsr, pRange->szName, *puValue));
5935 else if (rcStrict == VERR_CPUM_RAISE_GP_0)
5936 {
5937 Log(("CPUM: RDMSR %#x (%s) -> #GP(0)\n", idMsr, pRange->szName));
5938 STAM_COUNTER_INC(&pRange->cGps);
5939 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReadsRaiseGp);
5940 }
5941#ifndef IN_RING3
5942 else if (rcStrict == VINF_CPUM_R3_MSR_READ)
5943 Log(("CPUM: RDMSR %#x (%s) -> ring-3\n", idMsr, pRange->szName));
5944#endif
5945 else
5946 {
5947 Log(("CPUM: RDMSR %#x (%s) -> rcStrict=%Rrc\n", idMsr, pRange->szName, VBOXSTRICTRC_VAL(rcStrict)));
5948 AssertMsgStmt(RT_FAILURE_NP(rcStrict), ("%Rrc idMsr=%#x\n", VBOXSTRICTRC_VAL(rcStrict), idMsr),
5949 rcStrict = VERR_IPE_UNEXPECTED_INFO_STATUS);
5950 Assert(rcStrict != VERR_EM_INTERPRETER);
5951 }
5952 }
5953 else
5954 {
5955 Log(("CPUM: Unknown RDMSR %#x -> #GP(0)\n", idMsr));
5956 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReads);
5957 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReadsUnknown);
5958 rcStrict = VERR_CPUM_RAISE_GP_0;
5959 }
5960 return rcStrict;
5961}
5962
5963
5964/**
5965 * Writes to a guest MSR.
5966 *
5967 * The caller is responsible for checking privilege if the call is the result of
5968 * a WRMSR instruction. We'll do the rest.
5969 *
5970 * @retval VINF_SUCCESS on success.
5971 * @retval VINF_CPUM_R3_MSR_WRITE if the MSR write could not be serviced in the
5972 * current context (raw-mode or ring-0).
5973 * @retval VERR_CPUM_RAISE_GP_0 on failure, the caller is expected to take the
5974 * appropriate actions.
5975 *
5976 * @param pVCpu The cross context virtual CPU structure.
5977 * @param idMsr The MSR id.
5978 * @param uValue The value to set.
5979 *
5980 * @remarks Everyone changing MSR values, including the recompiler, shall do it
5981 * by calling this method. This makes sure we have current values and
5982 * that we trigger all the right actions when something changes.
5983 *
5984 * For performance reasons, this actually isn't entirely true for some
5985 * MSRs when in HM mode. The code here and in HM must be aware of
5986 * this.
5987 */
5988VMMDECL(VBOXSTRICTRC) CPUMSetGuestMsr(PVMCPU pVCpu, uint32_t idMsr, uint64_t uValue)
5989{
5990 VBOXSTRICTRC rcStrict;
5991 PVM pVM = pVCpu->CTX_SUFF(pVM);
5992 PCPUMMSRRANGE pRange = cpumLookupMsrRange(pVM, idMsr);
5993 if (pRange)
5994 {
5995 STAM_COUNTER_INC(&pRange->cWrites);
5996 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWrites);
5997
5998 if (!(uValue & pRange->fWrGpMask))
5999 {
6000 CPUMMSRWRFN enmWrFn = (CPUMMSRWRFN)pRange->enmWrFn;
6001 AssertReturn(enmWrFn > kCpumMsrWrFn_Invalid && enmWrFn < kCpumMsrWrFn_End, VERR_CPUM_IPE_1);
6002
6003 PFNCPUMWRMSR pfnWrMsr = g_aCpumWrMsrFns[enmWrFn];
6004 AssertReturn(pfnWrMsr, VERR_CPUM_IPE_2);
6005
6006 uint64_t uValueAdjusted = uValue & ~pRange->fWrIgnMask;
6007 if (uValueAdjusted != uValue)
6008 {
6009 STAM_COUNTER_INC(&pRange->cIgnoredBits);
6010 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesToIgnoredBits);
6011 }
6012
6013 rcStrict = pfnWrMsr(pVCpu, idMsr, pRange, uValueAdjusted, uValue);
6014 if (rcStrict == VINF_SUCCESS)
6015 Log2(("CPUM: WRMSR %#x (%s), %#llx [%#llx]\n", idMsr, pRange->szName, uValueAdjusted, uValue));
6016 else if (rcStrict == VERR_CPUM_RAISE_GP_0)
6017 {
6018 Log(("CPUM: WRMSR %#x (%s), %#llx [%#llx] -> #GP(0)\n", idMsr, pRange->szName, uValueAdjusted, uValue));
6019 STAM_COUNTER_INC(&pRange->cGps);
6020 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesRaiseGp);
6021 }
6022#ifndef IN_RING3
6023 else if (rcStrict == VINF_CPUM_R3_MSR_WRITE)
6024 Log(("CPUM: WRMSR %#x (%s), %#llx [%#llx] -> ring-3\n", idMsr, pRange->szName, uValueAdjusted, uValue));
6025#endif
6026 else
6027 {
6028 Log(("CPUM: WRMSR %#x (%s), %#llx [%#llx] -> rcStrict=%Rrc\n",
6029 idMsr, pRange->szName, uValueAdjusted, uValue, VBOXSTRICTRC_VAL(rcStrict)));
6030 AssertMsgStmt(RT_FAILURE_NP(rcStrict), ("%Rrc idMsr=%#x\n", VBOXSTRICTRC_VAL(rcStrict), idMsr),
6031 rcStrict = VERR_IPE_UNEXPECTED_INFO_STATUS);
6032 Assert(rcStrict != VERR_EM_INTERPRETER);
6033 }
6034 }
6035 else
6036 {
6037 Log(("CPUM: WRMSR %#x (%s), %#llx -> #GP(0) - invalid bits %#llx\n",
6038 idMsr, pRange->szName, uValue, uValue & pRange->fWrGpMask));
6039 STAM_COUNTER_INC(&pRange->cGps);
6040 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesRaiseGp);
6041 rcStrict = VERR_CPUM_RAISE_GP_0;
6042 }
6043 }
6044 else
6045 {
6046 Log(("CPUM: Unknown WRMSR %#x, %#llx -> #GP(0)\n", idMsr, uValue));
6047 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWrites);
6048 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesUnknown);
6049 rcStrict = VERR_CPUM_RAISE_GP_0;
6050 }
6051 return rcStrict;
6052}
6053
6054
6055#if defined(VBOX_STRICT) && defined(IN_RING3)
6056/**
6057 * Performs some checks on the static data related to MSRs.
6058 *
6059 * @returns VINF_SUCCESS on success, error on failure.
6060 */
6061int cpumR3MsrStrictInitChecks(void)
6062{
6063#define CPUM_ASSERT_RD_MSR_FN(a_Register) \
6064 AssertReturn(g_aCpumRdMsrFns[kCpumMsrRdFn_##a_Register] == cpumMsrRd_##a_Register, VERR_CPUM_IPE_2);
6065#define CPUM_ASSERT_WR_MSR_FN(a_Register) \
6066 AssertReturn(g_aCpumWrMsrFns[kCpumMsrWrFn_##a_Register] == cpumMsrWr_##a_Register, VERR_CPUM_IPE_2);
6067
6068 AssertReturn(g_aCpumRdMsrFns[kCpumMsrRdFn_Invalid] == NULL, VERR_CPUM_IPE_2);
6069 CPUM_ASSERT_RD_MSR_FN(FixedValue);
6070 CPUM_ASSERT_RD_MSR_FN(WriteOnly);
6071 CPUM_ASSERT_RD_MSR_FN(Ia32P5McAddr);
6072 CPUM_ASSERT_RD_MSR_FN(Ia32P5McType);
6073 CPUM_ASSERT_RD_MSR_FN(Ia32TimestampCounter);
6074 CPUM_ASSERT_RD_MSR_FN(Ia32PlatformId);
6075 CPUM_ASSERT_RD_MSR_FN(Ia32ApicBase);
6076 CPUM_ASSERT_RD_MSR_FN(Ia32FeatureControl);
6077 CPUM_ASSERT_RD_MSR_FN(Ia32BiosSignId);
6078 CPUM_ASSERT_RD_MSR_FN(Ia32SmmMonitorCtl);
6079 CPUM_ASSERT_RD_MSR_FN(Ia32PmcN);
6080 CPUM_ASSERT_RD_MSR_FN(Ia32MonitorFilterLineSize);
6081 CPUM_ASSERT_RD_MSR_FN(Ia32MPerf);
6082 CPUM_ASSERT_RD_MSR_FN(Ia32APerf);
6083 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrCap);
6084 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrPhysBaseN);
6085 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrPhysMaskN);
6086 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrFixed);
6087 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrDefType);
6088 CPUM_ASSERT_RD_MSR_FN(Ia32Pat);
6089 CPUM_ASSERT_RD_MSR_FN(Ia32SysEnterCs);
6090 CPUM_ASSERT_RD_MSR_FN(Ia32SysEnterEsp);
6091 CPUM_ASSERT_RD_MSR_FN(Ia32SysEnterEip);
6092 CPUM_ASSERT_RD_MSR_FN(Ia32McgCap);
6093 CPUM_ASSERT_RD_MSR_FN(Ia32McgStatus);
6094 CPUM_ASSERT_RD_MSR_FN(Ia32McgCtl);
6095 CPUM_ASSERT_RD_MSR_FN(Ia32DebugCtl);
6096 CPUM_ASSERT_RD_MSR_FN(Ia32SmrrPhysBase);
6097 CPUM_ASSERT_RD_MSR_FN(Ia32SmrrPhysMask);
6098 CPUM_ASSERT_RD_MSR_FN(Ia32PlatformDcaCap);
6099 CPUM_ASSERT_RD_MSR_FN(Ia32CpuDcaCap);
6100 CPUM_ASSERT_RD_MSR_FN(Ia32Dca0Cap);
6101 CPUM_ASSERT_RD_MSR_FN(Ia32PerfEvtSelN);
6102 CPUM_ASSERT_RD_MSR_FN(Ia32PerfStatus);
6103 CPUM_ASSERT_RD_MSR_FN(Ia32PerfCtl);
6104 CPUM_ASSERT_RD_MSR_FN(Ia32FixedCtrN);
6105 CPUM_ASSERT_RD_MSR_FN(Ia32PerfCapabilities);
6106 CPUM_ASSERT_RD_MSR_FN(Ia32FixedCtrCtrl);
6107 CPUM_ASSERT_RD_MSR_FN(Ia32PerfGlobalStatus);
6108 CPUM_ASSERT_RD_MSR_FN(Ia32PerfGlobalCtrl);
6109 CPUM_ASSERT_RD_MSR_FN(Ia32PerfGlobalOvfCtrl);
6110 CPUM_ASSERT_RD_MSR_FN(Ia32PebsEnable);
6111 CPUM_ASSERT_RD_MSR_FN(Ia32ClockModulation);
6112 CPUM_ASSERT_RD_MSR_FN(Ia32ThermInterrupt);
6113 CPUM_ASSERT_RD_MSR_FN(Ia32ThermStatus);
6114 CPUM_ASSERT_RD_MSR_FN(Ia32MiscEnable);
6115 CPUM_ASSERT_RD_MSR_FN(Ia32McCtlStatusAddrMiscN);
6116 CPUM_ASSERT_RD_MSR_FN(Ia32McNCtl2);
6117 CPUM_ASSERT_RD_MSR_FN(Ia32DsArea);
6118 CPUM_ASSERT_RD_MSR_FN(Ia32TscDeadline);
6119 CPUM_ASSERT_RD_MSR_FN(Ia32X2ApicN);
6120 CPUM_ASSERT_RD_MSR_FN(Ia32DebugInterface);
6121 CPUM_ASSERT_RD_MSR_FN(Ia32VmxBasic);
6122 CPUM_ASSERT_RD_MSR_FN(Ia32VmxPinbasedCtls);
6123 CPUM_ASSERT_RD_MSR_FN(Ia32VmxProcbasedCtls);
6124 CPUM_ASSERT_RD_MSR_FN(Ia32VmxExitCtls);
6125 CPUM_ASSERT_RD_MSR_FN(Ia32VmxEntryCtls);
6126 CPUM_ASSERT_RD_MSR_FN(Ia32VmxMisc);
6127 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr0Fixed0);
6128 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr0Fixed1);
6129 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr4Fixed0);
6130 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr4Fixed1);
6131 CPUM_ASSERT_RD_MSR_FN(Ia32VmxVmcsEnum);
6132 CPUM_ASSERT_RD_MSR_FN(Ia32VmxProcBasedCtls2);
6133 CPUM_ASSERT_RD_MSR_FN(Ia32VmxEptVpidCap);
6134 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTruePinbasedCtls);
6135 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTrueProcbasedCtls);
6136 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTrueExitCtls);
6137 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTrueEntryCtls);
6138 CPUM_ASSERT_RD_MSR_FN(Ia32VmxVmFunc);
6139 CPUM_ASSERT_RD_MSR_FN(Ia32SpecCtrl);
6140 CPUM_ASSERT_RD_MSR_FN(Ia32ArchCapabilities);
6141
6142 CPUM_ASSERT_RD_MSR_FN(Amd64Efer);
6143 CPUM_ASSERT_RD_MSR_FN(Amd64SyscallTarget);
6144 CPUM_ASSERT_RD_MSR_FN(Amd64LongSyscallTarget);
6145 CPUM_ASSERT_RD_MSR_FN(Amd64CompSyscallTarget);
6146 CPUM_ASSERT_RD_MSR_FN(Amd64SyscallFlagMask);
6147 CPUM_ASSERT_RD_MSR_FN(Amd64FsBase);
6148 CPUM_ASSERT_RD_MSR_FN(Amd64GsBase);
6149 CPUM_ASSERT_RD_MSR_FN(Amd64KernelGsBase);
6150 CPUM_ASSERT_RD_MSR_FN(Amd64TscAux);
6151
6152 CPUM_ASSERT_RD_MSR_FN(IntelEblCrPowerOn);
6153 CPUM_ASSERT_RD_MSR_FN(IntelI7CoreThreadCount);
6154 CPUM_ASSERT_RD_MSR_FN(IntelP4EbcHardPowerOn);
6155 CPUM_ASSERT_RD_MSR_FN(IntelP4EbcSoftPowerOn);
6156 CPUM_ASSERT_RD_MSR_FN(IntelP4EbcFrequencyId);
6157 CPUM_ASSERT_RD_MSR_FN(IntelP6FsbFrequency);
6158 CPUM_ASSERT_RD_MSR_FN(IntelPlatformInfo);
6159 CPUM_ASSERT_RD_MSR_FN(IntelFlexRatio);
6160 CPUM_ASSERT_RD_MSR_FN(IntelPkgCStConfigControl);
6161 CPUM_ASSERT_RD_MSR_FN(IntelPmgIoCaptureBase);
6162 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchFromToN);
6163 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchFromN);
6164 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchToN);
6165 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchTos);
6166 CPUM_ASSERT_RD_MSR_FN(IntelBblCrCtl);
6167 CPUM_ASSERT_RD_MSR_FN(IntelBblCrCtl3);
6168 CPUM_ASSERT_RD_MSR_FN(IntelI7TemperatureTarget);
6169 CPUM_ASSERT_RD_MSR_FN(IntelI7MsrOffCoreResponseN);
6170 CPUM_ASSERT_RD_MSR_FN(IntelI7MiscPwrMgmt);
6171 CPUM_ASSERT_RD_MSR_FN(IntelP6CrN);
6172 CPUM_ASSERT_RD_MSR_FN(IntelCpuId1FeatureMaskEcdx);
6173 CPUM_ASSERT_RD_MSR_FN(IntelCpuId1FeatureMaskEax);
6174 CPUM_ASSERT_RD_MSR_FN(IntelCpuId80000001FeatureMaskEcdx);
6175 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyAesNiCtl);
6176 CPUM_ASSERT_RD_MSR_FN(IntelI7TurboRatioLimit);
6177 CPUM_ASSERT_RD_MSR_FN(IntelI7LbrSelect);
6178 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyErrorControl);
6179 CPUM_ASSERT_RD_MSR_FN(IntelI7VirtualLegacyWireCap);
6180 CPUM_ASSERT_RD_MSR_FN(IntelI7PowerCtl);
6181 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyPebsNumAlt);
6182 CPUM_ASSERT_RD_MSR_FN(IntelI7PebsLdLat);
6183 CPUM_ASSERT_RD_MSR_FN(IntelI7PkgCnResidencyN);
6184 CPUM_ASSERT_RD_MSR_FN(IntelI7CoreCnResidencyN);
6185 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyVrCurrentConfig);
6186 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyVrMiscConfig);
6187 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyRaplPowerUnit);
6188 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyPkgCnIrtlN);
6189 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyPkgC2Residency);
6190 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgPowerLimit);
6191 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgEnergyStatus);
6192 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgPerfStatus);
6193 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgPowerInfo);
6194 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramPowerLimit);
6195 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramEnergyStatus);
6196 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramPerfStatus);
6197 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramPowerInfo);
6198 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0PowerLimit);
6199 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0EnergyStatus);
6200 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0Policy);
6201 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0PerfStatus);
6202 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp1PowerLimit);
6203 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp1EnergyStatus);
6204 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp1Policy);
6205 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpNominal);
6206 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpLevel1);
6207 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpLevel2);
6208 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpControl);
6209 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyTurboActivationRatio);
6210 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfGlobalCtrl);
6211 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfGlobalStatus);
6212 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfGlobalOvfCtrl);
6213 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfFixedCtrCtrl);
6214 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfFixedCtr);
6215 CPUM_ASSERT_RD_MSR_FN(IntelI7UncCBoxConfig);
6216 CPUM_ASSERT_RD_MSR_FN(IntelI7UncArbPerfCtrN);
6217 CPUM_ASSERT_RD_MSR_FN(IntelI7UncArbPerfEvtSelN);
6218 CPUM_ASSERT_RD_MSR_FN(IntelI7SmiCount);
6219 CPUM_ASSERT_RD_MSR_FN(IntelCore2EmttmCrTablesN);
6220 CPUM_ASSERT_RD_MSR_FN(IntelCore2SmmCStMiscInfo);
6221 CPUM_ASSERT_RD_MSR_FN(IntelCore1ExtConfig);
6222 CPUM_ASSERT_RD_MSR_FN(IntelCore1DtsCalControl);
6223 CPUM_ASSERT_RD_MSR_FN(IntelCore2PeciControl);
6224 CPUM_ASSERT_RD_MSR_FN(IntelAtSilvCoreC1Recidency);
6225
6226 CPUM_ASSERT_RD_MSR_FN(P6LastBranchFromIp);
6227 CPUM_ASSERT_RD_MSR_FN(P6LastBranchToIp);
6228 CPUM_ASSERT_RD_MSR_FN(P6LastIntFromIp);
6229 CPUM_ASSERT_RD_MSR_FN(P6LastIntToIp);
6230
6231 CPUM_ASSERT_RD_MSR_FN(AmdFam15hTscRate);
6232 CPUM_ASSERT_RD_MSR_FN(AmdFam15hLwpCfg);
6233 CPUM_ASSERT_RD_MSR_FN(AmdFam15hLwpCbAddr);
6234 CPUM_ASSERT_RD_MSR_FN(AmdFam10hMc4MiscN);
6235 CPUM_ASSERT_RD_MSR_FN(AmdK8PerfCtlN);
6236 CPUM_ASSERT_RD_MSR_FN(AmdK8PerfCtrN);
6237 CPUM_ASSERT_RD_MSR_FN(AmdK8SysCfg);
6238 CPUM_ASSERT_RD_MSR_FN(AmdK8HwCr);
6239 CPUM_ASSERT_RD_MSR_FN(AmdK8IorrBaseN);
6240 CPUM_ASSERT_RD_MSR_FN(AmdK8IorrMaskN);
6241 CPUM_ASSERT_RD_MSR_FN(AmdK8TopOfMemN);
6242 CPUM_ASSERT_RD_MSR_FN(AmdK8NbCfg1);
6243 CPUM_ASSERT_RD_MSR_FN(AmdK8McXcptRedir);
6244 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuNameN);
6245 CPUM_ASSERT_RD_MSR_FN(AmdK8HwThermalCtrl);
6246 CPUM_ASSERT_RD_MSR_FN(AmdK8SwThermalCtrl);
6247 CPUM_ASSERT_RD_MSR_FN(AmdK8FidVidControl);
6248 CPUM_ASSERT_RD_MSR_FN(AmdK8FidVidStatus);
6249 CPUM_ASSERT_RD_MSR_FN(AmdK8McCtlMaskN);
6250 CPUM_ASSERT_RD_MSR_FN(AmdK8SmiOnIoTrapN);
6251 CPUM_ASSERT_RD_MSR_FN(AmdK8SmiOnIoTrapCtlSts);
6252 CPUM_ASSERT_RD_MSR_FN(AmdK8IntPendingMessage);
6253 CPUM_ASSERT_RD_MSR_FN(AmdK8SmiTriggerIoCycle);
6254 CPUM_ASSERT_RD_MSR_FN(AmdFam10hMmioCfgBaseAddr);
6255 CPUM_ASSERT_RD_MSR_FN(AmdFam10hTrapCtlMaybe);
6256 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateCurLimit);
6257 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateControl);
6258 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateStatus);
6259 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateN);
6260 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCofVidControl);
6261 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCofVidStatus);
6262 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCStateIoBaseAddr);
6263 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCpuWatchdogTimer);
6264 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmBase);
6265 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmAddr);
6266 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmMask);
6267 CPUM_ASSERT_RD_MSR_FN(AmdK8VmCr);
6268 CPUM_ASSERT_RD_MSR_FN(AmdK8IgnNe);
6269 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmCtl);
6270 CPUM_ASSERT_RD_MSR_FN(AmdK8VmHSavePa);
6271 CPUM_ASSERT_RD_MSR_FN(AmdFam10hVmLockKey);
6272 CPUM_ASSERT_RD_MSR_FN(AmdFam10hSmmLockKey);
6273 CPUM_ASSERT_RD_MSR_FN(AmdFam10hLocalSmiStatus);
6274 CPUM_ASSERT_RD_MSR_FN(AmdFam10hOsVisWrkIdLength);
6275 CPUM_ASSERT_RD_MSR_FN(AmdFam10hOsVisWrkStatus);
6276 CPUM_ASSERT_RD_MSR_FN(AmdFam16hL2IPerfCtlN);
6277 CPUM_ASSERT_RD_MSR_FN(AmdFam16hL2IPerfCtrN);
6278 CPUM_ASSERT_RD_MSR_FN(AmdFam15hNorthbridgePerfCtlN);
6279 CPUM_ASSERT_RD_MSR_FN(AmdFam15hNorthbridgePerfCtrN);
6280 CPUM_ASSERT_RD_MSR_FN(AmdK7MicrocodeCtl);
6281 CPUM_ASSERT_RD_MSR_FN(AmdK7ClusterIdMaybe);
6282 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlStd07hEbax);
6283 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlStd06hEcx);
6284 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlStd01hEdcx);
6285 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlExt01hEdcx);
6286 CPUM_ASSERT_RD_MSR_FN(AmdK8PatchLevel);
6287 CPUM_ASSERT_RD_MSR_FN(AmdK7DebugStatusMaybe);
6288 CPUM_ASSERT_RD_MSR_FN(AmdK7BHTraceBaseMaybe);
6289 CPUM_ASSERT_RD_MSR_FN(AmdK7BHTracePtrMaybe);
6290 CPUM_ASSERT_RD_MSR_FN(AmdK7BHTraceLimitMaybe);
6291 CPUM_ASSERT_RD_MSR_FN(AmdK7HardwareDebugToolCfgMaybe);
6292 CPUM_ASSERT_RD_MSR_FN(AmdK7FastFlushCountMaybe);
6293 CPUM_ASSERT_RD_MSR_FN(AmdK7NodeId);
6294 CPUM_ASSERT_RD_MSR_FN(AmdK7DrXAddrMaskN);
6295 CPUM_ASSERT_RD_MSR_FN(AmdK7Dr0DataMatchMaybe);
6296 CPUM_ASSERT_RD_MSR_FN(AmdK7Dr0DataMaskMaybe);
6297 CPUM_ASSERT_RD_MSR_FN(AmdK7LoadStoreCfg);
6298 CPUM_ASSERT_RD_MSR_FN(AmdK7InstrCacheCfg);
6299 CPUM_ASSERT_RD_MSR_FN(AmdK7DataCacheCfg);
6300 CPUM_ASSERT_RD_MSR_FN(AmdK7BusUnitCfg);
6301 CPUM_ASSERT_RD_MSR_FN(AmdK7DebugCtl2Maybe);
6302 CPUM_ASSERT_RD_MSR_FN(AmdFam15hFpuCfg);
6303 CPUM_ASSERT_RD_MSR_FN(AmdFam15hDecoderCfg);
6304 CPUM_ASSERT_RD_MSR_FN(AmdFam10hBusUnitCfg2);
6305 CPUM_ASSERT_RD_MSR_FN(AmdFam15hCombUnitCfg);
6306 CPUM_ASSERT_RD_MSR_FN(AmdFam15hCombUnitCfg2);
6307 CPUM_ASSERT_RD_MSR_FN(AmdFam15hCombUnitCfg3);
6308 CPUM_ASSERT_RD_MSR_FN(AmdFam15hExecUnitCfg);
6309 CPUM_ASSERT_RD_MSR_FN(AmdFam15hLoadStoreCfg2);
6310 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsFetchCtl);
6311 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsFetchLinAddr);
6312 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsFetchPhysAddr);
6313 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpExecCtl);
6314 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpRip);
6315 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpData);
6316 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpData2);
6317 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpData3);
6318 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsDcLinAddr);
6319 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsDcPhysAddr);
6320 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsCtl);
6321 CPUM_ASSERT_RD_MSR_FN(AmdFam14hIbsBrTarget);
6322
6323 CPUM_ASSERT_RD_MSR_FN(Gim)
6324
6325 AssertReturn(g_aCpumWrMsrFns[kCpumMsrWrFn_Invalid] == NULL, VERR_CPUM_IPE_2);
6326 CPUM_ASSERT_WR_MSR_FN(Ia32P5McAddr);
6327 CPUM_ASSERT_WR_MSR_FN(Ia32P5McType);
6328 CPUM_ASSERT_WR_MSR_FN(Ia32TimestampCounter);
6329 CPUM_ASSERT_WR_MSR_FN(Ia32ApicBase);
6330 CPUM_ASSERT_WR_MSR_FN(Ia32FeatureControl);
6331 CPUM_ASSERT_WR_MSR_FN(Ia32BiosSignId);
6332 CPUM_ASSERT_WR_MSR_FN(Ia32BiosUpdateTrigger);
6333 CPUM_ASSERT_WR_MSR_FN(Ia32SmmMonitorCtl);
6334 CPUM_ASSERT_WR_MSR_FN(Ia32PmcN);
6335 CPUM_ASSERT_WR_MSR_FN(Ia32MonitorFilterLineSize);
6336 CPUM_ASSERT_WR_MSR_FN(Ia32MPerf);
6337 CPUM_ASSERT_WR_MSR_FN(Ia32APerf);
6338 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrPhysBaseN);
6339 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrPhysMaskN);
6340 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrFixed);
6341 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrDefType);
6342 CPUM_ASSERT_WR_MSR_FN(Ia32Pat);
6343 CPUM_ASSERT_WR_MSR_FN(Ia32SysEnterCs);
6344 CPUM_ASSERT_WR_MSR_FN(Ia32SysEnterEsp);
6345 CPUM_ASSERT_WR_MSR_FN(Ia32SysEnterEip);
6346 CPUM_ASSERT_WR_MSR_FN(Ia32McgStatus);
6347 CPUM_ASSERT_WR_MSR_FN(Ia32McgCtl);
6348 CPUM_ASSERT_WR_MSR_FN(Ia32DebugCtl);
6349 CPUM_ASSERT_WR_MSR_FN(Ia32SmrrPhysBase);
6350 CPUM_ASSERT_WR_MSR_FN(Ia32SmrrPhysMask);
6351 CPUM_ASSERT_WR_MSR_FN(Ia32PlatformDcaCap);
6352 CPUM_ASSERT_WR_MSR_FN(Ia32Dca0Cap);
6353 CPUM_ASSERT_WR_MSR_FN(Ia32PerfEvtSelN);
6354 CPUM_ASSERT_WR_MSR_FN(Ia32PerfStatus);
6355 CPUM_ASSERT_WR_MSR_FN(Ia32PerfCtl);
6356 CPUM_ASSERT_WR_MSR_FN(Ia32FixedCtrN);
6357 CPUM_ASSERT_WR_MSR_FN(Ia32PerfCapabilities);
6358 CPUM_ASSERT_WR_MSR_FN(Ia32FixedCtrCtrl);
6359 CPUM_ASSERT_WR_MSR_FN(Ia32PerfGlobalStatus);
6360 CPUM_ASSERT_WR_MSR_FN(Ia32PerfGlobalCtrl);
6361 CPUM_ASSERT_WR_MSR_FN(Ia32PerfGlobalOvfCtrl);
6362 CPUM_ASSERT_WR_MSR_FN(Ia32PebsEnable);
6363 CPUM_ASSERT_WR_MSR_FN(Ia32ClockModulation);
6364 CPUM_ASSERT_WR_MSR_FN(Ia32ThermInterrupt);
6365 CPUM_ASSERT_WR_MSR_FN(Ia32ThermStatus);
6366 CPUM_ASSERT_WR_MSR_FN(Ia32MiscEnable);
6367 CPUM_ASSERT_WR_MSR_FN(Ia32McCtlStatusAddrMiscN);
6368 CPUM_ASSERT_WR_MSR_FN(Ia32McNCtl2);
6369 CPUM_ASSERT_WR_MSR_FN(Ia32DsArea);
6370 CPUM_ASSERT_WR_MSR_FN(Ia32TscDeadline);
6371 CPUM_ASSERT_WR_MSR_FN(Ia32X2ApicN);
6372 CPUM_ASSERT_WR_MSR_FN(Ia32DebugInterface);
6373 CPUM_ASSERT_WR_MSR_FN(Ia32SpecCtrl);
6374 CPUM_ASSERT_WR_MSR_FN(Ia32PredCmd);
6375
6376 CPUM_ASSERT_WR_MSR_FN(Amd64Efer);
6377 CPUM_ASSERT_WR_MSR_FN(Amd64SyscallTarget);
6378 CPUM_ASSERT_WR_MSR_FN(Amd64LongSyscallTarget);
6379 CPUM_ASSERT_WR_MSR_FN(Amd64CompSyscallTarget);
6380 CPUM_ASSERT_WR_MSR_FN(Amd64SyscallFlagMask);
6381 CPUM_ASSERT_WR_MSR_FN(Amd64FsBase);
6382 CPUM_ASSERT_WR_MSR_FN(Amd64GsBase);
6383 CPUM_ASSERT_WR_MSR_FN(Amd64KernelGsBase);
6384 CPUM_ASSERT_WR_MSR_FN(Amd64TscAux);
6385
6386 CPUM_ASSERT_WR_MSR_FN(IntelEblCrPowerOn);
6387 CPUM_ASSERT_WR_MSR_FN(IntelP4EbcHardPowerOn);
6388 CPUM_ASSERT_WR_MSR_FN(IntelP4EbcSoftPowerOn);
6389 CPUM_ASSERT_WR_MSR_FN(IntelP4EbcFrequencyId);
6390 CPUM_ASSERT_WR_MSR_FN(IntelFlexRatio);
6391 CPUM_ASSERT_WR_MSR_FN(IntelPkgCStConfigControl);
6392 CPUM_ASSERT_WR_MSR_FN(IntelPmgIoCaptureBase);
6393 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchFromToN);
6394 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchFromN);
6395 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchToN);
6396 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchTos);
6397 CPUM_ASSERT_WR_MSR_FN(IntelBblCrCtl);
6398 CPUM_ASSERT_WR_MSR_FN(IntelBblCrCtl3);
6399 CPUM_ASSERT_WR_MSR_FN(IntelI7TemperatureTarget);
6400 CPUM_ASSERT_WR_MSR_FN(IntelI7MsrOffCoreResponseN);
6401 CPUM_ASSERT_WR_MSR_FN(IntelI7MiscPwrMgmt);
6402 CPUM_ASSERT_WR_MSR_FN(IntelP6CrN);
6403 CPUM_ASSERT_WR_MSR_FN(IntelCpuId1FeatureMaskEcdx);
6404 CPUM_ASSERT_WR_MSR_FN(IntelCpuId1FeatureMaskEax);
6405 CPUM_ASSERT_WR_MSR_FN(IntelCpuId80000001FeatureMaskEcdx);
6406 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyAesNiCtl);
6407 CPUM_ASSERT_WR_MSR_FN(IntelI7TurboRatioLimit);
6408 CPUM_ASSERT_WR_MSR_FN(IntelI7LbrSelect);
6409 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyErrorControl);
6410 CPUM_ASSERT_WR_MSR_FN(IntelI7PowerCtl);
6411 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyPebsNumAlt);
6412 CPUM_ASSERT_WR_MSR_FN(IntelI7PebsLdLat);
6413 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyVrCurrentConfig);
6414 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyVrMiscConfig);
6415 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyPkgCnIrtlN);
6416 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyPkgC2Residency);
6417 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPkgPowerLimit);
6418 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplDramPowerLimit);
6419 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp0PowerLimit);
6420 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp0Policy);
6421 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp1PowerLimit);
6422 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp1Policy);
6423 CPUM_ASSERT_WR_MSR_FN(IntelI7IvyConfigTdpControl);
6424 CPUM_ASSERT_WR_MSR_FN(IntelI7IvyTurboActivationRatio);
6425 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfGlobalCtrl);
6426 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfGlobalStatus);
6427 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfGlobalOvfCtrl);
6428 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfFixedCtrCtrl);
6429 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfFixedCtr);
6430 CPUM_ASSERT_WR_MSR_FN(IntelI7UncArbPerfCtrN);
6431 CPUM_ASSERT_WR_MSR_FN(IntelI7UncArbPerfEvtSelN);
6432 CPUM_ASSERT_WR_MSR_FN(IntelCore2EmttmCrTablesN);
6433 CPUM_ASSERT_WR_MSR_FN(IntelCore2SmmCStMiscInfo);
6434 CPUM_ASSERT_WR_MSR_FN(IntelCore1ExtConfig);
6435 CPUM_ASSERT_WR_MSR_FN(IntelCore1DtsCalControl);
6436 CPUM_ASSERT_WR_MSR_FN(IntelCore2PeciControl);
6437
6438 CPUM_ASSERT_WR_MSR_FN(P6LastIntFromIp);
6439 CPUM_ASSERT_WR_MSR_FN(P6LastIntToIp);
6440
6441 CPUM_ASSERT_WR_MSR_FN(AmdFam15hTscRate);
6442 CPUM_ASSERT_WR_MSR_FN(AmdFam15hLwpCfg);
6443 CPUM_ASSERT_WR_MSR_FN(AmdFam15hLwpCbAddr);
6444 CPUM_ASSERT_WR_MSR_FN(AmdFam10hMc4MiscN);
6445 CPUM_ASSERT_WR_MSR_FN(AmdK8PerfCtlN);
6446 CPUM_ASSERT_WR_MSR_FN(AmdK8PerfCtrN);
6447 CPUM_ASSERT_WR_MSR_FN(AmdK8SysCfg);
6448 CPUM_ASSERT_WR_MSR_FN(AmdK8HwCr);
6449 CPUM_ASSERT_WR_MSR_FN(AmdK8IorrBaseN);
6450 CPUM_ASSERT_WR_MSR_FN(AmdK8IorrMaskN);
6451 CPUM_ASSERT_WR_MSR_FN(AmdK8TopOfMemN);
6452 CPUM_ASSERT_WR_MSR_FN(AmdK8NbCfg1);
6453 CPUM_ASSERT_WR_MSR_FN(AmdK8McXcptRedir);
6454 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuNameN);
6455 CPUM_ASSERT_WR_MSR_FN(AmdK8HwThermalCtrl);
6456 CPUM_ASSERT_WR_MSR_FN(AmdK8SwThermalCtrl);
6457 CPUM_ASSERT_WR_MSR_FN(AmdK8FidVidControl);
6458 CPUM_ASSERT_WR_MSR_FN(AmdK8McCtlMaskN);
6459 CPUM_ASSERT_WR_MSR_FN(AmdK8SmiOnIoTrapN);
6460 CPUM_ASSERT_WR_MSR_FN(AmdK8SmiOnIoTrapCtlSts);
6461 CPUM_ASSERT_WR_MSR_FN(AmdK8IntPendingMessage);
6462 CPUM_ASSERT_WR_MSR_FN(AmdK8SmiTriggerIoCycle);
6463 CPUM_ASSERT_WR_MSR_FN(AmdFam10hMmioCfgBaseAddr);
6464 CPUM_ASSERT_WR_MSR_FN(AmdFam10hTrapCtlMaybe);
6465 CPUM_ASSERT_WR_MSR_FN(AmdFam10hPStateControl);
6466 CPUM_ASSERT_WR_MSR_FN(AmdFam10hPStateStatus);
6467 CPUM_ASSERT_WR_MSR_FN(AmdFam10hPStateN);
6468 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCofVidControl);
6469 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCofVidStatus);
6470 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCStateIoBaseAddr);
6471 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCpuWatchdogTimer);
6472 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmBase);
6473 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmAddr);
6474 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmMask);
6475 CPUM_ASSERT_WR_MSR_FN(AmdK8VmCr);
6476 CPUM_ASSERT_WR_MSR_FN(AmdK8IgnNe);
6477 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmCtl);
6478 CPUM_ASSERT_WR_MSR_FN(AmdK8VmHSavePa);
6479 CPUM_ASSERT_WR_MSR_FN(AmdFam10hVmLockKey);
6480 CPUM_ASSERT_WR_MSR_FN(AmdFam10hSmmLockKey);
6481 CPUM_ASSERT_WR_MSR_FN(AmdFam10hLocalSmiStatus);
6482 CPUM_ASSERT_WR_MSR_FN(AmdFam10hOsVisWrkIdLength);
6483 CPUM_ASSERT_WR_MSR_FN(AmdFam10hOsVisWrkStatus);
6484 CPUM_ASSERT_WR_MSR_FN(AmdFam16hL2IPerfCtlN);
6485 CPUM_ASSERT_WR_MSR_FN(AmdFam16hL2IPerfCtrN);
6486 CPUM_ASSERT_WR_MSR_FN(AmdFam15hNorthbridgePerfCtlN);
6487 CPUM_ASSERT_WR_MSR_FN(AmdFam15hNorthbridgePerfCtrN);
6488 CPUM_ASSERT_WR_MSR_FN(AmdK7MicrocodeCtl);
6489 CPUM_ASSERT_WR_MSR_FN(AmdK7ClusterIdMaybe);
6490 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlStd07hEbax);
6491 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlStd06hEcx);
6492 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlStd01hEdcx);
6493 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlExt01hEdcx);
6494 CPUM_ASSERT_WR_MSR_FN(AmdK8PatchLoader);
6495 CPUM_ASSERT_WR_MSR_FN(AmdK7DebugStatusMaybe);
6496 CPUM_ASSERT_WR_MSR_FN(AmdK7BHTraceBaseMaybe);
6497 CPUM_ASSERT_WR_MSR_FN(AmdK7BHTracePtrMaybe);
6498 CPUM_ASSERT_WR_MSR_FN(AmdK7BHTraceLimitMaybe);
6499 CPUM_ASSERT_WR_MSR_FN(AmdK7HardwareDebugToolCfgMaybe);
6500 CPUM_ASSERT_WR_MSR_FN(AmdK7FastFlushCountMaybe);
6501 CPUM_ASSERT_WR_MSR_FN(AmdK7NodeId);
6502 CPUM_ASSERT_WR_MSR_FN(AmdK7DrXAddrMaskN);
6503 CPUM_ASSERT_WR_MSR_FN(AmdK7Dr0DataMatchMaybe);
6504 CPUM_ASSERT_WR_MSR_FN(AmdK7Dr0DataMaskMaybe);
6505 CPUM_ASSERT_WR_MSR_FN(AmdK7LoadStoreCfg);
6506 CPUM_ASSERT_WR_MSR_FN(AmdK7InstrCacheCfg);
6507 CPUM_ASSERT_WR_MSR_FN(AmdK7DataCacheCfg);
6508 CPUM_ASSERT_WR_MSR_FN(AmdK7BusUnitCfg);
6509 CPUM_ASSERT_WR_MSR_FN(AmdK7DebugCtl2Maybe);
6510 CPUM_ASSERT_WR_MSR_FN(AmdFam15hFpuCfg);
6511 CPUM_ASSERT_WR_MSR_FN(AmdFam15hDecoderCfg);
6512 CPUM_ASSERT_WR_MSR_FN(AmdFam10hBusUnitCfg2);
6513 CPUM_ASSERT_WR_MSR_FN(AmdFam15hCombUnitCfg);
6514 CPUM_ASSERT_WR_MSR_FN(AmdFam15hCombUnitCfg2);
6515 CPUM_ASSERT_WR_MSR_FN(AmdFam15hCombUnitCfg3);
6516 CPUM_ASSERT_WR_MSR_FN(AmdFam15hExecUnitCfg);
6517 CPUM_ASSERT_WR_MSR_FN(AmdFam15hLoadStoreCfg2);
6518 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsFetchCtl);
6519 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsFetchLinAddr);
6520 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsFetchPhysAddr);
6521 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpExecCtl);
6522 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpRip);
6523 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpData);
6524 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpData2);
6525 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpData3);
6526 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsDcLinAddr);
6527 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsDcPhysAddr);
6528 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsCtl);
6529 CPUM_ASSERT_WR_MSR_FN(AmdFam14hIbsBrTarget);
6530
6531 CPUM_ASSERT_WR_MSR_FN(Gim);
6532
6533 return VINF_SUCCESS;
6534}
6535#endif /* VBOX_STRICT && IN_RING3 */
6536
6537
6538/**
6539 * Gets the scalable bus frequency.
6540 *
6541 * The bus frequency is used as a base in several MSRs that gives the CPU and
6542 * other frequency ratios.
6543 *
6544 * @returns Scalable bus frequency in Hz. Will not return CPUM_SBUSFREQ_UNKNOWN.
6545 * @param pVM The cross context VM structure.
6546 */
6547VMMDECL(uint64_t) CPUMGetGuestScalableBusFrequency(PVM pVM)
6548{
6549 uint64_t uFreq = pVM->cpum.s.GuestInfo.uScalableBusFreq;
6550 if (uFreq == CPUM_SBUSFREQ_UNKNOWN)
6551 uFreq = CPUM_SBUSFREQ_100MHZ;
6552 return uFreq;
6553}
6554
6555
6556/**
6557 * Sets the guest EFER MSR without performing any additional checks.
6558 *
6559 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6560 * @param uOldEfer The previous EFER MSR value.
6561 * @param uValidEfer The new, validated EFER MSR value.
6562 *
6563 * @remarks One would normally call CPUMQueryValidatedGuestEfer before calling this
6564 * function to change the EFER in order to perform an EFER transition.
6565 */
6566VMMDECL(void) CPUMSetGuestEferMsrNoChecks(PVMCPU pVCpu, uint64_t uOldEfer, uint64_t uValidEfer)
6567{
6568 pVCpu->cpum.s.Guest.msrEFER = uValidEfer;
6569
6570 /* AMD64 Architecture Programmer's Manual: 15.15 TLB Control; flush the TLB
6571 if MSR_K6_EFER_NXE, MSR_K6_EFER_LME or MSR_K6_EFER_LMA are changed. */
6572 if ( (uOldEfer & (MSR_K6_EFER_NXE | MSR_K6_EFER_LME | MSR_K6_EFER_LMA))
6573 != (pVCpu->cpum.s.Guest.msrEFER & (MSR_K6_EFER_NXE | MSR_K6_EFER_LME | MSR_K6_EFER_LMA)))
6574 {
6575 /// @todo PGMFlushTLB(pVCpu, cr3, true /*fGlobal*/);
6576 HMFlushTLB(pVCpu);
6577
6578 /* Notify PGM about NXE changes. */
6579 if ( (uOldEfer & MSR_K6_EFER_NXE)
6580 != (pVCpu->cpum.s.Guest.msrEFER & MSR_K6_EFER_NXE))
6581 PGMNotifyNxeChanged(pVCpu, !(uOldEfer & MSR_K6_EFER_NXE));
6582 }
6583}
6584
6585
6586/**
6587 * Checks if a guest PAT MSR write is valid.
6588 *
6589 * @returns @c true if the PAT bit combination is valid, @c false otherwise.
6590 * @param uValue The PAT MSR value.
6591 */
6592VMMDECL(bool) CPUMIsPatMsrValid(uint64_t uValue)
6593{
6594 for (uint32_t cShift = 0; cShift < 63; cShift += 8)
6595 {
6596 /* Check all eight bits because the top 5 bits of each byte are reserved. */
6597 uint8_t uType = (uint8_t)(uValue >> cShift);
6598 if ((uType >= 8) || (uType == 2) || (uType == 3))
6599 {
6600 Log(("CPUM: Invalid PAT type at %u:%u in IA32_PAT: %#llx (%#llx)\n", cShift + 7, cShift, uValue, uType));
6601 return false;
6602 }
6603 }
6604 return true;
6605}
6606
6607
6608/**
6609 * Validates an EFER MSR write.
6610 *
6611 * @returns VBox status code.
6612 * @param pVM The cross context VM structure.
6613 * @param uCr0 The CR0 of the CPU corresponding to the EFER MSR.
6614 * @param uOldEfer Value of the previous EFER MSR on the CPU if any.
6615 * @param uNewEfer The new EFER MSR value being written.
6616 * @param puValidEfer Where to store the validated EFER (only updated if
6617 * this function returns VINF_SUCCESS).
6618 */
6619VMMDECL(int) CPUMQueryValidatedGuestEfer(PVM pVM, uint64_t uCr0, uint64_t uOldEfer, uint64_t uNewEfer, uint64_t *puValidEfer)
6620{
6621 /* #GP(0) If anything outside the allowed bits is set. */
6622 uint64_t fMask = CPUMGetGuestEferMsrValidMask(pVM);
6623 if (uNewEfer & ~fMask)
6624 {
6625 Log(("CPUM: Settings disallowed EFER bit. uNewEfer=%#RX64 fAllowed=%#RX64 -> #GP(0)\n", uNewEfer, fMask));
6626 return VERR_CPUM_RAISE_GP_0;
6627 }
6628
6629 /* Check for illegal MSR_K6_EFER_LME transitions: not allowed to change LME if
6630 paging is enabled. (AMD Arch. Programmer's Manual Volume 2: Table 14-5) */
6631 if ( (uOldEfer & MSR_K6_EFER_LME) != (uNewEfer & MSR_K6_EFER_LME)
6632 && (uCr0 & X86_CR0_PG))
6633 {
6634 Log(("CPUM: Illegal MSR_K6_EFER_LME change: paging is enabled!!\n"));
6635 return VERR_CPUM_RAISE_GP_0;
6636 }
6637
6638 /* There are a few more: e.g. MSR_K6_EFER_LMSLE. */
6639 AssertMsg(!(uNewEfer & ~( MSR_K6_EFER_NXE
6640 | MSR_K6_EFER_LME
6641 | MSR_K6_EFER_LMA /* ignored anyway */
6642 | MSR_K6_EFER_SCE
6643 | MSR_K6_EFER_FFXSR
6644 | MSR_K6_EFER_SVME)),
6645 ("Unexpected value %#RX64\n", uNewEfer));
6646
6647 /* Ignore EFER.LMA, it's updated when setting CR0. */
6648 fMask &= ~MSR_K6_EFER_LMA;
6649
6650 *puValidEfer = (uOldEfer & ~fMask) | (uNewEfer & fMask);
6651 return VINF_SUCCESS;
6652}
6653
6654
6655/**
6656 * Gets the mask of valid EFER bits depending on supported guest-CPU features.
6657 *
6658 * @returns Mask of valid EFER bits.
6659 * @param pVM The cross context VM structure.
6660 *
6661 * @remarks EFER.LMA is included as part of the valid mask. It's not invalid but
6662 * rather a read-only bit.
6663 */
6664VMMDECL(uint64_t) CPUMGetGuestEferMsrValidMask(PVM pVM)
6665{
6666 uint32_t const fExtFeatures = pVM->cpum.s.aGuestCpuIdPatmExt[0].uEax >= 0x80000001
6667 ? pVM->cpum.s.aGuestCpuIdPatmExt[1].uEdx
6668 : 0;
6669 uint64_t fMask = 0;
6670 uint64_t const fIgnoreMask = MSR_K6_EFER_LMA;
6671
6672 /* Filter out those bits the guest is allowed to change. (e.g. LMA is read-only) */
6673 if (fExtFeatures & X86_CPUID_EXT_FEATURE_EDX_NX)
6674 fMask |= MSR_K6_EFER_NXE;
6675 if (fExtFeatures & X86_CPUID_EXT_FEATURE_EDX_LONG_MODE)
6676 fMask |= MSR_K6_EFER_LME;
6677 if (fExtFeatures & X86_CPUID_EXT_FEATURE_EDX_SYSCALL)
6678 fMask |= MSR_K6_EFER_SCE;
6679 if (fExtFeatures & X86_CPUID_AMD_FEATURE_EDX_FFXSR)
6680 fMask |= MSR_K6_EFER_FFXSR;
6681 if (pVM->cpum.s.GuestFeatures.fSvm)
6682 fMask |= MSR_K6_EFER_SVME;
6683
6684 return (fIgnoreMask | fMask);
6685}
6686
6687
6688/**
6689 * Fast way for HM to access the MSR_K8_TSC_AUX register.
6690 *
6691 * @returns The register value.
6692 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6693 * @thread EMT(pVCpu)
6694 */
6695VMM_INT_DECL(uint64_t) CPUMGetGuestTscAux(PVMCPU pVCpu)
6696{
6697 Assert(!(pVCpu->cpum.s.Guest.fExtrn & CPUMCTX_EXTRN_TSC_AUX));
6698 return pVCpu->cpum.s.GuestMsrs.msr.TscAux;
6699}
6700
6701
6702/**
6703 * Fast way for HM to access the MSR_K8_TSC_AUX register.
6704 *
6705 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6706 * @param uValue The new value.
6707 * @thread EMT(pVCpu)
6708 */
6709VMM_INT_DECL(void) CPUMSetGuestTscAux(PVMCPU pVCpu, uint64_t uValue)
6710{
6711 pVCpu->cpum.s.Guest.fExtrn &= ~CPUMCTX_EXTRN_TSC_AUX;
6712 pVCpu->cpum.s.GuestMsrs.msr.TscAux = uValue;
6713}
6714
6715
6716/**
6717 * Fast way for HM to access the IA32_SPEC_CTRL register.
6718 *
6719 * @returns The register value.
6720 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6721 * @thread EMT(pVCpu)
6722 */
6723VMM_INT_DECL(uint64_t) CPUMGetGuestSpecCtrl(PVMCPU pVCpu)
6724{
6725 return pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl;
6726}
6727
6728
6729/**
6730 * Fast way for HM to access the IA32_SPEC_CTRL register.
6731 *
6732 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6733 * @param uValue The new value.
6734 * @thread EMT(pVCpu)
6735 */
6736VMM_INT_DECL(void) CPUMSetGuestSpecCtrl(PVMCPU pVCpu, uint64_t uValue)
6737{
6738 pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl = uValue;
6739}
6740
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette