VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/CPUMAllMsrs.cpp@ 75142

Last change on this file since 75142 was 74648, checked in by vboxsync, 6 years ago

VMM/IEM, CPUM: Nested VMX: bugref:9180 VM-exit bits; Add TPR virtualization for WRMSR.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 252.8 KB
Line 
1/* $Id: CPUMAllMsrs.cpp 74648 2018-10-07 06:20:55Z vboxsync $ */
2/** @file
3 * CPUM - CPU MSR Registers.
4 */
5
6/*
7 * Copyright (C) 2013-2017 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*********************************************************************************************************************************
20* Header Files *
21*********************************************************************************************************************************/
22#define LOG_GROUP LOG_GROUP_CPUM
23#include <VBox/vmm/cpum.h>
24#include <VBox/vmm/apic.h>
25#include <VBox/vmm/hm.h>
26#include <VBox/vmm/hm_vmx.h>
27#include <VBox/vmm/tm.h>
28#include <VBox/vmm/gim.h>
29#include "CPUMInternal.h"
30#include <VBox/vmm/vm.h>
31#include <VBox/err.h>
32
33
34/*********************************************************************************************************************************
35* Defined Constants And Macros *
36*********************************************************************************************************************************/
37/**
38 * Validates the CPUMMSRRANGE::offCpumCpu value and declares a local variable
39 * pointing to it.
40 *
41 * ASSUMES sizeof(a_Type) is a power of two and that the member is aligned
42 * correctly.
43 */
44#define CPUM_MSR_ASSERT_CPUMCPU_OFFSET_RETURN(a_pVCpu, a_pRange, a_Type, a_VarName) \
45 AssertMsgReturn( (a_pRange)->offCpumCpu >= 8 \
46 && (a_pRange)->offCpumCpu < sizeof(CPUMCPU) \
47 && !((a_pRange)->offCpumCpu & (RT_MIN(sizeof(a_Type), 8) - 1)) \
48 , ("offCpumCpu=%#x %s\n", (a_pRange)->offCpumCpu, (a_pRange)->szName), \
49 VERR_CPUM_MSR_BAD_CPUMCPU_OFFSET); \
50 a_Type *a_VarName = (a_Type *)((uintptr_t)&(a_pVCpu)->cpum.s + (a_pRange)->offCpumCpu)
51
52
53/*********************************************************************************************************************************
54* Structures and Typedefs *
55*********************************************************************************************************************************/
56
57/**
58 * Implements reading one or more MSRs.
59 *
60 * @returns VBox status code.
61 * @retval VINF_SUCCESS on success.
62 * @retval VINF_CPUM_R3_MSR_READ if the MSR read could not be serviced in the
63 * current context (raw-mode or ring-0).
64 * @retval VERR_CPUM_RAISE_GP_0 on failure (invalid MSR).
65 *
66 * @param pVCpu The cross context virtual CPU structure.
67 * @param idMsr The MSR we're reading.
68 * @param pRange The MSR range descriptor.
69 * @param puValue Where to return the value.
70 */
71typedef DECLCALLBACK(VBOXSTRICTRC) FNCPUMRDMSR(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue);
72/** Pointer to a RDMSR worker for a specific MSR or range of MSRs. */
73typedef FNCPUMRDMSR *PFNCPUMRDMSR;
74
75
76/**
77 * Implements writing one or more MSRs.
78 *
79 * @retval VINF_SUCCESS on success.
80 * @retval VINF_CPUM_R3_MSR_WRITE if the MSR write could not be serviced in the
81 * current context (raw-mode or ring-0).
82 * @retval VERR_CPUM_RAISE_GP_0 on failure.
83 *
84 * @param pVCpu The cross context virtual CPU structure.
85 * @param idMsr The MSR we're writing.
86 * @param pRange The MSR range descriptor.
87 * @param uValue The value to set, ignored bits masked.
88 * @param uRawValue The raw value with the ignored bits not masked.
89 */
90typedef DECLCALLBACK(VBOXSTRICTRC) FNCPUMWRMSR(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue);
91/** Pointer to a WRMSR worker for a specific MSR or range of MSRs. */
92typedef FNCPUMWRMSR *PFNCPUMWRMSR;
93
94
95
96/*
97 * Generic functions.
98 * Generic functions.
99 * Generic functions.
100 */
101
102
103/** @callback_method_impl{FNCPUMRDMSR} */
104static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_FixedValue(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
105{
106 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
107 *puValue = pRange->uValue;
108 return VINF_SUCCESS;
109}
110
111
112/** @callback_method_impl{FNCPUMWRMSR} */
113static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IgnoreWrite(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
114{
115 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
116 Log(("CPUM: Ignoring WRMSR %#x (%s), %#llx\n", idMsr, pRange->szName, uValue));
117 return VINF_SUCCESS;
118}
119
120
121/** @callback_method_impl{FNCPUMRDMSR} */
122static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_WriteOnly(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
123{
124 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(puValue);
125 return VERR_CPUM_RAISE_GP_0;
126}
127
128
129/** @callback_method_impl{FNCPUMWRMSR} */
130static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_ReadOnly(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
131{
132 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
133 Assert(pRange->fWrGpMask == UINT64_MAX);
134 return VERR_CPUM_RAISE_GP_0;
135}
136
137
138
139
140/*
141 * IA32
142 * IA32
143 * IA32
144 */
145
146/** @callback_method_impl{FNCPUMRDMSR} */
147static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32P5McAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
148{
149 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
150 *puValue = 0; /** @todo implement machine check injection. */
151 return VINF_SUCCESS;
152}
153
154
155/** @callback_method_impl{FNCPUMWRMSR} */
156static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32P5McAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
157{
158 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
159 /** @todo implement machine check injection. */
160 return VINF_SUCCESS;
161}
162
163
164/** @callback_method_impl{FNCPUMRDMSR} */
165static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32P5McType(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
166{
167 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
168 *puValue = 0; /** @todo implement machine check injection. */
169 return VINF_SUCCESS;
170}
171
172
173/** @callback_method_impl{FNCPUMWRMSR} */
174static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32P5McType(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
175{
176 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
177 /** @todo implement machine check injection. */
178 return VINF_SUCCESS;
179}
180
181
182/** @callback_method_impl{FNCPUMRDMSR} */
183static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32TimestampCounter(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
184{
185 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
186 *puValue = TMCpuTickGet(pVCpu);
187#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
188 *puValue = CPUMApplyNestedGuestTscOffset(pVCpu, *puValue);
189#endif
190 return VINF_SUCCESS;
191}
192
193
194/** @callback_method_impl{FNCPUMWRMSR} */
195static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32TimestampCounter(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
196{
197 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
198 TMCpuTickSet(pVCpu->CTX_SUFF(pVM), pVCpu, uValue);
199 return VINF_SUCCESS;
200}
201
202
203/** @callback_method_impl{FNCPUMRDMSR} */
204static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PlatformId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
205{
206 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
207 uint64_t uValue = pRange->uValue;
208 if (uValue & 0x1f00)
209 {
210 /* Max allowed bus ratio present. */
211 /** @todo Implement scaled BUS frequency. */
212 }
213
214 *puValue = uValue;
215 return VINF_SUCCESS;
216}
217
218
219/** @callback_method_impl{FNCPUMRDMSR} */
220static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ApicBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
221{
222 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
223 return APICGetBaseMsr(pVCpu, puValue);
224}
225
226
227/** @callback_method_impl{FNCPUMWRMSR} */
228static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ApicBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
229{
230 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
231 return APICSetBaseMsr(pVCpu, uValue);
232}
233
234
235/**
236 * Get fixed IA32_FEATURE_CONTROL value for NEM and cpumMsrRd_Ia32FeatureControl.
237 *
238 * @returns Fixed IA32_FEATURE_CONTROL value.
239 * @param pVCpu The cross context per CPU structure.
240 */
241VMM_INT_DECL(uint64_t) CPUMGetGuestIa32FeatureControl(PVMCPU pVCpu)
242{
243 /* Always report the MSR lock bit as set, in order to prevent guests from modifiying this MSR. */
244 uint64_t fFeatCtl = MSR_IA32_FEATURE_CONTROL_LOCK;
245
246 /* Report VMX features. */
247 if (pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fVmx)
248 fFeatCtl |= MSR_IA32_FEATURE_CONTROL_VMXON;
249
250 return fFeatCtl;
251}
252
253/** @callback_method_impl{FNCPUMRDMSR} */
254static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32FeatureControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
255{
256 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
257 *puValue = CPUMGetGuestIa32FeatureControl(pVCpu);
258 return VINF_SUCCESS;
259}
260
261
262/** @callback_method_impl{FNCPUMWRMSR} */
263static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32FeatureControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
264{
265 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
266 return VERR_CPUM_RAISE_GP_0;
267}
268
269
270/** @callback_method_impl{FNCPUMRDMSR} */
271static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32BiosSignId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
272{
273 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
274 /** @todo fake microcode update. */
275 *puValue = pRange->uValue;
276 return VINF_SUCCESS;
277}
278
279
280/** @callback_method_impl{FNCPUMWRMSR} */
281static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32BiosSignId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
282{
283 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
284 /* Normally, zero is written to Ia32BiosSignId before reading it in order
285 to select the signature instead of the BBL_CR_D3 behaviour. The GP mask
286 of the database entry should take care of most illegal writes for now, so
287 just ignore all writes atm. */
288 return VINF_SUCCESS;
289}
290
291
292/** @callback_method_impl{FNCPUMWRMSR} */
293static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32BiosUpdateTrigger(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
294{
295 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
296 /** @todo Fake bios update trigger better. The value is the address to an
297 * update package, I think. We should probably GP if it's invalid. */
298 return VINF_SUCCESS;
299}
300
301
302/**
303 * Get MSR_IA32_SMM_MONITOR_CTL value for IEM and cpumMsrRd_Ia32SmmMonitorCtl.
304 *
305 * @returns The MSR_IA32_SMM_MONITOR_CTL value.
306 * @param pVCpu The cross context per CPU structure.
307 */
308VMM_INT_DECL(uint64_t) CPUMGetGuestIa32SmmMonitorCtl(PVMCPU pVCpu)
309{
310 /* We do not support dual-monitor treatment for SMI and SMM. */
311 /** @todo SMM. */
312 RT_NOREF(pVCpu);
313 return 0;
314}
315
316
317/** @callback_method_impl{FNCPUMRDMSR} */
318static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SmmMonitorCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
319{
320 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
321 *puValue = CPUMGetGuestIa32SmmMonitorCtl(pVCpu);
322 return VINF_SUCCESS;
323}
324
325
326/** @callback_method_impl{FNCPUMWRMSR} */
327static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SmmMonitorCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
328{
329 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
330 /** @todo SMM. */
331 return VINF_SUCCESS;
332}
333
334
335/** @callback_method_impl{FNCPUMRDMSR} */
336static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PmcN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
337{
338 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
339 /** @todo check CPUID leaf 0ah. */
340 *puValue = 0;
341 return VINF_SUCCESS;
342}
343
344
345/** @callback_method_impl{FNCPUMWRMSR} */
346static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PmcN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
347{
348 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
349 /** @todo check CPUID leaf 0ah. */
350 return VINF_SUCCESS;
351}
352
353
354/** @callback_method_impl{FNCPUMRDMSR} */
355static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MonitorFilterLineSize(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
356{
357 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
358 /** @todo return 0x1000 if we try emulate mwait 100% correctly. */
359 *puValue = 0x40; /** @todo Change to CPU cache line size. */
360 return VINF_SUCCESS;
361}
362
363
364/** @callback_method_impl{FNCPUMWRMSR} */
365static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MonitorFilterLineSize(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
366{
367 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
368 /** @todo should remember writes, though it's supposedly something only a BIOS
369 * would write so, it's not extremely important. */
370 return VINF_SUCCESS;
371}
372
373/** @callback_method_impl{FNCPUMRDMSR} */
374static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MPerf(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
375{
376 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
377 /** @todo Read MPERF: Adjust against previously written MPERF value. Is TSC
378 * what we want? */
379 *puValue = TMCpuTickGet(pVCpu);
380#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
381 *puValue = CPUMApplyNestedGuestTscOffset(pVCpu, *puValue);
382#endif
383 return VINF_SUCCESS;
384}
385
386
387/** @callback_method_impl{FNCPUMWRMSR} */
388static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MPerf(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
389{
390 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
391 /** @todo Write MPERF: Calc adjustment. */
392 return VINF_SUCCESS;
393}
394
395
396/** @callback_method_impl{FNCPUMRDMSR} */
397static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32APerf(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
398{
399 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
400 /** @todo Read APERF: Adjust against previously written MPERF value. Is TSC
401 * what we want? */
402 *puValue = TMCpuTickGet(pVCpu);
403#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
404 *puValue = CPUMApplyNestedGuestTscOffset(pVCpu, *puValue);
405#endif
406 return VINF_SUCCESS;
407}
408
409
410/** @callback_method_impl{FNCPUMWRMSR} */
411static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32APerf(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
412{
413 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
414 /** @todo Write APERF: Calc adjustment. */
415 return VINF_SUCCESS;
416}
417
418
419/**
420 * Get fixed IA32_MTRR_CAP value for NEM and cpumMsrRd_Ia32MtrrCap.
421 *
422 * @returns Fixed IA32_MTRR_CAP value.
423 * @param pVCpu The cross context per CPU structure.
424 */
425VMM_INT_DECL(uint64_t) CPUMGetGuestIa32MtrrCap(PVMCPU pVCpu)
426{
427 RT_NOREF_PV(pVCpu);
428
429 /* This is currently a bit weird. :-) */
430 uint8_t const cVariableRangeRegs = 0;
431 bool const fSystemManagementRangeRegisters = false;
432 bool const fFixedRangeRegisters = false;
433 bool const fWriteCombiningType = false;
434 return cVariableRangeRegs
435 | (fFixedRangeRegisters ? RT_BIT_64(8) : 0)
436 | (fWriteCombiningType ? RT_BIT_64(10) : 0)
437 | (fSystemManagementRangeRegisters ? RT_BIT_64(11) : 0);
438}
439
440/** @callback_method_impl{FNCPUMRDMSR} */
441static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
442{
443 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
444 *puValue = CPUMGetGuestIa32MtrrCap(pVCpu);
445 return VINF_SUCCESS;
446}
447
448
449/** @callback_method_impl{FNCPUMRDMSR} */
450static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrPhysBaseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
451{
452 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
453 /** @todo Implement variable MTRR storage. */
454 Assert(pRange->uValue == (idMsr - 0x200) / 2);
455 *puValue = 0;
456 return VINF_SUCCESS;
457}
458
459
460/** @callback_method_impl{FNCPUMWRMSR} */
461static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrPhysBaseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
462{
463 /*
464 * Validate the value.
465 */
466 Assert(pRange->uValue == (idMsr - 0x200) / 2);
467 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(uRawValue); RT_NOREF_PV(pRange);
468
469 uint8_t uType = uValue & 0xff;
470 if ((uType >= 7) || (uType == 2) || (uType == 3))
471 {
472 Log(("CPUM: Invalid type set writing MTRR PhysBase MSR %#x: %#llx (%#llx)\n", idMsr, uValue, uType));
473 return VERR_CPUM_RAISE_GP_0;
474 }
475
476 uint64_t fInvPhysMask = ~(RT_BIT_64(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.cMaxPhysAddrWidth) - 1U);
477 if (fInvPhysMask & uValue)
478 {
479 Log(("CPUM: Invalid physical address bits set writing MTRR PhysBase MSR %#x: %#llx (%#llx)\n",
480 idMsr, uValue, uValue & fInvPhysMask));
481 return VERR_CPUM_RAISE_GP_0;
482 }
483
484 /*
485 * Store it.
486 */
487 /** @todo Implement variable MTRR storage. */
488 return VINF_SUCCESS;
489}
490
491
492/** @callback_method_impl{FNCPUMRDMSR} */
493static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrPhysMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
494{
495 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
496 /** @todo Implement variable MTRR storage. */
497 Assert(pRange->uValue == (idMsr - 0x200) / 2);
498 *puValue = 0;
499 return VINF_SUCCESS;
500}
501
502
503/** @callback_method_impl{FNCPUMWRMSR} */
504static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrPhysMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
505{
506 /*
507 * Validate the value.
508 */
509 Assert(pRange->uValue == (idMsr - 0x200) / 2);
510 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(uRawValue); RT_NOREF_PV(pRange);
511
512 uint64_t fInvPhysMask = ~(RT_BIT_64(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.cMaxPhysAddrWidth) - 1U);
513 if (fInvPhysMask & uValue)
514 {
515 Log(("CPUM: Invalid physical address bits set writing MTRR PhysMask MSR %#x: %#llx (%#llx)\n",
516 idMsr, uValue, uValue & fInvPhysMask));
517 return VERR_CPUM_RAISE_GP_0;
518 }
519
520 /*
521 * Store it.
522 */
523 /** @todo Implement variable MTRR storage. */
524 return VINF_SUCCESS;
525}
526
527
528/** @callback_method_impl{FNCPUMRDMSR} */
529static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrFixed(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
530{
531 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
532 CPUM_MSR_ASSERT_CPUMCPU_OFFSET_RETURN(pVCpu, pRange, uint64_t, puFixedMtrr);
533 *puValue = *puFixedMtrr;
534 return VINF_SUCCESS;
535}
536
537
538/** @callback_method_impl{FNCPUMWRMSR} */
539static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrFixed(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
540{
541 CPUM_MSR_ASSERT_CPUMCPU_OFFSET_RETURN(pVCpu, pRange, uint64_t, puFixedMtrr);
542 RT_NOREF_PV(idMsr); RT_NOREF_PV(uRawValue);
543
544 for (uint32_t cShift = 0; cShift < 63; cShift += 8)
545 {
546 uint8_t uType = (uint8_t)(uValue >> cShift);
547 if ((uType >= 7) || (uType == 2) || (uType == 3))
548 {
549 Log(("CPUM: Invalid MTRR type at %u:%u in fixed range (%#x/%s): %#llx (%#llx)\n",
550 cShift + 7, cShift, idMsr, pRange->szName, uValue, uType));
551 return VERR_CPUM_RAISE_GP_0;
552 }
553 }
554 *puFixedMtrr = uValue;
555 return VINF_SUCCESS;
556}
557
558
559/** @callback_method_impl{FNCPUMRDMSR} */
560static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrDefType(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
561{
562 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
563 *puValue = pVCpu->cpum.s.GuestMsrs.msr.MtrrDefType;
564 return VINF_SUCCESS;
565}
566
567
568/** @callback_method_impl{FNCPUMWRMSR} */
569static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrDefType(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
570{
571 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
572
573 uint8_t uType = uValue & 0xff;
574 if ((uType >= 7) || (uType == 2) || (uType == 3))
575 {
576 Log(("CPUM: Invalid MTRR default type value on %s: %#llx (%#llx)\n", pRange->szName, uValue, uType));
577 return VERR_CPUM_RAISE_GP_0;
578 }
579
580 pVCpu->cpum.s.GuestMsrs.msr.MtrrDefType = uValue;
581 return VINF_SUCCESS;
582}
583
584
585/** @callback_method_impl{FNCPUMRDMSR} */
586static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32Pat(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
587{
588 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
589 *puValue = pVCpu->cpum.s.Guest.msrPAT;
590 return VINF_SUCCESS;
591}
592
593
594/** @callback_method_impl{FNCPUMWRMSR} */
595static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32Pat(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
596{
597 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
598 if (CPUMIsPatMsrValid(uValue))
599 {
600 pVCpu->cpum.s.Guest.msrPAT = uValue;
601 return VINF_SUCCESS;
602 }
603 return VERR_CPUM_RAISE_GP_0;
604}
605
606
607/** @callback_method_impl{FNCPUMRDMSR} */
608static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SysEnterCs(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
609{
610 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
611 *puValue = pVCpu->cpum.s.Guest.SysEnter.cs;
612 return VINF_SUCCESS;
613}
614
615
616/** @callback_method_impl{FNCPUMWRMSR} */
617static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SysEnterCs(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
618{
619 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
620
621 /* Note! We used to mask this by 0xffff, but turns out real HW doesn't and
622 there are generally 32-bit working bits backing this register. */
623 pVCpu->cpum.s.Guest.SysEnter.cs = uValue;
624 return VINF_SUCCESS;
625}
626
627
628/** @callback_method_impl{FNCPUMRDMSR} */
629static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SysEnterEsp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
630{
631 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
632 *puValue = pVCpu->cpum.s.Guest.SysEnter.esp;
633 return VINF_SUCCESS;
634}
635
636
637/** @callback_method_impl{FNCPUMWRMSR} */
638static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SysEnterEsp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
639{
640 if (X86_IS_CANONICAL(uValue))
641 {
642 pVCpu->cpum.s.Guest.SysEnter.esp = uValue;
643 return VINF_SUCCESS;
644 }
645 Log(("CPUM: IA32_SYSENTER_ESP not canonical! %#llx\n", uValue));
646 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
647 return VERR_CPUM_RAISE_GP_0;
648}
649
650
651/** @callback_method_impl{FNCPUMRDMSR} */
652static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SysEnterEip(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
653{
654 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
655 *puValue = pVCpu->cpum.s.Guest.SysEnter.eip;
656 return VINF_SUCCESS;
657}
658
659
660/** @callback_method_impl{FNCPUMWRMSR} */
661static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SysEnterEip(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
662{
663 if (X86_IS_CANONICAL(uValue))
664 {
665 pVCpu->cpum.s.Guest.SysEnter.eip = uValue;
666 return VINF_SUCCESS;
667 }
668 LogRel(("CPUM: IA32_SYSENTER_EIP not canonical! %#llx\n", uValue));
669 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
670 return VERR_CPUM_RAISE_GP_0;
671}
672
673
674/** @callback_method_impl{FNCPUMRDMSR} */
675static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McgCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
676{
677#if 0 /** @todo implement machine checks. */
678 *puValue = pRange->uValue & (RT_BIT_64(8) | 0);
679#else
680 *puValue = 0;
681#endif
682 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
683 return VINF_SUCCESS;
684}
685
686
687/** @callback_method_impl{FNCPUMRDMSR} */
688static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McgStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
689{
690 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
691 /** @todo implement machine checks. */
692 *puValue = 0;
693 return VINF_SUCCESS;
694}
695
696
697/** @callback_method_impl{FNCPUMWRMSR} */
698static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McgStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
699{
700 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
701 /** @todo implement machine checks. */
702 return VINF_SUCCESS;
703}
704
705
706/** @callback_method_impl{FNCPUMRDMSR} */
707static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McgCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
708{
709 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
710 /** @todo implement machine checks. */
711 *puValue = 0;
712 return VINF_SUCCESS;
713}
714
715
716/** @callback_method_impl{FNCPUMWRMSR} */
717static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McgCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
718{
719 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
720 /** @todo implement machine checks. */
721 return VINF_SUCCESS;
722}
723
724
725/** @callback_method_impl{FNCPUMRDMSR} */
726static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32DebugCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
727{
728 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
729 /** @todo implement IA32_DEBUGCTL. */
730 *puValue = 0;
731 return VINF_SUCCESS;
732}
733
734
735/** @callback_method_impl{FNCPUMWRMSR} */
736static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32DebugCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
737{
738 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
739 /** @todo implement IA32_DEBUGCTL. */
740 return VINF_SUCCESS;
741}
742
743
744/** @callback_method_impl{FNCPUMRDMSR} */
745static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SmrrPhysBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
746{
747 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
748 /** @todo implement intel SMM. */
749 *puValue = 0;
750 return VINF_SUCCESS;
751}
752
753
754/** @callback_method_impl{FNCPUMWRMSR} */
755static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SmrrPhysBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
756{
757 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
758 /** @todo implement intel SMM. */
759 return VERR_CPUM_RAISE_GP_0;
760}
761
762
763/** @callback_method_impl{FNCPUMRDMSR} */
764static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SmrrPhysMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
765{
766 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
767 /** @todo implement intel SMM. */
768 *puValue = 0;
769 return VINF_SUCCESS;
770}
771
772
773/** @callback_method_impl{FNCPUMWRMSR} */
774static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SmrrPhysMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
775{
776 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
777 /** @todo implement intel SMM. */
778 return VERR_CPUM_RAISE_GP_0;
779}
780
781
782/** @callback_method_impl{FNCPUMRDMSR} */
783static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PlatformDcaCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
784{
785 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
786 /** @todo implement intel direct cache access (DCA)?? */
787 *puValue = 0;
788 return VINF_SUCCESS;
789}
790
791
792/** @callback_method_impl{FNCPUMWRMSR} */
793static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PlatformDcaCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
794{
795 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
796 /** @todo implement intel direct cache access (DCA)?? */
797 return VINF_SUCCESS;
798}
799
800
801/** @callback_method_impl{FNCPUMRDMSR} */
802static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32CpuDcaCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
803{
804 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
805 /** @todo implement intel direct cache access (DCA)?? */
806 *puValue = 0;
807 return VINF_SUCCESS;
808}
809
810
811/** @callback_method_impl{FNCPUMRDMSR} */
812static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32Dca0Cap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
813{
814 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
815 /** @todo implement intel direct cache access (DCA)?? */
816 *puValue = 0;
817 return VINF_SUCCESS;
818}
819
820
821/** @callback_method_impl{FNCPUMWRMSR} */
822static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32Dca0Cap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
823{
824 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
825 /** @todo implement intel direct cache access (DCA)?? */
826 return VINF_SUCCESS;
827}
828
829
830/** @callback_method_impl{FNCPUMRDMSR} */
831static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfEvtSelN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
832{
833 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
834 /** @todo implement IA32_PERFEVTSEL0+. */
835 *puValue = 0;
836 return VINF_SUCCESS;
837}
838
839
840/** @callback_method_impl{FNCPUMWRMSR} */
841static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfEvtSelN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
842{
843 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
844 /** @todo implement IA32_PERFEVTSEL0+. */
845 return VINF_SUCCESS;
846}
847
848
849/** @callback_method_impl{FNCPUMRDMSR} */
850static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
851{
852 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
853 uint64_t uValue = pRange->uValue;
854
855 /* Always provide the max bus ratio for now. XNU expects it. */
856 uValue &= ~((UINT64_C(0x1f) << 40) | RT_BIT_64(46));
857
858 PVM pVM = pVCpu->CTX_SUFF(pVM);
859 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
860 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
861 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
862 if (uTscRatio > 0x1f)
863 uTscRatio = 0x1f;
864 uValue |= (uint64_t)uTscRatio << 40;
865
866 *puValue = uValue;
867 return VINF_SUCCESS;
868}
869
870
871/** @callback_method_impl{FNCPUMWRMSR} */
872static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
873{
874 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
875 /* Pentium4 allows writing, but all bits are ignored. */
876 return VINF_SUCCESS;
877}
878
879
880/** @callback_method_impl{FNCPUMRDMSR} */
881static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
882{
883 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
884 /** @todo implement IA32_PERFCTL. */
885 *puValue = 0;
886 return VINF_SUCCESS;
887}
888
889
890/** @callback_method_impl{FNCPUMWRMSR} */
891static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
892{
893 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
894 /** @todo implement IA32_PERFCTL. */
895 return VINF_SUCCESS;
896}
897
898
899/** @callback_method_impl{FNCPUMRDMSR} */
900static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32FixedCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
901{
902 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
903 /** @todo implement IA32_FIXED_CTRn (fixed performance counters). */
904 *puValue = 0;
905 return VINF_SUCCESS;
906}
907
908
909/** @callback_method_impl{FNCPUMWRMSR} */
910static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32FixedCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
911{
912 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
913 /** @todo implement IA32_FIXED_CTRn (fixed performance counters). */
914 return VINF_SUCCESS;
915}
916
917
918/** @callback_method_impl{FNCPUMRDMSR} */
919static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfCapabilities(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
920{
921 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
922 /** @todo implement performance counters. */
923 *puValue = 0;
924 return VINF_SUCCESS;
925}
926
927
928/** @callback_method_impl{FNCPUMWRMSR} */
929static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfCapabilities(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
930{
931 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
932 /** @todo implement performance counters. */
933 return VINF_SUCCESS;
934}
935
936
937/** @callback_method_impl{FNCPUMRDMSR} */
938static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32FixedCtrCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
939{
940 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
941 /** @todo implement performance counters. */
942 *puValue = 0;
943 return VINF_SUCCESS;
944}
945
946
947/** @callback_method_impl{FNCPUMWRMSR} */
948static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32FixedCtrCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
949{
950 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
951 /** @todo implement performance counters. */
952 return VINF_SUCCESS;
953}
954
955
956/** @callback_method_impl{FNCPUMRDMSR} */
957static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfGlobalStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
958{
959 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
960 /** @todo implement performance counters. */
961 *puValue = 0;
962 return VINF_SUCCESS;
963}
964
965
966/** @callback_method_impl{FNCPUMWRMSR} */
967static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfGlobalStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
968{
969 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
970 /** @todo implement performance counters. */
971 return VINF_SUCCESS;
972}
973
974
975/** @callback_method_impl{FNCPUMRDMSR} */
976static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfGlobalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
977{
978 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
979 /** @todo implement performance counters. */
980 *puValue = 0;
981 return VINF_SUCCESS;
982}
983
984
985/** @callback_method_impl{FNCPUMWRMSR} */
986static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfGlobalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
987{
988 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
989 /** @todo implement performance counters. */
990 return VINF_SUCCESS;
991}
992
993
994/** @callback_method_impl{FNCPUMRDMSR} */
995static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfGlobalOvfCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
996{
997 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
998 /** @todo implement performance counters. */
999 *puValue = 0;
1000 return VINF_SUCCESS;
1001}
1002
1003
1004/** @callback_method_impl{FNCPUMWRMSR} */
1005static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfGlobalOvfCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1006{
1007 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1008 /** @todo implement performance counters. */
1009 return VINF_SUCCESS;
1010}
1011
1012
1013/** @callback_method_impl{FNCPUMRDMSR} */
1014static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PebsEnable(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1015{
1016 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1017 /** @todo implement performance counters. */
1018 *puValue = 0;
1019 return VINF_SUCCESS;
1020}
1021
1022
1023/** @callback_method_impl{FNCPUMWRMSR} */
1024static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PebsEnable(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1025{
1026 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1027 /** @todo implement performance counters. */
1028 return VINF_SUCCESS;
1029}
1030
1031
1032/** @callback_method_impl{FNCPUMRDMSR} */
1033static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ClockModulation(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1034{
1035 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1036 /** @todo implement IA32_CLOCK_MODULATION. */
1037 *puValue = 0;
1038 return VINF_SUCCESS;
1039}
1040
1041
1042/** @callback_method_impl{FNCPUMWRMSR} */
1043static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ClockModulation(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1044{
1045 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1046 /** @todo implement IA32_CLOCK_MODULATION. */
1047 return VINF_SUCCESS;
1048}
1049
1050
1051/** @callback_method_impl{FNCPUMRDMSR} */
1052static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ThermInterrupt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1053{
1054 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1055 /** @todo implement IA32_THERM_INTERRUPT. */
1056 *puValue = 0;
1057 return VINF_SUCCESS;
1058}
1059
1060
1061/** @callback_method_impl{FNCPUMWRMSR} */
1062static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ThermInterrupt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1063{
1064 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1065 /** @todo implement IA32_THERM_STATUS. */
1066 return VINF_SUCCESS;
1067}
1068
1069
1070/** @callback_method_impl{FNCPUMRDMSR} */
1071static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ThermStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1072{
1073 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1074 /** @todo implement IA32_THERM_STATUS. */
1075 *puValue = 0;
1076 return VINF_SUCCESS;
1077}
1078
1079
1080/** @callback_method_impl{FNCPUMWRMSR} */
1081static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ThermStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1082{
1083 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1084 /** @todo implement IA32_THERM_INTERRUPT. */
1085 return VINF_SUCCESS;
1086}
1087
1088
1089/** @callback_method_impl{FNCPUMRDMSR} */
1090static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32Therm2Ctl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1091{
1092 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1093 /** @todo implement IA32_THERM2_CTL. */
1094 *puValue = 0;
1095 return VINF_SUCCESS;
1096}
1097
1098
1099/** @callback_method_impl{FNCPUMWRMSR} */
1100static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32Therm2Ctl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1101{
1102 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1103 /** @todo implement IA32_THERM2_CTL. */
1104 return VINF_SUCCESS;
1105}
1106
1107
1108/** @callback_method_impl{FNCPUMRDMSR} */
1109static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MiscEnable(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1110{
1111 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1112 *puValue = pVCpu->cpum.s.GuestMsrs.msr.MiscEnable;
1113 return VINF_SUCCESS;
1114}
1115
1116
1117/** @callback_method_impl{FNCPUMWRMSR} */
1118static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MiscEnable(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1119{
1120 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1121#ifdef LOG_ENABLED
1122 uint64_t const uOld = pVCpu->cpum.s.GuestMsrs.msr.MiscEnable;
1123#endif
1124
1125 /* Unsupported bits are generally ignored and stripped by the MSR range
1126 entry that got us here. So, we just need to preserve fixed bits. */
1127 pVCpu->cpum.s.GuestMsrs.msr.MiscEnable = uValue
1128 | MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL
1129 | MSR_IA32_MISC_ENABLE_BTS_UNAVAIL;
1130
1131 Log(("CPUM: IA32_MISC_ENABLE; old=%#llx written=%#llx => %#llx\n",
1132 uOld, uValue, pVCpu->cpum.s.GuestMsrs.msr.MiscEnable));
1133
1134 /** @todo Wire IA32_MISC_ENABLE bit 22 to our NT 4 CPUID trick. */
1135 /** @todo Wire up MSR_IA32_MISC_ENABLE_XD_DISABLE. */
1136 return VINF_SUCCESS;
1137}
1138
1139
1140/** @callback_method_impl{FNCPUMRDMSR} */
1141static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McCtlStatusAddrMiscN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1142{
1143 RT_NOREF_PV(pVCpu); RT_NOREF_PV(pRange);
1144
1145 /** @todo Implement machine check exception injection. */
1146 switch (idMsr & 3)
1147 {
1148 case 0:
1149 case 1:
1150 *puValue = 0;
1151 break;
1152
1153 /* The ADDR and MISC registers aren't accessible since the
1154 corresponding STATUS bits are zero. */
1155 case 2:
1156 Log(("CPUM: Reading IA32_MCi_ADDR %#x -> #GP\n", idMsr));
1157 return VERR_CPUM_RAISE_GP_0;
1158 case 3:
1159 Log(("CPUM: Reading IA32_MCi_MISC %#x -> #GP\n", idMsr));
1160 return VERR_CPUM_RAISE_GP_0;
1161 }
1162 return VINF_SUCCESS;
1163}
1164
1165
1166/** @callback_method_impl{FNCPUMWRMSR} */
1167static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McCtlStatusAddrMiscN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1168{
1169 RT_NOREF_PV(pVCpu); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1170 switch (idMsr & 3)
1171 {
1172 case 0:
1173 /* Ignore writes to the CTL register. */
1174 break;
1175
1176 case 1:
1177 /* According to specs, the STATUS register can only be written to
1178 with the value 0. VBoxCpuReport thinks different for a
1179 Pentium M Dothan, but implementing according to specs now. */
1180 if (uValue != 0)
1181 {
1182 Log(("CPUM: Writing non-zero value (%#llx) to IA32_MCi_STATUS %#x -> #GP\n", uValue, idMsr));
1183 return VERR_CPUM_RAISE_GP_0;
1184 }
1185 break;
1186
1187 /* Specs states that ADDR and MISC can be cleared by writing zeros.
1188 Writing 1s will GP. Need to figure out how this relates to the
1189 ADDRV and MISCV status flags. If writing is independent of those
1190 bits, we need to know whether the CPU really implements them since
1191 that is exposed by writing 0 to them.
1192 Implementing the solution with the fewer GPs for now. */
1193 case 2:
1194 if (uValue != 0)
1195 {
1196 Log(("CPUM: Writing non-zero value (%#llx) to IA32_MCi_ADDR %#x -> #GP\n", uValue, idMsr));
1197 return VERR_CPUM_RAISE_GP_0;
1198 }
1199 break;
1200 case 3:
1201 if (uValue != 0)
1202 {
1203 Log(("CPUM: Writing non-zero value (%#llx) to IA32_MCi_MISC %#x -> #GP\n", uValue, idMsr));
1204 return VERR_CPUM_RAISE_GP_0;
1205 }
1206 break;
1207 }
1208 return VINF_SUCCESS;
1209}
1210
1211
1212/** @callback_method_impl{FNCPUMRDMSR} */
1213static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McNCtl2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1214{
1215 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1216 /** @todo Implement machine check exception injection. */
1217 *puValue = 0;
1218 return VINF_SUCCESS;
1219}
1220
1221
1222/** @callback_method_impl{FNCPUMWRMSR} */
1223static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McNCtl2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1224{
1225 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1226 /** @todo Implement machine check exception injection. */
1227 return VINF_SUCCESS;
1228}
1229
1230
1231/** @callback_method_impl{FNCPUMRDMSR} */
1232static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32DsArea(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1233{
1234 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1235 /** @todo implement IA32_DS_AREA. */
1236 *puValue = 0;
1237 return VINF_SUCCESS;
1238}
1239
1240
1241/** @callback_method_impl{FNCPUMWRMSR} */
1242static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32DsArea(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1243{
1244 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1245 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1246 return VINF_SUCCESS;
1247}
1248
1249
1250/** @callback_method_impl{FNCPUMRDMSR} */
1251static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32TscDeadline(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1252{
1253 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1254 /** @todo implement TSC deadline timer. */
1255 *puValue = 0;
1256 return VINF_SUCCESS;
1257}
1258
1259
1260/** @callback_method_impl{FNCPUMWRMSR} */
1261static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32TscDeadline(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1262{
1263 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1264 /** @todo implement TSC deadline timer. */
1265 return VINF_SUCCESS;
1266}
1267
1268
1269/** @callback_method_impl{FNCPUMRDMSR} */
1270static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32X2ApicN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1271{
1272 RT_NOREF_PV(pRange);
1273 return APICReadMsr(pVCpu, idMsr, puValue);
1274}
1275
1276
1277/** @callback_method_impl{FNCPUMWRMSR} */
1278static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32X2ApicN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1279{
1280 RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1281 return APICWriteMsr(pVCpu, idMsr, uValue);
1282}
1283
1284
1285/** @callback_method_impl{FNCPUMRDMSR} */
1286static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32DebugInterface(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1287{
1288 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1289 /** @todo IA32_DEBUG_INTERFACE (no docs) */
1290 *puValue = 0;
1291 return VINF_SUCCESS;
1292}
1293
1294
1295/** @callback_method_impl{FNCPUMWRMSR} */
1296static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32DebugInterface(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1297{
1298 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1299 /** @todo IA32_DEBUG_INTERFACE (no docs) */
1300 return VINF_SUCCESS;
1301}
1302
1303
1304/**
1305 * Gets IA32_VMX_BASIC for IEM and cpumMsrRd_Ia32VmxBasic.
1306 *
1307 * @returns IA32_VMX_BASIC value.
1308 * @param pVCpu The cross context per CPU structure.
1309 */
1310VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxBasic(PVMCPU pVCpu)
1311{
1312 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1313 uint64_t uVmxMsr;
1314 if (pGuestFeatures->fVmx)
1315 {
1316 uVmxMsr = RT_BF_MAKE(VMX_BF_BASIC_VMCS_ID, VMX_V_VMCS_REVISION_ID )
1317 | RT_BF_MAKE(VMX_BF_BASIC_VMCS_SIZE, VMX_V_VMCS_SIZE )
1318 | RT_BF_MAKE(VMX_BF_BASIC_PHYSADDR_WIDTH, !pGuestFeatures->fLongMode )
1319 | RT_BF_MAKE(VMX_BF_BASIC_DUAL_MON, 0 )
1320 | RT_BF_MAKE(VMX_BF_BASIC_VMCS_MEM_TYPE, VMX_BASIC_MEM_TYPE_WB )
1321 | RT_BF_MAKE(VMX_BF_BASIC_VMCS_INS_OUTS, pGuestFeatures->fVmxInsOutInfo)
1322 | RT_BF_MAKE(VMX_BF_BASIC_TRUE_CTLS, 0 );
1323 }
1324 else
1325 uVmxMsr = 0;
1326 return uVmxMsr;
1327}
1328
1329
1330/** @callback_method_impl{FNCPUMRDMSR} */
1331static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxBasic(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1332{
1333 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1334 *puValue = CPUMGetGuestIa32VmxBasic(pVCpu);
1335 return VINF_SUCCESS;
1336}
1337
1338
1339/**
1340 * Gets IA32_VMX_PINBASED_CTLS for IEM and cpumMsrRd_Ia32VmxPinbasedCtls.
1341 *
1342 * @returns IA32_VMX_PINBASED_CTLS value.
1343 * @param pVCpu The cross context per CPU structure.
1344 */
1345VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxPinbasedCtls(PVMCPU pVCpu)
1346{
1347 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1348 uint64_t uVmxMsr;
1349 if (pGuestFeatures->fVmx)
1350 {
1351 uint32_t const fFeatures = (pGuestFeatures->fVmxExtIntExit << VMX_BF_PIN_CTLS_EXT_INT_EXIT_SHIFT )
1352 | (pGuestFeatures->fVmxNmiExit << VMX_BF_PIN_CTLS_NMI_EXIT_SHIFT )
1353 | (pGuestFeatures->fVmxVirtNmi << VMX_BF_PIN_CTLS_VIRT_NMI_SHIFT )
1354 | (pGuestFeatures->fVmxPreemptTimer << VMX_BF_PIN_CTLS_PREEMPT_TIMER_SHIFT)
1355 | (pGuestFeatures->fVmxPostedInt << VMX_BF_PIN_CTLS_POSTED_INT_SHIFT );
1356 uint32_t const fVal = VMX_PIN_CTLS_DEFAULT1;
1357 uint32_t const fZap = fFeatures | VMX_PIN_CTLS_DEFAULT1;
1358 AssertMsg((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures));
1359 uVmxMsr = RT_MAKE_U64(fVal, fZap);
1360 }
1361 else
1362 uVmxMsr = 0;
1363 return uVmxMsr;
1364}
1365
1366
1367/** @callback_method_impl{FNCPUMRDMSR} */
1368static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxPinbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1369{
1370 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1371 *puValue = CPUMGetGuestIa32VmxPinbasedCtls(pVCpu);
1372 return VINF_SUCCESS;
1373}
1374
1375
1376/**
1377 * Gets IA32_VMX_PROCBASED_CTLS for IEM and cpumMsrRd_Ia32VmxProcbasedCtls.
1378 *
1379 * @returns IA32_VMX_PROCBASED_CTLS value.
1380 * @param pVCpu The cross context per CPU structure.
1381 */
1382VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxProcbasedCtls(PVMCPU pVCpu)
1383{
1384 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1385 uint64_t uVmxMsr;
1386 if (pGuestFeatures->fVmx)
1387 {
1388 uint32_t const fFeatures = (pGuestFeatures->fVmxIntWindowExit << VMX_BF_PROC_CTLS_INT_WINDOW_EXIT_SHIFT )
1389 | (pGuestFeatures->fVmxTscOffsetting << VMX_BF_PROC_CTLS_USE_TSC_OFFSETTING_SHIFT)
1390 | (pGuestFeatures->fVmxHltExit << VMX_BF_PROC_CTLS_HLT_EXIT_SHIFT )
1391 | (pGuestFeatures->fVmxInvlpgExit << VMX_BF_PROC_CTLS_INVLPG_EXIT_SHIFT )
1392 | (pGuestFeatures->fVmxMwaitExit << VMX_BF_PROC_CTLS_MWAIT_EXIT_SHIFT )
1393 | (pGuestFeatures->fVmxRdpmcExit << VMX_BF_PROC_CTLS_RDPMC_EXIT_SHIFT )
1394 | (pGuestFeatures->fVmxRdtscExit << VMX_BF_PROC_CTLS_RDTSC_EXIT_SHIFT )
1395 | (pGuestFeatures->fVmxCr3LoadExit << VMX_BF_PROC_CTLS_CR3_LOAD_EXIT_SHIFT )
1396 | (pGuestFeatures->fVmxCr3StoreExit << VMX_BF_PROC_CTLS_CR3_STORE_EXIT_SHIFT )
1397 | (pGuestFeatures->fVmxCr8LoadExit << VMX_BF_PROC_CTLS_CR8_LOAD_EXIT_SHIFT )
1398 | (pGuestFeatures->fVmxCr8StoreExit << VMX_BF_PROC_CTLS_CR8_STORE_EXIT_SHIFT )
1399 | (pGuestFeatures->fVmxUseTprShadow << VMX_BF_PROC_CTLS_USE_TPR_SHADOW_SHIFT )
1400 | (pGuestFeatures->fVmxNmiWindowExit << VMX_BF_PROC_CTLS_NMI_WINDOW_EXIT_SHIFT )
1401 | (pGuestFeatures->fVmxMovDRxExit << VMX_BF_PROC_CTLS_MOV_DR_EXIT_SHIFT )
1402 | (pGuestFeatures->fVmxUncondIoExit << VMX_BF_PROC_CTLS_UNCOND_IO_EXIT_SHIFT )
1403 | (pGuestFeatures->fVmxUseIoBitmaps << VMX_BF_PROC_CTLS_USE_IO_BITMAPS_SHIFT )
1404 | (pGuestFeatures->fVmxMonitorTrapFlag << VMX_BF_PROC_CTLS_MONITOR_TRAP_FLAG_SHIFT )
1405 | (pGuestFeatures->fVmxUseMsrBitmaps << VMX_BF_PROC_CTLS_USE_MSR_BITMAPS_SHIFT )
1406 | (pGuestFeatures->fVmxMonitorExit << VMX_BF_PROC_CTLS_MONITOR_EXIT_SHIFT )
1407 | (pGuestFeatures->fVmxPauseExit << VMX_BF_PROC_CTLS_PAUSE_EXIT_SHIFT )
1408 | (pGuestFeatures->fVmxSecondaryExecCtls << VMX_BF_PROC_CTLS_USE_SECONDARY_CTLS_SHIFT);
1409 uint32_t const fVal = VMX_PROC_CTLS_DEFAULT1;
1410 uint32_t const fZap = fFeatures | VMX_PROC_CTLS_DEFAULT1;
1411 AssertMsg((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures));
1412 uVmxMsr = RT_MAKE_U64(fVal, fZap);
1413 }
1414 else
1415 uVmxMsr = 0;
1416 return uVmxMsr;
1417}
1418
1419
1420/** @callback_method_impl{FNCPUMRDMSR} */
1421static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxProcbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1422{
1423 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1424 *puValue = CPUMGetGuestIa32VmxProcbasedCtls(pVCpu);
1425 return VINF_SUCCESS;
1426}
1427
1428
1429/**
1430 * Gets IA32_VMX_EXIT_CTLS for IEM and cpumMsrRd_Ia32VmxProcbasedCtls.
1431 *
1432 * @returns IA32_VMX_EXIT_CTLS value.
1433 * @param pVCpu The cross context per CPU structure.
1434 */
1435VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxExitCtls(PVMCPU pVCpu)
1436{
1437 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1438 uint64_t uVmxMsr;
1439 if (pGuestFeatures->fVmx)
1440 {
1441 uint32_t const fFeatures = (pGuestFeatures->fVmxExitSaveDebugCtls << VMX_BF_EXIT_CTLS_SAVE_DEBUG_SHIFT )
1442 | (pGuestFeatures->fVmxHostAddrSpaceSize << VMX_BF_EXIT_CTLS_HOST_ADDR_SPACE_SIZE_SHIFT)
1443 | (pGuestFeatures->fVmxExitAckExtInt << VMX_BF_EXIT_CTLS_ACK_EXT_INT_SHIFT )
1444 | (pGuestFeatures->fVmxExitSavePatMsr << VMX_BF_EXIT_CTLS_SAVE_PAT_MSR_SHIFT )
1445 | (pGuestFeatures->fVmxExitLoadPatMsr << VMX_BF_EXIT_CTLS_LOAD_PAT_MSR_SHIFT )
1446 | (pGuestFeatures->fVmxExitSaveEferMsr << VMX_BF_EXIT_CTLS_SAVE_EFER_MSR_SHIFT )
1447 | (pGuestFeatures->fVmxExitLoadEferMsr << VMX_BF_EXIT_CTLS_LOAD_EFER_MSR_SHIFT )
1448 | (pGuestFeatures->fVmxSavePreemptTimer << VMX_BF_EXIT_CTLS_SAVE_PREEMPT_TIMER_SHIFT );
1449 uint32_t const fVal = VMX_EXIT_CTLS_DEFAULT1;
1450 uint32_t const fZap = fFeatures | VMX_EXIT_CTLS_DEFAULT1;
1451 AssertMsg((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures));
1452 uVmxMsr = RT_MAKE_U64(fVal, fZap);
1453 }
1454 else
1455 uVmxMsr = 0;
1456 return uVmxMsr;
1457}
1458
1459
1460/** @callback_method_impl{FNCPUMRDMSR} */
1461static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxExitCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1462{
1463 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1464 *puValue = CPUMGetGuestIa32VmxExitCtls(pVCpu);
1465 return VINF_SUCCESS;
1466}
1467
1468
1469/**
1470 * Gets IA32_VMX_ENTRY_CTLS for IEM and cpumMsrRd_Ia32VmxEntryCtls.
1471 *
1472 * @returns IA32_VMX_ENTRY_CTLS value.
1473 * @param pVCpu The cross context per CPU structure.
1474 */
1475VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxEntryCtls(PVMCPU pVCpu)
1476{
1477 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1478 uint64_t uVmxMsr;
1479 if (pGuestFeatures->fVmx)
1480 {
1481 uint32_t const fFeatures = (pGuestFeatures->fVmxEntryLoadDebugCtls << VMX_BF_ENTRY_CTLS_LOAD_DEBUG_SHIFT )
1482 | (pGuestFeatures->fVmxIa32eModeGuest << VMX_BF_ENTRY_CTLS_IA32E_MODE_GUEST_SHIFT)
1483 | (pGuestFeatures->fVmxEntryLoadEferMsr << VMX_BF_ENTRY_CTLS_LOAD_EFER_MSR_SHIFT )
1484 | (pGuestFeatures->fVmxEntryLoadPatMsr << VMX_BF_ENTRY_CTLS_LOAD_PAT_MSR_SHIFT );
1485 uint32_t const fDefault1 = VMX_ENTRY_CTLS_DEFAULT1;
1486 uint32_t const fVal = fDefault1;
1487 uint32_t const fZap = fFeatures | fDefault1;
1488 AssertMsg((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures));
1489 uVmxMsr = RT_MAKE_U64(fVal, fZap);
1490 }
1491 else
1492 uVmxMsr = 0;
1493 return uVmxMsr;
1494}
1495
1496
1497/** @callback_method_impl{FNCPUMRDMSR} */
1498static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxEntryCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1499{
1500 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1501 *puValue = CPUMGetGuestIa32VmxEntryCtls(pVCpu);
1502 return VINF_SUCCESS;
1503}
1504
1505
1506/**
1507 * Gets IA32_VMX_MISC for IEM and cpumMsrRd_Ia32VmxMisc.
1508 *
1509 * @returns IA32_VMX_MISC MSR.
1510 * @param pVCpu The cross context per CPU structure.
1511 */
1512VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxMisc(PVMCPU pVCpu)
1513{
1514 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1515 uint64_t uVmxMsr;
1516 if (pGuestFeatures->fVmx)
1517 {
1518 uint64_t uHostMsr;
1519 int rc = HMVmxGetHostMsr(pVCpu->CTX_SUFF(pVM), MSR_IA32_VMX_MISC, &uHostMsr);
1520 AssertMsgRC(rc, ("HMVmxGetHostMsr failed. rc=%Rrc\n", rc)); RT_NOREF_PV(rc);
1521 uint8_t const cMaxMsrs = RT_MIN(RT_BF_GET(uHostMsr, VMX_BF_MISC_MAX_MSRS), VMX_V_AUTOMSR_COUNT_MAX);
1522 uint8_t const fActivityState = RT_BF_GET(uHostMsr, VMX_BF_MISC_ACTIVITY_STATES) & VMX_V_GUEST_ACTIVITY_STATE_MASK;
1523 uVmxMsr = RT_BF_MAKE(VMX_BF_MISC_PREEMPT_TIMER_TSC, VMX_V_PREEMPT_TIMER_SHIFT )
1524 | RT_BF_MAKE(VMX_BF_MISC_EXIT_SAVE_EFER_LMA, pGuestFeatures->fVmxExitSaveEferLma )
1525 | RT_BF_MAKE(VMX_BF_MISC_ACTIVITY_STATES, fActivityState )
1526 | RT_BF_MAKE(VMX_BF_MISC_INTEL_PT, pGuestFeatures->fVmxIntelPt )
1527 | RT_BF_MAKE(VMX_BF_MISC_SMM_READ_SMBASE_MSR, 0 )
1528 | RT_BF_MAKE(VMX_BF_MISC_CR3_TARGET, VMX_V_CR3_TARGET_COUNT )
1529 | RT_BF_MAKE(VMX_BF_MISC_MAX_MSRS, cMaxMsrs )
1530 | RT_BF_MAKE(VMX_BF_MISC_VMXOFF_BLOCK_SMI, 0 )
1531 | RT_BF_MAKE(VMX_BF_MISC_VMWRITE_ALL, pGuestFeatures->fVmxVmwriteAll )
1532 | RT_BF_MAKE(VMX_BF_MISC_ENTRY_INJECT_SOFT_INT, pGuestFeatures->fVmxEntryInjectSoftInt)
1533 | RT_BF_MAKE(VMX_BF_MISC_MSEG_ID, VMX_V_MSEG_REV_ID );
1534 }
1535 else
1536 uVmxMsr = 0;
1537 return uVmxMsr;
1538}
1539
1540
1541/** @callback_method_impl{FNCPUMRDMSR} */
1542static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxMisc(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1543{
1544 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1545 *puValue = CPUMGetGuestIa32VmxMisc(pVCpu);
1546 return VINF_SUCCESS;
1547}
1548
1549
1550/**
1551 * Gets IA32_VMX_CR0_FIXED0 for IEM and cpumMsrRd_Ia32VmxMisc.
1552 *
1553 * @returns IA32_VMX_CR0_FIXED0 value.
1554 * @param pVCpu The cross context per CPU structure.
1555 */
1556VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr0Fixed0(PVMCPU pVCpu)
1557{
1558 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1559 if (pGuestFeatures->fVmx)
1560 {
1561 uint64_t const uVmxMsr = pGuestFeatures->fVmxUnrestrictedGuest ? VMX_V_CR0_FIXED0_UX : VMX_V_CR0_FIXED0;
1562 return uVmxMsr;
1563 }
1564 return 0;
1565}
1566
1567
1568/** @callback_method_impl{FNCPUMRDMSR} */
1569static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr0Fixed0(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1570{
1571 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1572 *puValue = CPUMGetGuestIa32VmxCr0Fixed0(pVCpu);
1573 return VINF_SUCCESS;
1574}
1575
1576
1577/**
1578 * Gets IA32_VMX_CR0_FIXED1 for IEM and cpumMsrRd_Ia32VmxMisc.
1579 *
1580 * @returns IA32_VMX_CR0_FIXED1 MSR.
1581 * @param pVCpu The cross context per CPU structure.
1582 */
1583VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr0Fixed1(PVMCPU pVCpu)
1584{
1585 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1586 uint64_t uVmxMsr;
1587 if (pGuestFeatures->fVmx)
1588 {
1589 int rc = HMVmxGetHostMsr(pVCpu->CTX_SUFF(pVM), MSR_IA32_VMX_CR0_FIXED1, &uVmxMsr);
1590 AssertMsgRC(rc, ("HMVmxGetHostMsr failed. rc=%Rrc\n", rc)); RT_NOREF_PV(rc);
1591 uVmxMsr |= VMX_V_CR0_FIXED0; /* Make sure the CR0 MB1 bits are not clear. */
1592 }
1593 else
1594 uVmxMsr = 0;
1595 return uVmxMsr;
1596}
1597
1598
1599/** @callback_method_impl{FNCPUMRDMSR} */
1600static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr0Fixed1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1601{
1602 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1603 Assert(idMsr == MSR_IA32_VMX_CR0_FIXED1);
1604 *puValue = CPUMGetGuestIa32VmxCr0Fixed1(pVCpu);
1605 return VINF_SUCCESS;
1606}
1607
1608
1609/**
1610 * Gets IA32_VMX_CR4_FIXED0 for IEM and cpumMsrRd_Ia32VmxCr4Fixed0.
1611 *
1612 * @returns IA32_VMX_CR4_FIXED0 value.
1613 * @param pVCpu The cross context per CPU structure.
1614 */
1615VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr4Fixed0(PVMCPU pVCpu)
1616{
1617 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1618 uint64_t const uVmxMsr = pGuestFeatures->fVmx ? VMX_V_CR4_FIXED0 : 0;
1619 return uVmxMsr;
1620}
1621
1622
1623/** @callback_method_impl{FNCPUMRDMSR} */
1624static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr4Fixed0(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1625{
1626 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1627 *puValue = CPUMGetGuestIa32VmxCr4Fixed0(pVCpu);
1628 return VINF_SUCCESS;
1629}
1630
1631
1632/**
1633 * Gets IA32_VMX_CR4_FIXED1 for IEM and cpumMsrRd_Ia32VmxCr4Fixed1.
1634 *
1635 * @returns IA32_VMX_CR4_FIXED1 MSR.
1636 * @param pVCpu The cross context per CPU structure.
1637 */
1638VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr4Fixed1(PVMCPU pVCpu)
1639{
1640 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1641 uint64_t uVmxMsr;
1642 if (pGuestFeatures->fVmx)
1643 {
1644 int rc = HMVmxGetHostMsr(pVCpu->CTX_SUFF(pVM), MSR_IA32_VMX_CR4_FIXED1, &uVmxMsr);
1645 AssertMsgRC(rc, ("HMVmxGetHostMsr failed. rc=%Rrc\n", rc)); RT_NOREF_PV(rc);
1646 uVmxMsr |= VMX_V_CR4_FIXED0; /* Make sure the CR4 MB1 bits are not clear. */
1647 }
1648 else
1649 uVmxMsr = 0;
1650 return uVmxMsr;
1651}
1652
1653
1654/** @callback_method_impl{FNCPUMRDMSR} */
1655static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr4Fixed1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1656{
1657 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1658 Assert(idMsr == MSR_IA32_VMX_CR4_FIXED1);
1659 *puValue = CPUMGetGuestIa32VmxCr4Fixed1(pVCpu);
1660 return VINF_SUCCESS;
1661}
1662
1663
1664/**
1665 * Gets IA32_VMX_VMCS_ENUM for IEM and cpumMsrRd_Ia32VmxVmcsEnum.
1666 *
1667 * @returns IA32_VMX_VMCS_ENUM value.
1668 * @param pVCpu The cross context per CPU structure.
1669 */
1670VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxVmcsEnum(PVMCPU pVCpu)
1671{
1672 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1673 uint64_t uVmxMsr;
1674 if (pGuestFeatures->fVmx)
1675 uVmxMsr = VMX_V_VMCS_MAX_INDEX << VMX_BF_VMCS_ENUM_HIGHEST_IDX_SHIFT;
1676 else
1677 uVmxMsr = 0;
1678 return uVmxMsr;
1679}
1680
1681
1682/** @callback_method_impl{FNCPUMRDMSR} */
1683static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxVmcsEnum(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1684{
1685 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1686 *puValue = CPUMGetGuestIa32VmxVmcsEnum(pVCpu);
1687 return VINF_SUCCESS;
1688}
1689
1690
1691/**
1692 * Gets MSR_IA32_VMX_PROCBASED_CTLS2 for IEM and cpumMsrRd_Ia32VmxProcBasedCtls2.
1693 *
1694 * @returns MSR_IA32_VMX_PROCBASED_CTLS2 value.
1695 * @param pVCpu The cross context per CPU structure.
1696 */
1697VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxProcbasedCtls2(PVMCPU pVCpu)
1698{
1699 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1700 uint64_t uVmxMsr;
1701 if ( pGuestFeatures->fVmx
1702 && pGuestFeatures->fVmxSecondaryExecCtls)
1703 {
1704 uint32_t const fFeatures = (pGuestFeatures->fVmxVirtApicAccess << VMX_BF_PROC_CTLS2_VIRT_APIC_ACCESS_SHIFT )
1705 | (pGuestFeatures->fVmxEpt << VMX_BF_PROC_CTLS2_EPT_SHIFT )
1706 | (pGuestFeatures->fVmxDescTableExit << VMX_BF_PROC_CTLS2_DESC_TABLE_EXIT_SHIFT )
1707 | (pGuestFeatures->fVmxRdtscp << VMX_BF_PROC_CTLS2_RDTSCP_SHIFT )
1708 | (pGuestFeatures->fVmxVirtX2ApicMode << VMX_BF_PROC_CTLS2_VIRT_X2APIC_MODE_SHIFT )
1709 | (pGuestFeatures->fVmxVpid << VMX_BF_PROC_CTLS2_VPID_SHIFT )
1710 | (pGuestFeatures->fVmxWbinvdExit << VMX_BF_PROC_CTLS2_WBINVD_EXIT_SHIFT )
1711 | (pGuestFeatures->fVmxUnrestrictedGuest << VMX_BF_PROC_CTLS2_UNRESTRICTED_GUEST_SHIFT)
1712 | (pGuestFeatures->fVmxApicRegVirt << VMX_BF_PROC_CTLS2_APIC_REG_VIRT_SHIFT )
1713 | (pGuestFeatures->fVmxVirtIntDelivery << VMX_BF_PROC_CTLS2_VIRT_INT_DELIVERY_SHIFT )
1714 | (pGuestFeatures->fVmxPauseLoopExit << VMX_BF_PROC_CTLS2_PAUSE_LOOP_EXIT_SHIFT )
1715 | (pGuestFeatures->fVmxRdrandExit << VMX_BF_PROC_CTLS2_RDRAND_EXIT_SHIFT )
1716 | (pGuestFeatures->fVmxInvpcid << VMX_BF_PROC_CTLS2_INVPCID_SHIFT )
1717 | (pGuestFeatures->fVmxVmFunc << VMX_BF_PROC_CTLS2_VMFUNC_SHIFT )
1718 | (pGuestFeatures->fVmxVmcsShadowing << VMX_BF_PROC_CTLS2_VMCS_SHADOWING_SHIFT )
1719 | (pGuestFeatures->fVmxRdseedExit << VMX_BF_PROC_CTLS2_RDSEED_EXIT_SHIFT )
1720 | (pGuestFeatures->fVmxPml << VMX_BF_PROC_CTLS2_PML_SHIFT )
1721 | (pGuestFeatures->fVmxEptXcptVe << VMX_BF_PROC_CTLS2_EPT_VE_SHIFT )
1722 | (pGuestFeatures->fVmxXsavesXrstors << VMX_BF_PROC_CTLS2_XSAVES_XRSTORS_SHIFT )
1723 | (pGuestFeatures->fVmxUseTscScaling << VMX_BF_PROC_CTLS2_TSC_SCALING_SHIFT );
1724 uint32_t const fVal = 0;
1725 uint32_t const fZap = fFeatures;
1726 uVmxMsr = RT_MAKE_U64(fVal, fZap);
1727 }
1728 else
1729 uVmxMsr = 0;
1730 return uVmxMsr;
1731}
1732
1733
1734/** @callback_method_impl{FNCPUMRDMSR} */
1735static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxProcBasedCtls2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1736{
1737 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1738 *puValue = CPUMGetGuestIa32VmxProcbasedCtls2(pVCpu);
1739 return VINF_SUCCESS;
1740}
1741
1742
1743/** @callback_method_impl{FNCPUMRDMSR} */
1744static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxEptVpidCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1745{
1746 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1747 *puValue = 0;
1748 return VINF_SUCCESS;
1749}
1750
1751
1752/** @callback_method_impl{FNCPUMRDMSR} */
1753static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTruePinbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1754{
1755 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1756 *puValue = 0;
1757 return VINF_SUCCESS;
1758}
1759
1760
1761/** @callback_method_impl{FNCPUMRDMSR} */
1762static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTrueProcbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1763{
1764 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1765 *puValue = 0;
1766 return VINF_SUCCESS;
1767}
1768
1769
1770/** @callback_method_impl{FNCPUMRDMSR} */
1771static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTrueExitCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1772{
1773 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1774 *puValue = 0;
1775 return VINF_SUCCESS;
1776}
1777
1778
1779/** @callback_method_impl{FNCPUMRDMSR} */
1780static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTrueEntryCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1781{
1782 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1783 *puValue = 0;
1784 return VINF_SUCCESS;
1785}
1786
1787
1788/**
1789 * Gets IA32_VMX_VMFUNC for IEM and cpumMsrRd_Ia32VmxVmFunc.
1790 *
1791 * @returns IA32_VMX_VMFUNC value.
1792 * @param pVCpu The cross context per CPU structure.
1793 */
1794VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxVmFunc(PVMCPU pVCpu)
1795{
1796 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1797 uint64_t uVmxMsr;
1798 if ( pGuestFeatures->fVmx
1799 && pGuestFeatures->fVmxVmFunc)
1800 uVmxMsr = RT_BF_MAKE(VMX_BF_VMFUNC_EPTP_SWITCHING, 1);
1801 else
1802 uVmxMsr = 0;
1803 return uVmxMsr;
1804}
1805
1806
1807/** @callback_method_impl{FNCPUMRDMSR} */
1808static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxVmFunc(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1809{
1810 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1811 *puValue = CPUMGetGuestIa32VmxVmFunc(pVCpu);
1812 return VINF_SUCCESS;
1813}
1814
1815
1816/** @callback_method_impl{FNCPUMRDMSR} */
1817static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SpecCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1818{
1819 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1820 *puValue = pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl;
1821 return VINF_SUCCESS;
1822}
1823
1824
1825/** @callback_method_impl{FNCPUMWRMSR} */
1826static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SpecCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1827{
1828 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1829
1830 /* NB: The STIBP bit can be set even when IBRS is present, regardless of whether STIBP is actually implemented. */
1831 if (uValue & ~(MSR_IA32_SPEC_CTRL_F_IBRS | MSR_IA32_SPEC_CTRL_F_STIBP))
1832 {
1833 Log(("CPUM: Invalid IA32_SPEC_CTRL bits (trying to write %#llx)\n", uValue));
1834 return VERR_CPUM_RAISE_GP_0;
1835 }
1836
1837 pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl = uValue;
1838 return VINF_SUCCESS;
1839}
1840
1841
1842/** @callback_method_impl{FNCPUMWRMSR} */
1843static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PredCmd(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1844{
1845 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1846 return VINF_SUCCESS;
1847}
1848
1849
1850/** @callback_method_impl{FNCPUMRDMSR} */
1851static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ArchCapabilities(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1852{
1853 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1854 *puValue = pVCpu->cpum.s.GuestMsrs.msr.ArchCaps;
1855 return VINF_SUCCESS;
1856}
1857
1858
1859
1860
1861
1862
1863
1864
1865
1866
1867
1868
1869/*
1870 * AMD64
1871 * AMD64
1872 * AMD64
1873 */
1874
1875
1876/** @callback_method_impl{FNCPUMRDMSR} */
1877static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64Efer(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1878{
1879 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1880 *puValue = pVCpu->cpum.s.Guest.msrEFER;
1881 return VINF_SUCCESS;
1882}
1883
1884
1885/** @callback_method_impl{FNCPUMWRMSR} */
1886static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64Efer(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1887{
1888 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1889 uint64_t uValidatedEfer;
1890 uint64_t const uOldEfer = pVCpu->cpum.s.Guest.msrEFER;
1891 int rc = CPUMIsGuestEferMsrWriteValid(pVCpu->CTX_SUFF(pVM), pVCpu->cpum.s.Guest.cr0, uOldEfer, uValue, &uValidatedEfer);
1892 if (RT_FAILURE(rc))
1893 return VERR_CPUM_RAISE_GP_0;
1894
1895 CPUMSetGuestEferMsrNoChecks(pVCpu, uOldEfer, uValidatedEfer);
1896 return VINF_SUCCESS;
1897}
1898
1899
1900/** @callback_method_impl{FNCPUMRDMSR} */
1901static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64SyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1902{
1903 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1904 *puValue = pVCpu->cpum.s.Guest.msrSTAR;
1905 return VINF_SUCCESS;
1906}
1907
1908
1909/** @callback_method_impl{FNCPUMWRMSR} */
1910static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64SyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1911{
1912 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1913 pVCpu->cpum.s.Guest.msrSTAR = uValue;
1914 return VINF_SUCCESS;
1915}
1916
1917
1918/** @callback_method_impl{FNCPUMRDMSR} */
1919static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64LongSyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1920{
1921 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1922 *puValue = pVCpu->cpum.s.Guest.msrLSTAR;
1923 return VINF_SUCCESS;
1924}
1925
1926
1927/** @callback_method_impl{FNCPUMWRMSR} */
1928static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64LongSyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1929{
1930 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1931 if (!X86_IS_CANONICAL(uValue))
1932 {
1933 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
1934 return VERR_CPUM_RAISE_GP_0;
1935 }
1936 pVCpu->cpum.s.Guest.msrLSTAR = uValue;
1937 return VINF_SUCCESS;
1938}
1939
1940
1941/** @callback_method_impl{FNCPUMRDMSR} */
1942static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64CompSyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1943{
1944 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1945 *puValue = pVCpu->cpum.s.Guest.msrCSTAR;
1946 return VINF_SUCCESS;
1947}
1948
1949
1950/** @callback_method_impl{FNCPUMWRMSR} */
1951static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64CompSyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1952{
1953 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1954 if (!X86_IS_CANONICAL(uValue))
1955 {
1956 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
1957 return VERR_CPUM_RAISE_GP_0;
1958 }
1959 pVCpu->cpum.s.Guest.msrCSTAR = uValue;
1960 return VINF_SUCCESS;
1961}
1962
1963
1964/** @callback_method_impl{FNCPUMRDMSR} */
1965static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64SyscallFlagMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1966{
1967 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1968 *puValue = pVCpu->cpum.s.Guest.msrSFMASK;
1969 return VINF_SUCCESS;
1970}
1971
1972
1973/** @callback_method_impl{FNCPUMWRMSR} */
1974static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64SyscallFlagMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1975{
1976 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1977 pVCpu->cpum.s.Guest.msrSFMASK = uValue;
1978 return VINF_SUCCESS;
1979}
1980
1981
1982/** @callback_method_impl{FNCPUMRDMSR} */
1983static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64FsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1984{
1985 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1986 *puValue = pVCpu->cpum.s.Guest.fs.u64Base;
1987 return VINF_SUCCESS;
1988}
1989
1990
1991/** @callback_method_impl{FNCPUMWRMSR} */
1992static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64FsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1993{
1994 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1995 pVCpu->cpum.s.Guest.fs.u64Base = uValue;
1996 return VINF_SUCCESS;
1997}
1998
1999
2000/** @callback_method_impl{FNCPUMRDMSR} */
2001static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64GsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2002{
2003 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2004 *puValue = pVCpu->cpum.s.Guest.gs.u64Base;
2005 return VINF_SUCCESS;
2006}
2007
2008/** @callback_method_impl{FNCPUMWRMSR} */
2009static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64GsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2010{
2011 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2012 pVCpu->cpum.s.Guest.gs.u64Base = uValue;
2013 return VINF_SUCCESS;
2014}
2015
2016
2017
2018/** @callback_method_impl{FNCPUMRDMSR} */
2019static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64KernelGsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2020{
2021 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2022 *puValue = pVCpu->cpum.s.Guest.msrKERNELGSBASE;
2023 return VINF_SUCCESS;
2024}
2025
2026/** @callback_method_impl{FNCPUMWRMSR} */
2027static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64KernelGsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2028{
2029 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2030 pVCpu->cpum.s.Guest.msrKERNELGSBASE = uValue;
2031 return VINF_SUCCESS;
2032}
2033
2034
2035/** @callback_method_impl{FNCPUMRDMSR} */
2036static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64TscAux(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2037{
2038 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2039 *puValue = pVCpu->cpum.s.GuestMsrs.msr.TscAux;
2040 return VINF_SUCCESS;
2041}
2042
2043/** @callback_method_impl{FNCPUMWRMSR} */
2044static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64TscAux(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2045{
2046 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2047 pVCpu->cpum.s.GuestMsrs.msr.TscAux = uValue;
2048 return VINF_SUCCESS;
2049}
2050
2051
2052/*
2053 * Intel specific
2054 * Intel specific
2055 * Intel specific
2056 */
2057
2058/** @callback_method_impl{FNCPUMRDMSR} */
2059static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelEblCrPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2060{
2061 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2062 /** @todo recalc clock frequency ratio? */
2063 *puValue = pRange->uValue;
2064 return VINF_SUCCESS;
2065}
2066
2067
2068/** @callback_method_impl{FNCPUMWRMSR} */
2069static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelEblCrPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2070{
2071 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2072 /** @todo Write EBL_CR_POWERON: Remember written bits. */
2073 return VINF_SUCCESS;
2074}
2075
2076
2077/** @callback_method_impl{FNCPUMRDMSR} */
2078static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7CoreThreadCount(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2079{
2080 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2081
2082 /* Note! According to cpuid_set_info in XNU (10.7.0), Westmere CPU only
2083 have a 4-bit core count. */
2084 uint16_t cCores = pVCpu->CTX_SUFF(pVM)->cCpus;
2085 uint16_t cThreads = cCores; /** @todo hyper-threading. */
2086 *puValue = RT_MAKE_U32(cThreads, cCores);
2087 return VINF_SUCCESS;
2088}
2089
2090
2091/** @callback_method_impl{FNCPUMRDMSR} */
2092static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP4EbcHardPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2093{
2094 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2095 /** @todo P4 hard power on config */
2096 *puValue = pRange->uValue;
2097 return VINF_SUCCESS;
2098}
2099
2100
2101/** @callback_method_impl{FNCPUMWRMSR} */
2102static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP4EbcHardPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2103{
2104 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2105 /** @todo P4 hard power on config */
2106 return VINF_SUCCESS;
2107}
2108
2109
2110/** @callback_method_impl{FNCPUMRDMSR} */
2111static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP4EbcSoftPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2112{
2113 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2114 /** @todo P4 soft power on config */
2115 *puValue = pRange->uValue;
2116 return VINF_SUCCESS;
2117}
2118
2119
2120/** @callback_method_impl{FNCPUMWRMSR} */
2121static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP4EbcSoftPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2122{
2123 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2124 /** @todo P4 soft power on config */
2125 return VINF_SUCCESS;
2126}
2127
2128
2129/** @callback_method_impl{FNCPUMRDMSR} */
2130static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP4EbcFrequencyId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2131{
2132 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2133
2134 uint64_t uValue;
2135 PVM pVM = pVCpu->CTX_SUFF(pVM);
2136 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
2137 if (pVM->cpum.s.GuestFeatures.uModel >= 2)
2138 {
2139 if (uScalableBusHz <= CPUM_SBUSFREQ_100MHZ && pVM->cpum.s.GuestFeatures.uModel <= 2)
2140 {
2141 uScalableBusHz = CPUM_SBUSFREQ_100MHZ;
2142 uValue = 0;
2143 }
2144 else if (uScalableBusHz <= CPUM_SBUSFREQ_133MHZ)
2145 {
2146 uScalableBusHz = CPUM_SBUSFREQ_133MHZ;
2147 uValue = 1;
2148 }
2149 else if (uScalableBusHz <= CPUM_SBUSFREQ_167MHZ)
2150 {
2151 uScalableBusHz = CPUM_SBUSFREQ_167MHZ;
2152 uValue = 3;
2153 }
2154 else if (uScalableBusHz <= CPUM_SBUSFREQ_200MHZ)
2155 {
2156 uScalableBusHz = CPUM_SBUSFREQ_200MHZ;
2157 uValue = 2;
2158 }
2159 else if (uScalableBusHz <= CPUM_SBUSFREQ_267MHZ && pVM->cpum.s.GuestFeatures.uModel > 2)
2160 {
2161 uScalableBusHz = CPUM_SBUSFREQ_267MHZ;
2162 uValue = 0;
2163 }
2164 else
2165 {
2166 uScalableBusHz = CPUM_SBUSFREQ_333MHZ;
2167 uValue = 6;
2168 }
2169 uValue <<= 16;
2170
2171 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
2172 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
2173 uValue |= (uint32_t)uTscRatio << 24;
2174
2175 uValue |= pRange->uValue & ~UINT64_C(0xff0f0000);
2176 }
2177 else
2178 {
2179 /* Probably more stuff here, but intel doesn't want to tell us. */
2180 uValue = pRange->uValue;
2181 uValue &= ~(RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23)); /* 100 MHz is only documented value */
2182 }
2183
2184 *puValue = uValue;
2185 return VINF_SUCCESS;
2186}
2187
2188
2189/** @callback_method_impl{FNCPUMWRMSR} */
2190static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP4EbcFrequencyId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2191{
2192 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2193 /** @todo P4 bus frequency config */
2194 return VINF_SUCCESS;
2195}
2196
2197
2198/** @callback_method_impl{FNCPUMRDMSR} */
2199static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP6FsbFrequency(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2200{
2201 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2202
2203 /* Convert the scalable bus frequency to the encoding in the intel manual (for core+). */
2204 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVCpu->CTX_SUFF(pVM));
2205 if (uScalableBusHz <= CPUM_SBUSFREQ_100MHZ)
2206 *puValue = 5;
2207 else if (uScalableBusHz <= CPUM_SBUSFREQ_133MHZ)
2208 *puValue = 1;
2209 else if (uScalableBusHz <= CPUM_SBUSFREQ_167MHZ)
2210 *puValue = 3;
2211 else if (uScalableBusHz <= CPUM_SBUSFREQ_200MHZ)
2212 *puValue = 2;
2213 else if (uScalableBusHz <= CPUM_SBUSFREQ_267MHZ)
2214 *puValue = 0;
2215 else if (uScalableBusHz <= CPUM_SBUSFREQ_333MHZ)
2216 *puValue = 4;
2217 else /*if (uScalableBusHz <= CPUM_SBUSFREQ_400MHZ)*/
2218 *puValue = 6;
2219
2220 *puValue |= pRange->uValue & ~UINT64_C(0x7);
2221
2222 return VINF_SUCCESS;
2223}
2224
2225
2226/** @callback_method_impl{FNCPUMRDMSR} */
2227static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelPlatformInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2228{
2229 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2230
2231 /* Just indicate a fixed TSC, no turbo boost, no programmable anything. */
2232 PVM pVM = pVCpu->CTX_SUFF(pVM);
2233 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
2234 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
2235 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
2236 uint64_t uValue = ((uint32_t)uTscRatio << 8) /* TSC invariant frequency. */
2237 | ((uint64_t)uTscRatio << 40); /* The max turbo frequency. */
2238
2239 /* Ivy bridge has a minimum operating ratio as well. */
2240 if (true) /** @todo detect sandy bridge. */
2241 uValue |= (uint64_t)uTscRatio << 48;
2242
2243 *puValue = uValue;
2244 return VINF_SUCCESS;
2245}
2246
2247
2248/** @callback_method_impl{FNCPUMRDMSR} */
2249static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelFlexRatio(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2250{
2251 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2252
2253 uint64_t uValue = pRange->uValue & ~UINT64_C(0x1ff00);
2254
2255 PVM pVM = pVCpu->CTX_SUFF(pVM);
2256 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
2257 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
2258 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
2259 uValue |= (uint32_t)uTscRatio << 8;
2260
2261 *puValue = uValue;
2262 return VINF_SUCCESS;
2263}
2264
2265
2266/** @callback_method_impl{FNCPUMWRMSR} */
2267static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelFlexRatio(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2268{
2269 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2270 /** @todo implement writing MSR_FLEX_RATIO. */
2271 return VINF_SUCCESS;
2272}
2273
2274
2275/** @callback_method_impl{FNCPUMRDMSR} */
2276static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelPkgCStConfigControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2277{
2278 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2279 *puValue = pVCpu->cpum.s.GuestMsrs.msr.PkgCStateCfgCtrl;
2280 return VINF_SUCCESS;
2281}
2282
2283
2284/** @callback_method_impl{FNCPUMWRMSR} */
2285static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelPkgCStConfigControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2286{
2287 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2288
2289 if (pVCpu->cpum.s.GuestMsrs.msr.PkgCStateCfgCtrl & RT_BIT_64(15))
2290 {
2291 Log(("CPUM: WRMSR %#x (%s), %#llx: Write protected -> #GP\n", idMsr, pRange->szName, uValue));
2292 return VERR_CPUM_RAISE_GP_0;
2293 }
2294#if 0 /** @todo check what real (old) hardware does. */
2295 if ((uValue & 7) >= 5)
2296 {
2297 Log(("CPUM: WRMSR %#x (%s), %#llx: Invalid limit (%d) -> #GP\n", idMsr, pRange->szName, uValue, (uint32_t)(uValue & 7)));
2298 return VERR_CPUM_RAISE_GP_0;
2299 }
2300#endif
2301 pVCpu->cpum.s.GuestMsrs.msr.PkgCStateCfgCtrl = uValue;
2302 return VINF_SUCCESS;
2303}
2304
2305
2306/** @callback_method_impl{FNCPUMRDMSR} */
2307static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelPmgIoCaptureBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2308{
2309 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2310 /** @todo implement I/O mwait wakeup. */
2311 *puValue = 0;
2312 return VINF_SUCCESS;
2313}
2314
2315
2316/** @callback_method_impl{FNCPUMWRMSR} */
2317static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelPmgIoCaptureBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2318{
2319 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2320 /** @todo implement I/O mwait wakeup. */
2321 return VINF_SUCCESS;
2322}
2323
2324
2325/** @callback_method_impl{FNCPUMRDMSR} */
2326static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchFromToN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2327{
2328 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2329 /** @todo implement last branch records. */
2330 *puValue = 0;
2331 return VINF_SUCCESS;
2332}
2333
2334
2335/** @callback_method_impl{FNCPUMWRMSR} */
2336static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchFromToN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2337{
2338 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2339 /** @todo implement last branch records. */
2340 return VINF_SUCCESS;
2341}
2342
2343
2344/** @callback_method_impl{FNCPUMRDMSR} */
2345static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchFromN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2346{
2347 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2348 /** @todo implement last branch records. */
2349 *puValue = 0;
2350 return VINF_SUCCESS;
2351}
2352
2353
2354/** @callback_method_impl{FNCPUMWRMSR} */
2355static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchFromN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2356{
2357 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2358 /** @todo implement last branch records. */
2359 /** @todo Probing indicates that bit 63 is settable on SandyBridge, at least
2360 * if the rest of the bits are zero. Automatic sign extending?
2361 * Investigate! */
2362 if (!X86_IS_CANONICAL(uValue))
2363 {
2364 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
2365 return VERR_CPUM_RAISE_GP_0;
2366 }
2367 return VINF_SUCCESS;
2368}
2369
2370
2371/** @callback_method_impl{FNCPUMRDMSR} */
2372static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchToN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2373{
2374 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2375 /** @todo implement last branch records. */
2376 *puValue = 0;
2377 return VINF_SUCCESS;
2378}
2379
2380
2381/** @callback_method_impl{FNCPUMWRMSR} */
2382static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchToN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2383{
2384 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2385 /** @todo implement last branch records. */
2386 /** @todo Probing indicates that bit 63 is settable on SandyBridge, at least
2387 * if the rest of the bits are zero. Automatic sign extending?
2388 * Investigate! */
2389 if (!X86_IS_CANONICAL(uValue))
2390 {
2391 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
2392 return VERR_CPUM_RAISE_GP_0;
2393 }
2394 return VINF_SUCCESS;
2395}
2396
2397
2398/** @callback_method_impl{FNCPUMRDMSR} */
2399static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchTos(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2400{
2401 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2402 /** @todo implement last branch records. */
2403 *puValue = 0;
2404 return VINF_SUCCESS;
2405}
2406
2407
2408/** @callback_method_impl{FNCPUMWRMSR} */
2409static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchTos(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2410{
2411 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2412 /** @todo implement last branch records. */
2413 return VINF_SUCCESS;
2414}
2415
2416
2417/** @callback_method_impl{FNCPUMRDMSR} */
2418static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelBblCrCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2419{
2420 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2421 *puValue = pRange->uValue;
2422 return VINF_SUCCESS;
2423}
2424
2425
2426/** @callback_method_impl{FNCPUMWRMSR} */
2427static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelBblCrCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2428{
2429 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2430 return VINF_SUCCESS;
2431}
2432
2433
2434/** @callback_method_impl{FNCPUMRDMSR} */
2435static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelBblCrCtl3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2436{
2437 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2438 *puValue = pRange->uValue;
2439 return VINF_SUCCESS;
2440}
2441
2442
2443/** @callback_method_impl{FNCPUMWRMSR} */
2444static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelBblCrCtl3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2445{
2446 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2447 return VINF_SUCCESS;
2448}
2449
2450
2451/** @callback_method_impl{FNCPUMRDMSR} */
2452static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7TemperatureTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2453{
2454 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2455 *puValue = pRange->uValue;
2456 return VINF_SUCCESS;
2457}
2458
2459
2460/** @callback_method_impl{FNCPUMWRMSR} */
2461static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7TemperatureTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2462{
2463 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2464 return VINF_SUCCESS;
2465}
2466
2467
2468/** @callback_method_impl{FNCPUMRDMSR} */
2469static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7MsrOffCoreResponseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2470{
2471 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2472 /** @todo machine check. */
2473 *puValue = pRange->uValue;
2474 return VINF_SUCCESS;
2475}
2476
2477
2478/** @callback_method_impl{FNCPUMWRMSR} */
2479static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7MsrOffCoreResponseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2480{
2481 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2482 /** @todo machine check. */
2483 return VINF_SUCCESS;
2484}
2485
2486
2487/** @callback_method_impl{FNCPUMRDMSR} */
2488static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7MiscPwrMgmt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2489{
2490 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2491 *puValue = 0;
2492 return VINF_SUCCESS;
2493}
2494
2495
2496/** @callback_method_impl{FNCPUMWRMSR} */
2497static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7MiscPwrMgmt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2498{
2499 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2500 return VINF_SUCCESS;
2501}
2502
2503
2504/** @callback_method_impl{FNCPUMRDMSR} */
2505static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP6CrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2506{
2507 RT_NOREF_PV(idMsr);
2508 int rc = CPUMGetGuestCRx(pVCpu, pRange->uValue, puValue);
2509 AssertRC(rc);
2510 return VINF_SUCCESS;
2511}
2512
2513
2514/** @callback_method_impl{FNCPUMWRMSR} */
2515static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP6CrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2516{
2517 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2518 /* This CRx interface differs from the MOV CRx, GReg interface in that
2519 #GP(0) isn't raised if unsupported bits are written to. Instead they
2520 are simply ignored and masked off. (Pentium M Dothan) */
2521 /** @todo Implement MSR_P6_CRx writing. Too much effort for very little, if
2522 * any, gain. */
2523 return VINF_SUCCESS;
2524}
2525
2526
2527/** @callback_method_impl{FNCPUMRDMSR} */
2528static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCpuId1FeatureMaskEcdx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2529{
2530 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2531 /** @todo implement CPUID masking. */
2532 *puValue = UINT64_MAX;
2533 return VINF_SUCCESS;
2534}
2535
2536
2537/** @callback_method_impl{FNCPUMWRMSR} */
2538static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCpuId1FeatureMaskEcdx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2539{
2540 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2541 /** @todo implement CPUID masking. */
2542 return VINF_SUCCESS;
2543}
2544
2545
2546/** @callback_method_impl{FNCPUMRDMSR} */
2547static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCpuId1FeatureMaskEax(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2548{
2549 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2550 /** @todo implement CPUID masking. */
2551 *puValue = 0;
2552 return VINF_SUCCESS;
2553}
2554
2555
2556/** @callback_method_impl{FNCPUMWRMSR} */
2557static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCpuId1FeatureMaskEax(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2558{
2559 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2560 /** @todo implement CPUID masking. */
2561 return VINF_SUCCESS;
2562}
2563
2564
2565
2566/** @callback_method_impl{FNCPUMRDMSR} */
2567static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCpuId80000001FeatureMaskEcdx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2568{
2569 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2570 /** @todo implement CPUID masking. */
2571 *puValue = UINT64_MAX;
2572 return VINF_SUCCESS;
2573}
2574
2575
2576/** @callback_method_impl{FNCPUMWRMSR} */
2577static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCpuId80000001FeatureMaskEcdx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2578{
2579 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2580 /** @todo implement CPUID masking. */
2581 return VINF_SUCCESS;
2582}
2583
2584
2585
2586/** @callback_method_impl{FNCPUMRDMSR} */
2587static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyAesNiCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2588{
2589 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2590 /** @todo implement AES-NI. */
2591 *puValue = 3; /* Bit 0 is lock bit, bit 1 disables AES-NI. That's what they say. */
2592 return VINF_SUCCESS;
2593}
2594
2595
2596/** @callback_method_impl{FNCPUMWRMSR} */
2597static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyAesNiCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2598{
2599 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2600 /** @todo implement AES-NI. */
2601 return VERR_CPUM_RAISE_GP_0;
2602}
2603
2604
2605/** @callback_method_impl{FNCPUMRDMSR} */
2606static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7TurboRatioLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2607{
2608 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2609 /** @todo implement intel C states. */
2610 *puValue = pRange->uValue;
2611 return VINF_SUCCESS;
2612}
2613
2614
2615/** @callback_method_impl{FNCPUMWRMSR} */
2616static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7TurboRatioLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2617{
2618 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2619 /** @todo implement intel C states. */
2620 return VINF_SUCCESS;
2621}
2622
2623
2624/** @callback_method_impl{FNCPUMRDMSR} */
2625static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7LbrSelect(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2626{
2627 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2628 /** @todo implement last-branch-records. */
2629 *puValue = 0;
2630 return VINF_SUCCESS;
2631}
2632
2633
2634/** @callback_method_impl{FNCPUMWRMSR} */
2635static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7LbrSelect(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2636{
2637 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2638 /** @todo implement last-branch-records. */
2639 return VINF_SUCCESS;
2640}
2641
2642
2643/** @callback_method_impl{FNCPUMRDMSR} */
2644static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyErrorControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2645{
2646 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2647 /** @todo implement memory error injection (MSR_ERROR_CONTROL). */
2648 *puValue = 0;
2649 return VINF_SUCCESS;
2650}
2651
2652
2653/** @callback_method_impl{FNCPUMWRMSR} */
2654static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyErrorControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2655{
2656 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2657 /** @todo implement memory error injection (MSR_ERROR_CONTROL). */
2658 return VINF_SUCCESS;
2659}
2660
2661
2662/** @callback_method_impl{FNCPUMRDMSR} */
2663static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7VirtualLegacyWireCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2664{
2665 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2666 /** @todo implement memory VLW? */
2667 *puValue = pRange->uValue;
2668 /* Note: A20M is known to be bit 1 as this was disclosed in spec update
2669 AAJ49/AAK51/????, which documents the inversion of this bit. The
2670 Sandy bridge CPU here has value 0x74, so it probably doesn't have a BIOS
2671 that correct things. Some guesses at the other bits:
2672 bit 2 = INTR
2673 bit 4 = SMI
2674 bit 5 = INIT
2675 bit 6 = NMI */
2676 return VINF_SUCCESS;
2677}
2678
2679
2680/** @callback_method_impl{FNCPUMRDMSR} */
2681static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7PowerCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2682{
2683 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2684 /** @todo intel power management */
2685 *puValue = 0;
2686 return VINF_SUCCESS;
2687}
2688
2689
2690/** @callback_method_impl{FNCPUMWRMSR} */
2691static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7PowerCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2692{
2693 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2694 /** @todo intel power management */
2695 return VINF_SUCCESS;
2696}
2697
2698
2699/** @callback_method_impl{FNCPUMRDMSR} */
2700static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyPebsNumAlt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2701{
2702 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2703 /** @todo intel performance counters. */
2704 *puValue = 0;
2705 return VINF_SUCCESS;
2706}
2707
2708
2709/** @callback_method_impl{FNCPUMWRMSR} */
2710static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyPebsNumAlt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2711{
2712 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2713 /** @todo intel performance counters. */
2714 return VINF_SUCCESS;
2715}
2716
2717
2718/** @callback_method_impl{FNCPUMRDMSR} */
2719static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7PebsLdLat(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2720{
2721 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2722 /** @todo intel performance counters. */
2723 *puValue = 0;
2724 return VINF_SUCCESS;
2725}
2726
2727
2728/** @callback_method_impl{FNCPUMWRMSR} */
2729static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7PebsLdLat(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2730{
2731 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2732 /** @todo intel performance counters. */
2733 return VINF_SUCCESS;
2734}
2735
2736
2737/** @callback_method_impl{FNCPUMRDMSR} */
2738static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7PkgCnResidencyN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2739{
2740 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2741 /** @todo intel power management. */
2742 *puValue = 0;
2743 return VINF_SUCCESS;
2744}
2745
2746
2747/** @callback_method_impl{FNCPUMRDMSR} */
2748static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7CoreCnResidencyN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2749{
2750 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2751 /** @todo intel power management. */
2752 *puValue = 0;
2753 return VINF_SUCCESS;
2754}
2755
2756
2757/** @callback_method_impl{FNCPUMRDMSR} */
2758static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyVrCurrentConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2759{
2760 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2761 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2762 *puValue = 0;
2763 return VINF_SUCCESS;
2764}
2765
2766
2767/** @callback_method_impl{FNCPUMWRMSR} */
2768static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyVrCurrentConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2769{
2770 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2771 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2772 return VINF_SUCCESS;
2773}
2774
2775
2776/** @callback_method_impl{FNCPUMRDMSR} */
2777static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyVrMiscConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2778{
2779 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2780 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2781 *puValue = 0;
2782 return VINF_SUCCESS;
2783}
2784
2785
2786/** @callback_method_impl{FNCPUMWRMSR} */
2787static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyVrMiscConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2788{
2789 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2790 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2791 return VINF_SUCCESS;
2792}
2793
2794
2795/** @callback_method_impl{FNCPUMRDMSR} */
2796static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyRaplPowerUnit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2797{
2798 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2799 /** @todo intel RAPL. */
2800 *puValue = pRange->uValue;
2801 return VINF_SUCCESS;
2802}
2803
2804
2805/** @callback_method_impl{FNCPUMWRMSR} */
2806static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyRaplPowerUnit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2807{
2808 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2809 /* Note! This is documented as read only and except for a Silvermont sample has
2810 always been classified as read only. This is just here to make it compile. */
2811 return VINF_SUCCESS;
2812}
2813
2814
2815/** @callback_method_impl{FNCPUMRDMSR} */
2816static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyPkgCnIrtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2817{
2818 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2819 /** @todo intel power management. */
2820 *puValue = 0;
2821 return VINF_SUCCESS;
2822}
2823
2824
2825/** @callback_method_impl{FNCPUMWRMSR} */
2826static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyPkgCnIrtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2827{
2828 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2829 /** @todo intel power management. */
2830 return VINF_SUCCESS;
2831}
2832
2833
2834/** @callback_method_impl{FNCPUMRDMSR} */
2835static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyPkgC2Residency(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2836{
2837 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2838 /** @todo intel power management. */
2839 *puValue = 0;
2840 return VINF_SUCCESS;
2841}
2842
2843
2844/** @callback_method_impl{FNCPUMWRMSR} */
2845static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyPkgC2Residency(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2846{
2847 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2848 /* Note! This is documented as read only and except for a Silvermont sample has
2849 always been classified as read only. This is just here to make it compile. */
2850 return VINF_SUCCESS;
2851}
2852
2853
2854/** @callback_method_impl{FNCPUMRDMSR} */
2855static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgPowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2856{
2857 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2858 /** @todo intel RAPL. */
2859 *puValue = 0;
2860 return VINF_SUCCESS;
2861}
2862
2863
2864/** @callback_method_impl{FNCPUMWRMSR} */
2865static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPkgPowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2866{
2867 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2868 /** @todo intel RAPL. */
2869 return VINF_SUCCESS;
2870}
2871
2872
2873/** @callback_method_impl{FNCPUMRDMSR} */
2874static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgEnergyStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2875{
2876 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2877 /** @todo intel power management. */
2878 *puValue = 0;
2879 return VINF_SUCCESS;
2880}
2881
2882
2883/** @callback_method_impl{FNCPUMRDMSR} */
2884static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgPerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2885{
2886 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2887 /** @todo intel power management. */
2888 *puValue = 0;
2889 return VINF_SUCCESS;
2890}
2891
2892
2893/** @callback_method_impl{FNCPUMRDMSR} */
2894static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgPowerInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2895{
2896 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2897 /** @todo intel power management. */
2898 *puValue = 0;
2899 return VINF_SUCCESS;
2900}
2901
2902
2903/** @callback_method_impl{FNCPUMRDMSR} */
2904static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramPowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2905{
2906 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2907 /** @todo intel RAPL. */
2908 *puValue = 0;
2909 return VINF_SUCCESS;
2910}
2911
2912
2913/** @callback_method_impl{FNCPUMWRMSR} */
2914static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplDramPowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2915{
2916 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2917 /** @todo intel RAPL. */
2918 return VINF_SUCCESS;
2919}
2920
2921
2922/** @callback_method_impl{FNCPUMRDMSR} */
2923static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramEnergyStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2924{
2925 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2926 /** @todo intel power management. */
2927 *puValue = 0;
2928 return VINF_SUCCESS;
2929}
2930
2931
2932/** @callback_method_impl{FNCPUMRDMSR} */
2933static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramPerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2934{
2935 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2936 /** @todo intel power management. */
2937 *puValue = 0;
2938 return VINF_SUCCESS;
2939}
2940
2941
2942/** @callback_method_impl{FNCPUMRDMSR} */
2943static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramPowerInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2944{
2945 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2946 /** @todo intel power management. */
2947 *puValue = 0;
2948 return VINF_SUCCESS;
2949}
2950
2951
2952/** @callback_method_impl{FNCPUMRDMSR} */
2953static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0PowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2954{
2955 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2956 /** @todo intel RAPL. */
2957 *puValue = 0;
2958 return VINF_SUCCESS;
2959}
2960
2961
2962/** @callback_method_impl{FNCPUMWRMSR} */
2963static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp0PowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2964{
2965 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2966 /** @todo intel RAPL. */
2967 return VINF_SUCCESS;
2968}
2969
2970
2971/** @callback_method_impl{FNCPUMRDMSR} */
2972static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0EnergyStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2973{
2974 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2975 /** @todo intel power management. */
2976 *puValue = 0;
2977 return VINF_SUCCESS;
2978}
2979
2980
2981/** @callback_method_impl{FNCPUMRDMSR} */
2982static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0Policy(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2983{
2984 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2985 /** @todo intel RAPL. */
2986 *puValue = 0;
2987 return VINF_SUCCESS;
2988}
2989
2990
2991/** @callback_method_impl{FNCPUMWRMSR} */
2992static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp0Policy(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2993{
2994 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2995 /** @todo intel RAPL. */
2996 return VINF_SUCCESS;
2997}
2998
2999
3000/** @callback_method_impl{FNCPUMRDMSR} */
3001static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0PerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3002{
3003 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3004 /** @todo intel power management. */
3005 *puValue = 0;
3006 return VINF_SUCCESS;
3007}
3008
3009
3010/** @callback_method_impl{FNCPUMRDMSR} */
3011static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp1PowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3012{
3013 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3014 /** @todo intel RAPL. */
3015 *puValue = 0;
3016 return VINF_SUCCESS;
3017}
3018
3019
3020/** @callback_method_impl{FNCPUMWRMSR} */
3021static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp1PowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3022{
3023 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3024 /** @todo intel RAPL. */
3025 return VINF_SUCCESS;
3026}
3027
3028
3029/** @callback_method_impl{FNCPUMRDMSR} */
3030static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp1EnergyStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3031{
3032 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3033 /** @todo intel power management. */
3034 *puValue = 0;
3035 return VINF_SUCCESS;
3036}
3037
3038
3039/** @callback_method_impl{FNCPUMRDMSR} */
3040static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp1Policy(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3041{
3042 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3043 /** @todo intel RAPL. */
3044 *puValue = 0;
3045 return VINF_SUCCESS;
3046}
3047
3048
3049/** @callback_method_impl{FNCPUMWRMSR} */
3050static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp1Policy(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3051{
3052 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3053 /** @todo intel RAPL. */
3054 return VINF_SUCCESS;
3055}
3056
3057
3058/** @callback_method_impl{FNCPUMRDMSR} */
3059static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpNominal(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3060{
3061 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3062 /** @todo intel power management. */
3063 *puValue = pRange->uValue;
3064 return VINF_SUCCESS;
3065}
3066
3067
3068/** @callback_method_impl{FNCPUMRDMSR} */
3069static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpLevel1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3070{
3071 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3072 /** @todo intel power management. */
3073 *puValue = pRange->uValue;
3074 return VINF_SUCCESS;
3075}
3076
3077
3078/** @callback_method_impl{FNCPUMRDMSR} */
3079static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpLevel2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3080{
3081 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3082 /** @todo intel power management. */
3083 *puValue = pRange->uValue;
3084 return VINF_SUCCESS;
3085}
3086
3087
3088/** @callback_method_impl{FNCPUMRDMSR} */
3089static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3090{
3091 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3092 /** @todo intel power management. */
3093 *puValue = 0;
3094 return VINF_SUCCESS;
3095}
3096
3097
3098/** @callback_method_impl{FNCPUMWRMSR} */
3099static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7IvyConfigTdpControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3100{
3101 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3102 /** @todo intel power management. */
3103 return VINF_SUCCESS;
3104}
3105
3106
3107/** @callback_method_impl{FNCPUMRDMSR} */
3108static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyTurboActivationRatio(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3109{
3110 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3111 /** @todo intel power management. */
3112 *puValue = 0;
3113 return VINF_SUCCESS;
3114}
3115
3116
3117/** @callback_method_impl{FNCPUMWRMSR} */
3118static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7IvyTurboActivationRatio(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3119{
3120 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3121 /** @todo intel power management. */
3122 return VINF_SUCCESS;
3123}
3124
3125
3126/** @callback_method_impl{FNCPUMRDMSR} */
3127static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfGlobalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3128{
3129 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3130 /** @todo uncore msrs. */
3131 *puValue = 0;
3132 return VINF_SUCCESS;
3133}
3134
3135
3136/** @callback_method_impl{FNCPUMWRMSR} */
3137static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfGlobalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3138{
3139 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3140 /** @todo uncore msrs. */
3141 return VINF_SUCCESS;
3142}
3143
3144
3145/** @callback_method_impl{FNCPUMRDMSR} */
3146static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfGlobalStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3147{
3148 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3149 /** @todo uncore msrs. */
3150 *puValue = 0;
3151 return VINF_SUCCESS;
3152}
3153
3154
3155/** @callback_method_impl{FNCPUMWRMSR} */
3156static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfGlobalStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3157{
3158 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3159 /** @todo uncore msrs. */
3160 return VINF_SUCCESS;
3161}
3162
3163
3164/** @callback_method_impl{FNCPUMRDMSR} */
3165static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfGlobalOvfCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3166{
3167 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3168 /** @todo uncore msrs. */
3169 *puValue = 0;
3170 return VINF_SUCCESS;
3171}
3172
3173
3174/** @callback_method_impl{FNCPUMWRMSR} */
3175static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfGlobalOvfCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3176{
3177 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3178 /** @todo uncore msrs. */
3179 return VINF_SUCCESS;
3180}
3181
3182
3183/** @callback_method_impl{FNCPUMRDMSR} */
3184static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfFixedCtrCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3185{
3186 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3187 /** @todo uncore msrs. */
3188 *puValue = 0;
3189 return VINF_SUCCESS;
3190}
3191
3192
3193/** @callback_method_impl{FNCPUMWRMSR} */
3194static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfFixedCtrCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3195{
3196 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3197 /** @todo uncore msrs. */
3198 return VINF_SUCCESS;
3199}
3200
3201
3202/** @callback_method_impl{FNCPUMRDMSR} */
3203static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfFixedCtr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3204{
3205 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3206 /** @todo uncore msrs. */
3207 *puValue = 0;
3208 return VINF_SUCCESS;
3209}
3210
3211
3212/** @callback_method_impl{FNCPUMWRMSR} */
3213static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfFixedCtr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3214{
3215 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3216 /** @todo uncore msrs. */
3217 return VINF_SUCCESS;
3218}
3219
3220
3221/** @callback_method_impl{FNCPUMRDMSR} */
3222static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncCBoxConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3223{
3224 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3225 /** @todo uncore msrs. */
3226 *puValue = 0;
3227 return VINF_SUCCESS;
3228}
3229
3230
3231/** @callback_method_impl{FNCPUMRDMSR} */
3232static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncArbPerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3233{
3234 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3235 /** @todo uncore msrs. */
3236 *puValue = 0;
3237 return VINF_SUCCESS;
3238}
3239
3240
3241/** @callback_method_impl{FNCPUMWRMSR} */
3242static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncArbPerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3243{
3244 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3245 /** @todo uncore msrs. */
3246 return VINF_SUCCESS;
3247}
3248
3249
3250/** @callback_method_impl{FNCPUMRDMSR} */
3251static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncArbPerfEvtSelN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3252{
3253 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3254 /** @todo uncore msrs. */
3255 *puValue = 0;
3256 return VINF_SUCCESS;
3257}
3258
3259
3260/** @callback_method_impl{FNCPUMWRMSR} */
3261static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncArbPerfEvtSelN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3262{
3263 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3264 /** @todo uncore msrs. */
3265 return VINF_SUCCESS;
3266}
3267
3268
3269/** @callback_method_impl{FNCPUMRDMSR} */
3270static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SmiCount(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3271{
3272 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3273
3274 /*
3275 * 31:0 is SMI count (read only), 63:32 reserved.
3276 * Since we don't do SMI, the count is always zero.
3277 */
3278 *puValue = 0;
3279 return VINF_SUCCESS;
3280}
3281
3282
3283/** @callback_method_impl{FNCPUMRDMSR} */
3284static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore2EmttmCrTablesN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3285{
3286 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3287 /** @todo implement enhanced multi thread termal monitoring? */
3288 *puValue = pRange->uValue;
3289 return VINF_SUCCESS;
3290}
3291
3292
3293/** @callback_method_impl{FNCPUMWRMSR} */
3294static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore2EmttmCrTablesN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3295{
3296 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3297 /** @todo implement enhanced multi thread termal monitoring? */
3298 return VINF_SUCCESS;
3299}
3300
3301
3302/** @callback_method_impl{FNCPUMRDMSR} */
3303static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore2SmmCStMiscInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3304{
3305 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3306 /** @todo SMM & C-states? */
3307 *puValue = 0;
3308 return VINF_SUCCESS;
3309}
3310
3311
3312/** @callback_method_impl{FNCPUMWRMSR} */
3313static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore2SmmCStMiscInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3314{
3315 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3316 /** @todo SMM & C-states? */
3317 return VINF_SUCCESS;
3318}
3319
3320
3321/** @callback_method_impl{FNCPUMRDMSR} */
3322static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore1ExtConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3323{
3324 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3325 /** @todo Core1&2 EXT_CONFIG (whatever that is)? */
3326 *puValue = 0;
3327 return VINF_SUCCESS;
3328}
3329
3330
3331/** @callback_method_impl{FNCPUMWRMSR} */
3332static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore1ExtConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3333{
3334 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3335 /** @todo Core1&2 EXT_CONFIG (whatever that is)? */
3336 return VINF_SUCCESS;
3337}
3338
3339
3340/** @callback_method_impl{FNCPUMRDMSR} */
3341static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore1DtsCalControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3342{
3343 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3344 /** @todo Core1&2(?) DTS_CAL_CTRL (whatever that is)? */
3345 *puValue = 0;
3346 return VINF_SUCCESS;
3347}
3348
3349
3350/** @callback_method_impl{FNCPUMWRMSR} */
3351static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore1DtsCalControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3352{
3353 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3354 /** @todo Core1&2(?) DTS_CAL_CTRL (whatever that is)? */
3355 return VINF_SUCCESS;
3356}
3357
3358
3359/** @callback_method_impl{FNCPUMRDMSR} */
3360static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore2PeciControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3361{
3362 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3363 /** @todo Core2+ platform environment control interface control register? */
3364 *puValue = 0;
3365 return VINF_SUCCESS;
3366}
3367
3368
3369/** @callback_method_impl{FNCPUMWRMSR} */
3370static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore2PeciControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3371{
3372 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3373 /** @todo Core2+ platform environment control interface control register? */
3374 return VINF_SUCCESS;
3375}
3376
3377
3378/** @callback_method_impl{FNCPUMRDMSR} */
3379static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelAtSilvCoreC1Recidency(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3380{
3381 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3382 *puValue = 0;
3383 return VINF_SUCCESS;
3384}
3385
3386
3387/*
3388 * Multiple vendor P6 MSRs.
3389 * Multiple vendor P6 MSRs.
3390 * Multiple vendor P6 MSRs.
3391 *
3392 * These MSRs were introduced with the P6 but not elevated to architectural
3393 * MSRs, despite other vendors implementing them.
3394 */
3395
3396
3397/** @callback_method_impl{FNCPUMRDMSR} */
3398static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastBranchFromIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3399{
3400 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3401 /* AMD seems to just record RIP, while intel claims to record RIP+CS.BASE
3402 if I read the docs correctly, thus the need for separate functions. */
3403 /** @todo implement last branch records. */
3404 *puValue = 0;
3405 return VINF_SUCCESS;
3406}
3407
3408
3409/** @callback_method_impl{FNCPUMRDMSR} */
3410static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastBranchToIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3411{
3412 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3413 /** @todo implement last branch records. */
3414 *puValue = 0;
3415 return VINF_SUCCESS;
3416}
3417
3418
3419/** @callback_method_impl{FNCPUMRDMSR} */
3420static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastIntFromIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3421{
3422 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3423 /** @todo implement last exception records. */
3424 *puValue = 0;
3425 return VINF_SUCCESS;
3426}
3427
3428
3429/** @callback_method_impl{FNCPUMWRMSR} */
3430static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_P6LastIntFromIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3431{
3432 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3433 /** @todo implement last exception records. */
3434 /* Note! On many CPUs, the high bit of the 0x000001dd register is always writable, even when the result is
3435 a non-cannonical address. */
3436 return VINF_SUCCESS;
3437}
3438
3439
3440/** @callback_method_impl{FNCPUMRDMSR} */
3441static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastIntToIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3442{
3443 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3444 /** @todo implement last exception records. */
3445 *puValue = 0;
3446 return VINF_SUCCESS;
3447}
3448
3449
3450/** @callback_method_impl{FNCPUMWRMSR} */
3451static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_P6LastIntToIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3452{
3453 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3454 /** @todo implement last exception records. */
3455 return VINF_SUCCESS;
3456}
3457
3458
3459
3460/*
3461 * AMD specific
3462 * AMD specific
3463 * AMD specific
3464 */
3465
3466
3467/** @callback_method_impl{FNCPUMRDMSR} */
3468static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hTscRate(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3469{
3470 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3471 /** @todo Implement TscRateMsr */
3472 *puValue = RT_MAKE_U64(0, 1); /* 1.0 = reset value. */
3473 return VINF_SUCCESS;
3474}
3475
3476
3477/** @callback_method_impl{FNCPUMWRMSR} */
3478static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hTscRate(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3479{
3480 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3481 /** @todo Implement TscRateMsr */
3482 return VINF_SUCCESS;
3483}
3484
3485
3486/** @callback_method_impl{FNCPUMRDMSR} */
3487static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hLwpCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3488{
3489 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3490 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3491 /* Note: Only listes in BKDG for Family 15H. */
3492 *puValue = 0;
3493 return VINF_SUCCESS;
3494}
3495
3496
3497/** @callback_method_impl{FNCPUMWRMSR} */
3498static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hLwpCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3499{
3500 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3501 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3502 return VINF_SUCCESS;
3503}
3504
3505
3506/** @callback_method_impl{FNCPUMRDMSR} */
3507static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hLwpCbAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3508{
3509 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3510 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3511 /* Note: Only listes in BKDG for Family 15H. */
3512 *puValue = 0;
3513 return VINF_SUCCESS;
3514}
3515
3516
3517/** @callback_method_impl{FNCPUMWRMSR} */
3518static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hLwpCbAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3519{
3520 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3521 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3522 return VINF_SUCCESS;
3523}
3524
3525
3526/** @callback_method_impl{FNCPUMRDMSR} */
3527static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hMc4MiscN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3528{
3529 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3530 /** @todo machine check. */
3531 *puValue = 0;
3532 return VINF_SUCCESS;
3533}
3534
3535
3536/** @callback_method_impl{FNCPUMWRMSR} */
3537static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hMc4MiscN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3538{
3539 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3540 /** @todo machine check. */
3541 return VINF_SUCCESS;
3542}
3543
3544
3545/** @callback_method_impl{FNCPUMRDMSR} */
3546static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8PerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3547{
3548 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3549 /** @todo AMD performance events. */
3550 *puValue = 0;
3551 return VINF_SUCCESS;
3552}
3553
3554
3555/** @callback_method_impl{FNCPUMWRMSR} */
3556static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8PerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3557{
3558 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3559 /** @todo AMD performance events. */
3560 return VINF_SUCCESS;
3561}
3562
3563
3564/** @callback_method_impl{FNCPUMRDMSR} */
3565static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8PerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3566{
3567 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3568 /** @todo AMD performance events. */
3569 *puValue = 0;
3570 return VINF_SUCCESS;
3571}
3572
3573
3574/** @callback_method_impl{FNCPUMWRMSR} */
3575static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8PerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3576{
3577 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3578 /** @todo AMD performance events. */
3579 return VINF_SUCCESS;
3580}
3581
3582
3583/** @callback_method_impl{FNCPUMRDMSR} */
3584static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SysCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3585{
3586 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3587 /** @todo AMD SYS_CFG */
3588 *puValue = pRange->uValue;
3589 return VINF_SUCCESS;
3590}
3591
3592
3593/** @callback_method_impl{FNCPUMWRMSR} */
3594static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SysCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3595{
3596 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3597 /** @todo AMD SYS_CFG */
3598 return VINF_SUCCESS;
3599}
3600
3601
3602/** @callback_method_impl{FNCPUMRDMSR} */
3603static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8HwCr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3604{
3605 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3606 /** @todo AMD HW_CFG */
3607 *puValue = 0;
3608 return VINF_SUCCESS;
3609}
3610
3611
3612/** @callback_method_impl{FNCPUMWRMSR} */
3613static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8HwCr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3614{
3615 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3616 /** @todo AMD HW_CFG */
3617 return VINF_SUCCESS;
3618}
3619
3620
3621/** @callback_method_impl{FNCPUMRDMSR} */
3622static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IorrBaseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3623{
3624 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3625 /** @todo AMD IorrMask/IorrBase */
3626 *puValue = 0;
3627 return VINF_SUCCESS;
3628}
3629
3630
3631/** @callback_method_impl{FNCPUMWRMSR} */
3632static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IorrBaseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3633{
3634 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3635 /** @todo AMD IorrMask/IorrBase */
3636 return VINF_SUCCESS;
3637}
3638
3639
3640/** @callback_method_impl{FNCPUMRDMSR} */
3641static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IorrMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3642{
3643 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3644 /** @todo AMD IorrMask/IorrBase */
3645 *puValue = 0;
3646 return VINF_SUCCESS;
3647}
3648
3649
3650/** @callback_method_impl{FNCPUMWRMSR} */
3651static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IorrMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3652{
3653 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3654 /** @todo AMD IorrMask/IorrBase */
3655 return VINF_SUCCESS;
3656}
3657
3658
3659/** @callback_method_impl{FNCPUMRDMSR} */
3660static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8TopOfMemN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3661{
3662 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3663 *puValue = 0;
3664 /** @todo return 4GB - RamHoleSize here for TOPMEM. Figure out what to return
3665 * for TOPMEM2. */
3666 //if (pRange->uValue == 0)
3667 // *puValue = _4G - RamHoleSize;
3668 return VINF_SUCCESS;
3669}
3670
3671
3672/** @callback_method_impl{FNCPUMWRMSR} */
3673static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8TopOfMemN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3674{
3675 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3676 /** @todo AMD TOPMEM and TOPMEM2/TOM2. */
3677 return VINF_SUCCESS;
3678}
3679
3680
3681/** @callback_method_impl{FNCPUMRDMSR} */
3682static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8NbCfg1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3683{
3684 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3685 /** @todo AMD NB_CFG1 */
3686 *puValue = 0;
3687 return VINF_SUCCESS;
3688}
3689
3690
3691/** @callback_method_impl{FNCPUMWRMSR} */
3692static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8NbCfg1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3693{
3694 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3695 /** @todo AMD NB_CFG1 */
3696 return VINF_SUCCESS;
3697}
3698
3699
3700/** @callback_method_impl{FNCPUMRDMSR} */
3701static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8McXcptRedir(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3702{
3703 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3704 /** @todo machine check. */
3705 *puValue = 0;
3706 return VINF_SUCCESS;
3707}
3708
3709
3710/** @callback_method_impl{FNCPUMWRMSR} */
3711static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8McXcptRedir(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3712{
3713 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3714 /** @todo machine check. */
3715 return VINF_SUCCESS;
3716}
3717
3718
3719/** @callback_method_impl{FNCPUMRDMSR} */
3720static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuNameN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3721{
3722 RT_NOREF_PV(idMsr);
3723 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), pRange->uValue / 2 + 0x80000001);
3724 if (pLeaf)
3725 {
3726 if (!(pRange->uValue & 1))
3727 *puValue = RT_MAKE_U64(pLeaf->uEax, pLeaf->uEbx);
3728 else
3729 *puValue = RT_MAKE_U64(pLeaf->uEcx, pLeaf->uEdx);
3730 }
3731 else
3732 *puValue = 0;
3733 return VINF_SUCCESS;
3734}
3735
3736
3737/** @callback_method_impl{FNCPUMWRMSR} */
3738static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuNameN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3739{
3740 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3741 /** @todo Remember guest programmed CPU name. */
3742 return VINF_SUCCESS;
3743}
3744
3745
3746/** @callback_method_impl{FNCPUMRDMSR} */
3747static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8HwThermalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3748{
3749 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3750 /** @todo AMD HTC. */
3751 *puValue = pRange->uValue;
3752 return VINF_SUCCESS;
3753}
3754
3755
3756/** @callback_method_impl{FNCPUMWRMSR} */
3757static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8HwThermalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3758{
3759 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3760 /** @todo AMD HTC. */
3761 return VINF_SUCCESS;
3762}
3763
3764
3765/** @callback_method_impl{FNCPUMRDMSR} */
3766static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SwThermalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3767{
3768 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3769 /** @todo AMD STC. */
3770 *puValue = 0;
3771 return VINF_SUCCESS;
3772}
3773
3774
3775/** @callback_method_impl{FNCPUMWRMSR} */
3776static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SwThermalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3777{
3778 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3779 /** @todo AMD STC. */
3780 return VINF_SUCCESS;
3781}
3782
3783
3784/** @callback_method_impl{FNCPUMRDMSR} */
3785static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8FidVidControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3786{
3787 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3788 /** @todo AMD FIDVID_CTL. */
3789 *puValue = pRange->uValue;
3790 return VINF_SUCCESS;
3791}
3792
3793
3794/** @callback_method_impl{FNCPUMWRMSR} */
3795static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8FidVidControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3796{
3797 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3798 /** @todo AMD FIDVID_CTL. */
3799 return VINF_SUCCESS;
3800}
3801
3802
3803/** @callback_method_impl{FNCPUMRDMSR} */
3804static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8FidVidStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3805{
3806 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3807 /** @todo AMD FIDVID_STATUS. */
3808 *puValue = pRange->uValue;
3809 return VINF_SUCCESS;
3810}
3811
3812
3813/** @callback_method_impl{FNCPUMRDMSR} */
3814static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8McCtlMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3815{
3816 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3817 /** @todo AMD MC. */
3818 *puValue = 0;
3819 return VINF_SUCCESS;
3820}
3821
3822
3823/** @callback_method_impl{FNCPUMWRMSR} */
3824static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8McCtlMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3825{
3826 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3827 /** @todo AMD MC. */
3828 return VINF_SUCCESS;
3829}
3830
3831
3832/** @callback_method_impl{FNCPUMRDMSR} */
3833static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmiOnIoTrapN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3834{
3835 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3836 /** @todo AMD SMM/SMI and I/O trap. */
3837 *puValue = 0;
3838 return VINF_SUCCESS;
3839}
3840
3841
3842/** @callback_method_impl{FNCPUMWRMSR} */
3843static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmiOnIoTrapN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3844{
3845 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3846 /** @todo AMD SMM/SMI and I/O trap. */
3847 return VINF_SUCCESS;
3848}
3849
3850
3851/** @callback_method_impl{FNCPUMRDMSR} */
3852static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmiOnIoTrapCtlSts(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3853{
3854 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3855 /** @todo AMD SMM/SMI and I/O trap. */
3856 *puValue = 0;
3857 return VINF_SUCCESS;
3858}
3859
3860
3861/** @callback_method_impl{FNCPUMWRMSR} */
3862static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmiOnIoTrapCtlSts(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3863{
3864 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3865 /** @todo AMD SMM/SMI and I/O trap. */
3866 return VINF_SUCCESS;
3867}
3868
3869
3870/** @callback_method_impl{FNCPUMRDMSR} */
3871static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IntPendingMessage(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3872{
3873 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3874 /** @todo Interrupt pending message. */
3875 *puValue = 0;
3876 return VINF_SUCCESS;
3877}
3878
3879
3880/** @callback_method_impl{FNCPUMWRMSR} */
3881static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IntPendingMessage(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3882{
3883 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3884 /** @todo Interrupt pending message. */
3885 return VINF_SUCCESS;
3886}
3887
3888
3889/** @callback_method_impl{FNCPUMRDMSR} */
3890static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmiTriggerIoCycle(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3891{
3892 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3893 /** @todo AMD SMM/SMI and trigger I/O cycle. */
3894 *puValue = 0;
3895 return VINF_SUCCESS;
3896}
3897
3898
3899/** @callback_method_impl{FNCPUMWRMSR} */
3900static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmiTriggerIoCycle(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3901{
3902 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3903 /** @todo AMD SMM/SMI and trigger I/O cycle. */
3904 return VINF_SUCCESS;
3905}
3906
3907
3908/** @callback_method_impl{FNCPUMRDMSR} */
3909static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hMmioCfgBaseAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3910{
3911 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3912 /** @todo AMD MMIO Configuration base address. */
3913 *puValue = 0;
3914 return VINF_SUCCESS;
3915}
3916
3917
3918/** @callback_method_impl{FNCPUMWRMSR} */
3919static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hMmioCfgBaseAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3920{
3921 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3922 /** @todo AMD MMIO Configuration base address. */
3923 return VINF_SUCCESS;
3924}
3925
3926
3927/** @callback_method_impl{FNCPUMRDMSR} */
3928static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hTrapCtlMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3929{
3930 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3931 /** @todo AMD 0xc0010059. */
3932 *puValue = 0;
3933 return VINF_SUCCESS;
3934}
3935
3936
3937/** @callback_method_impl{FNCPUMWRMSR} */
3938static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hTrapCtlMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3939{
3940 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3941 /** @todo AMD 0xc0010059. */
3942 return VINF_SUCCESS;
3943}
3944
3945
3946/** @callback_method_impl{FNCPUMRDMSR} */
3947static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateCurLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3948{
3949 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3950 /** @todo AMD P-states. */
3951 *puValue = pRange->uValue;
3952 return VINF_SUCCESS;
3953}
3954
3955
3956/** @callback_method_impl{FNCPUMRDMSR} */
3957static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3958{
3959 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3960 /** @todo AMD P-states. */
3961 *puValue = pRange->uValue;
3962 return VINF_SUCCESS;
3963}
3964
3965
3966/** @callback_method_impl{FNCPUMWRMSR} */
3967static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hPStateControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3968{
3969 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3970 /** @todo AMD P-states. */
3971 return VINF_SUCCESS;
3972}
3973
3974
3975/** @callback_method_impl{FNCPUMRDMSR} */
3976static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3977{
3978 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3979 /** @todo AMD P-states. */
3980 *puValue = pRange->uValue;
3981 return VINF_SUCCESS;
3982}
3983
3984
3985/** @callback_method_impl{FNCPUMWRMSR} */
3986static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hPStateStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3987{
3988 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3989 /** @todo AMD P-states. */
3990 return VINF_SUCCESS;
3991}
3992
3993
3994/** @callback_method_impl{FNCPUMRDMSR} */
3995static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3996{
3997 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3998 /** @todo AMD P-states. */
3999 *puValue = pRange->uValue;
4000 return VINF_SUCCESS;
4001}
4002
4003
4004/** @callback_method_impl{FNCPUMWRMSR} */
4005static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hPStateN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4006{
4007 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4008 /** @todo AMD P-states. */
4009 return VINF_SUCCESS;
4010}
4011
4012
4013/** @callback_method_impl{FNCPUMRDMSR} */
4014static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCofVidControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4015{
4016 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4017 /** @todo AMD P-states. */
4018 *puValue = pRange->uValue;
4019 return VINF_SUCCESS;
4020}
4021
4022
4023/** @callback_method_impl{FNCPUMWRMSR} */
4024static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCofVidControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4025{
4026 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4027 /** @todo AMD P-states. */
4028 return VINF_SUCCESS;
4029}
4030
4031
4032/** @callback_method_impl{FNCPUMRDMSR} */
4033static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCofVidStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4034{
4035 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4036 /** @todo AMD P-states. */
4037 *puValue = pRange->uValue;
4038 return VINF_SUCCESS;
4039}
4040
4041
4042/** @callback_method_impl{FNCPUMWRMSR} */
4043static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCofVidStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4044{
4045 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4046 /* Note! Writing 0 seems to not GP, not sure if it does anything to the value... */
4047 /** @todo AMD P-states. */
4048 return VINF_SUCCESS;
4049}
4050
4051
4052/** @callback_method_impl{FNCPUMRDMSR} */
4053static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCStateIoBaseAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4054{
4055 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4056 /** @todo AMD C-states. */
4057 *puValue = 0;
4058 return VINF_SUCCESS;
4059}
4060
4061
4062/** @callback_method_impl{FNCPUMWRMSR} */
4063static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCStateIoBaseAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4064{
4065 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4066 /** @todo AMD C-states. */
4067 return VINF_SUCCESS;
4068}
4069
4070
4071/** @callback_method_impl{FNCPUMRDMSR} */
4072static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCpuWatchdogTimer(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4073{
4074 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4075 /** @todo AMD machine checks. */
4076 *puValue = 0;
4077 return VINF_SUCCESS;
4078}
4079
4080
4081/** @callback_method_impl{FNCPUMWRMSR} */
4082static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCpuWatchdogTimer(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4083{
4084 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4085 /** @todo AMD machine checks. */
4086 return VINF_SUCCESS;
4087}
4088
4089
4090/** @callback_method_impl{FNCPUMRDMSR} */
4091static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4092{
4093 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4094 /** @todo AMD SMM. */
4095 *puValue = 0;
4096 return VINF_SUCCESS;
4097}
4098
4099
4100/** @callback_method_impl{FNCPUMWRMSR} */
4101static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4102{
4103 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4104 /** @todo AMD SMM. */
4105 return VINF_SUCCESS;
4106}
4107
4108
4109/** @callback_method_impl{FNCPUMRDMSR} */
4110static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4111{
4112 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4113 /** @todo AMD SMM. */
4114 *puValue = 0;
4115 return VINF_SUCCESS;
4116}
4117
4118
4119/** @callback_method_impl{FNCPUMWRMSR} */
4120static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4121{
4122 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4123 /** @todo AMD SMM. */
4124 return VINF_SUCCESS;
4125}
4126
4127
4128
4129/** @callback_method_impl{FNCPUMRDMSR} */
4130static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4131{
4132 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4133 /** @todo AMD SMM. */
4134 *puValue = 0;
4135 return VINF_SUCCESS;
4136}
4137
4138
4139/** @callback_method_impl{FNCPUMWRMSR} */
4140static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4141{
4142 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4143 /** @todo AMD SMM. */
4144 return VINF_SUCCESS;
4145}
4146
4147
4148/** @callback_method_impl{FNCPUMRDMSR} */
4149static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8VmCr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4150{
4151 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4152 PVM pVM = pVCpu->CTX_SUFF(pVM);
4153 if (pVM->cpum.s.GuestFeatures.fSvm)
4154 *puValue = MSR_K8_VM_CR_LOCK;
4155 else
4156 *puValue = 0;
4157 return VINF_SUCCESS;
4158}
4159
4160
4161/** @callback_method_impl{FNCPUMWRMSR} */
4162static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8VmCr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4163{
4164 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
4165 PVM pVM = pVCpu->CTX_SUFF(pVM);
4166 if (pVM->cpum.s.GuestFeatures.fSvm)
4167 {
4168 /* Silently ignore writes to LOCK and SVM_DISABLE bit when the LOCK bit is set (see cpumMsrRd_AmdK8VmCr). */
4169 if (uValue & (MSR_K8_VM_CR_DPD | MSR_K8_VM_CR_R_INIT | MSR_K8_VM_CR_DIS_A20M))
4170 return VERR_CPUM_RAISE_GP_0;
4171 return VINF_SUCCESS;
4172 }
4173 return VERR_CPUM_RAISE_GP_0;
4174}
4175
4176
4177/** @callback_method_impl{FNCPUMRDMSR} */
4178static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IgnNe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4179{
4180 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4181 /** @todo AMD IGNNE\# control. */
4182 *puValue = 0;
4183 return VINF_SUCCESS;
4184}
4185
4186
4187/** @callback_method_impl{FNCPUMWRMSR} */
4188static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IgnNe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4189{
4190 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4191 /** @todo AMD IGNNE\# control. */
4192 return VINF_SUCCESS;
4193}
4194
4195
4196/** @callback_method_impl{FNCPUMRDMSR} */
4197static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4198{
4199 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4200 /** @todo AMD SMM. */
4201 *puValue = 0;
4202 return VINF_SUCCESS;
4203}
4204
4205
4206/** @callback_method_impl{FNCPUMWRMSR} */
4207static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4208{
4209 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4210 /** @todo AMD SMM. */
4211 return VINF_SUCCESS;
4212}
4213
4214
4215/** @callback_method_impl{FNCPUMRDMSR} */
4216static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8VmHSavePa(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4217{
4218 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4219 *puValue = pVCpu->cpum.s.Guest.hwvirt.svm.uMsrHSavePa;
4220 return VINF_SUCCESS;
4221}
4222
4223
4224/** @callback_method_impl{FNCPUMWRMSR} */
4225static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8VmHSavePa(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4226{
4227 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
4228 if (uValue & UINT64_C(0xfff))
4229 {
4230 Log(("CPUM: Invalid setting of low 12 bits set writing host-state save area MSR %#x: %#llx\n", idMsr, uValue));
4231 return VERR_CPUM_RAISE_GP_0;
4232 }
4233
4234 uint64_t fInvPhysMask = ~(RT_BIT_64(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.cMaxPhysAddrWidth) - 1U);
4235 if (fInvPhysMask & uValue)
4236 {
4237 Log(("CPUM: Invalid physical address bits set writing host-state save area MSR %#x: %#llx (%#llx)\n",
4238 idMsr, uValue, uValue & fInvPhysMask));
4239 return VERR_CPUM_RAISE_GP_0;
4240 }
4241
4242 pVCpu->cpum.s.Guest.hwvirt.svm.uMsrHSavePa = uValue;
4243 return VINF_SUCCESS;
4244}
4245
4246
4247/** @callback_method_impl{FNCPUMRDMSR} */
4248static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hVmLockKey(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4249{
4250 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4251 /** @todo AMD SVM. */
4252 *puValue = 0; /* RAZ */
4253 return VINF_SUCCESS;
4254}
4255
4256
4257/** @callback_method_impl{FNCPUMWRMSR} */
4258static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hVmLockKey(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4259{
4260 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4261 /** @todo AMD SVM. */
4262 return VINF_SUCCESS;
4263}
4264
4265
4266/** @callback_method_impl{FNCPUMRDMSR} */
4267static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hSmmLockKey(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4268{
4269 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4270 /** @todo AMD SMM. */
4271 *puValue = 0; /* RAZ */
4272 return VINF_SUCCESS;
4273}
4274
4275
4276/** @callback_method_impl{FNCPUMWRMSR} */
4277static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hSmmLockKey(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4278{
4279 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4280 /** @todo AMD SMM. */
4281 return VINF_SUCCESS;
4282}
4283
4284
4285/** @callback_method_impl{FNCPUMRDMSR} */
4286static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hLocalSmiStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4287{
4288 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4289 /** @todo AMD SMM/SMI. */
4290 *puValue = 0;
4291 return VINF_SUCCESS;
4292}
4293
4294
4295/** @callback_method_impl{FNCPUMWRMSR} */
4296static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hLocalSmiStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4297{
4298 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4299 /** @todo AMD SMM/SMI. */
4300 return VINF_SUCCESS;
4301}
4302
4303
4304/** @callback_method_impl{FNCPUMRDMSR} */
4305static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hOsVisWrkIdLength(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4306{
4307 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
4308 /** @todo AMD OS visible workaround. */
4309 *puValue = pRange->uValue;
4310 return VINF_SUCCESS;
4311}
4312
4313
4314/** @callback_method_impl{FNCPUMWRMSR} */
4315static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hOsVisWrkIdLength(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4316{
4317 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4318 /** @todo AMD OS visible workaround. */
4319 return VINF_SUCCESS;
4320}
4321
4322
4323/** @callback_method_impl{FNCPUMRDMSR} */
4324static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hOsVisWrkStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4325{
4326 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4327 /** @todo AMD OS visible workaround. */
4328 *puValue = 0;
4329 return VINF_SUCCESS;
4330}
4331
4332
4333/** @callback_method_impl{FNCPUMWRMSR} */
4334static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hOsVisWrkStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4335{
4336 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4337 /** @todo AMD OS visible workaround. */
4338 return VINF_SUCCESS;
4339}
4340
4341
4342/** @callback_method_impl{FNCPUMRDMSR} */
4343static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam16hL2IPerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4344{
4345 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4346 /** @todo AMD L2I performance counters. */
4347 *puValue = 0;
4348 return VINF_SUCCESS;
4349}
4350
4351
4352/** @callback_method_impl{FNCPUMWRMSR} */
4353static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam16hL2IPerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4354{
4355 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4356 /** @todo AMD L2I performance counters. */
4357 return VINF_SUCCESS;
4358}
4359
4360
4361/** @callback_method_impl{FNCPUMRDMSR} */
4362static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam16hL2IPerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4363{
4364 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4365 /** @todo AMD L2I performance counters. */
4366 *puValue = 0;
4367 return VINF_SUCCESS;
4368}
4369
4370
4371/** @callback_method_impl{FNCPUMWRMSR} */
4372static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam16hL2IPerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4373{
4374 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4375 /** @todo AMD L2I performance counters. */
4376 return VINF_SUCCESS;
4377}
4378
4379
4380/** @callback_method_impl{FNCPUMRDMSR} */
4381static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hNorthbridgePerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4382{
4383 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4384 /** @todo AMD Northbridge performance counters. */
4385 *puValue = 0;
4386 return VINF_SUCCESS;
4387}
4388
4389
4390/** @callback_method_impl{FNCPUMWRMSR} */
4391static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hNorthbridgePerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4392{
4393 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4394 /** @todo AMD Northbridge performance counters. */
4395 return VINF_SUCCESS;
4396}
4397
4398
4399/** @callback_method_impl{FNCPUMRDMSR} */
4400static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hNorthbridgePerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4401{
4402 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4403 /** @todo AMD Northbridge performance counters. */
4404 *puValue = 0;
4405 return VINF_SUCCESS;
4406}
4407
4408
4409/** @callback_method_impl{FNCPUMWRMSR} */
4410static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hNorthbridgePerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4411{
4412 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4413 /** @todo AMD Northbridge performance counters. */
4414 return VINF_SUCCESS;
4415}
4416
4417
4418/** @callback_method_impl{FNCPUMRDMSR} */
4419static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7MicrocodeCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4420{
4421 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4422 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4423 * cpus. Need to be explored and verify K7 presence. */
4424 /** @todo Undocumented register only seen mentioned in fam15h erratum \#608. */
4425 *puValue = pRange->uValue;
4426 return VINF_SUCCESS;
4427}
4428
4429
4430/** @callback_method_impl{FNCPUMWRMSR} */
4431static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7MicrocodeCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4432{
4433 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4434 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4435 * cpus. Need to be explored and verify K7 presence. */
4436 /** @todo Undocumented register only seen mentioned in fam15h erratum \#608. */
4437 return VINF_SUCCESS;
4438}
4439
4440
4441/** @callback_method_impl{FNCPUMRDMSR} */
4442static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7ClusterIdMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4443{
4444 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4445 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4446 * cpus. Need to be explored and verify K7 presence. */
4447 /** @todo Undocumented register only seen mentioned in fam16h BKDG r3.00 when
4448 * describing EBL_CR_POWERON. */
4449 *puValue = pRange->uValue;
4450 return VINF_SUCCESS;
4451}
4452
4453
4454/** @callback_method_impl{FNCPUMWRMSR} */
4455static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7ClusterIdMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4456{
4457 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4458 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4459 * cpus. Need to be explored and verify K7 presence. */
4460 /** @todo Undocumented register only seen mentioned in fam16h BKDG r3.00 when
4461 * describing EBL_CR_POWERON. */
4462 return VINF_SUCCESS;
4463}
4464
4465
4466/** @callback_method_impl{FNCPUMRDMSR} */
4467static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlStd07hEbax(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4468{
4469 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4470 bool fIgnored;
4471 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeafEx(pVCpu->CTX_SUFF(pVM), 0x00000007, 0, &fIgnored);
4472 if (pLeaf)
4473 *puValue = RT_MAKE_U64(pLeaf->uEbx, pLeaf->uEax);
4474 else
4475 *puValue = 0;
4476 return VINF_SUCCESS;
4477}
4478
4479
4480/** @callback_method_impl{FNCPUMWRMSR} */
4481static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlStd07hEbax(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4482{
4483 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4484 /** @todo Changing CPUID leaf 7/0. */
4485 return VINF_SUCCESS;
4486}
4487
4488
4489/** @callback_method_impl{FNCPUMRDMSR} */
4490static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlStd06hEcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4491{
4492 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4493 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), 0x00000006);
4494 if (pLeaf)
4495 *puValue = pLeaf->uEcx;
4496 else
4497 *puValue = 0;
4498 return VINF_SUCCESS;
4499}
4500
4501
4502/** @callback_method_impl{FNCPUMWRMSR} */
4503static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlStd06hEcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4504{
4505 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4506 /** @todo Changing CPUID leaf 6. */
4507 return VINF_SUCCESS;
4508}
4509
4510
4511/** @callback_method_impl{FNCPUMRDMSR} */
4512static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlStd01hEdcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4513{
4514 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4515 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), 0x00000001);
4516 if (pLeaf)
4517 *puValue = RT_MAKE_U64(pLeaf->uEdx, pLeaf->uEcx);
4518 else
4519 *puValue = 0;
4520 return VINF_SUCCESS;
4521}
4522
4523
4524/** @callback_method_impl{FNCPUMWRMSR} */
4525static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlStd01hEdcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4526{
4527 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4528 /** @todo Changing CPUID leaf 0x80000001. */
4529 return VINF_SUCCESS;
4530}
4531
4532
4533/** @callback_method_impl{FNCPUMRDMSR} */
4534static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlExt01hEdcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4535{
4536 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4537 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), 0x80000001);
4538 if (pLeaf)
4539 *puValue = RT_MAKE_U64(pLeaf->uEdx, pLeaf->uEcx);
4540 else
4541 *puValue = 0;
4542 return VINF_SUCCESS;
4543}
4544
4545
4546/** @callback_method_impl{FNCPUMWRMSR} */
4547static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlExt01hEdcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4548{
4549 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4550 /** @todo Changing CPUID leaf 0x80000001. */
4551 return VINF_SUCCESS;
4552}
4553
4554
4555/** @callback_method_impl{FNCPUMRDMSR} */
4556static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8PatchLevel(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4557{
4558 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4559 /** @todo Fake AMD microcode patching. */
4560 *puValue = pRange->uValue;
4561 return VINF_SUCCESS;
4562}
4563
4564
4565/** @callback_method_impl{FNCPUMWRMSR} */
4566static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8PatchLoader(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4567{
4568 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4569 /** @todo Fake AMD microcode patching. */
4570 return VINF_SUCCESS;
4571}
4572
4573
4574/** @callback_method_impl{FNCPUMRDMSR} */
4575static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DebugStatusMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4576{
4577 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4578 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4579 * cpus. Need to be explored and verify K7 presence. */
4580 /** @todo undocumented */
4581 *puValue = 0;
4582 return VINF_SUCCESS;
4583}
4584
4585
4586/** @callback_method_impl{FNCPUMWRMSR} */
4587static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DebugStatusMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4588{
4589 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4590 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4591 * cpus. Need to be explored and verify K7 presence. */
4592 /** @todo undocumented */
4593 return VINF_SUCCESS;
4594}
4595
4596
4597/** @callback_method_impl{FNCPUMRDMSR} */
4598static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BHTraceBaseMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4599{
4600 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4601 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4602 * cpus. Need to be explored and verify K7 presence. */
4603 /** @todo undocumented */
4604 *puValue = 0;
4605 return VINF_SUCCESS;
4606}
4607
4608
4609/** @callback_method_impl{FNCPUMWRMSR} */
4610static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BHTraceBaseMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4611{
4612 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4613 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4614 * cpus. Need to be explored and verify K7 presence. */
4615 /** @todo undocumented */
4616 return VINF_SUCCESS;
4617}
4618
4619
4620/** @callback_method_impl{FNCPUMRDMSR} */
4621static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BHTracePtrMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4622{
4623 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4624 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4625 * cpus. Need to be explored and verify K7 presence. */
4626 /** @todo undocumented */
4627 *puValue = 0;
4628 return VINF_SUCCESS;
4629}
4630
4631
4632/** @callback_method_impl{FNCPUMWRMSR} */
4633static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BHTracePtrMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4634{
4635 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4636 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4637 * cpus. Need to be explored and verify K7 presence. */
4638 /** @todo undocumented */
4639 return VINF_SUCCESS;
4640}
4641
4642
4643/** @callback_method_impl{FNCPUMRDMSR} */
4644static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BHTraceLimitMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4645{
4646 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4647 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4648 * cpus. Need to be explored and verify K7 presence. */
4649 /** @todo undocumented */
4650 *puValue = 0;
4651 return VINF_SUCCESS;
4652}
4653
4654
4655/** @callback_method_impl{FNCPUMWRMSR} */
4656static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BHTraceLimitMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4657{
4658 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4659 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4660 * cpus. Need to be explored and verify K7 presence. */
4661 /** @todo undocumented */
4662 return VINF_SUCCESS;
4663}
4664
4665
4666/** @callback_method_impl{FNCPUMRDMSR} */
4667static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7HardwareDebugToolCfgMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4668{
4669 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4670 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4671 * cpus. Need to be explored and verify K7 presence. */
4672 /** @todo undocumented */
4673 *puValue = 0;
4674 return VINF_SUCCESS;
4675}
4676
4677
4678/** @callback_method_impl{FNCPUMWRMSR} */
4679static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7HardwareDebugToolCfgMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4680{
4681 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4682 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4683 * cpus. Need to be explored and verify K7 presence. */
4684 /** @todo undocumented */
4685 return VINF_SUCCESS;
4686}
4687
4688
4689/** @callback_method_impl{FNCPUMRDMSR} */
4690static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7FastFlushCountMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4691{
4692 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4693 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4694 * cpus. Need to be explored and verify K7 presence. */
4695 /** @todo undocumented */
4696 *puValue = 0;
4697 return VINF_SUCCESS;
4698}
4699
4700
4701/** @callback_method_impl{FNCPUMWRMSR} */
4702static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7FastFlushCountMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4703{
4704 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4705 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4706 * cpus. Need to be explored and verify K7 presence. */
4707 /** @todo undocumented */
4708 return VINF_SUCCESS;
4709}
4710
4711
4712/** @callback_method_impl{FNCPUMRDMSR} */
4713static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7NodeId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4714{
4715 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4716 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4717 * cpus. Need to be explored and verify K7 presence. */
4718 /** @todo AMD node ID and bios scratch. */
4719 *puValue = 0; /* nodeid = 0; nodes-per-cpu = 1 */
4720 return VINF_SUCCESS;
4721}
4722
4723
4724/** @callback_method_impl{FNCPUMWRMSR} */
4725static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7NodeId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4726{
4727 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4728 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4729 * cpus. Need to be explored and verify K7 presence. */
4730 /** @todo AMD node ID and bios scratch. */
4731 return VINF_SUCCESS;
4732}
4733
4734
4735/** @callback_method_impl{FNCPUMRDMSR} */
4736static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DrXAddrMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4737{
4738 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4739 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4740 * cpus. Need to be explored and verify K7 presence. */
4741 /** @todo AMD DRx address masking (range breakpoints). */
4742 *puValue = 0;
4743 return VINF_SUCCESS;
4744}
4745
4746
4747/** @callback_method_impl{FNCPUMWRMSR} */
4748static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DrXAddrMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4749{
4750 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4751 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4752 * cpus. Need to be explored and verify K7 presence. */
4753 /** @todo AMD DRx address masking (range breakpoints). */
4754 return VINF_SUCCESS;
4755}
4756
4757
4758/** @callback_method_impl{FNCPUMRDMSR} */
4759static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7Dr0DataMatchMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4760{
4761 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4762 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4763 * cpus. Need to be explored and verify K7 presence. */
4764 /** @todo AMD undocument debugging features. */
4765 *puValue = 0;
4766 return VINF_SUCCESS;
4767}
4768
4769
4770/** @callback_method_impl{FNCPUMWRMSR} */
4771static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7Dr0DataMatchMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4772{
4773 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4774 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4775 * cpus. Need to be explored and verify K7 presence. */
4776 /** @todo AMD undocument debugging features. */
4777 return VINF_SUCCESS;
4778}
4779
4780
4781/** @callback_method_impl{FNCPUMRDMSR} */
4782static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7Dr0DataMaskMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4783{
4784 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4785 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4786 * cpus. Need to be explored and verify K7 presence. */
4787 /** @todo AMD undocument debugging features. */
4788 *puValue = 0;
4789 return VINF_SUCCESS;
4790}
4791
4792
4793/** @callback_method_impl{FNCPUMWRMSR} */
4794static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7Dr0DataMaskMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4795{
4796 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4797 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4798 * cpus. Need to be explored and verify K7 presence. */
4799 /** @todo AMD undocument debugging features. */
4800 return VINF_SUCCESS;
4801}
4802
4803
4804/** @callback_method_impl{FNCPUMRDMSR} */
4805static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7LoadStoreCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4806{
4807 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4808 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4809 * cpus. Need to be explored and verify K7 presence. */
4810 /** @todo AMD load-store config. */
4811 *puValue = 0;
4812 return VINF_SUCCESS;
4813}
4814
4815
4816/** @callback_method_impl{FNCPUMWRMSR} */
4817static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7LoadStoreCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4818{
4819 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4820 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4821 * cpus. Need to be explored and verify K7 presence. */
4822 /** @todo AMD load-store config. */
4823 return VINF_SUCCESS;
4824}
4825
4826
4827/** @callback_method_impl{FNCPUMRDMSR} */
4828static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7InstrCacheCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4829{
4830 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4831 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4832 * cpus. Need to be explored and verify K7 presence. */
4833 /** @todo AMD instruction cache config. */
4834 *puValue = 0;
4835 return VINF_SUCCESS;
4836}
4837
4838
4839/** @callback_method_impl{FNCPUMWRMSR} */
4840static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7InstrCacheCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4841{
4842 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4843 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4844 * cpus. Need to be explored and verify K7 presence. */
4845 /** @todo AMD instruction cache config. */
4846 return VINF_SUCCESS;
4847}
4848
4849
4850/** @callback_method_impl{FNCPUMRDMSR} */
4851static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DataCacheCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4852{
4853 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4854 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4855 * cpus. Need to be explored and verify K7 presence. */
4856 /** @todo AMD data cache config. */
4857 *puValue = 0;
4858 return VINF_SUCCESS;
4859}
4860
4861
4862/** @callback_method_impl{FNCPUMWRMSR} */
4863static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DataCacheCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4864{
4865 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4866 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4867 * cpus. Need to be explored and verify K7 presence. */
4868 /** @todo AMD data cache config. */
4869 return VINF_SUCCESS;
4870}
4871
4872
4873/** @callback_method_impl{FNCPUMRDMSR} */
4874static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BusUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4875{
4876 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4877 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4878 * cpus. Need to be explored and verify K7 presence. */
4879 /** @todo AMD bus unit config. */
4880 *puValue = 0;
4881 return VINF_SUCCESS;
4882}
4883
4884
4885/** @callback_method_impl{FNCPUMWRMSR} */
4886static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BusUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4887{
4888 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4889 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4890 * cpus. Need to be explored and verify K7 presence. */
4891 /** @todo AMD bus unit config. */
4892 return VINF_SUCCESS;
4893}
4894
4895
4896/** @callback_method_impl{FNCPUMRDMSR} */
4897static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DebugCtl2Maybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4898{
4899 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4900 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4901 * cpus. Need to be explored and verify K7 presence. */
4902 /** @todo Undocument AMD debug control register \#2. */
4903 *puValue = 0;
4904 return VINF_SUCCESS;
4905}
4906
4907
4908/** @callback_method_impl{FNCPUMWRMSR} */
4909static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DebugCtl2Maybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4910{
4911 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4912 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4913 * cpus. Need to be explored and verify K7 presence. */
4914 /** @todo Undocument AMD debug control register \#2. */
4915 return VINF_SUCCESS;
4916}
4917
4918
4919/** @callback_method_impl{FNCPUMRDMSR} */
4920static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hFpuCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4921{
4922 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4923 /** @todo AMD FPU config. */
4924 *puValue = 0;
4925 return VINF_SUCCESS;
4926}
4927
4928
4929/** @callback_method_impl{FNCPUMWRMSR} */
4930static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hFpuCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4931{
4932 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4933 /** @todo AMD FPU config. */
4934 return VINF_SUCCESS;
4935}
4936
4937
4938/** @callback_method_impl{FNCPUMRDMSR} */
4939static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hDecoderCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4940{
4941 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4942 /** @todo AMD decoder config. */
4943 *puValue = 0;
4944 return VINF_SUCCESS;
4945}
4946
4947
4948/** @callback_method_impl{FNCPUMWRMSR} */
4949static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hDecoderCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4950{
4951 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4952 /** @todo AMD decoder config. */
4953 return VINF_SUCCESS;
4954}
4955
4956
4957/** @callback_method_impl{FNCPUMRDMSR} */
4958static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hBusUnitCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4959{
4960 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4961 /* Note! 10h and 16h */
4962 /** @todo AMD bus unit config. */
4963 *puValue = 0;
4964 return VINF_SUCCESS;
4965}
4966
4967
4968/** @callback_method_impl{FNCPUMWRMSR} */
4969static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hBusUnitCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4970{
4971 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4972 /* Note! 10h and 16h */
4973 /** @todo AMD bus unit config. */
4974 return VINF_SUCCESS;
4975}
4976
4977
4978/** @callback_method_impl{FNCPUMRDMSR} */
4979static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hCombUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4980{
4981 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4982 /** @todo AMD unit config. */
4983 *puValue = 0;
4984 return VINF_SUCCESS;
4985}
4986
4987
4988/** @callback_method_impl{FNCPUMWRMSR} */
4989static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hCombUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4990{
4991 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4992 /** @todo AMD unit config. */
4993 return VINF_SUCCESS;
4994}
4995
4996
4997/** @callback_method_impl{FNCPUMRDMSR} */
4998static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hCombUnitCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4999{
5000 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5001 /** @todo AMD unit config 2. */
5002 *puValue = 0;
5003 return VINF_SUCCESS;
5004}
5005
5006
5007/** @callback_method_impl{FNCPUMWRMSR} */
5008static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hCombUnitCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5009{
5010 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5011 /** @todo AMD unit config 2. */
5012 return VINF_SUCCESS;
5013}
5014
5015
5016/** @callback_method_impl{FNCPUMRDMSR} */
5017static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hCombUnitCfg3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5018{
5019 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5020 /** @todo AMD combined unit config 3. */
5021 *puValue = 0;
5022 return VINF_SUCCESS;
5023}
5024
5025
5026/** @callback_method_impl{FNCPUMWRMSR} */
5027static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hCombUnitCfg3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5028{
5029 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5030 /** @todo AMD combined unit config 3. */
5031 return VINF_SUCCESS;
5032}
5033
5034
5035/** @callback_method_impl{FNCPUMRDMSR} */
5036static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hExecUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5037{
5038 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5039 /** @todo AMD execution unit config. */
5040 *puValue = 0;
5041 return VINF_SUCCESS;
5042}
5043
5044
5045/** @callback_method_impl{FNCPUMWRMSR} */
5046static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hExecUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5047{
5048 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5049 /** @todo AMD execution unit config. */
5050 return VINF_SUCCESS;
5051}
5052
5053
5054/** @callback_method_impl{FNCPUMRDMSR} */
5055static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hLoadStoreCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5056{
5057 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5058 /** @todo AMD load-store config 2. */
5059 *puValue = 0;
5060 return VINF_SUCCESS;
5061}
5062
5063
5064/** @callback_method_impl{FNCPUMWRMSR} */
5065static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hLoadStoreCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5066{
5067 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5068 /** @todo AMD load-store config 2. */
5069 return VINF_SUCCESS;
5070}
5071
5072
5073/** @callback_method_impl{FNCPUMRDMSR} */
5074static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsFetchCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5075{
5076 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5077 /** @todo AMD IBS. */
5078 *puValue = 0;
5079 return VINF_SUCCESS;
5080}
5081
5082
5083/** @callback_method_impl{FNCPUMWRMSR} */
5084static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsFetchCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5085{
5086 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5087 /** @todo AMD IBS. */
5088 return VINF_SUCCESS;
5089}
5090
5091
5092/** @callback_method_impl{FNCPUMRDMSR} */
5093static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsFetchLinAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5094{
5095 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5096 /** @todo AMD IBS. */
5097 *puValue = 0;
5098 return VINF_SUCCESS;
5099}
5100
5101
5102/** @callback_method_impl{FNCPUMWRMSR} */
5103static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsFetchLinAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5104{
5105 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5106 /** @todo AMD IBS. */
5107 return VINF_SUCCESS;
5108}
5109
5110
5111/** @callback_method_impl{FNCPUMRDMSR} */
5112static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsFetchPhysAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5113{
5114 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5115 /** @todo AMD IBS. */
5116 *puValue = 0;
5117 return VINF_SUCCESS;
5118}
5119
5120
5121/** @callback_method_impl{FNCPUMWRMSR} */
5122static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsFetchPhysAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5123{
5124 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5125 /** @todo AMD IBS. */
5126 return VINF_SUCCESS;
5127}
5128
5129
5130/** @callback_method_impl{FNCPUMRDMSR} */
5131static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpExecCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5132{
5133 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5134 /** @todo AMD IBS. */
5135 *puValue = 0;
5136 return VINF_SUCCESS;
5137}
5138
5139
5140/** @callback_method_impl{FNCPUMWRMSR} */
5141static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpExecCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5142{
5143 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5144 /** @todo AMD IBS. */
5145 return VINF_SUCCESS;
5146}
5147
5148
5149/** @callback_method_impl{FNCPUMRDMSR} */
5150static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpRip(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5151{
5152 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5153 /** @todo AMD IBS. */
5154 *puValue = 0;
5155 return VINF_SUCCESS;
5156}
5157
5158
5159/** @callback_method_impl{FNCPUMWRMSR} */
5160static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpRip(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5161{
5162 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5163 /** @todo AMD IBS. */
5164 if (!X86_IS_CANONICAL(uValue))
5165 {
5166 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
5167 return VERR_CPUM_RAISE_GP_0;
5168 }
5169 return VINF_SUCCESS;
5170}
5171
5172
5173/** @callback_method_impl{FNCPUMRDMSR} */
5174static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpData(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5175{
5176 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5177 /** @todo AMD IBS. */
5178 *puValue = 0;
5179 return VINF_SUCCESS;
5180}
5181
5182
5183/** @callback_method_impl{FNCPUMWRMSR} */
5184static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpData(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5185{
5186 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5187 /** @todo AMD IBS. */
5188 return VINF_SUCCESS;
5189}
5190
5191
5192/** @callback_method_impl{FNCPUMRDMSR} */
5193static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpData2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5194{
5195 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5196 /** @todo AMD IBS. */
5197 *puValue = 0;
5198 return VINF_SUCCESS;
5199}
5200
5201
5202/** @callback_method_impl{FNCPUMWRMSR} */
5203static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpData2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5204{
5205 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5206 /** @todo AMD IBS. */
5207 return VINF_SUCCESS;
5208}
5209
5210
5211/** @callback_method_impl{FNCPUMRDMSR} */
5212static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpData3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5213{
5214 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5215 /** @todo AMD IBS. */
5216 *puValue = 0;
5217 return VINF_SUCCESS;
5218}
5219
5220
5221/** @callback_method_impl{FNCPUMWRMSR} */
5222static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpData3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5223{
5224 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5225 /** @todo AMD IBS. */
5226 return VINF_SUCCESS;
5227}
5228
5229
5230/** @callback_method_impl{FNCPUMRDMSR} */
5231static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsDcLinAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5232{
5233 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5234 /** @todo AMD IBS. */
5235 *puValue = 0;
5236 return VINF_SUCCESS;
5237}
5238
5239
5240/** @callback_method_impl{FNCPUMWRMSR} */
5241static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsDcLinAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5242{
5243 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5244 /** @todo AMD IBS. */
5245 if (!X86_IS_CANONICAL(uValue))
5246 {
5247 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
5248 return VERR_CPUM_RAISE_GP_0;
5249 }
5250 return VINF_SUCCESS;
5251}
5252
5253
5254/** @callback_method_impl{FNCPUMRDMSR} */
5255static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsDcPhysAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5256{
5257 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5258 /** @todo AMD IBS. */
5259 *puValue = 0;
5260 return VINF_SUCCESS;
5261}
5262
5263
5264/** @callback_method_impl{FNCPUMWRMSR} */
5265static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsDcPhysAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5266{
5267 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5268 /** @todo AMD IBS. */
5269 return VINF_SUCCESS;
5270}
5271
5272
5273/** @callback_method_impl{FNCPUMRDMSR} */
5274static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5275{
5276 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5277 /** @todo AMD IBS. */
5278 *puValue = 0;
5279 return VINF_SUCCESS;
5280}
5281
5282
5283/** @callback_method_impl{FNCPUMWRMSR} */
5284static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5285{
5286 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5287 /** @todo AMD IBS. */
5288 return VINF_SUCCESS;
5289}
5290
5291
5292/** @callback_method_impl{FNCPUMRDMSR} */
5293static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam14hIbsBrTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5294{
5295 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5296 /** @todo AMD IBS. */
5297 *puValue = 0;
5298 return VINF_SUCCESS;
5299}
5300
5301
5302/** @callback_method_impl{FNCPUMWRMSR} */
5303static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam14hIbsBrTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5304{
5305 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5306 /** @todo AMD IBS. */
5307 if (!X86_IS_CANONICAL(uValue))
5308 {
5309 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
5310 return VERR_CPUM_RAISE_GP_0;
5311 }
5312 return VINF_SUCCESS;
5313}
5314
5315
5316
5317/*
5318 * GIM MSRs.
5319 * GIM MSRs.
5320 * GIM MSRs.
5321 */
5322
5323
5324/** @callback_method_impl{FNCPUMRDMSR} */
5325static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Gim(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5326{
5327#if defined(VBOX_WITH_NESTED_HWVIRT_SVM) || defined(VBOX_WITH_NESTED_HWVIRT_VMX)
5328 /* Raise #GP(0) like a physical CPU would since the nested-hypervisor hasn't intercept these MSRs. */
5329 if ( CPUMIsGuestInSvmNestedHwVirtMode(&pVCpu->cpum.s.Guest)
5330 || CPUMIsGuestInVmxNonRootMode(&pVCpu->cpum.s.Guest))
5331 return VERR_CPUM_RAISE_GP_0;
5332#endif
5333 return GIMReadMsr(pVCpu, idMsr, pRange, puValue);
5334}
5335
5336
5337/** @callback_method_impl{FNCPUMWRMSR} */
5338static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Gim(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5339{
5340#if defined(VBOX_WITH_NESTED_HWVIRT_SVM) || defined(VBOX_WITH_NESTED_HWVIRT_VMX)
5341 /* Raise #GP(0) like a physical CPU would since the nested-hypervisor hasn't intercept these MSRs. */
5342 if ( CPUMIsGuestInSvmNestedHwVirtMode(&pVCpu->cpum.s.Guest)
5343 || CPUMIsGuestInVmxNonRootMode(&pVCpu->cpum.s.Guest))
5344 return VERR_CPUM_RAISE_GP_0;
5345#endif
5346 return GIMWriteMsr(pVCpu, idMsr, pRange, uValue, uRawValue);
5347}
5348
5349
5350/**
5351 * MSR read function table.
5352 */
5353static const PFNCPUMRDMSR g_aCpumRdMsrFns[kCpumMsrRdFn_End] =
5354{
5355 NULL, /* Invalid */
5356 cpumMsrRd_FixedValue,
5357 NULL, /* Alias */
5358 cpumMsrRd_WriteOnly,
5359 cpumMsrRd_Ia32P5McAddr,
5360 cpumMsrRd_Ia32P5McType,
5361 cpumMsrRd_Ia32TimestampCounter,
5362 cpumMsrRd_Ia32PlatformId,
5363 cpumMsrRd_Ia32ApicBase,
5364 cpumMsrRd_Ia32FeatureControl,
5365 cpumMsrRd_Ia32BiosSignId,
5366 cpumMsrRd_Ia32SmmMonitorCtl,
5367 cpumMsrRd_Ia32PmcN,
5368 cpumMsrRd_Ia32MonitorFilterLineSize,
5369 cpumMsrRd_Ia32MPerf,
5370 cpumMsrRd_Ia32APerf,
5371 cpumMsrRd_Ia32MtrrCap,
5372 cpumMsrRd_Ia32MtrrPhysBaseN,
5373 cpumMsrRd_Ia32MtrrPhysMaskN,
5374 cpumMsrRd_Ia32MtrrFixed,
5375 cpumMsrRd_Ia32MtrrDefType,
5376 cpumMsrRd_Ia32Pat,
5377 cpumMsrRd_Ia32SysEnterCs,
5378 cpumMsrRd_Ia32SysEnterEsp,
5379 cpumMsrRd_Ia32SysEnterEip,
5380 cpumMsrRd_Ia32McgCap,
5381 cpumMsrRd_Ia32McgStatus,
5382 cpumMsrRd_Ia32McgCtl,
5383 cpumMsrRd_Ia32DebugCtl,
5384 cpumMsrRd_Ia32SmrrPhysBase,
5385 cpumMsrRd_Ia32SmrrPhysMask,
5386 cpumMsrRd_Ia32PlatformDcaCap,
5387 cpumMsrRd_Ia32CpuDcaCap,
5388 cpumMsrRd_Ia32Dca0Cap,
5389 cpumMsrRd_Ia32PerfEvtSelN,
5390 cpumMsrRd_Ia32PerfStatus,
5391 cpumMsrRd_Ia32PerfCtl,
5392 cpumMsrRd_Ia32FixedCtrN,
5393 cpumMsrRd_Ia32PerfCapabilities,
5394 cpumMsrRd_Ia32FixedCtrCtrl,
5395 cpumMsrRd_Ia32PerfGlobalStatus,
5396 cpumMsrRd_Ia32PerfGlobalCtrl,
5397 cpumMsrRd_Ia32PerfGlobalOvfCtrl,
5398 cpumMsrRd_Ia32PebsEnable,
5399 cpumMsrRd_Ia32ClockModulation,
5400 cpumMsrRd_Ia32ThermInterrupt,
5401 cpumMsrRd_Ia32ThermStatus,
5402 cpumMsrRd_Ia32Therm2Ctl,
5403 cpumMsrRd_Ia32MiscEnable,
5404 cpumMsrRd_Ia32McCtlStatusAddrMiscN,
5405 cpumMsrRd_Ia32McNCtl2,
5406 cpumMsrRd_Ia32DsArea,
5407 cpumMsrRd_Ia32TscDeadline,
5408 cpumMsrRd_Ia32X2ApicN,
5409 cpumMsrRd_Ia32DebugInterface,
5410 cpumMsrRd_Ia32VmxBasic,
5411 cpumMsrRd_Ia32VmxPinbasedCtls,
5412 cpumMsrRd_Ia32VmxProcbasedCtls,
5413 cpumMsrRd_Ia32VmxExitCtls,
5414 cpumMsrRd_Ia32VmxEntryCtls,
5415 cpumMsrRd_Ia32VmxMisc,
5416 cpumMsrRd_Ia32VmxCr0Fixed0,
5417 cpumMsrRd_Ia32VmxCr0Fixed1,
5418 cpumMsrRd_Ia32VmxCr4Fixed0,
5419 cpumMsrRd_Ia32VmxCr4Fixed1,
5420 cpumMsrRd_Ia32VmxVmcsEnum,
5421 cpumMsrRd_Ia32VmxProcBasedCtls2,
5422 cpumMsrRd_Ia32VmxEptVpidCap,
5423 cpumMsrRd_Ia32VmxTruePinbasedCtls,
5424 cpumMsrRd_Ia32VmxTrueProcbasedCtls,
5425 cpumMsrRd_Ia32VmxTrueExitCtls,
5426 cpumMsrRd_Ia32VmxTrueEntryCtls,
5427 cpumMsrRd_Ia32VmxVmFunc,
5428 cpumMsrRd_Ia32SpecCtrl,
5429 cpumMsrRd_Ia32ArchCapabilities,
5430
5431 cpumMsrRd_Amd64Efer,
5432 cpumMsrRd_Amd64SyscallTarget,
5433 cpumMsrRd_Amd64LongSyscallTarget,
5434 cpumMsrRd_Amd64CompSyscallTarget,
5435 cpumMsrRd_Amd64SyscallFlagMask,
5436 cpumMsrRd_Amd64FsBase,
5437 cpumMsrRd_Amd64GsBase,
5438 cpumMsrRd_Amd64KernelGsBase,
5439 cpumMsrRd_Amd64TscAux,
5440
5441 cpumMsrRd_IntelEblCrPowerOn,
5442 cpumMsrRd_IntelI7CoreThreadCount,
5443 cpumMsrRd_IntelP4EbcHardPowerOn,
5444 cpumMsrRd_IntelP4EbcSoftPowerOn,
5445 cpumMsrRd_IntelP4EbcFrequencyId,
5446 cpumMsrRd_IntelP6FsbFrequency,
5447 cpumMsrRd_IntelPlatformInfo,
5448 cpumMsrRd_IntelFlexRatio,
5449 cpumMsrRd_IntelPkgCStConfigControl,
5450 cpumMsrRd_IntelPmgIoCaptureBase,
5451 cpumMsrRd_IntelLastBranchFromToN,
5452 cpumMsrRd_IntelLastBranchFromN,
5453 cpumMsrRd_IntelLastBranchToN,
5454 cpumMsrRd_IntelLastBranchTos,
5455 cpumMsrRd_IntelBblCrCtl,
5456 cpumMsrRd_IntelBblCrCtl3,
5457 cpumMsrRd_IntelI7TemperatureTarget,
5458 cpumMsrRd_IntelI7MsrOffCoreResponseN,
5459 cpumMsrRd_IntelI7MiscPwrMgmt,
5460 cpumMsrRd_IntelP6CrN,
5461 cpumMsrRd_IntelCpuId1FeatureMaskEcdx,
5462 cpumMsrRd_IntelCpuId1FeatureMaskEax,
5463 cpumMsrRd_IntelCpuId80000001FeatureMaskEcdx,
5464 cpumMsrRd_IntelI7SandyAesNiCtl,
5465 cpumMsrRd_IntelI7TurboRatioLimit,
5466 cpumMsrRd_IntelI7LbrSelect,
5467 cpumMsrRd_IntelI7SandyErrorControl,
5468 cpumMsrRd_IntelI7VirtualLegacyWireCap,
5469 cpumMsrRd_IntelI7PowerCtl,
5470 cpumMsrRd_IntelI7SandyPebsNumAlt,
5471 cpumMsrRd_IntelI7PebsLdLat,
5472 cpumMsrRd_IntelI7PkgCnResidencyN,
5473 cpumMsrRd_IntelI7CoreCnResidencyN,
5474 cpumMsrRd_IntelI7SandyVrCurrentConfig,
5475 cpumMsrRd_IntelI7SandyVrMiscConfig,
5476 cpumMsrRd_IntelI7SandyRaplPowerUnit,
5477 cpumMsrRd_IntelI7SandyPkgCnIrtlN,
5478 cpumMsrRd_IntelI7SandyPkgC2Residency,
5479 cpumMsrRd_IntelI7RaplPkgPowerLimit,
5480 cpumMsrRd_IntelI7RaplPkgEnergyStatus,
5481 cpumMsrRd_IntelI7RaplPkgPerfStatus,
5482 cpumMsrRd_IntelI7RaplPkgPowerInfo,
5483 cpumMsrRd_IntelI7RaplDramPowerLimit,
5484 cpumMsrRd_IntelI7RaplDramEnergyStatus,
5485 cpumMsrRd_IntelI7RaplDramPerfStatus,
5486 cpumMsrRd_IntelI7RaplDramPowerInfo,
5487 cpumMsrRd_IntelI7RaplPp0PowerLimit,
5488 cpumMsrRd_IntelI7RaplPp0EnergyStatus,
5489 cpumMsrRd_IntelI7RaplPp0Policy,
5490 cpumMsrRd_IntelI7RaplPp0PerfStatus,
5491 cpumMsrRd_IntelI7RaplPp1PowerLimit,
5492 cpumMsrRd_IntelI7RaplPp1EnergyStatus,
5493 cpumMsrRd_IntelI7RaplPp1Policy,
5494 cpumMsrRd_IntelI7IvyConfigTdpNominal,
5495 cpumMsrRd_IntelI7IvyConfigTdpLevel1,
5496 cpumMsrRd_IntelI7IvyConfigTdpLevel2,
5497 cpumMsrRd_IntelI7IvyConfigTdpControl,
5498 cpumMsrRd_IntelI7IvyTurboActivationRatio,
5499 cpumMsrRd_IntelI7UncPerfGlobalCtrl,
5500 cpumMsrRd_IntelI7UncPerfGlobalStatus,
5501 cpumMsrRd_IntelI7UncPerfGlobalOvfCtrl,
5502 cpumMsrRd_IntelI7UncPerfFixedCtrCtrl,
5503 cpumMsrRd_IntelI7UncPerfFixedCtr,
5504 cpumMsrRd_IntelI7UncCBoxConfig,
5505 cpumMsrRd_IntelI7UncArbPerfCtrN,
5506 cpumMsrRd_IntelI7UncArbPerfEvtSelN,
5507 cpumMsrRd_IntelI7SmiCount,
5508 cpumMsrRd_IntelCore2EmttmCrTablesN,
5509 cpumMsrRd_IntelCore2SmmCStMiscInfo,
5510 cpumMsrRd_IntelCore1ExtConfig,
5511 cpumMsrRd_IntelCore1DtsCalControl,
5512 cpumMsrRd_IntelCore2PeciControl,
5513 cpumMsrRd_IntelAtSilvCoreC1Recidency,
5514
5515 cpumMsrRd_P6LastBranchFromIp,
5516 cpumMsrRd_P6LastBranchToIp,
5517 cpumMsrRd_P6LastIntFromIp,
5518 cpumMsrRd_P6LastIntToIp,
5519
5520 cpumMsrRd_AmdFam15hTscRate,
5521 cpumMsrRd_AmdFam15hLwpCfg,
5522 cpumMsrRd_AmdFam15hLwpCbAddr,
5523 cpumMsrRd_AmdFam10hMc4MiscN,
5524 cpumMsrRd_AmdK8PerfCtlN,
5525 cpumMsrRd_AmdK8PerfCtrN,
5526 cpumMsrRd_AmdK8SysCfg,
5527 cpumMsrRd_AmdK8HwCr,
5528 cpumMsrRd_AmdK8IorrBaseN,
5529 cpumMsrRd_AmdK8IorrMaskN,
5530 cpumMsrRd_AmdK8TopOfMemN,
5531 cpumMsrRd_AmdK8NbCfg1,
5532 cpumMsrRd_AmdK8McXcptRedir,
5533 cpumMsrRd_AmdK8CpuNameN,
5534 cpumMsrRd_AmdK8HwThermalCtrl,
5535 cpumMsrRd_AmdK8SwThermalCtrl,
5536 cpumMsrRd_AmdK8FidVidControl,
5537 cpumMsrRd_AmdK8FidVidStatus,
5538 cpumMsrRd_AmdK8McCtlMaskN,
5539 cpumMsrRd_AmdK8SmiOnIoTrapN,
5540 cpumMsrRd_AmdK8SmiOnIoTrapCtlSts,
5541 cpumMsrRd_AmdK8IntPendingMessage,
5542 cpumMsrRd_AmdK8SmiTriggerIoCycle,
5543 cpumMsrRd_AmdFam10hMmioCfgBaseAddr,
5544 cpumMsrRd_AmdFam10hTrapCtlMaybe,
5545 cpumMsrRd_AmdFam10hPStateCurLimit,
5546 cpumMsrRd_AmdFam10hPStateControl,
5547 cpumMsrRd_AmdFam10hPStateStatus,
5548 cpumMsrRd_AmdFam10hPStateN,
5549 cpumMsrRd_AmdFam10hCofVidControl,
5550 cpumMsrRd_AmdFam10hCofVidStatus,
5551 cpumMsrRd_AmdFam10hCStateIoBaseAddr,
5552 cpumMsrRd_AmdFam10hCpuWatchdogTimer,
5553 cpumMsrRd_AmdK8SmmBase,
5554 cpumMsrRd_AmdK8SmmAddr,
5555 cpumMsrRd_AmdK8SmmMask,
5556 cpumMsrRd_AmdK8VmCr,
5557 cpumMsrRd_AmdK8IgnNe,
5558 cpumMsrRd_AmdK8SmmCtl,
5559 cpumMsrRd_AmdK8VmHSavePa,
5560 cpumMsrRd_AmdFam10hVmLockKey,
5561 cpumMsrRd_AmdFam10hSmmLockKey,
5562 cpumMsrRd_AmdFam10hLocalSmiStatus,
5563 cpumMsrRd_AmdFam10hOsVisWrkIdLength,
5564 cpumMsrRd_AmdFam10hOsVisWrkStatus,
5565 cpumMsrRd_AmdFam16hL2IPerfCtlN,
5566 cpumMsrRd_AmdFam16hL2IPerfCtrN,
5567 cpumMsrRd_AmdFam15hNorthbridgePerfCtlN,
5568 cpumMsrRd_AmdFam15hNorthbridgePerfCtrN,
5569 cpumMsrRd_AmdK7MicrocodeCtl,
5570 cpumMsrRd_AmdK7ClusterIdMaybe,
5571 cpumMsrRd_AmdK8CpuIdCtlStd07hEbax,
5572 cpumMsrRd_AmdK8CpuIdCtlStd06hEcx,
5573 cpumMsrRd_AmdK8CpuIdCtlStd01hEdcx,
5574 cpumMsrRd_AmdK8CpuIdCtlExt01hEdcx,
5575 cpumMsrRd_AmdK8PatchLevel,
5576 cpumMsrRd_AmdK7DebugStatusMaybe,
5577 cpumMsrRd_AmdK7BHTraceBaseMaybe,
5578 cpumMsrRd_AmdK7BHTracePtrMaybe,
5579 cpumMsrRd_AmdK7BHTraceLimitMaybe,
5580 cpumMsrRd_AmdK7HardwareDebugToolCfgMaybe,
5581 cpumMsrRd_AmdK7FastFlushCountMaybe,
5582 cpumMsrRd_AmdK7NodeId,
5583 cpumMsrRd_AmdK7DrXAddrMaskN,
5584 cpumMsrRd_AmdK7Dr0DataMatchMaybe,
5585 cpumMsrRd_AmdK7Dr0DataMaskMaybe,
5586 cpumMsrRd_AmdK7LoadStoreCfg,
5587 cpumMsrRd_AmdK7InstrCacheCfg,
5588 cpumMsrRd_AmdK7DataCacheCfg,
5589 cpumMsrRd_AmdK7BusUnitCfg,
5590 cpumMsrRd_AmdK7DebugCtl2Maybe,
5591 cpumMsrRd_AmdFam15hFpuCfg,
5592 cpumMsrRd_AmdFam15hDecoderCfg,
5593 cpumMsrRd_AmdFam10hBusUnitCfg2,
5594 cpumMsrRd_AmdFam15hCombUnitCfg,
5595 cpumMsrRd_AmdFam15hCombUnitCfg2,
5596 cpumMsrRd_AmdFam15hCombUnitCfg3,
5597 cpumMsrRd_AmdFam15hExecUnitCfg,
5598 cpumMsrRd_AmdFam15hLoadStoreCfg2,
5599 cpumMsrRd_AmdFam10hIbsFetchCtl,
5600 cpumMsrRd_AmdFam10hIbsFetchLinAddr,
5601 cpumMsrRd_AmdFam10hIbsFetchPhysAddr,
5602 cpumMsrRd_AmdFam10hIbsOpExecCtl,
5603 cpumMsrRd_AmdFam10hIbsOpRip,
5604 cpumMsrRd_AmdFam10hIbsOpData,
5605 cpumMsrRd_AmdFam10hIbsOpData2,
5606 cpumMsrRd_AmdFam10hIbsOpData3,
5607 cpumMsrRd_AmdFam10hIbsDcLinAddr,
5608 cpumMsrRd_AmdFam10hIbsDcPhysAddr,
5609 cpumMsrRd_AmdFam10hIbsCtl,
5610 cpumMsrRd_AmdFam14hIbsBrTarget,
5611
5612 cpumMsrRd_Gim
5613};
5614
5615
5616/**
5617 * MSR write function table.
5618 */
5619static const PFNCPUMWRMSR g_aCpumWrMsrFns[kCpumMsrWrFn_End] =
5620{
5621 NULL, /* Invalid */
5622 cpumMsrWr_IgnoreWrite,
5623 cpumMsrWr_ReadOnly,
5624 NULL, /* Alias */
5625 cpumMsrWr_Ia32P5McAddr,
5626 cpumMsrWr_Ia32P5McType,
5627 cpumMsrWr_Ia32TimestampCounter,
5628 cpumMsrWr_Ia32ApicBase,
5629 cpumMsrWr_Ia32FeatureControl,
5630 cpumMsrWr_Ia32BiosSignId,
5631 cpumMsrWr_Ia32BiosUpdateTrigger,
5632 cpumMsrWr_Ia32SmmMonitorCtl,
5633 cpumMsrWr_Ia32PmcN,
5634 cpumMsrWr_Ia32MonitorFilterLineSize,
5635 cpumMsrWr_Ia32MPerf,
5636 cpumMsrWr_Ia32APerf,
5637 cpumMsrWr_Ia32MtrrPhysBaseN,
5638 cpumMsrWr_Ia32MtrrPhysMaskN,
5639 cpumMsrWr_Ia32MtrrFixed,
5640 cpumMsrWr_Ia32MtrrDefType,
5641 cpumMsrWr_Ia32Pat,
5642 cpumMsrWr_Ia32SysEnterCs,
5643 cpumMsrWr_Ia32SysEnterEsp,
5644 cpumMsrWr_Ia32SysEnterEip,
5645 cpumMsrWr_Ia32McgStatus,
5646 cpumMsrWr_Ia32McgCtl,
5647 cpumMsrWr_Ia32DebugCtl,
5648 cpumMsrWr_Ia32SmrrPhysBase,
5649 cpumMsrWr_Ia32SmrrPhysMask,
5650 cpumMsrWr_Ia32PlatformDcaCap,
5651 cpumMsrWr_Ia32Dca0Cap,
5652 cpumMsrWr_Ia32PerfEvtSelN,
5653 cpumMsrWr_Ia32PerfStatus,
5654 cpumMsrWr_Ia32PerfCtl,
5655 cpumMsrWr_Ia32FixedCtrN,
5656 cpumMsrWr_Ia32PerfCapabilities,
5657 cpumMsrWr_Ia32FixedCtrCtrl,
5658 cpumMsrWr_Ia32PerfGlobalStatus,
5659 cpumMsrWr_Ia32PerfGlobalCtrl,
5660 cpumMsrWr_Ia32PerfGlobalOvfCtrl,
5661 cpumMsrWr_Ia32PebsEnable,
5662 cpumMsrWr_Ia32ClockModulation,
5663 cpumMsrWr_Ia32ThermInterrupt,
5664 cpumMsrWr_Ia32ThermStatus,
5665 cpumMsrWr_Ia32Therm2Ctl,
5666 cpumMsrWr_Ia32MiscEnable,
5667 cpumMsrWr_Ia32McCtlStatusAddrMiscN,
5668 cpumMsrWr_Ia32McNCtl2,
5669 cpumMsrWr_Ia32DsArea,
5670 cpumMsrWr_Ia32TscDeadline,
5671 cpumMsrWr_Ia32X2ApicN,
5672 cpumMsrWr_Ia32DebugInterface,
5673 cpumMsrWr_Ia32SpecCtrl,
5674 cpumMsrWr_Ia32PredCmd,
5675
5676 cpumMsrWr_Amd64Efer,
5677 cpumMsrWr_Amd64SyscallTarget,
5678 cpumMsrWr_Amd64LongSyscallTarget,
5679 cpumMsrWr_Amd64CompSyscallTarget,
5680 cpumMsrWr_Amd64SyscallFlagMask,
5681 cpumMsrWr_Amd64FsBase,
5682 cpumMsrWr_Amd64GsBase,
5683 cpumMsrWr_Amd64KernelGsBase,
5684 cpumMsrWr_Amd64TscAux,
5685
5686 cpumMsrWr_IntelEblCrPowerOn,
5687 cpumMsrWr_IntelP4EbcHardPowerOn,
5688 cpumMsrWr_IntelP4EbcSoftPowerOn,
5689 cpumMsrWr_IntelP4EbcFrequencyId,
5690 cpumMsrWr_IntelFlexRatio,
5691 cpumMsrWr_IntelPkgCStConfigControl,
5692 cpumMsrWr_IntelPmgIoCaptureBase,
5693 cpumMsrWr_IntelLastBranchFromToN,
5694 cpumMsrWr_IntelLastBranchFromN,
5695 cpumMsrWr_IntelLastBranchToN,
5696 cpumMsrWr_IntelLastBranchTos,
5697 cpumMsrWr_IntelBblCrCtl,
5698 cpumMsrWr_IntelBblCrCtl3,
5699 cpumMsrWr_IntelI7TemperatureTarget,
5700 cpumMsrWr_IntelI7MsrOffCoreResponseN,
5701 cpumMsrWr_IntelI7MiscPwrMgmt,
5702 cpumMsrWr_IntelP6CrN,
5703 cpumMsrWr_IntelCpuId1FeatureMaskEcdx,
5704 cpumMsrWr_IntelCpuId1FeatureMaskEax,
5705 cpumMsrWr_IntelCpuId80000001FeatureMaskEcdx,
5706 cpumMsrWr_IntelI7SandyAesNiCtl,
5707 cpumMsrWr_IntelI7TurboRatioLimit,
5708 cpumMsrWr_IntelI7LbrSelect,
5709 cpumMsrWr_IntelI7SandyErrorControl,
5710 cpumMsrWr_IntelI7PowerCtl,
5711 cpumMsrWr_IntelI7SandyPebsNumAlt,
5712 cpumMsrWr_IntelI7PebsLdLat,
5713 cpumMsrWr_IntelI7SandyVrCurrentConfig,
5714 cpumMsrWr_IntelI7SandyVrMiscConfig,
5715 cpumMsrWr_IntelI7SandyRaplPowerUnit,
5716 cpumMsrWr_IntelI7SandyPkgCnIrtlN,
5717 cpumMsrWr_IntelI7SandyPkgC2Residency,
5718 cpumMsrWr_IntelI7RaplPkgPowerLimit,
5719 cpumMsrWr_IntelI7RaplDramPowerLimit,
5720 cpumMsrWr_IntelI7RaplPp0PowerLimit,
5721 cpumMsrWr_IntelI7RaplPp0Policy,
5722 cpumMsrWr_IntelI7RaplPp1PowerLimit,
5723 cpumMsrWr_IntelI7RaplPp1Policy,
5724 cpumMsrWr_IntelI7IvyConfigTdpControl,
5725 cpumMsrWr_IntelI7IvyTurboActivationRatio,
5726 cpumMsrWr_IntelI7UncPerfGlobalCtrl,
5727 cpumMsrWr_IntelI7UncPerfGlobalStatus,
5728 cpumMsrWr_IntelI7UncPerfGlobalOvfCtrl,
5729 cpumMsrWr_IntelI7UncPerfFixedCtrCtrl,
5730 cpumMsrWr_IntelI7UncPerfFixedCtr,
5731 cpumMsrWr_IntelI7UncArbPerfCtrN,
5732 cpumMsrWr_IntelI7UncArbPerfEvtSelN,
5733 cpumMsrWr_IntelCore2EmttmCrTablesN,
5734 cpumMsrWr_IntelCore2SmmCStMiscInfo,
5735 cpumMsrWr_IntelCore1ExtConfig,
5736 cpumMsrWr_IntelCore1DtsCalControl,
5737 cpumMsrWr_IntelCore2PeciControl,
5738
5739 cpumMsrWr_P6LastIntFromIp,
5740 cpumMsrWr_P6LastIntToIp,
5741
5742 cpumMsrWr_AmdFam15hTscRate,
5743 cpumMsrWr_AmdFam15hLwpCfg,
5744 cpumMsrWr_AmdFam15hLwpCbAddr,
5745 cpumMsrWr_AmdFam10hMc4MiscN,
5746 cpumMsrWr_AmdK8PerfCtlN,
5747 cpumMsrWr_AmdK8PerfCtrN,
5748 cpumMsrWr_AmdK8SysCfg,
5749 cpumMsrWr_AmdK8HwCr,
5750 cpumMsrWr_AmdK8IorrBaseN,
5751 cpumMsrWr_AmdK8IorrMaskN,
5752 cpumMsrWr_AmdK8TopOfMemN,
5753 cpumMsrWr_AmdK8NbCfg1,
5754 cpumMsrWr_AmdK8McXcptRedir,
5755 cpumMsrWr_AmdK8CpuNameN,
5756 cpumMsrWr_AmdK8HwThermalCtrl,
5757 cpumMsrWr_AmdK8SwThermalCtrl,
5758 cpumMsrWr_AmdK8FidVidControl,
5759 cpumMsrWr_AmdK8McCtlMaskN,
5760 cpumMsrWr_AmdK8SmiOnIoTrapN,
5761 cpumMsrWr_AmdK8SmiOnIoTrapCtlSts,
5762 cpumMsrWr_AmdK8IntPendingMessage,
5763 cpumMsrWr_AmdK8SmiTriggerIoCycle,
5764 cpumMsrWr_AmdFam10hMmioCfgBaseAddr,
5765 cpumMsrWr_AmdFam10hTrapCtlMaybe,
5766 cpumMsrWr_AmdFam10hPStateControl,
5767 cpumMsrWr_AmdFam10hPStateStatus,
5768 cpumMsrWr_AmdFam10hPStateN,
5769 cpumMsrWr_AmdFam10hCofVidControl,
5770 cpumMsrWr_AmdFam10hCofVidStatus,
5771 cpumMsrWr_AmdFam10hCStateIoBaseAddr,
5772 cpumMsrWr_AmdFam10hCpuWatchdogTimer,
5773 cpumMsrWr_AmdK8SmmBase,
5774 cpumMsrWr_AmdK8SmmAddr,
5775 cpumMsrWr_AmdK8SmmMask,
5776 cpumMsrWr_AmdK8VmCr,
5777 cpumMsrWr_AmdK8IgnNe,
5778 cpumMsrWr_AmdK8SmmCtl,
5779 cpumMsrWr_AmdK8VmHSavePa,
5780 cpumMsrWr_AmdFam10hVmLockKey,
5781 cpumMsrWr_AmdFam10hSmmLockKey,
5782 cpumMsrWr_AmdFam10hLocalSmiStatus,
5783 cpumMsrWr_AmdFam10hOsVisWrkIdLength,
5784 cpumMsrWr_AmdFam10hOsVisWrkStatus,
5785 cpumMsrWr_AmdFam16hL2IPerfCtlN,
5786 cpumMsrWr_AmdFam16hL2IPerfCtrN,
5787 cpumMsrWr_AmdFam15hNorthbridgePerfCtlN,
5788 cpumMsrWr_AmdFam15hNorthbridgePerfCtrN,
5789 cpumMsrWr_AmdK7MicrocodeCtl,
5790 cpumMsrWr_AmdK7ClusterIdMaybe,
5791 cpumMsrWr_AmdK8CpuIdCtlStd07hEbax,
5792 cpumMsrWr_AmdK8CpuIdCtlStd06hEcx,
5793 cpumMsrWr_AmdK8CpuIdCtlStd01hEdcx,
5794 cpumMsrWr_AmdK8CpuIdCtlExt01hEdcx,
5795 cpumMsrWr_AmdK8PatchLoader,
5796 cpumMsrWr_AmdK7DebugStatusMaybe,
5797 cpumMsrWr_AmdK7BHTraceBaseMaybe,
5798 cpumMsrWr_AmdK7BHTracePtrMaybe,
5799 cpumMsrWr_AmdK7BHTraceLimitMaybe,
5800 cpumMsrWr_AmdK7HardwareDebugToolCfgMaybe,
5801 cpumMsrWr_AmdK7FastFlushCountMaybe,
5802 cpumMsrWr_AmdK7NodeId,
5803 cpumMsrWr_AmdK7DrXAddrMaskN,
5804 cpumMsrWr_AmdK7Dr0DataMatchMaybe,
5805 cpumMsrWr_AmdK7Dr0DataMaskMaybe,
5806 cpumMsrWr_AmdK7LoadStoreCfg,
5807 cpumMsrWr_AmdK7InstrCacheCfg,
5808 cpumMsrWr_AmdK7DataCacheCfg,
5809 cpumMsrWr_AmdK7BusUnitCfg,
5810 cpumMsrWr_AmdK7DebugCtl2Maybe,
5811 cpumMsrWr_AmdFam15hFpuCfg,
5812 cpumMsrWr_AmdFam15hDecoderCfg,
5813 cpumMsrWr_AmdFam10hBusUnitCfg2,
5814 cpumMsrWr_AmdFam15hCombUnitCfg,
5815 cpumMsrWr_AmdFam15hCombUnitCfg2,
5816 cpumMsrWr_AmdFam15hCombUnitCfg3,
5817 cpumMsrWr_AmdFam15hExecUnitCfg,
5818 cpumMsrWr_AmdFam15hLoadStoreCfg2,
5819 cpumMsrWr_AmdFam10hIbsFetchCtl,
5820 cpumMsrWr_AmdFam10hIbsFetchLinAddr,
5821 cpumMsrWr_AmdFam10hIbsFetchPhysAddr,
5822 cpumMsrWr_AmdFam10hIbsOpExecCtl,
5823 cpumMsrWr_AmdFam10hIbsOpRip,
5824 cpumMsrWr_AmdFam10hIbsOpData,
5825 cpumMsrWr_AmdFam10hIbsOpData2,
5826 cpumMsrWr_AmdFam10hIbsOpData3,
5827 cpumMsrWr_AmdFam10hIbsDcLinAddr,
5828 cpumMsrWr_AmdFam10hIbsDcPhysAddr,
5829 cpumMsrWr_AmdFam10hIbsCtl,
5830 cpumMsrWr_AmdFam14hIbsBrTarget,
5831
5832 cpumMsrWr_Gim
5833};
5834
5835
5836/**
5837 * Looks up the range for the given MSR.
5838 *
5839 * @returns Pointer to the range if found, NULL if not.
5840 * @param pVM The cross context VM structure.
5841 * @param idMsr The MSR to look up.
5842 */
5843# ifndef IN_RING3
5844static
5845# endif
5846PCPUMMSRRANGE cpumLookupMsrRange(PVM pVM, uint32_t idMsr)
5847{
5848 /*
5849 * Binary lookup.
5850 */
5851 uint32_t cRanges = pVM->cpum.s.GuestInfo.cMsrRanges;
5852 if (!cRanges)
5853 return NULL;
5854 PCPUMMSRRANGE paRanges = pVM->cpum.s.GuestInfo.CTX_SUFF(paMsrRanges);
5855 for (;;)
5856 {
5857 uint32_t i = cRanges / 2;
5858 if (idMsr < paRanges[i].uFirst)
5859 {
5860 if (i == 0)
5861 break;
5862 cRanges = i;
5863 }
5864 else if (idMsr > paRanges[i].uLast)
5865 {
5866 i++;
5867 if (i >= cRanges)
5868 break;
5869 cRanges -= i;
5870 paRanges = &paRanges[i];
5871 }
5872 else
5873 {
5874 if (paRanges[i].enmRdFn == kCpumMsrRdFn_MsrAlias)
5875 return cpumLookupMsrRange(pVM, paRanges[i].uValue);
5876 return &paRanges[i];
5877 }
5878 }
5879
5880# ifdef VBOX_STRICT
5881 /*
5882 * Linear lookup to verify the above binary search.
5883 */
5884 uint32_t cLeft = pVM->cpum.s.GuestInfo.cMsrRanges;
5885 PCPUMMSRRANGE pCur = pVM->cpum.s.GuestInfo.CTX_SUFF(paMsrRanges);
5886 while (cLeft-- > 0)
5887 {
5888 if (idMsr >= pCur->uFirst && idMsr <= pCur->uLast)
5889 {
5890 AssertFailed();
5891 if (pCur->enmRdFn == kCpumMsrRdFn_MsrAlias)
5892 return cpumLookupMsrRange(pVM, pCur->uValue);
5893 return pCur;
5894 }
5895 pCur++;
5896 }
5897# endif
5898 return NULL;
5899}
5900
5901
5902/**
5903 * Query a guest MSR.
5904 *
5905 * The caller is responsible for checking privilege if the call is the result of
5906 * a RDMSR instruction. We'll do the rest.
5907 *
5908 * @retval VINF_SUCCESS on success.
5909 * @retval VINF_CPUM_R3_MSR_READ if the MSR read could not be serviced in the
5910 * current context (raw-mode or ring-0).
5911 * @retval VERR_CPUM_RAISE_GP_0 on failure (invalid MSR), the caller is
5912 * expected to take the appropriate actions. @a *puValue is set to 0.
5913 * @param pVCpu The cross context virtual CPU structure.
5914 * @param idMsr The MSR.
5915 * @param puValue Where to return the value.
5916 *
5917 * @remarks This will always return the right values, even when we're in the
5918 * recompiler.
5919 */
5920VMMDECL(VBOXSTRICTRC) CPUMQueryGuestMsr(PVMCPU pVCpu, uint32_t idMsr, uint64_t *puValue)
5921{
5922 *puValue = 0;
5923
5924 VBOXSTRICTRC rcStrict;
5925 PVM pVM = pVCpu->CTX_SUFF(pVM);
5926 PCPUMMSRRANGE pRange = cpumLookupMsrRange(pVM, idMsr);
5927 if (pRange)
5928 {
5929 CPUMMSRRDFN enmRdFn = (CPUMMSRRDFN)pRange->enmRdFn;
5930 AssertReturn(enmRdFn > kCpumMsrRdFn_Invalid && enmRdFn < kCpumMsrRdFn_End, VERR_CPUM_IPE_1);
5931
5932 PFNCPUMRDMSR pfnRdMsr = g_aCpumRdMsrFns[enmRdFn];
5933 AssertReturn(pfnRdMsr, VERR_CPUM_IPE_2);
5934
5935 STAM_COUNTER_INC(&pRange->cReads);
5936 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReads);
5937
5938 rcStrict = pfnRdMsr(pVCpu, idMsr, pRange, puValue);
5939 if (rcStrict == VINF_SUCCESS)
5940 Log2(("CPUM: RDMSR %#x (%s) -> %#llx\n", idMsr, pRange->szName, *puValue));
5941 else if (rcStrict == VERR_CPUM_RAISE_GP_0)
5942 {
5943 Log(("CPUM: RDMSR %#x (%s) -> #GP(0)\n", idMsr, pRange->szName));
5944 STAM_COUNTER_INC(&pRange->cGps);
5945 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReadsRaiseGp);
5946 }
5947#ifndef IN_RING3
5948 else if (rcStrict == VINF_CPUM_R3_MSR_READ)
5949 Log(("CPUM: RDMSR %#x (%s) -> ring-3\n", idMsr, pRange->szName));
5950#endif
5951 else
5952 {
5953 Log(("CPUM: RDMSR %#x (%s) -> rcStrict=%Rrc\n", idMsr, pRange->szName, VBOXSTRICTRC_VAL(rcStrict)));
5954 AssertMsgStmt(RT_FAILURE_NP(rcStrict), ("%Rrc idMsr=%#x\n", VBOXSTRICTRC_VAL(rcStrict), idMsr),
5955 rcStrict = VERR_IPE_UNEXPECTED_INFO_STATUS);
5956 Assert(rcStrict != VERR_EM_INTERPRETER);
5957 }
5958 }
5959 else
5960 {
5961 Log(("CPUM: Unknown RDMSR %#x -> #GP(0)\n", idMsr));
5962 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReads);
5963 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReadsUnknown);
5964 rcStrict = VERR_CPUM_RAISE_GP_0;
5965 }
5966 return rcStrict;
5967}
5968
5969
5970/**
5971 * Writes to a guest MSR.
5972 *
5973 * The caller is responsible for checking privilege if the call is the result of
5974 * a WRMSR instruction. We'll do the rest.
5975 *
5976 * @retval VINF_SUCCESS on success.
5977 * @retval VINF_CPUM_R3_MSR_WRITE if the MSR write could not be serviced in the
5978 * current context (raw-mode or ring-0).
5979 * @retval VERR_CPUM_RAISE_GP_0 on failure, the caller is expected to take the
5980 * appropriate actions.
5981 *
5982 * @param pVCpu The cross context virtual CPU structure.
5983 * @param idMsr The MSR id.
5984 * @param uValue The value to set.
5985 *
5986 * @remarks Everyone changing MSR values, including the recompiler, shall do it
5987 * by calling this method. This makes sure we have current values and
5988 * that we trigger all the right actions when something changes.
5989 *
5990 * For performance reasons, this actually isn't entirely true for some
5991 * MSRs when in HM mode. The code here and in HM must be aware of
5992 * this.
5993 */
5994VMMDECL(VBOXSTRICTRC) CPUMSetGuestMsr(PVMCPU pVCpu, uint32_t idMsr, uint64_t uValue)
5995{
5996 VBOXSTRICTRC rcStrict;
5997 PVM pVM = pVCpu->CTX_SUFF(pVM);
5998 PCPUMMSRRANGE pRange = cpumLookupMsrRange(pVM, idMsr);
5999 if (pRange)
6000 {
6001 STAM_COUNTER_INC(&pRange->cWrites);
6002 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWrites);
6003
6004 if (!(uValue & pRange->fWrGpMask))
6005 {
6006 CPUMMSRWRFN enmWrFn = (CPUMMSRWRFN)pRange->enmWrFn;
6007 AssertReturn(enmWrFn > kCpumMsrWrFn_Invalid && enmWrFn < kCpumMsrWrFn_End, VERR_CPUM_IPE_1);
6008
6009 PFNCPUMWRMSR pfnWrMsr = g_aCpumWrMsrFns[enmWrFn];
6010 AssertReturn(pfnWrMsr, VERR_CPUM_IPE_2);
6011
6012 uint64_t uValueAdjusted = uValue & ~pRange->fWrIgnMask;
6013 if (uValueAdjusted != uValue)
6014 {
6015 STAM_COUNTER_INC(&pRange->cIgnoredBits);
6016 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesToIgnoredBits);
6017 }
6018
6019 rcStrict = pfnWrMsr(pVCpu, idMsr, pRange, uValueAdjusted, uValue);
6020 if (rcStrict == VINF_SUCCESS)
6021 Log2(("CPUM: WRMSR %#x (%s), %#llx [%#llx]\n", idMsr, pRange->szName, uValueAdjusted, uValue));
6022 else if (rcStrict == VERR_CPUM_RAISE_GP_0)
6023 {
6024 Log(("CPUM: WRMSR %#x (%s), %#llx [%#llx] -> #GP(0)\n", idMsr, pRange->szName, uValueAdjusted, uValue));
6025 STAM_COUNTER_INC(&pRange->cGps);
6026 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesRaiseGp);
6027 }
6028#ifndef IN_RING3
6029 else if (rcStrict == VINF_CPUM_R3_MSR_WRITE)
6030 Log(("CPUM: WRMSR %#x (%s), %#llx [%#llx] -> ring-3\n", idMsr, pRange->szName, uValueAdjusted, uValue));
6031#endif
6032 else
6033 {
6034 Log(("CPUM: WRMSR %#x (%s), %#llx [%#llx] -> rcStrict=%Rrc\n",
6035 idMsr, pRange->szName, uValueAdjusted, uValue, VBOXSTRICTRC_VAL(rcStrict)));
6036 AssertMsgStmt(RT_FAILURE_NP(rcStrict), ("%Rrc idMsr=%#x\n", VBOXSTRICTRC_VAL(rcStrict), idMsr),
6037 rcStrict = VERR_IPE_UNEXPECTED_INFO_STATUS);
6038 Assert(rcStrict != VERR_EM_INTERPRETER);
6039 }
6040 }
6041 else
6042 {
6043 Log(("CPUM: WRMSR %#x (%s), %#llx -> #GP(0) - invalid bits %#llx\n",
6044 idMsr, pRange->szName, uValue, uValue & pRange->fWrGpMask));
6045 STAM_COUNTER_INC(&pRange->cGps);
6046 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesRaiseGp);
6047 rcStrict = VERR_CPUM_RAISE_GP_0;
6048 }
6049 }
6050 else
6051 {
6052 Log(("CPUM: Unknown WRMSR %#x, %#llx -> #GP(0)\n", idMsr, uValue));
6053 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWrites);
6054 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesUnknown);
6055 rcStrict = VERR_CPUM_RAISE_GP_0;
6056 }
6057 return rcStrict;
6058}
6059
6060
6061#if defined(VBOX_STRICT) && defined(IN_RING3)
6062/**
6063 * Performs some checks on the static data related to MSRs.
6064 *
6065 * @returns VINF_SUCCESS on success, error on failure.
6066 */
6067int cpumR3MsrStrictInitChecks(void)
6068{
6069#define CPUM_ASSERT_RD_MSR_FN(a_Register) \
6070 AssertReturn(g_aCpumRdMsrFns[kCpumMsrRdFn_##a_Register] == cpumMsrRd_##a_Register, VERR_CPUM_IPE_2);
6071#define CPUM_ASSERT_WR_MSR_FN(a_Register) \
6072 AssertReturn(g_aCpumWrMsrFns[kCpumMsrWrFn_##a_Register] == cpumMsrWr_##a_Register, VERR_CPUM_IPE_2);
6073
6074 AssertReturn(g_aCpumRdMsrFns[kCpumMsrRdFn_Invalid] == NULL, VERR_CPUM_IPE_2);
6075 CPUM_ASSERT_RD_MSR_FN(FixedValue);
6076 CPUM_ASSERT_RD_MSR_FN(WriteOnly);
6077 CPUM_ASSERT_RD_MSR_FN(Ia32P5McAddr);
6078 CPUM_ASSERT_RD_MSR_FN(Ia32P5McType);
6079 CPUM_ASSERT_RD_MSR_FN(Ia32TimestampCounter);
6080 CPUM_ASSERT_RD_MSR_FN(Ia32PlatformId);
6081 CPUM_ASSERT_RD_MSR_FN(Ia32ApicBase);
6082 CPUM_ASSERT_RD_MSR_FN(Ia32FeatureControl);
6083 CPUM_ASSERT_RD_MSR_FN(Ia32BiosSignId);
6084 CPUM_ASSERT_RD_MSR_FN(Ia32SmmMonitorCtl);
6085 CPUM_ASSERT_RD_MSR_FN(Ia32PmcN);
6086 CPUM_ASSERT_RD_MSR_FN(Ia32MonitorFilterLineSize);
6087 CPUM_ASSERT_RD_MSR_FN(Ia32MPerf);
6088 CPUM_ASSERT_RD_MSR_FN(Ia32APerf);
6089 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrCap);
6090 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrPhysBaseN);
6091 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrPhysMaskN);
6092 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrFixed);
6093 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrDefType);
6094 CPUM_ASSERT_RD_MSR_FN(Ia32Pat);
6095 CPUM_ASSERT_RD_MSR_FN(Ia32SysEnterCs);
6096 CPUM_ASSERT_RD_MSR_FN(Ia32SysEnterEsp);
6097 CPUM_ASSERT_RD_MSR_FN(Ia32SysEnterEip);
6098 CPUM_ASSERT_RD_MSR_FN(Ia32McgCap);
6099 CPUM_ASSERT_RD_MSR_FN(Ia32McgStatus);
6100 CPUM_ASSERT_RD_MSR_FN(Ia32McgCtl);
6101 CPUM_ASSERT_RD_MSR_FN(Ia32DebugCtl);
6102 CPUM_ASSERT_RD_MSR_FN(Ia32SmrrPhysBase);
6103 CPUM_ASSERT_RD_MSR_FN(Ia32SmrrPhysMask);
6104 CPUM_ASSERT_RD_MSR_FN(Ia32PlatformDcaCap);
6105 CPUM_ASSERT_RD_MSR_FN(Ia32CpuDcaCap);
6106 CPUM_ASSERT_RD_MSR_FN(Ia32Dca0Cap);
6107 CPUM_ASSERT_RD_MSR_FN(Ia32PerfEvtSelN);
6108 CPUM_ASSERT_RD_MSR_FN(Ia32PerfStatus);
6109 CPUM_ASSERT_RD_MSR_FN(Ia32PerfCtl);
6110 CPUM_ASSERT_RD_MSR_FN(Ia32FixedCtrN);
6111 CPUM_ASSERT_RD_MSR_FN(Ia32PerfCapabilities);
6112 CPUM_ASSERT_RD_MSR_FN(Ia32FixedCtrCtrl);
6113 CPUM_ASSERT_RD_MSR_FN(Ia32PerfGlobalStatus);
6114 CPUM_ASSERT_RD_MSR_FN(Ia32PerfGlobalCtrl);
6115 CPUM_ASSERT_RD_MSR_FN(Ia32PerfGlobalOvfCtrl);
6116 CPUM_ASSERT_RD_MSR_FN(Ia32PebsEnable);
6117 CPUM_ASSERT_RD_MSR_FN(Ia32ClockModulation);
6118 CPUM_ASSERT_RD_MSR_FN(Ia32ThermInterrupt);
6119 CPUM_ASSERT_RD_MSR_FN(Ia32ThermStatus);
6120 CPUM_ASSERT_RD_MSR_FN(Ia32MiscEnable);
6121 CPUM_ASSERT_RD_MSR_FN(Ia32McCtlStatusAddrMiscN);
6122 CPUM_ASSERT_RD_MSR_FN(Ia32McNCtl2);
6123 CPUM_ASSERT_RD_MSR_FN(Ia32DsArea);
6124 CPUM_ASSERT_RD_MSR_FN(Ia32TscDeadline);
6125 CPUM_ASSERT_RD_MSR_FN(Ia32X2ApicN);
6126 CPUM_ASSERT_RD_MSR_FN(Ia32DebugInterface);
6127 CPUM_ASSERT_RD_MSR_FN(Ia32VmxBasic);
6128 CPUM_ASSERT_RD_MSR_FN(Ia32VmxPinbasedCtls);
6129 CPUM_ASSERT_RD_MSR_FN(Ia32VmxProcbasedCtls);
6130 CPUM_ASSERT_RD_MSR_FN(Ia32VmxExitCtls);
6131 CPUM_ASSERT_RD_MSR_FN(Ia32VmxEntryCtls);
6132 CPUM_ASSERT_RD_MSR_FN(Ia32VmxMisc);
6133 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr0Fixed0);
6134 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr0Fixed1);
6135 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr4Fixed0);
6136 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr4Fixed1);
6137 CPUM_ASSERT_RD_MSR_FN(Ia32VmxVmcsEnum);
6138 CPUM_ASSERT_RD_MSR_FN(Ia32VmxProcBasedCtls2);
6139 CPUM_ASSERT_RD_MSR_FN(Ia32VmxEptVpidCap);
6140 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTruePinbasedCtls);
6141 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTrueProcbasedCtls);
6142 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTrueExitCtls);
6143 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTrueEntryCtls);
6144 CPUM_ASSERT_RD_MSR_FN(Ia32VmxVmFunc);
6145 CPUM_ASSERT_RD_MSR_FN(Ia32SpecCtrl);
6146 CPUM_ASSERT_RD_MSR_FN(Ia32ArchCapabilities);
6147
6148 CPUM_ASSERT_RD_MSR_FN(Amd64Efer);
6149 CPUM_ASSERT_RD_MSR_FN(Amd64SyscallTarget);
6150 CPUM_ASSERT_RD_MSR_FN(Amd64LongSyscallTarget);
6151 CPUM_ASSERT_RD_MSR_FN(Amd64CompSyscallTarget);
6152 CPUM_ASSERT_RD_MSR_FN(Amd64SyscallFlagMask);
6153 CPUM_ASSERT_RD_MSR_FN(Amd64FsBase);
6154 CPUM_ASSERT_RD_MSR_FN(Amd64GsBase);
6155 CPUM_ASSERT_RD_MSR_FN(Amd64KernelGsBase);
6156 CPUM_ASSERT_RD_MSR_FN(Amd64TscAux);
6157
6158 CPUM_ASSERT_RD_MSR_FN(IntelEblCrPowerOn);
6159 CPUM_ASSERT_RD_MSR_FN(IntelI7CoreThreadCount);
6160 CPUM_ASSERT_RD_MSR_FN(IntelP4EbcHardPowerOn);
6161 CPUM_ASSERT_RD_MSR_FN(IntelP4EbcSoftPowerOn);
6162 CPUM_ASSERT_RD_MSR_FN(IntelP4EbcFrequencyId);
6163 CPUM_ASSERT_RD_MSR_FN(IntelP6FsbFrequency);
6164 CPUM_ASSERT_RD_MSR_FN(IntelPlatformInfo);
6165 CPUM_ASSERT_RD_MSR_FN(IntelFlexRatio);
6166 CPUM_ASSERT_RD_MSR_FN(IntelPkgCStConfigControl);
6167 CPUM_ASSERT_RD_MSR_FN(IntelPmgIoCaptureBase);
6168 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchFromToN);
6169 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchFromN);
6170 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchToN);
6171 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchTos);
6172 CPUM_ASSERT_RD_MSR_FN(IntelBblCrCtl);
6173 CPUM_ASSERT_RD_MSR_FN(IntelBblCrCtl3);
6174 CPUM_ASSERT_RD_MSR_FN(IntelI7TemperatureTarget);
6175 CPUM_ASSERT_RD_MSR_FN(IntelI7MsrOffCoreResponseN);
6176 CPUM_ASSERT_RD_MSR_FN(IntelI7MiscPwrMgmt);
6177 CPUM_ASSERT_RD_MSR_FN(IntelP6CrN);
6178 CPUM_ASSERT_RD_MSR_FN(IntelCpuId1FeatureMaskEcdx);
6179 CPUM_ASSERT_RD_MSR_FN(IntelCpuId1FeatureMaskEax);
6180 CPUM_ASSERT_RD_MSR_FN(IntelCpuId80000001FeatureMaskEcdx);
6181 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyAesNiCtl);
6182 CPUM_ASSERT_RD_MSR_FN(IntelI7TurboRatioLimit);
6183 CPUM_ASSERT_RD_MSR_FN(IntelI7LbrSelect);
6184 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyErrorControl);
6185 CPUM_ASSERT_RD_MSR_FN(IntelI7VirtualLegacyWireCap);
6186 CPUM_ASSERT_RD_MSR_FN(IntelI7PowerCtl);
6187 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyPebsNumAlt);
6188 CPUM_ASSERT_RD_MSR_FN(IntelI7PebsLdLat);
6189 CPUM_ASSERT_RD_MSR_FN(IntelI7PkgCnResidencyN);
6190 CPUM_ASSERT_RD_MSR_FN(IntelI7CoreCnResidencyN);
6191 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyVrCurrentConfig);
6192 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyVrMiscConfig);
6193 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyRaplPowerUnit);
6194 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyPkgCnIrtlN);
6195 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyPkgC2Residency);
6196 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgPowerLimit);
6197 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgEnergyStatus);
6198 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgPerfStatus);
6199 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgPowerInfo);
6200 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramPowerLimit);
6201 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramEnergyStatus);
6202 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramPerfStatus);
6203 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramPowerInfo);
6204 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0PowerLimit);
6205 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0EnergyStatus);
6206 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0Policy);
6207 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0PerfStatus);
6208 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp1PowerLimit);
6209 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp1EnergyStatus);
6210 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp1Policy);
6211 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpNominal);
6212 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpLevel1);
6213 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpLevel2);
6214 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpControl);
6215 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyTurboActivationRatio);
6216 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfGlobalCtrl);
6217 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfGlobalStatus);
6218 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfGlobalOvfCtrl);
6219 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfFixedCtrCtrl);
6220 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfFixedCtr);
6221 CPUM_ASSERT_RD_MSR_FN(IntelI7UncCBoxConfig);
6222 CPUM_ASSERT_RD_MSR_FN(IntelI7UncArbPerfCtrN);
6223 CPUM_ASSERT_RD_MSR_FN(IntelI7UncArbPerfEvtSelN);
6224 CPUM_ASSERT_RD_MSR_FN(IntelI7SmiCount);
6225 CPUM_ASSERT_RD_MSR_FN(IntelCore2EmttmCrTablesN);
6226 CPUM_ASSERT_RD_MSR_FN(IntelCore2SmmCStMiscInfo);
6227 CPUM_ASSERT_RD_MSR_FN(IntelCore1ExtConfig);
6228 CPUM_ASSERT_RD_MSR_FN(IntelCore1DtsCalControl);
6229 CPUM_ASSERT_RD_MSR_FN(IntelCore2PeciControl);
6230 CPUM_ASSERT_RD_MSR_FN(IntelAtSilvCoreC1Recidency);
6231
6232 CPUM_ASSERT_RD_MSR_FN(P6LastBranchFromIp);
6233 CPUM_ASSERT_RD_MSR_FN(P6LastBranchToIp);
6234 CPUM_ASSERT_RD_MSR_FN(P6LastIntFromIp);
6235 CPUM_ASSERT_RD_MSR_FN(P6LastIntToIp);
6236
6237 CPUM_ASSERT_RD_MSR_FN(AmdFam15hTscRate);
6238 CPUM_ASSERT_RD_MSR_FN(AmdFam15hLwpCfg);
6239 CPUM_ASSERT_RD_MSR_FN(AmdFam15hLwpCbAddr);
6240 CPUM_ASSERT_RD_MSR_FN(AmdFam10hMc4MiscN);
6241 CPUM_ASSERT_RD_MSR_FN(AmdK8PerfCtlN);
6242 CPUM_ASSERT_RD_MSR_FN(AmdK8PerfCtrN);
6243 CPUM_ASSERT_RD_MSR_FN(AmdK8SysCfg);
6244 CPUM_ASSERT_RD_MSR_FN(AmdK8HwCr);
6245 CPUM_ASSERT_RD_MSR_FN(AmdK8IorrBaseN);
6246 CPUM_ASSERT_RD_MSR_FN(AmdK8IorrMaskN);
6247 CPUM_ASSERT_RD_MSR_FN(AmdK8TopOfMemN);
6248 CPUM_ASSERT_RD_MSR_FN(AmdK8NbCfg1);
6249 CPUM_ASSERT_RD_MSR_FN(AmdK8McXcptRedir);
6250 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuNameN);
6251 CPUM_ASSERT_RD_MSR_FN(AmdK8HwThermalCtrl);
6252 CPUM_ASSERT_RD_MSR_FN(AmdK8SwThermalCtrl);
6253 CPUM_ASSERT_RD_MSR_FN(AmdK8FidVidControl);
6254 CPUM_ASSERT_RD_MSR_FN(AmdK8FidVidStatus);
6255 CPUM_ASSERT_RD_MSR_FN(AmdK8McCtlMaskN);
6256 CPUM_ASSERT_RD_MSR_FN(AmdK8SmiOnIoTrapN);
6257 CPUM_ASSERT_RD_MSR_FN(AmdK8SmiOnIoTrapCtlSts);
6258 CPUM_ASSERT_RD_MSR_FN(AmdK8IntPendingMessage);
6259 CPUM_ASSERT_RD_MSR_FN(AmdK8SmiTriggerIoCycle);
6260 CPUM_ASSERT_RD_MSR_FN(AmdFam10hMmioCfgBaseAddr);
6261 CPUM_ASSERT_RD_MSR_FN(AmdFam10hTrapCtlMaybe);
6262 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateCurLimit);
6263 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateControl);
6264 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateStatus);
6265 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateN);
6266 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCofVidControl);
6267 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCofVidStatus);
6268 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCStateIoBaseAddr);
6269 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCpuWatchdogTimer);
6270 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmBase);
6271 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmAddr);
6272 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmMask);
6273 CPUM_ASSERT_RD_MSR_FN(AmdK8VmCr);
6274 CPUM_ASSERT_RD_MSR_FN(AmdK8IgnNe);
6275 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmCtl);
6276 CPUM_ASSERT_RD_MSR_FN(AmdK8VmHSavePa);
6277 CPUM_ASSERT_RD_MSR_FN(AmdFam10hVmLockKey);
6278 CPUM_ASSERT_RD_MSR_FN(AmdFam10hSmmLockKey);
6279 CPUM_ASSERT_RD_MSR_FN(AmdFam10hLocalSmiStatus);
6280 CPUM_ASSERT_RD_MSR_FN(AmdFam10hOsVisWrkIdLength);
6281 CPUM_ASSERT_RD_MSR_FN(AmdFam10hOsVisWrkStatus);
6282 CPUM_ASSERT_RD_MSR_FN(AmdFam16hL2IPerfCtlN);
6283 CPUM_ASSERT_RD_MSR_FN(AmdFam16hL2IPerfCtrN);
6284 CPUM_ASSERT_RD_MSR_FN(AmdFam15hNorthbridgePerfCtlN);
6285 CPUM_ASSERT_RD_MSR_FN(AmdFam15hNorthbridgePerfCtrN);
6286 CPUM_ASSERT_RD_MSR_FN(AmdK7MicrocodeCtl);
6287 CPUM_ASSERT_RD_MSR_FN(AmdK7ClusterIdMaybe);
6288 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlStd07hEbax);
6289 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlStd06hEcx);
6290 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlStd01hEdcx);
6291 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlExt01hEdcx);
6292 CPUM_ASSERT_RD_MSR_FN(AmdK8PatchLevel);
6293 CPUM_ASSERT_RD_MSR_FN(AmdK7DebugStatusMaybe);
6294 CPUM_ASSERT_RD_MSR_FN(AmdK7BHTraceBaseMaybe);
6295 CPUM_ASSERT_RD_MSR_FN(AmdK7BHTracePtrMaybe);
6296 CPUM_ASSERT_RD_MSR_FN(AmdK7BHTraceLimitMaybe);
6297 CPUM_ASSERT_RD_MSR_FN(AmdK7HardwareDebugToolCfgMaybe);
6298 CPUM_ASSERT_RD_MSR_FN(AmdK7FastFlushCountMaybe);
6299 CPUM_ASSERT_RD_MSR_FN(AmdK7NodeId);
6300 CPUM_ASSERT_RD_MSR_FN(AmdK7DrXAddrMaskN);
6301 CPUM_ASSERT_RD_MSR_FN(AmdK7Dr0DataMatchMaybe);
6302 CPUM_ASSERT_RD_MSR_FN(AmdK7Dr0DataMaskMaybe);
6303 CPUM_ASSERT_RD_MSR_FN(AmdK7LoadStoreCfg);
6304 CPUM_ASSERT_RD_MSR_FN(AmdK7InstrCacheCfg);
6305 CPUM_ASSERT_RD_MSR_FN(AmdK7DataCacheCfg);
6306 CPUM_ASSERT_RD_MSR_FN(AmdK7BusUnitCfg);
6307 CPUM_ASSERT_RD_MSR_FN(AmdK7DebugCtl2Maybe);
6308 CPUM_ASSERT_RD_MSR_FN(AmdFam15hFpuCfg);
6309 CPUM_ASSERT_RD_MSR_FN(AmdFam15hDecoderCfg);
6310 CPUM_ASSERT_RD_MSR_FN(AmdFam10hBusUnitCfg2);
6311 CPUM_ASSERT_RD_MSR_FN(AmdFam15hCombUnitCfg);
6312 CPUM_ASSERT_RD_MSR_FN(AmdFam15hCombUnitCfg2);
6313 CPUM_ASSERT_RD_MSR_FN(AmdFam15hCombUnitCfg3);
6314 CPUM_ASSERT_RD_MSR_FN(AmdFam15hExecUnitCfg);
6315 CPUM_ASSERT_RD_MSR_FN(AmdFam15hLoadStoreCfg2);
6316 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsFetchCtl);
6317 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsFetchLinAddr);
6318 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsFetchPhysAddr);
6319 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpExecCtl);
6320 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpRip);
6321 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpData);
6322 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpData2);
6323 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpData3);
6324 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsDcLinAddr);
6325 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsDcPhysAddr);
6326 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsCtl);
6327 CPUM_ASSERT_RD_MSR_FN(AmdFam14hIbsBrTarget);
6328
6329 CPUM_ASSERT_RD_MSR_FN(Gim)
6330
6331 AssertReturn(g_aCpumWrMsrFns[kCpumMsrWrFn_Invalid] == NULL, VERR_CPUM_IPE_2);
6332 CPUM_ASSERT_WR_MSR_FN(Ia32P5McAddr);
6333 CPUM_ASSERT_WR_MSR_FN(Ia32P5McType);
6334 CPUM_ASSERT_WR_MSR_FN(Ia32TimestampCounter);
6335 CPUM_ASSERT_WR_MSR_FN(Ia32ApicBase);
6336 CPUM_ASSERT_WR_MSR_FN(Ia32FeatureControl);
6337 CPUM_ASSERT_WR_MSR_FN(Ia32BiosSignId);
6338 CPUM_ASSERT_WR_MSR_FN(Ia32BiosUpdateTrigger);
6339 CPUM_ASSERT_WR_MSR_FN(Ia32SmmMonitorCtl);
6340 CPUM_ASSERT_WR_MSR_FN(Ia32PmcN);
6341 CPUM_ASSERT_WR_MSR_FN(Ia32MonitorFilterLineSize);
6342 CPUM_ASSERT_WR_MSR_FN(Ia32MPerf);
6343 CPUM_ASSERT_WR_MSR_FN(Ia32APerf);
6344 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrPhysBaseN);
6345 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrPhysMaskN);
6346 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrFixed);
6347 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrDefType);
6348 CPUM_ASSERT_WR_MSR_FN(Ia32Pat);
6349 CPUM_ASSERT_WR_MSR_FN(Ia32SysEnterCs);
6350 CPUM_ASSERT_WR_MSR_FN(Ia32SysEnterEsp);
6351 CPUM_ASSERT_WR_MSR_FN(Ia32SysEnterEip);
6352 CPUM_ASSERT_WR_MSR_FN(Ia32McgStatus);
6353 CPUM_ASSERT_WR_MSR_FN(Ia32McgCtl);
6354 CPUM_ASSERT_WR_MSR_FN(Ia32DebugCtl);
6355 CPUM_ASSERT_WR_MSR_FN(Ia32SmrrPhysBase);
6356 CPUM_ASSERT_WR_MSR_FN(Ia32SmrrPhysMask);
6357 CPUM_ASSERT_WR_MSR_FN(Ia32PlatformDcaCap);
6358 CPUM_ASSERT_WR_MSR_FN(Ia32Dca0Cap);
6359 CPUM_ASSERT_WR_MSR_FN(Ia32PerfEvtSelN);
6360 CPUM_ASSERT_WR_MSR_FN(Ia32PerfStatus);
6361 CPUM_ASSERT_WR_MSR_FN(Ia32PerfCtl);
6362 CPUM_ASSERT_WR_MSR_FN(Ia32FixedCtrN);
6363 CPUM_ASSERT_WR_MSR_FN(Ia32PerfCapabilities);
6364 CPUM_ASSERT_WR_MSR_FN(Ia32FixedCtrCtrl);
6365 CPUM_ASSERT_WR_MSR_FN(Ia32PerfGlobalStatus);
6366 CPUM_ASSERT_WR_MSR_FN(Ia32PerfGlobalCtrl);
6367 CPUM_ASSERT_WR_MSR_FN(Ia32PerfGlobalOvfCtrl);
6368 CPUM_ASSERT_WR_MSR_FN(Ia32PebsEnable);
6369 CPUM_ASSERT_WR_MSR_FN(Ia32ClockModulation);
6370 CPUM_ASSERT_WR_MSR_FN(Ia32ThermInterrupt);
6371 CPUM_ASSERT_WR_MSR_FN(Ia32ThermStatus);
6372 CPUM_ASSERT_WR_MSR_FN(Ia32MiscEnable);
6373 CPUM_ASSERT_WR_MSR_FN(Ia32McCtlStatusAddrMiscN);
6374 CPUM_ASSERT_WR_MSR_FN(Ia32McNCtl2);
6375 CPUM_ASSERT_WR_MSR_FN(Ia32DsArea);
6376 CPUM_ASSERT_WR_MSR_FN(Ia32TscDeadline);
6377 CPUM_ASSERT_WR_MSR_FN(Ia32X2ApicN);
6378 CPUM_ASSERT_WR_MSR_FN(Ia32DebugInterface);
6379 CPUM_ASSERT_WR_MSR_FN(Ia32SpecCtrl);
6380 CPUM_ASSERT_WR_MSR_FN(Ia32PredCmd);
6381
6382 CPUM_ASSERT_WR_MSR_FN(Amd64Efer);
6383 CPUM_ASSERT_WR_MSR_FN(Amd64SyscallTarget);
6384 CPUM_ASSERT_WR_MSR_FN(Amd64LongSyscallTarget);
6385 CPUM_ASSERT_WR_MSR_FN(Amd64CompSyscallTarget);
6386 CPUM_ASSERT_WR_MSR_FN(Amd64SyscallFlagMask);
6387 CPUM_ASSERT_WR_MSR_FN(Amd64FsBase);
6388 CPUM_ASSERT_WR_MSR_FN(Amd64GsBase);
6389 CPUM_ASSERT_WR_MSR_FN(Amd64KernelGsBase);
6390 CPUM_ASSERT_WR_MSR_FN(Amd64TscAux);
6391
6392 CPUM_ASSERT_WR_MSR_FN(IntelEblCrPowerOn);
6393 CPUM_ASSERT_WR_MSR_FN(IntelP4EbcHardPowerOn);
6394 CPUM_ASSERT_WR_MSR_FN(IntelP4EbcSoftPowerOn);
6395 CPUM_ASSERT_WR_MSR_FN(IntelP4EbcFrequencyId);
6396 CPUM_ASSERT_WR_MSR_FN(IntelFlexRatio);
6397 CPUM_ASSERT_WR_MSR_FN(IntelPkgCStConfigControl);
6398 CPUM_ASSERT_WR_MSR_FN(IntelPmgIoCaptureBase);
6399 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchFromToN);
6400 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchFromN);
6401 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchToN);
6402 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchTos);
6403 CPUM_ASSERT_WR_MSR_FN(IntelBblCrCtl);
6404 CPUM_ASSERT_WR_MSR_FN(IntelBblCrCtl3);
6405 CPUM_ASSERT_WR_MSR_FN(IntelI7TemperatureTarget);
6406 CPUM_ASSERT_WR_MSR_FN(IntelI7MsrOffCoreResponseN);
6407 CPUM_ASSERT_WR_MSR_FN(IntelI7MiscPwrMgmt);
6408 CPUM_ASSERT_WR_MSR_FN(IntelP6CrN);
6409 CPUM_ASSERT_WR_MSR_FN(IntelCpuId1FeatureMaskEcdx);
6410 CPUM_ASSERT_WR_MSR_FN(IntelCpuId1FeatureMaskEax);
6411 CPUM_ASSERT_WR_MSR_FN(IntelCpuId80000001FeatureMaskEcdx);
6412 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyAesNiCtl);
6413 CPUM_ASSERT_WR_MSR_FN(IntelI7TurboRatioLimit);
6414 CPUM_ASSERT_WR_MSR_FN(IntelI7LbrSelect);
6415 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyErrorControl);
6416 CPUM_ASSERT_WR_MSR_FN(IntelI7PowerCtl);
6417 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyPebsNumAlt);
6418 CPUM_ASSERT_WR_MSR_FN(IntelI7PebsLdLat);
6419 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyVrCurrentConfig);
6420 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyVrMiscConfig);
6421 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyPkgCnIrtlN);
6422 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyPkgC2Residency);
6423 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPkgPowerLimit);
6424 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplDramPowerLimit);
6425 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp0PowerLimit);
6426 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp0Policy);
6427 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp1PowerLimit);
6428 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp1Policy);
6429 CPUM_ASSERT_WR_MSR_FN(IntelI7IvyConfigTdpControl);
6430 CPUM_ASSERT_WR_MSR_FN(IntelI7IvyTurboActivationRatio);
6431 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfGlobalCtrl);
6432 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfGlobalStatus);
6433 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfGlobalOvfCtrl);
6434 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfFixedCtrCtrl);
6435 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfFixedCtr);
6436 CPUM_ASSERT_WR_MSR_FN(IntelI7UncArbPerfCtrN);
6437 CPUM_ASSERT_WR_MSR_FN(IntelI7UncArbPerfEvtSelN);
6438 CPUM_ASSERT_WR_MSR_FN(IntelCore2EmttmCrTablesN);
6439 CPUM_ASSERT_WR_MSR_FN(IntelCore2SmmCStMiscInfo);
6440 CPUM_ASSERT_WR_MSR_FN(IntelCore1ExtConfig);
6441 CPUM_ASSERT_WR_MSR_FN(IntelCore1DtsCalControl);
6442 CPUM_ASSERT_WR_MSR_FN(IntelCore2PeciControl);
6443
6444 CPUM_ASSERT_WR_MSR_FN(P6LastIntFromIp);
6445 CPUM_ASSERT_WR_MSR_FN(P6LastIntToIp);
6446
6447 CPUM_ASSERT_WR_MSR_FN(AmdFam15hTscRate);
6448 CPUM_ASSERT_WR_MSR_FN(AmdFam15hLwpCfg);
6449 CPUM_ASSERT_WR_MSR_FN(AmdFam15hLwpCbAddr);
6450 CPUM_ASSERT_WR_MSR_FN(AmdFam10hMc4MiscN);
6451 CPUM_ASSERT_WR_MSR_FN(AmdK8PerfCtlN);
6452 CPUM_ASSERT_WR_MSR_FN(AmdK8PerfCtrN);
6453 CPUM_ASSERT_WR_MSR_FN(AmdK8SysCfg);
6454 CPUM_ASSERT_WR_MSR_FN(AmdK8HwCr);
6455 CPUM_ASSERT_WR_MSR_FN(AmdK8IorrBaseN);
6456 CPUM_ASSERT_WR_MSR_FN(AmdK8IorrMaskN);
6457 CPUM_ASSERT_WR_MSR_FN(AmdK8TopOfMemN);
6458 CPUM_ASSERT_WR_MSR_FN(AmdK8NbCfg1);
6459 CPUM_ASSERT_WR_MSR_FN(AmdK8McXcptRedir);
6460 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuNameN);
6461 CPUM_ASSERT_WR_MSR_FN(AmdK8HwThermalCtrl);
6462 CPUM_ASSERT_WR_MSR_FN(AmdK8SwThermalCtrl);
6463 CPUM_ASSERT_WR_MSR_FN(AmdK8FidVidControl);
6464 CPUM_ASSERT_WR_MSR_FN(AmdK8McCtlMaskN);
6465 CPUM_ASSERT_WR_MSR_FN(AmdK8SmiOnIoTrapN);
6466 CPUM_ASSERT_WR_MSR_FN(AmdK8SmiOnIoTrapCtlSts);
6467 CPUM_ASSERT_WR_MSR_FN(AmdK8IntPendingMessage);
6468 CPUM_ASSERT_WR_MSR_FN(AmdK8SmiTriggerIoCycle);
6469 CPUM_ASSERT_WR_MSR_FN(AmdFam10hMmioCfgBaseAddr);
6470 CPUM_ASSERT_WR_MSR_FN(AmdFam10hTrapCtlMaybe);
6471 CPUM_ASSERT_WR_MSR_FN(AmdFam10hPStateControl);
6472 CPUM_ASSERT_WR_MSR_FN(AmdFam10hPStateStatus);
6473 CPUM_ASSERT_WR_MSR_FN(AmdFam10hPStateN);
6474 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCofVidControl);
6475 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCofVidStatus);
6476 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCStateIoBaseAddr);
6477 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCpuWatchdogTimer);
6478 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmBase);
6479 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmAddr);
6480 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmMask);
6481 CPUM_ASSERT_WR_MSR_FN(AmdK8VmCr);
6482 CPUM_ASSERT_WR_MSR_FN(AmdK8IgnNe);
6483 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmCtl);
6484 CPUM_ASSERT_WR_MSR_FN(AmdK8VmHSavePa);
6485 CPUM_ASSERT_WR_MSR_FN(AmdFam10hVmLockKey);
6486 CPUM_ASSERT_WR_MSR_FN(AmdFam10hSmmLockKey);
6487 CPUM_ASSERT_WR_MSR_FN(AmdFam10hLocalSmiStatus);
6488 CPUM_ASSERT_WR_MSR_FN(AmdFam10hOsVisWrkIdLength);
6489 CPUM_ASSERT_WR_MSR_FN(AmdFam10hOsVisWrkStatus);
6490 CPUM_ASSERT_WR_MSR_FN(AmdFam16hL2IPerfCtlN);
6491 CPUM_ASSERT_WR_MSR_FN(AmdFam16hL2IPerfCtrN);
6492 CPUM_ASSERT_WR_MSR_FN(AmdFam15hNorthbridgePerfCtlN);
6493 CPUM_ASSERT_WR_MSR_FN(AmdFam15hNorthbridgePerfCtrN);
6494 CPUM_ASSERT_WR_MSR_FN(AmdK7MicrocodeCtl);
6495 CPUM_ASSERT_WR_MSR_FN(AmdK7ClusterIdMaybe);
6496 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlStd07hEbax);
6497 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlStd06hEcx);
6498 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlStd01hEdcx);
6499 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlExt01hEdcx);
6500 CPUM_ASSERT_WR_MSR_FN(AmdK8PatchLoader);
6501 CPUM_ASSERT_WR_MSR_FN(AmdK7DebugStatusMaybe);
6502 CPUM_ASSERT_WR_MSR_FN(AmdK7BHTraceBaseMaybe);
6503 CPUM_ASSERT_WR_MSR_FN(AmdK7BHTracePtrMaybe);
6504 CPUM_ASSERT_WR_MSR_FN(AmdK7BHTraceLimitMaybe);
6505 CPUM_ASSERT_WR_MSR_FN(AmdK7HardwareDebugToolCfgMaybe);
6506 CPUM_ASSERT_WR_MSR_FN(AmdK7FastFlushCountMaybe);
6507 CPUM_ASSERT_WR_MSR_FN(AmdK7NodeId);
6508 CPUM_ASSERT_WR_MSR_FN(AmdK7DrXAddrMaskN);
6509 CPUM_ASSERT_WR_MSR_FN(AmdK7Dr0DataMatchMaybe);
6510 CPUM_ASSERT_WR_MSR_FN(AmdK7Dr0DataMaskMaybe);
6511 CPUM_ASSERT_WR_MSR_FN(AmdK7LoadStoreCfg);
6512 CPUM_ASSERT_WR_MSR_FN(AmdK7InstrCacheCfg);
6513 CPUM_ASSERT_WR_MSR_FN(AmdK7DataCacheCfg);
6514 CPUM_ASSERT_WR_MSR_FN(AmdK7BusUnitCfg);
6515 CPUM_ASSERT_WR_MSR_FN(AmdK7DebugCtl2Maybe);
6516 CPUM_ASSERT_WR_MSR_FN(AmdFam15hFpuCfg);
6517 CPUM_ASSERT_WR_MSR_FN(AmdFam15hDecoderCfg);
6518 CPUM_ASSERT_WR_MSR_FN(AmdFam10hBusUnitCfg2);
6519 CPUM_ASSERT_WR_MSR_FN(AmdFam15hCombUnitCfg);
6520 CPUM_ASSERT_WR_MSR_FN(AmdFam15hCombUnitCfg2);
6521 CPUM_ASSERT_WR_MSR_FN(AmdFam15hCombUnitCfg3);
6522 CPUM_ASSERT_WR_MSR_FN(AmdFam15hExecUnitCfg);
6523 CPUM_ASSERT_WR_MSR_FN(AmdFam15hLoadStoreCfg2);
6524 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsFetchCtl);
6525 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsFetchLinAddr);
6526 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsFetchPhysAddr);
6527 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpExecCtl);
6528 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpRip);
6529 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpData);
6530 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpData2);
6531 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpData3);
6532 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsDcLinAddr);
6533 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsDcPhysAddr);
6534 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsCtl);
6535 CPUM_ASSERT_WR_MSR_FN(AmdFam14hIbsBrTarget);
6536
6537 CPUM_ASSERT_WR_MSR_FN(Gim);
6538
6539 return VINF_SUCCESS;
6540}
6541#endif /* VBOX_STRICT && IN_RING3 */
6542
6543
6544/**
6545 * Gets the scalable bus frequency.
6546 *
6547 * The bus frequency is used as a base in several MSRs that gives the CPU and
6548 * other frequency ratios.
6549 *
6550 * @returns Scalable bus frequency in Hz. Will not return CPUM_SBUSFREQ_UNKNOWN.
6551 * @param pVM The cross context VM structure.
6552 */
6553VMMDECL(uint64_t) CPUMGetGuestScalableBusFrequency(PVM pVM)
6554{
6555 uint64_t uFreq = pVM->cpum.s.GuestInfo.uScalableBusFreq;
6556 if (uFreq == CPUM_SBUSFREQ_UNKNOWN)
6557 uFreq = CPUM_SBUSFREQ_100MHZ;
6558 return uFreq;
6559}
6560
6561
6562/**
6563 * Sets the guest EFER MSR without performing any additional checks.
6564 *
6565 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6566 * @param uOldEfer The previous EFER MSR value.
6567 * @param uValidEfer The new, validated EFER MSR value.
6568 *
6569 * @remarks One would normally call CPUMIsGuestEferMsrWriteValid() before calling
6570 * this function to change the EFER in order to perform an EFER transition.
6571 */
6572VMMDECL(void) CPUMSetGuestEferMsrNoChecks(PVMCPU pVCpu, uint64_t uOldEfer, uint64_t uValidEfer)
6573{
6574 pVCpu->cpum.s.Guest.msrEFER = uValidEfer;
6575
6576 /* AMD64 Architecture Programmer's Manual: 15.15 TLB Control; flush the TLB
6577 if MSR_K6_EFER_NXE, MSR_K6_EFER_LME or MSR_K6_EFER_LMA are changed. */
6578 if ( (uOldEfer & (MSR_K6_EFER_NXE | MSR_K6_EFER_LME | MSR_K6_EFER_LMA))
6579 != (pVCpu->cpum.s.Guest.msrEFER & (MSR_K6_EFER_NXE | MSR_K6_EFER_LME | MSR_K6_EFER_LMA)))
6580 {
6581 /// @todo PGMFlushTLB(pVCpu, cr3, true /*fGlobal*/);
6582 HMFlushTLB(pVCpu);
6583
6584 /* Notify PGM about NXE changes. */
6585 if ( (uOldEfer & MSR_K6_EFER_NXE)
6586 != (pVCpu->cpum.s.Guest.msrEFER & MSR_K6_EFER_NXE))
6587 PGMNotifyNxeChanged(pVCpu, !(uOldEfer & MSR_K6_EFER_NXE));
6588 }
6589}
6590
6591
6592/**
6593 * Checks if a guest PAT MSR write is valid.
6594 *
6595 * @returns @c true if the PAT bit combination is valid, @c false otherwise.
6596 * @param uValue The PAT MSR value.
6597 */
6598VMMDECL(bool) CPUMIsPatMsrValid(uint64_t uValue)
6599{
6600 for (uint32_t cShift = 0; cShift < 63; cShift += 8)
6601 {
6602 /* Check all eight bits because the top 5 bits of each byte are reserved. */
6603 uint8_t uType = (uint8_t)(uValue >> cShift);
6604 if ((uType >= 8) || (uType == 2) || (uType == 3))
6605 {
6606 Log(("CPUM: Invalid PAT type at %u:%u in IA32_PAT: %#llx (%#llx)\n", cShift + 7, cShift, uValue, uType));
6607 return false;
6608 }
6609 }
6610 return true;
6611}
6612
6613
6614/**
6615 * Validates an EFER MSR write and provides the new, validated EFER MSR.
6616 *
6617 * @returns VBox status code.
6618 * @param pVM The cross context VM structure.
6619 * @param uCr0 The CR0 of the CPU corresponding to the EFER MSR.
6620 * @param uOldEfer Value of the previous EFER MSR on the CPU if any.
6621 * @param uNewEfer The new EFER MSR value being written.
6622 * @param puValidEfer Where to store the validated EFER (only updated if
6623 * this function returns VINF_SUCCESS).
6624 */
6625VMMDECL(int) CPUMIsGuestEferMsrWriteValid(PVM pVM, uint64_t uCr0, uint64_t uOldEfer, uint64_t uNewEfer, uint64_t *puValidEfer)
6626{
6627 /* #GP(0) If anything outside the allowed bits is set. */
6628 uint64_t fMask = CPUMGetGuestEferMsrValidMask(pVM);
6629 if (uNewEfer & ~fMask)
6630 {
6631 Log(("CPUM: Settings disallowed EFER bit. uNewEfer=%#RX64 fAllowed=%#RX64 -> #GP(0)\n", uNewEfer, fMask));
6632 return VERR_CPUM_RAISE_GP_0;
6633 }
6634
6635 /* Check for illegal MSR_K6_EFER_LME transitions: not allowed to change LME if
6636 paging is enabled. (AMD Arch. Programmer's Manual Volume 2: Table 14-5) */
6637 if ( (uOldEfer & MSR_K6_EFER_LME) != (uNewEfer & MSR_K6_EFER_LME)
6638 && (uCr0 & X86_CR0_PG))
6639 {
6640 Log(("CPUM: Illegal MSR_K6_EFER_LME change: paging is enabled!!\n"));
6641 return VERR_CPUM_RAISE_GP_0;
6642 }
6643
6644 /* There are a few more: e.g. MSR_K6_EFER_LMSLE. */
6645 AssertMsg(!(uNewEfer & ~( MSR_K6_EFER_NXE
6646 | MSR_K6_EFER_LME
6647 | MSR_K6_EFER_LMA /* ignored anyway */
6648 | MSR_K6_EFER_SCE
6649 | MSR_K6_EFER_FFXSR
6650 | MSR_K6_EFER_SVME)),
6651 ("Unexpected value %#RX64\n", uNewEfer));
6652
6653 /* Ignore EFER.LMA, it's updated when setting CR0. */
6654 fMask &= ~MSR_K6_EFER_LMA;
6655
6656 *puValidEfer = (uOldEfer & ~fMask) | (uNewEfer & fMask);
6657 return VINF_SUCCESS;
6658}
6659
6660
6661/**
6662 * Gets the mask of valid EFER bits depending on supported guest-CPU features.
6663 *
6664 * @returns Mask of valid EFER bits.
6665 * @param pVM The cross context VM structure.
6666 *
6667 * @remarks EFER.LMA is included as part of the valid mask. It's not invalid but
6668 * rather a read-only bit.
6669 */
6670VMMDECL(uint64_t) CPUMGetGuestEferMsrValidMask(PVM pVM)
6671{
6672 uint32_t const fExtFeatures = pVM->cpum.s.aGuestCpuIdPatmExt[0].uEax >= 0x80000001
6673 ? pVM->cpum.s.aGuestCpuIdPatmExt[1].uEdx
6674 : 0;
6675 uint64_t fMask = 0;
6676 uint64_t const fIgnoreMask = MSR_K6_EFER_LMA;
6677
6678 /* Filter out those bits the guest is allowed to change. (e.g. LMA is read-only) */
6679 if (fExtFeatures & X86_CPUID_EXT_FEATURE_EDX_NX)
6680 fMask |= MSR_K6_EFER_NXE;
6681 if (fExtFeatures & X86_CPUID_EXT_FEATURE_EDX_LONG_MODE)
6682 fMask |= MSR_K6_EFER_LME;
6683 if (fExtFeatures & X86_CPUID_EXT_FEATURE_EDX_SYSCALL)
6684 fMask |= MSR_K6_EFER_SCE;
6685 if (fExtFeatures & X86_CPUID_AMD_FEATURE_EDX_FFXSR)
6686 fMask |= MSR_K6_EFER_FFXSR;
6687 if (pVM->cpum.s.GuestFeatures.fSvm)
6688 fMask |= MSR_K6_EFER_SVME;
6689
6690 return (fIgnoreMask | fMask);
6691}
6692
6693
6694/**
6695 * Fast way for HM to access the MSR_K8_TSC_AUX register.
6696 *
6697 * @returns The register value.
6698 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6699 * @thread EMT(pVCpu)
6700 */
6701VMM_INT_DECL(uint64_t) CPUMGetGuestTscAux(PVMCPU pVCpu)
6702{
6703 Assert(!(pVCpu->cpum.s.Guest.fExtrn & CPUMCTX_EXTRN_TSC_AUX));
6704 return pVCpu->cpum.s.GuestMsrs.msr.TscAux;
6705}
6706
6707
6708/**
6709 * Fast way for HM to access the MSR_K8_TSC_AUX register.
6710 *
6711 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6712 * @param uValue The new value.
6713 * @thread EMT(pVCpu)
6714 */
6715VMM_INT_DECL(void) CPUMSetGuestTscAux(PVMCPU pVCpu, uint64_t uValue)
6716{
6717 pVCpu->cpum.s.Guest.fExtrn &= ~CPUMCTX_EXTRN_TSC_AUX;
6718 pVCpu->cpum.s.GuestMsrs.msr.TscAux = uValue;
6719}
6720
6721
6722/**
6723 * Fast way for HM to access the IA32_SPEC_CTRL register.
6724 *
6725 * @returns The register value.
6726 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6727 * @thread EMT(pVCpu)
6728 */
6729VMM_INT_DECL(uint64_t) CPUMGetGuestSpecCtrl(PVMCPU pVCpu)
6730{
6731 return pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl;
6732}
6733
6734
6735/**
6736 * Fast way for HM to access the IA32_SPEC_CTRL register.
6737 *
6738 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6739 * @param uValue The new value.
6740 * @thread EMT(pVCpu)
6741 */
6742VMM_INT_DECL(void) CPUMSetGuestSpecCtrl(PVMCPU pVCpu, uint64_t uValue)
6743{
6744 pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl = uValue;
6745}
6746
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette