VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/CPUMAllMsrs.cpp@ 75884

Last change on this file since 75884 was 75509, checked in by vboxsync, 6 years ago

VMM: Nested VMX: bugref:9180 Explicitly ignore attempts at microcode updates via the IA32_BIOS_UPDT_TRIG MSR while in VMX non-root mode.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 254.1 KB
Line 
1/* $Id: CPUMAllMsrs.cpp 75509 2018-11-16 08:34:08Z vboxsync $ */
2/** @file
3 * CPUM - CPU MSR Registers.
4 */
5
6/*
7 * Copyright (C) 2013-2017 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*********************************************************************************************************************************
20* Header Files *
21*********************************************************************************************************************************/
22#define LOG_GROUP LOG_GROUP_CPUM
23#include <VBox/vmm/cpum.h>
24#include <VBox/vmm/apic.h>
25#include <VBox/vmm/hm.h>
26#include <VBox/vmm/hm_vmx.h>
27#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
28# include <VBox/vmm/iem.h>
29#endif
30#include <VBox/vmm/tm.h>
31#include <VBox/vmm/gim.h>
32#include "CPUMInternal.h"
33#include <VBox/vmm/vm.h>
34#include <VBox/err.h>
35
36
37/*********************************************************************************************************************************
38* Defined Constants And Macros *
39*********************************************************************************************************************************/
40/**
41 * Validates the CPUMMSRRANGE::offCpumCpu value and declares a local variable
42 * pointing to it.
43 *
44 * ASSUMES sizeof(a_Type) is a power of two and that the member is aligned
45 * correctly.
46 */
47#define CPUM_MSR_ASSERT_CPUMCPU_OFFSET_RETURN(a_pVCpu, a_pRange, a_Type, a_VarName) \
48 AssertMsgReturn( (a_pRange)->offCpumCpu >= 8 \
49 && (a_pRange)->offCpumCpu < sizeof(CPUMCPU) \
50 && !((a_pRange)->offCpumCpu & (RT_MIN(sizeof(a_Type), 8) - 1)) \
51 , ("offCpumCpu=%#x %s\n", (a_pRange)->offCpumCpu, (a_pRange)->szName), \
52 VERR_CPUM_MSR_BAD_CPUMCPU_OFFSET); \
53 a_Type *a_VarName = (a_Type *)((uintptr_t)&(a_pVCpu)->cpum.s + (a_pRange)->offCpumCpu)
54
55
56/*********************************************************************************************************************************
57* Structures and Typedefs *
58*********************************************************************************************************************************/
59
60/**
61 * Implements reading one or more MSRs.
62 *
63 * @returns VBox status code.
64 * @retval VINF_SUCCESS on success.
65 * @retval VINF_CPUM_R3_MSR_READ if the MSR read could not be serviced in the
66 * current context (raw-mode or ring-0).
67 * @retval VERR_CPUM_RAISE_GP_0 on failure (invalid MSR).
68 *
69 * @param pVCpu The cross context virtual CPU structure.
70 * @param idMsr The MSR we're reading.
71 * @param pRange The MSR range descriptor.
72 * @param puValue Where to return the value.
73 */
74typedef DECLCALLBACK(VBOXSTRICTRC) FNCPUMRDMSR(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue);
75/** Pointer to a RDMSR worker for a specific MSR or range of MSRs. */
76typedef FNCPUMRDMSR *PFNCPUMRDMSR;
77
78
79/**
80 * Implements writing one or more MSRs.
81 *
82 * @retval VINF_SUCCESS on success.
83 * @retval VINF_CPUM_R3_MSR_WRITE if the MSR write could not be serviced in the
84 * current context (raw-mode or ring-0).
85 * @retval VERR_CPUM_RAISE_GP_0 on failure.
86 *
87 * @param pVCpu The cross context virtual CPU structure.
88 * @param idMsr The MSR we're writing.
89 * @param pRange The MSR range descriptor.
90 * @param uValue The value to set, ignored bits masked.
91 * @param uRawValue The raw value with the ignored bits not masked.
92 */
93typedef DECLCALLBACK(VBOXSTRICTRC) FNCPUMWRMSR(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue);
94/** Pointer to a WRMSR worker for a specific MSR or range of MSRs. */
95typedef FNCPUMWRMSR *PFNCPUMWRMSR;
96
97
98
99/*
100 * Generic functions.
101 * Generic functions.
102 * Generic functions.
103 */
104
105
106/** @callback_method_impl{FNCPUMRDMSR} */
107static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_FixedValue(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
108{
109 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
110 *puValue = pRange->uValue;
111 return VINF_SUCCESS;
112}
113
114
115/** @callback_method_impl{FNCPUMWRMSR} */
116static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IgnoreWrite(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
117{
118 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
119 Log(("CPUM: Ignoring WRMSR %#x (%s), %#llx\n", idMsr, pRange->szName, uValue));
120 return VINF_SUCCESS;
121}
122
123
124/** @callback_method_impl{FNCPUMRDMSR} */
125static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_WriteOnly(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
126{
127 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(puValue);
128 return VERR_CPUM_RAISE_GP_0;
129}
130
131
132/** @callback_method_impl{FNCPUMWRMSR} */
133static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_ReadOnly(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
134{
135 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
136 Assert(pRange->fWrGpMask == UINT64_MAX);
137 return VERR_CPUM_RAISE_GP_0;
138}
139
140
141
142
143/*
144 * IA32
145 * IA32
146 * IA32
147 */
148
149/** @callback_method_impl{FNCPUMRDMSR} */
150static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32P5McAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
151{
152 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
153 *puValue = 0; /** @todo implement machine check injection. */
154 return VINF_SUCCESS;
155}
156
157
158/** @callback_method_impl{FNCPUMWRMSR} */
159static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32P5McAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
160{
161 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
162 /** @todo implement machine check injection. */
163 return VINF_SUCCESS;
164}
165
166
167/** @callback_method_impl{FNCPUMRDMSR} */
168static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32P5McType(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
169{
170 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
171 *puValue = 0; /** @todo implement machine check injection. */
172 return VINF_SUCCESS;
173}
174
175
176/** @callback_method_impl{FNCPUMWRMSR} */
177static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32P5McType(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
178{
179 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
180 /** @todo implement machine check injection. */
181 return VINF_SUCCESS;
182}
183
184
185/** @callback_method_impl{FNCPUMRDMSR} */
186static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32TimestampCounter(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
187{
188 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
189 *puValue = TMCpuTickGet(pVCpu);
190#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
191 *puValue = CPUMApplyNestedGuestTscOffset(pVCpu, *puValue);
192#endif
193 return VINF_SUCCESS;
194}
195
196
197/** @callback_method_impl{FNCPUMWRMSR} */
198static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32TimestampCounter(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
199{
200 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
201 TMCpuTickSet(pVCpu->CTX_SUFF(pVM), pVCpu, uValue);
202 return VINF_SUCCESS;
203}
204
205
206/** @callback_method_impl{FNCPUMRDMSR} */
207static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PlatformId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
208{
209 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
210 uint64_t uValue = pRange->uValue;
211 if (uValue & 0x1f00)
212 {
213 /* Max allowed bus ratio present. */
214 /** @todo Implement scaled BUS frequency. */
215 }
216
217 *puValue = uValue;
218 return VINF_SUCCESS;
219}
220
221
222/** @callback_method_impl{FNCPUMRDMSR} */
223static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ApicBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
224{
225 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
226 return APICGetBaseMsr(pVCpu, puValue);
227}
228
229
230/** @callback_method_impl{FNCPUMWRMSR} */
231static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ApicBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
232{
233 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
234 return APICSetBaseMsr(pVCpu, uValue);
235}
236
237
238/**
239 * Get fixed IA32_FEATURE_CONTROL value for NEM and cpumMsrRd_Ia32FeatureControl.
240 *
241 * @returns Fixed IA32_FEATURE_CONTROL value.
242 * @param pVCpu The cross context per CPU structure.
243 */
244VMM_INT_DECL(uint64_t) CPUMGetGuestIa32FeatureControl(PVMCPU pVCpu)
245{
246 /* Always report the MSR lock bit as set, in order to prevent guests from modifiying this MSR. */
247 uint64_t fFeatCtl = MSR_IA32_FEATURE_CONTROL_LOCK;
248
249 /* Report VMX features. */
250 if (pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fVmx)
251 fFeatCtl |= MSR_IA32_FEATURE_CONTROL_VMXON;
252
253 return fFeatCtl;
254}
255
256/** @callback_method_impl{FNCPUMRDMSR} */
257static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32FeatureControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
258{
259 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
260 *puValue = CPUMGetGuestIa32FeatureControl(pVCpu);
261 return VINF_SUCCESS;
262}
263
264
265/** @callback_method_impl{FNCPUMWRMSR} */
266static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32FeatureControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
267{
268 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
269 return VERR_CPUM_RAISE_GP_0;
270}
271
272
273/** @callback_method_impl{FNCPUMRDMSR} */
274static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32BiosSignId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
275{
276 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
277 /** @todo fake microcode update. */
278 *puValue = pRange->uValue;
279 return VINF_SUCCESS;
280}
281
282
283/** @callback_method_impl{FNCPUMWRMSR} */
284static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32BiosSignId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
285{
286 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
287 /* Normally, zero is written to Ia32BiosSignId before reading it in order
288 to select the signature instead of the BBL_CR_D3 behaviour. The GP mask
289 of the database entry should take care of most illegal writes for now, so
290 just ignore all writes atm. */
291 return VINF_SUCCESS;
292}
293
294
295/** @callback_method_impl{FNCPUMWRMSR} */
296static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32BiosUpdateTrigger(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
297{
298 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
299
300 /* Microcode updates cannot be loaded in VMX non-root mode. */
301 if (CPUMIsGuestInVmxNonRootMode(&pVCpu->cpum.s.Guest))
302 return VINF_SUCCESS;
303
304 /** @todo Fake bios update trigger better. The value is the address to an
305 * update package, I think. We should probably GP if it's invalid. */
306 return VINF_SUCCESS;
307}
308
309
310/**
311 * Get MSR_IA32_SMM_MONITOR_CTL value for IEM and cpumMsrRd_Ia32SmmMonitorCtl.
312 *
313 * @returns The MSR_IA32_SMM_MONITOR_CTL value.
314 * @param pVCpu The cross context per CPU structure.
315 */
316VMM_INT_DECL(uint64_t) CPUMGetGuestIa32SmmMonitorCtl(PVMCPU pVCpu)
317{
318 /* We do not support dual-monitor treatment for SMI and SMM. */
319 /** @todo SMM. */
320 RT_NOREF(pVCpu);
321 return 0;
322}
323
324
325/** @callback_method_impl{FNCPUMRDMSR} */
326static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SmmMonitorCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
327{
328 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
329 *puValue = CPUMGetGuestIa32SmmMonitorCtl(pVCpu);
330 return VINF_SUCCESS;
331}
332
333
334/** @callback_method_impl{FNCPUMWRMSR} */
335static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SmmMonitorCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
336{
337 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
338 /** @todo SMM. */
339 return VINF_SUCCESS;
340}
341
342
343/** @callback_method_impl{FNCPUMRDMSR} */
344static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PmcN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
345{
346 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
347 /** @todo check CPUID leaf 0ah. */
348 *puValue = 0;
349 return VINF_SUCCESS;
350}
351
352
353/** @callback_method_impl{FNCPUMWRMSR} */
354static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PmcN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
355{
356 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
357 /** @todo check CPUID leaf 0ah. */
358 return VINF_SUCCESS;
359}
360
361
362/** @callback_method_impl{FNCPUMRDMSR} */
363static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MonitorFilterLineSize(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
364{
365 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
366 /** @todo return 0x1000 if we try emulate mwait 100% correctly. */
367 *puValue = 0x40; /** @todo Change to CPU cache line size. */
368 return VINF_SUCCESS;
369}
370
371
372/** @callback_method_impl{FNCPUMWRMSR} */
373static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MonitorFilterLineSize(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
374{
375 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
376 /** @todo should remember writes, though it's supposedly something only a BIOS
377 * would write so, it's not extremely important. */
378 return VINF_SUCCESS;
379}
380
381/** @callback_method_impl{FNCPUMRDMSR} */
382static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MPerf(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
383{
384 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
385 /** @todo Read MPERF: Adjust against previously written MPERF value. Is TSC
386 * what we want? */
387 *puValue = TMCpuTickGet(pVCpu);
388#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
389 *puValue = CPUMApplyNestedGuestTscOffset(pVCpu, *puValue);
390#endif
391 return VINF_SUCCESS;
392}
393
394
395/** @callback_method_impl{FNCPUMWRMSR} */
396static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MPerf(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
397{
398 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
399 /** @todo Write MPERF: Calc adjustment. */
400 return VINF_SUCCESS;
401}
402
403
404/** @callback_method_impl{FNCPUMRDMSR} */
405static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32APerf(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
406{
407 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
408 /** @todo Read APERF: Adjust against previously written MPERF value. Is TSC
409 * what we want? */
410 *puValue = TMCpuTickGet(pVCpu);
411#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
412 *puValue = CPUMApplyNestedGuestTscOffset(pVCpu, *puValue);
413#endif
414 return VINF_SUCCESS;
415}
416
417
418/** @callback_method_impl{FNCPUMWRMSR} */
419static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32APerf(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
420{
421 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
422 /** @todo Write APERF: Calc adjustment. */
423 return VINF_SUCCESS;
424}
425
426
427/**
428 * Get fixed IA32_MTRR_CAP value for NEM and cpumMsrRd_Ia32MtrrCap.
429 *
430 * @returns Fixed IA32_MTRR_CAP value.
431 * @param pVCpu The cross context per CPU structure.
432 */
433VMM_INT_DECL(uint64_t) CPUMGetGuestIa32MtrrCap(PVMCPU pVCpu)
434{
435 RT_NOREF_PV(pVCpu);
436
437 /* This is currently a bit weird. :-) */
438 uint8_t const cVariableRangeRegs = 0;
439 bool const fSystemManagementRangeRegisters = false;
440 bool const fFixedRangeRegisters = false;
441 bool const fWriteCombiningType = false;
442 return cVariableRangeRegs
443 | (fFixedRangeRegisters ? RT_BIT_64(8) : 0)
444 | (fWriteCombiningType ? RT_BIT_64(10) : 0)
445 | (fSystemManagementRangeRegisters ? RT_BIT_64(11) : 0);
446}
447
448/** @callback_method_impl{FNCPUMRDMSR} */
449static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
450{
451 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
452 *puValue = CPUMGetGuestIa32MtrrCap(pVCpu);
453 return VINF_SUCCESS;
454}
455
456
457/** @callback_method_impl{FNCPUMRDMSR} */
458static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrPhysBaseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
459{
460 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
461 /** @todo Implement variable MTRR storage. */
462 Assert(pRange->uValue == (idMsr - 0x200) / 2);
463 *puValue = 0;
464 return VINF_SUCCESS;
465}
466
467
468/** @callback_method_impl{FNCPUMWRMSR} */
469static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrPhysBaseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
470{
471 /*
472 * Validate the value.
473 */
474 Assert(pRange->uValue == (idMsr - 0x200) / 2);
475 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(uRawValue); RT_NOREF_PV(pRange);
476
477 uint8_t uType = uValue & 0xff;
478 if ((uType >= 7) || (uType == 2) || (uType == 3))
479 {
480 Log(("CPUM: Invalid type set writing MTRR PhysBase MSR %#x: %#llx (%#llx)\n", idMsr, uValue, uType));
481 return VERR_CPUM_RAISE_GP_0;
482 }
483
484 uint64_t fInvPhysMask = ~(RT_BIT_64(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.cMaxPhysAddrWidth) - 1U);
485 if (fInvPhysMask & uValue)
486 {
487 Log(("CPUM: Invalid physical address bits set writing MTRR PhysBase MSR %#x: %#llx (%#llx)\n",
488 idMsr, uValue, uValue & fInvPhysMask));
489 return VERR_CPUM_RAISE_GP_0;
490 }
491
492 /*
493 * Store it.
494 */
495 /** @todo Implement variable MTRR storage. */
496 return VINF_SUCCESS;
497}
498
499
500/** @callback_method_impl{FNCPUMRDMSR} */
501static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrPhysMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
502{
503 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
504 /** @todo Implement variable MTRR storage. */
505 Assert(pRange->uValue == (idMsr - 0x200) / 2);
506 *puValue = 0;
507 return VINF_SUCCESS;
508}
509
510
511/** @callback_method_impl{FNCPUMWRMSR} */
512static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrPhysMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
513{
514 /*
515 * Validate the value.
516 */
517 Assert(pRange->uValue == (idMsr - 0x200) / 2);
518 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(uRawValue); RT_NOREF_PV(pRange);
519
520 uint64_t fInvPhysMask = ~(RT_BIT_64(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.cMaxPhysAddrWidth) - 1U);
521 if (fInvPhysMask & uValue)
522 {
523 Log(("CPUM: Invalid physical address bits set writing MTRR PhysMask MSR %#x: %#llx (%#llx)\n",
524 idMsr, uValue, uValue & fInvPhysMask));
525 return VERR_CPUM_RAISE_GP_0;
526 }
527
528 /*
529 * Store it.
530 */
531 /** @todo Implement variable MTRR storage. */
532 return VINF_SUCCESS;
533}
534
535
536/** @callback_method_impl{FNCPUMRDMSR} */
537static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrFixed(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
538{
539 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
540 CPUM_MSR_ASSERT_CPUMCPU_OFFSET_RETURN(pVCpu, pRange, uint64_t, puFixedMtrr);
541 *puValue = *puFixedMtrr;
542 return VINF_SUCCESS;
543}
544
545
546/** @callback_method_impl{FNCPUMWRMSR} */
547static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrFixed(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
548{
549 CPUM_MSR_ASSERT_CPUMCPU_OFFSET_RETURN(pVCpu, pRange, uint64_t, puFixedMtrr);
550 RT_NOREF_PV(idMsr); RT_NOREF_PV(uRawValue);
551
552 for (uint32_t cShift = 0; cShift < 63; cShift += 8)
553 {
554 uint8_t uType = (uint8_t)(uValue >> cShift);
555 if ((uType >= 7) || (uType == 2) || (uType == 3))
556 {
557 Log(("CPUM: Invalid MTRR type at %u:%u in fixed range (%#x/%s): %#llx (%#llx)\n",
558 cShift + 7, cShift, idMsr, pRange->szName, uValue, uType));
559 return VERR_CPUM_RAISE_GP_0;
560 }
561 }
562 *puFixedMtrr = uValue;
563 return VINF_SUCCESS;
564}
565
566
567/** @callback_method_impl{FNCPUMRDMSR} */
568static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrDefType(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
569{
570 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
571 *puValue = pVCpu->cpum.s.GuestMsrs.msr.MtrrDefType;
572 return VINF_SUCCESS;
573}
574
575
576/** @callback_method_impl{FNCPUMWRMSR} */
577static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrDefType(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
578{
579 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
580
581 uint8_t uType = uValue & 0xff;
582 if ((uType >= 7) || (uType == 2) || (uType == 3))
583 {
584 Log(("CPUM: Invalid MTRR default type value on %s: %#llx (%#llx)\n", pRange->szName, uValue, uType));
585 return VERR_CPUM_RAISE_GP_0;
586 }
587
588 pVCpu->cpum.s.GuestMsrs.msr.MtrrDefType = uValue;
589 return VINF_SUCCESS;
590}
591
592
593/** @callback_method_impl{FNCPUMRDMSR} */
594static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32Pat(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
595{
596 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
597 *puValue = pVCpu->cpum.s.Guest.msrPAT;
598 return VINF_SUCCESS;
599}
600
601
602/** @callback_method_impl{FNCPUMWRMSR} */
603static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32Pat(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
604{
605 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
606 if (CPUMIsPatMsrValid(uValue))
607 {
608 pVCpu->cpum.s.Guest.msrPAT = uValue;
609 return VINF_SUCCESS;
610 }
611 return VERR_CPUM_RAISE_GP_0;
612}
613
614
615/** @callback_method_impl{FNCPUMRDMSR} */
616static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SysEnterCs(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
617{
618 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
619 *puValue = pVCpu->cpum.s.Guest.SysEnter.cs;
620 return VINF_SUCCESS;
621}
622
623
624/** @callback_method_impl{FNCPUMWRMSR} */
625static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SysEnterCs(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
626{
627 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
628
629 /* Note! We used to mask this by 0xffff, but turns out real HW doesn't and
630 there are generally 32-bit working bits backing this register. */
631 pVCpu->cpum.s.Guest.SysEnter.cs = uValue;
632 return VINF_SUCCESS;
633}
634
635
636/** @callback_method_impl{FNCPUMRDMSR} */
637static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SysEnterEsp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
638{
639 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
640 *puValue = pVCpu->cpum.s.Guest.SysEnter.esp;
641 return VINF_SUCCESS;
642}
643
644
645/** @callback_method_impl{FNCPUMWRMSR} */
646static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SysEnterEsp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
647{
648 if (X86_IS_CANONICAL(uValue))
649 {
650 pVCpu->cpum.s.Guest.SysEnter.esp = uValue;
651 return VINF_SUCCESS;
652 }
653 Log(("CPUM: IA32_SYSENTER_ESP not canonical! %#llx\n", uValue));
654 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
655 return VERR_CPUM_RAISE_GP_0;
656}
657
658
659/** @callback_method_impl{FNCPUMRDMSR} */
660static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SysEnterEip(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
661{
662 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
663 *puValue = pVCpu->cpum.s.Guest.SysEnter.eip;
664 return VINF_SUCCESS;
665}
666
667
668/** @callback_method_impl{FNCPUMWRMSR} */
669static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SysEnterEip(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
670{
671 if (X86_IS_CANONICAL(uValue))
672 {
673 pVCpu->cpum.s.Guest.SysEnter.eip = uValue;
674 return VINF_SUCCESS;
675 }
676 LogRel(("CPUM: IA32_SYSENTER_EIP not canonical! %#llx\n", uValue));
677 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
678 return VERR_CPUM_RAISE_GP_0;
679}
680
681
682/** @callback_method_impl{FNCPUMRDMSR} */
683static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McgCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
684{
685#if 0 /** @todo implement machine checks. */
686 *puValue = pRange->uValue & (RT_BIT_64(8) | 0);
687#else
688 *puValue = 0;
689#endif
690 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
691 return VINF_SUCCESS;
692}
693
694
695/** @callback_method_impl{FNCPUMRDMSR} */
696static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McgStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
697{
698 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
699 /** @todo implement machine checks. */
700 *puValue = 0;
701 return VINF_SUCCESS;
702}
703
704
705/** @callback_method_impl{FNCPUMWRMSR} */
706static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McgStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
707{
708 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
709 /** @todo implement machine checks. */
710 return VINF_SUCCESS;
711}
712
713
714/** @callback_method_impl{FNCPUMRDMSR} */
715static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McgCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
716{
717 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
718 /** @todo implement machine checks. */
719 *puValue = 0;
720 return VINF_SUCCESS;
721}
722
723
724/** @callback_method_impl{FNCPUMWRMSR} */
725static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McgCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
726{
727 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
728 /** @todo implement machine checks. */
729 return VINF_SUCCESS;
730}
731
732
733/** @callback_method_impl{FNCPUMRDMSR} */
734static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32DebugCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
735{
736 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
737 /** @todo implement IA32_DEBUGCTL. */
738 *puValue = 0;
739 return VINF_SUCCESS;
740}
741
742
743/** @callback_method_impl{FNCPUMWRMSR} */
744static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32DebugCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
745{
746 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
747 /** @todo implement IA32_DEBUGCTL. */
748 return VINF_SUCCESS;
749}
750
751
752/** @callback_method_impl{FNCPUMRDMSR} */
753static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SmrrPhysBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
754{
755 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
756 /** @todo implement intel SMM. */
757 *puValue = 0;
758 return VINF_SUCCESS;
759}
760
761
762/** @callback_method_impl{FNCPUMWRMSR} */
763static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SmrrPhysBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
764{
765 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
766 /** @todo implement intel SMM. */
767 return VERR_CPUM_RAISE_GP_0;
768}
769
770
771/** @callback_method_impl{FNCPUMRDMSR} */
772static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SmrrPhysMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
773{
774 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
775 /** @todo implement intel SMM. */
776 *puValue = 0;
777 return VINF_SUCCESS;
778}
779
780
781/** @callback_method_impl{FNCPUMWRMSR} */
782static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SmrrPhysMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
783{
784 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
785 /** @todo implement intel SMM. */
786 return VERR_CPUM_RAISE_GP_0;
787}
788
789
790/** @callback_method_impl{FNCPUMRDMSR} */
791static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PlatformDcaCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
792{
793 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
794 /** @todo implement intel direct cache access (DCA)?? */
795 *puValue = 0;
796 return VINF_SUCCESS;
797}
798
799
800/** @callback_method_impl{FNCPUMWRMSR} */
801static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PlatformDcaCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
802{
803 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
804 /** @todo implement intel direct cache access (DCA)?? */
805 return VINF_SUCCESS;
806}
807
808
809/** @callback_method_impl{FNCPUMRDMSR} */
810static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32CpuDcaCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
811{
812 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
813 /** @todo implement intel direct cache access (DCA)?? */
814 *puValue = 0;
815 return VINF_SUCCESS;
816}
817
818
819/** @callback_method_impl{FNCPUMRDMSR} */
820static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32Dca0Cap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
821{
822 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
823 /** @todo implement intel direct cache access (DCA)?? */
824 *puValue = 0;
825 return VINF_SUCCESS;
826}
827
828
829/** @callback_method_impl{FNCPUMWRMSR} */
830static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32Dca0Cap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
831{
832 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
833 /** @todo implement intel direct cache access (DCA)?? */
834 return VINF_SUCCESS;
835}
836
837
838/** @callback_method_impl{FNCPUMRDMSR} */
839static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfEvtSelN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
840{
841 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
842 /** @todo implement IA32_PERFEVTSEL0+. */
843 *puValue = 0;
844 return VINF_SUCCESS;
845}
846
847
848/** @callback_method_impl{FNCPUMWRMSR} */
849static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfEvtSelN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
850{
851 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
852 /** @todo implement IA32_PERFEVTSEL0+. */
853 return VINF_SUCCESS;
854}
855
856
857/** @callback_method_impl{FNCPUMRDMSR} */
858static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
859{
860 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
861 uint64_t uValue = pRange->uValue;
862
863 /* Always provide the max bus ratio for now. XNU expects it. */
864 uValue &= ~((UINT64_C(0x1f) << 40) | RT_BIT_64(46));
865
866 PVM pVM = pVCpu->CTX_SUFF(pVM);
867 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
868 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
869 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
870 if (uTscRatio > 0x1f)
871 uTscRatio = 0x1f;
872 uValue |= (uint64_t)uTscRatio << 40;
873
874 *puValue = uValue;
875 return VINF_SUCCESS;
876}
877
878
879/** @callback_method_impl{FNCPUMWRMSR} */
880static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
881{
882 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
883 /* Pentium4 allows writing, but all bits are ignored. */
884 return VINF_SUCCESS;
885}
886
887
888/** @callback_method_impl{FNCPUMRDMSR} */
889static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
890{
891 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
892 /** @todo implement IA32_PERFCTL. */
893 *puValue = 0;
894 return VINF_SUCCESS;
895}
896
897
898/** @callback_method_impl{FNCPUMWRMSR} */
899static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
900{
901 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
902 /** @todo implement IA32_PERFCTL. */
903 return VINF_SUCCESS;
904}
905
906
907/** @callback_method_impl{FNCPUMRDMSR} */
908static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32FixedCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
909{
910 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
911 /** @todo implement IA32_FIXED_CTRn (fixed performance counters). */
912 *puValue = 0;
913 return VINF_SUCCESS;
914}
915
916
917/** @callback_method_impl{FNCPUMWRMSR} */
918static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32FixedCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
919{
920 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
921 /** @todo implement IA32_FIXED_CTRn (fixed performance counters). */
922 return VINF_SUCCESS;
923}
924
925
926/** @callback_method_impl{FNCPUMRDMSR} */
927static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfCapabilities(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
928{
929 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
930 /** @todo implement performance counters. */
931 *puValue = 0;
932 return VINF_SUCCESS;
933}
934
935
936/** @callback_method_impl{FNCPUMWRMSR} */
937static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfCapabilities(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
938{
939 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
940 /** @todo implement performance counters. */
941 return VINF_SUCCESS;
942}
943
944
945/** @callback_method_impl{FNCPUMRDMSR} */
946static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32FixedCtrCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
947{
948 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
949 /** @todo implement performance counters. */
950 *puValue = 0;
951 return VINF_SUCCESS;
952}
953
954
955/** @callback_method_impl{FNCPUMWRMSR} */
956static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32FixedCtrCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
957{
958 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
959 /** @todo implement performance counters. */
960 return VINF_SUCCESS;
961}
962
963
964/** @callback_method_impl{FNCPUMRDMSR} */
965static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfGlobalStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
966{
967 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
968 /** @todo implement performance counters. */
969 *puValue = 0;
970 return VINF_SUCCESS;
971}
972
973
974/** @callback_method_impl{FNCPUMWRMSR} */
975static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfGlobalStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
976{
977 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
978 /** @todo implement performance counters. */
979 return VINF_SUCCESS;
980}
981
982
983/** @callback_method_impl{FNCPUMRDMSR} */
984static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfGlobalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
985{
986 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
987 /** @todo implement performance counters. */
988 *puValue = 0;
989 return VINF_SUCCESS;
990}
991
992
993/** @callback_method_impl{FNCPUMWRMSR} */
994static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfGlobalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
995{
996 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
997 /** @todo implement performance counters. */
998 return VINF_SUCCESS;
999}
1000
1001
1002/** @callback_method_impl{FNCPUMRDMSR} */
1003static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfGlobalOvfCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1004{
1005 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1006 /** @todo implement performance counters. */
1007 *puValue = 0;
1008 return VINF_SUCCESS;
1009}
1010
1011
1012/** @callback_method_impl{FNCPUMWRMSR} */
1013static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfGlobalOvfCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1014{
1015 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1016 /** @todo implement performance counters. */
1017 return VINF_SUCCESS;
1018}
1019
1020
1021/** @callback_method_impl{FNCPUMRDMSR} */
1022static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PebsEnable(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1023{
1024 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1025 /** @todo implement performance counters. */
1026 *puValue = 0;
1027 return VINF_SUCCESS;
1028}
1029
1030
1031/** @callback_method_impl{FNCPUMWRMSR} */
1032static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PebsEnable(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1033{
1034 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1035 /** @todo implement performance counters. */
1036 return VINF_SUCCESS;
1037}
1038
1039
1040/** @callback_method_impl{FNCPUMRDMSR} */
1041static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ClockModulation(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1042{
1043 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1044 /** @todo implement IA32_CLOCK_MODULATION. */
1045 *puValue = 0;
1046 return VINF_SUCCESS;
1047}
1048
1049
1050/** @callback_method_impl{FNCPUMWRMSR} */
1051static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ClockModulation(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1052{
1053 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1054 /** @todo implement IA32_CLOCK_MODULATION. */
1055 return VINF_SUCCESS;
1056}
1057
1058
1059/** @callback_method_impl{FNCPUMRDMSR} */
1060static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ThermInterrupt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1061{
1062 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1063 /** @todo implement IA32_THERM_INTERRUPT. */
1064 *puValue = 0;
1065 return VINF_SUCCESS;
1066}
1067
1068
1069/** @callback_method_impl{FNCPUMWRMSR} */
1070static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ThermInterrupt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1071{
1072 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1073 /** @todo implement IA32_THERM_STATUS. */
1074 return VINF_SUCCESS;
1075}
1076
1077
1078/** @callback_method_impl{FNCPUMRDMSR} */
1079static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ThermStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1080{
1081 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1082 /** @todo implement IA32_THERM_STATUS. */
1083 *puValue = 0;
1084 return VINF_SUCCESS;
1085}
1086
1087
1088/** @callback_method_impl{FNCPUMWRMSR} */
1089static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ThermStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1090{
1091 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1092 /** @todo implement IA32_THERM_INTERRUPT. */
1093 return VINF_SUCCESS;
1094}
1095
1096
1097/** @callback_method_impl{FNCPUMRDMSR} */
1098static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32Therm2Ctl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1099{
1100 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1101 /** @todo implement IA32_THERM2_CTL. */
1102 *puValue = 0;
1103 return VINF_SUCCESS;
1104}
1105
1106
1107/** @callback_method_impl{FNCPUMWRMSR} */
1108static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32Therm2Ctl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1109{
1110 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1111 /** @todo implement IA32_THERM2_CTL. */
1112 return VINF_SUCCESS;
1113}
1114
1115
1116/** @callback_method_impl{FNCPUMRDMSR} */
1117static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MiscEnable(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1118{
1119 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1120 *puValue = pVCpu->cpum.s.GuestMsrs.msr.MiscEnable;
1121 return VINF_SUCCESS;
1122}
1123
1124
1125/** @callback_method_impl{FNCPUMWRMSR} */
1126static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MiscEnable(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1127{
1128 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1129#ifdef LOG_ENABLED
1130 uint64_t const uOld = pVCpu->cpum.s.GuestMsrs.msr.MiscEnable;
1131#endif
1132
1133 /* Unsupported bits are generally ignored and stripped by the MSR range
1134 entry that got us here. So, we just need to preserve fixed bits. */
1135 pVCpu->cpum.s.GuestMsrs.msr.MiscEnable = uValue
1136 | MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL
1137 | MSR_IA32_MISC_ENABLE_BTS_UNAVAIL;
1138
1139 Log(("CPUM: IA32_MISC_ENABLE; old=%#llx written=%#llx => %#llx\n",
1140 uOld, uValue, pVCpu->cpum.s.GuestMsrs.msr.MiscEnable));
1141
1142 /** @todo Wire IA32_MISC_ENABLE bit 22 to our NT 4 CPUID trick. */
1143 /** @todo Wire up MSR_IA32_MISC_ENABLE_XD_DISABLE. */
1144 return VINF_SUCCESS;
1145}
1146
1147
1148/** @callback_method_impl{FNCPUMRDMSR} */
1149static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McCtlStatusAddrMiscN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1150{
1151 RT_NOREF_PV(pVCpu); RT_NOREF_PV(pRange);
1152
1153 /** @todo Implement machine check exception injection. */
1154 switch (idMsr & 3)
1155 {
1156 case 0:
1157 case 1:
1158 *puValue = 0;
1159 break;
1160
1161 /* The ADDR and MISC registers aren't accessible since the
1162 corresponding STATUS bits are zero. */
1163 case 2:
1164 Log(("CPUM: Reading IA32_MCi_ADDR %#x -> #GP\n", idMsr));
1165 return VERR_CPUM_RAISE_GP_0;
1166 case 3:
1167 Log(("CPUM: Reading IA32_MCi_MISC %#x -> #GP\n", idMsr));
1168 return VERR_CPUM_RAISE_GP_0;
1169 }
1170 return VINF_SUCCESS;
1171}
1172
1173
1174/** @callback_method_impl{FNCPUMWRMSR} */
1175static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McCtlStatusAddrMiscN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1176{
1177 RT_NOREF_PV(pVCpu); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1178 switch (idMsr & 3)
1179 {
1180 case 0:
1181 /* Ignore writes to the CTL register. */
1182 break;
1183
1184 case 1:
1185 /* According to specs, the STATUS register can only be written to
1186 with the value 0. VBoxCpuReport thinks different for a
1187 Pentium M Dothan, but implementing according to specs now. */
1188 if (uValue != 0)
1189 {
1190 Log(("CPUM: Writing non-zero value (%#llx) to IA32_MCi_STATUS %#x -> #GP\n", uValue, idMsr));
1191 return VERR_CPUM_RAISE_GP_0;
1192 }
1193 break;
1194
1195 /* Specs states that ADDR and MISC can be cleared by writing zeros.
1196 Writing 1s will GP. Need to figure out how this relates to the
1197 ADDRV and MISCV status flags. If writing is independent of those
1198 bits, we need to know whether the CPU really implements them since
1199 that is exposed by writing 0 to them.
1200 Implementing the solution with the fewer GPs for now. */
1201 case 2:
1202 if (uValue != 0)
1203 {
1204 Log(("CPUM: Writing non-zero value (%#llx) to IA32_MCi_ADDR %#x -> #GP\n", uValue, idMsr));
1205 return VERR_CPUM_RAISE_GP_0;
1206 }
1207 break;
1208 case 3:
1209 if (uValue != 0)
1210 {
1211 Log(("CPUM: Writing non-zero value (%#llx) to IA32_MCi_MISC %#x -> #GP\n", uValue, idMsr));
1212 return VERR_CPUM_RAISE_GP_0;
1213 }
1214 break;
1215 }
1216 return VINF_SUCCESS;
1217}
1218
1219
1220/** @callback_method_impl{FNCPUMRDMSR} */
1221static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McNCtl2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1222{
1223 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1224 /** @todo Implement machine check exception injection. */
1225 *puValue = 0;
1226 return VINF_SUCCESS;
1227}
1228
1229
1230/** @callback_method_impl{FNCPUMWRMSR} */
1231static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McNCtl2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1232{
1233 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1234 /** @todo Implement machine check exception injection. */
1235 return VINF_SUCCESS;
1236}
1237
1238
1239/** @callback_method_impl{FNCPUMRDMSR} */
1240static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32DsArea(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1241{
1242 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1243 /** @todo implement IA32_DS_AREA. */
1244 *puValue = 0;
1245 return VINF_SUCCESS;
1246}
1247
1248
1249/** @callback_method_impl{FNCPUMWRMSR} */
1250static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32DsArea(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1251{
1252 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1253 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1254 return VINF_SUCCESS;
1255}
1256
1257
1258/** @callback_method_impl{FNCPUMRDMSR} */
1259static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32TscDeadline(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1260{
1261 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1262 /** @todo implement TSC deadline timer. */
1263 *puValue = 0;
1264 return VINF_SUCCESS;
1265}
1266
1267
1268/** @callback_method_impl{FNCPUMWRMSR} */
1269static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32TscDeadline(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1270{
1271 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1272 /** @todo implement TSC deadline timer. */
1273 return VINF_SUCCESS;
1274}
1275
1276
1277/** @callback_method_impl{FNCPUMRDMSR} */
1278static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32X2ApicN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1279{
1280 RT_NOREF_PV(pRange);
1281#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1282 if ( CPUMIsGuestInVmxNonRootMode(&pVCpu->cpum.s.Guest)
1283 && CPUMIsGuestVmxProcCtls2Set(pVCpu, &pVCpu->cpum.s.Guest, VMX_PROC_CTLS2_VIRT_X2APIC_MODE))
1284 {
1285 VBOXSTRICTRC rcStrict = IEMExecVmxVirtApicAccessMsr(pVCpu, idMsr, puValue, false /* fWrite */);
1286 if (rcStrict == VINF_VMX_MODIFIES_BEHAVIOR)
1287 return VINF_SUCCESS;
1288 if (rcStrict == VERR_OUT_OF_RANGE)
1289 return VERR_CPUM_RAISE_GP_0;
1290 Assert(rcStrict == VINF_VMX_INTERCEPT_NOT_ACTIVE);
1291 }
1292#endif
1293 return APICReadMsr(pVCpu, idMsr, puValue);
1294}
1295
1296
1297/** @callback_method_impl{FNCPUMWRMSR} */
1298static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32X2ApicN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1299{
1300 RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1301#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1302 if ( CPUMIsGuestInVmxNonRootMode(&pVCpu->cpum.s.Guest)
1303 && CPUMIsGuestVmxProcCtls2Set(pVCpu, &pVCpu->cpum.s.Guest, VMX_PROC_CTLS2_VIRT_X2APIC_MODE))
1304 {
1305 VBOXSTRICTRC rcStrict = IEMExecVmxVirtApicAccessMsr(pVCpu, idMsr, &uValue, true /* fWrite */);
1306 if (rcStrict == VINF_VMX_MODIFIES_BEHAVIOR)
1307 return VINF_SUCCESS;
1308 if (rcStrict == VERR_OUT_OF_RANGE)
1309 return VERR_CPUM_RAISE_GP_0;
1310 Assert(rcStrict == VINF_VMX_INTERCEPT_NOT_ACTIVE);
1311 }
1312#endif
1313 return APICWriteMsr(pVCpu, idMsr, uValue);
1314}
1315
1316
1317/** @callback_method_impl{FNCPUMRDMSR} */
1318static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32DebugInterface(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1319{
1320 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1321 /** @todo IA32_DEBUG_INTERFACE (no docs) */
1322 *puValue = 0;
1323 return VINF_SUCCESS;
1324}
1325
1326
1327/** @callback_method_impl{FNCPUMWRMSR} */
1328static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32DebugInterface(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1329{
1330 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1331 /** @todo IA32_DEBUG_INTERFACE (no docs) */
1332 return VINF_SUCCESS;
1333}
1334
1335
1336/**
1337 * Gets IA32_VMX_BASIC for IEM and cpumMsrRd_Ia32VmxBasic.
1338 *
1339 * @returns IA32_VMX_BASIC value.
1340 * @param pVCpu The cross context per CPU structure.
1341 */
1342VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxBasic(PVMCPU pVCpu)
1343{
1344 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1345 uint64_t uVmxMsr;
1346 if (pGuestFeatures->fVmx)
1347 {
1348 uVmxMsr = RT_BF_MAKE(VMX_BF_BASIC_VMCS_ID, VMX_V_VMCS_REVISION_ID )
1349 | RT_BF_MAKE(VMX_BF_BASIC_VMCS_SIZE, VMX_V_VMCS_SIZE )
1350 | RT_BF_MAKE(VMX_BF_BASIC_PHYSADDR_WIDTH, !pGuestFeatures->fLongMode )
1351 | RT_BF_MAKE(VMX_BF_BASIC_DUAL_MON, 0 )
1352 | RT_BF_MAKE(VMX_BF_BASIC_VMCS_MEM_TYPE, VMX_BASIC_MEM_TYPE_WB )
1353 | RT_BF_MAKE(VMX_BF_BASIC_VMCS_INS_OUTS, pGuestFeatures->fVmxInsOutInfo)
1354 | RT_BF_MAKE(VMX_BF_BASIC_TRUE_CTLS, 0 );
1355 }
1356 else
1357 uVmxMsr = 0;
1358 return uVmxMsr;
1359}
1360
1361
1362/** @callback_method_impl{FNCPUMRDMSR} */
1363static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxBasic(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1364{
1365 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1366 *puValue = CPUMGetGuestIa32VmxBasic(pVCpu);
1367 return VINF_SUCCESS;
1368}
1369
1370
1371/**
1372 * Gets IA32_VMX_PINBASED_CTLS for IEM and cpumMsrRd_Ia32VmxPinbasedCtls.
1373 *
1374 * @returns IA32_VMX_PINBASED_CTLS value.
1375 * @param pVCpu The cross context per CPU structure.
1376 */
1377VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxPinbasedCtls(PVMCPU pVCpu)
1378{
1379 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1380 uint64_t uVmxMsr;
1381 if (pGuestFeatures->fVmx)
1382 {
1383 uint32_t const fFeatures = (pGuestFeatures->fVmxExtIntExit << VMX_BF_PIN_CTLS_EXT_INT_EXIT_SHIFT )
1384 | (pGuestFeatures->fVmxNmiExit << VMX_BF_PIN_CTLS_NMI_EXIT_SHIFT )
1385 | (pGuestFeatures->fVmxVirtNmi << VMX_BF_PIN_CTLS_VIRT_NMI_SHIFT )
1386 | (pGuestFeatures->fVmxPreemptTimer << VMX_BF_PIN_CTLS_PREEMPT_TIMER_SHIFT)
1387 | (pGuestFeatures->fVmxPostedInt << VMX_BF_PIN_CTLS_POSTED_INT_SHIFT );
1388 uint32_t const fVal = VMX_PIN_CTLS_DEFAULT1;
1389 uint32_t const fZap = fFeatures | VMX_PIN_CTLS_DEFAULT1;
1390 AssertMsg((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures));
1391 uVmxMsr = RT_MAKE_U64(fVal, fZap);
1392 }
1393 else
1394 uVmxMsr = 0;
1395 return uVmxMsr;
1396}
1397
1398
1399/** @callback_method_impl{FNCPUMRDMSR} */
1400static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxPinbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1401{
1402 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1403 *puValue = CPUMGetGuestIa32VmxPinbasedCtls(pVCpu);
1404 return VINF_SUCCESS;
1405}
1406
1407
1408/**
1409 * Gets IA32_VMX_PROCBASED_CTLS for IEM and cpumMsrRd_Ia32VmxProcbasedCtls.
1410 *
1411 * @returns IA32_VMX_PROCBASED_CTLS value.
1412 * @param pVCpu The cross context per CPU structure.
1413 */
1414VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxProcbasedCtls(PVMCPU pVCpu)
1415{
1416 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1417 uint64_t uVmxMsr;
1418 if (pGuestFeatures->fVmx)
1419 {
1420 uint32_t const fFeatures = (pGuestFeatures->fVmxIntWindowExit << VMX_BF_PROC_CTLS_INT_WINDOW_EXIT_SHIFT )
1421 | (pGuestFeatures->fVmxTscOffsetting << VMX_BF_PROC_CTLS_USE_TSC_OFFSETTING_SHIFT)
1422 | (pGuestFeatures->fVmxHltExit << VMX_BF_PROC_CTLS_HLT_EXIT_SHIFT )
1423 | (pGuestFeatures->fVmxInvlpgExit << VMX_BF_PROC_CTLS_INVLPG_EXIT_SHIFT )
1424 | (pGuestFeatures->fVmxMwaitExit << VMX_BF_PROC_CTLS_MWAIT_EXIT_SHIFT )
1425 | (pGuestFeatures->fVmxRdpmcExit << VMX_BF_PROC_CTLS_RDPMC_EXIT_SHIFT )
1426 | (pGuestFeatures->fVmxRdtscExit << VMX_BF_PROC_CTLS_RDTSC_EXIT_SHIFT )
1427 | (pGuestFeatures->fVmxCr3LoadExit << VMX_BF_PROC_CTLS_CR3_LOAD_EXIT_SHIFT )
1428 | (pGuestFeatures->fVmxCr3StoreExit << VMX_BF_PROC_CTLS_CR3_STORE_EXIT_SHIFT )
1429 | (pGuestFeatures->fVmxCr8LoadExit << VMX_BF_PROC_CTLS_CR8_LOAD_EXIT_SHIFT )
1430 | (pGuestFeatures->fVmxCr8StoreExit << VMX_BF_PROC_CTLS_CR8_STORE_EXIT_SHIFT )
1431 | (pGuestFeatures->fVmxUseTprShadow << VMX_BF_PROC_CTLS_USE_TPR_SHADOW_SHIFT )
1432 | (pGuestFeatures->fVmxNmiWindowExit << VMX_BF_PROC_CTLS_NMI_WINDOW_EXIT_SHIFT )
1433 | (pGuestFeatures->fVmxMovDRxExit << VMX_BF_PROC_CTLS_MOV_DR_EXIT_SHIFT )
1434 | (pGuestFeatures->fVmxUncondIoExit << VMX_BF_PROC_CTLS_UNCOND_IO_EXIT_SHIFT )
1435 | (pGuestFeatures->fVmxUseIoBitmaps << VMX_BF_PROC_CTLS_USE_IO_BITMAPS_SHIFT )
1436 | (pGuestFeatures->fVmxMonitorTrapFlag << VMX_BF_PROC_CTLS_MONITOR_TRAP_FLAG_SHIFT )
1437 | (pGuestFeatures->fVmxUseMsrBitmaps << VMX_BF_PROC_CTLS_USE_MSR_BITMAPS_SHIFT )
1438 | (pGuestFeatures->fVmxMonitorExit << VMX_BF_PROC_CTLS_MONITOR_EXIT_SHIFT )
1439 | (pGuestFeatures->fVmxPauseExit << VMX_BF_PROC_CTLS_PAUSE_EXIT_SHIFT )
1440 | (pGuestFeatures->fVmxSecondaryExecCtls << VMX_BF_PROC_CTLS_USE_SECONDARY_CTLS_SHIFT);
1441 uint32_t const fVal = VMX_PROC_CTLS_DEFAULT1;
1442 uint32_t const fZap = fFeatures | VMX_PROC_CTLS_DEFAULT1;
1443 AssertMsg((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures));
1444 uVmxMsr = RT_MAKE_U64(fVal, fZap);
1445 }
1446 else
1447 uVmxMsr = 0;
1448 return uVmxMsr;
1449}
1450
1451
1452/** @callback_method_impl{FNCPUMRDMSR} */
1453static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxProcbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1454{
1455 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1456 *puValue = CPUMGetGuestIa32VmxProcbasedCtls(pVCpu);
1457 return VINF_SUCCESS;
1458}
1459
1460
1461/**
1462 * Gets IA32_VMX_EXIT_CTLS for IEM and cpumMsrRd_Ia32VmxProcbasedCtls.
1463 *
1464 * @returns IA32_VMX_EXIT_CTLS value.
1465 * @param pVCpu The cross context per CPU structure.
1466 */
1467VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxExitCtls(PVMCPU pVCpu)
1468{
1469 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1470 uint64_t uVmxMsr;
1471 if (pGuestFeatures->fVmx)
1472 {
1473 uint32_t const fFeatures = (pGuestFeatures->fVmxExitSaveDebugCtls << VMX_BF_EXIT_CTLS_SAVE_DEBUG_SHIFT )
1474 | (pGuestFeatures->fVmxHostAddrSpaceSize << VMX_BF_EXIT_CTLS_HOST_ADDR_SPACE_SIZE_SHIFT)
1475 | (pGuestFeatures->fVmxExitAckExtInt << VMX_BF_EXIT_CTLS_ACK_EXT_INT_SHIFT )
1476 | (pGuestFeatures->fVmxExitSavePatMsr << VMX_BF_EXIT_CTLS_SAVE_PAT_MSR_SHIFT )
1477 | (pGuestFeatures->fVmxExitLoadPatMsr << VMX_BF_EXIT_CTLS_LOAD_PAT_MSR_SHIFT )
1478 | (pGuestFeatures->fVmxExitSaveEferMsr << VMX_BF_EXIT_CTLS_SAVE_EFER_MSR_SHIFT )
1479 | (pGuestFeatures->fVmxExitLoadEferMsr << VMX_BF_EXIT_CTLS_LOAD_EFER_MSR_SHIFT )
1480 | (pGuestFeatures->fVmxSavePreemptTimer << VMX_BF_EXIT_CTLS_SAVE_PREEMPT_TIMER_SHIFT );
1481 uint32_t const fVal = VMX_EXIT_CTLS_DEFAULT1;
1482 uint32_t const fZap = fFeatures | VMX_EXIT_CTLS_DEFAULT1;
1483 AssertMsg((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures));
1484 uVmxMsr = RT_MAKE_U64(fVal, fZap);
1485 }
1486 else
1487 uVmxMsr = 0;
1488 return uVmxMsr;
1489}
1490
1491
1492/** @callback_method_impl{FNCPUMRDMSR} */
1493static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxExitCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1494{
1495 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1496 *puValue = CPUMGetGuestIa32VmxExitCtls(pVCpu);
1497 return VINF_SUCCESS;
1498}
1499
1500
1501/**
1502 * Gets IA32_VMX_ENTRY_CTLS for IEM and cpumMsrRd_Ia32VmxEntryCtls.
1503 *
1504 * @returns IA32_VMX_ENTRY_CTLS value.
1505 * @param pVCpu The cross context per CPU structure.
1506 */
1507VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxEntryCtls(PVMCPU pVCpu)
1508{
1509 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1510 uint64_t uVmxMsr;
1511 if (pGuestFeatures->fVmx)
1512 {
1513 uint32_t const fFeatures = (pGuestFeatures->fVmxEntryLoadDebugCtls << VMX_BF_ENTRY_CTLS_LOAD_DEBUG_SHIFT )
1514 | (pGuestFeatures->fVmxIa32eModeGuest << VMX_BF_ENTRY_CTLS_IA32E_MODE_GUEST_SHIFT)
1515 | (pGuestFeatures->fVmxEntryLoadEferMsr << VMX_BF_ENTRY_CTLS_LOAD_EFER_MSR_SHIFT )
1516 | (pGuestFeatures->fVmxEntryLoadPatMsr << VMX_BF_ENTRY_CTLS_LOAD_PAT_MSR_SHIFT );
1517 uint32_t const fDefault1 = VMX_ENTRY_CTLS_DEFAULT1;
1518 uint32_t const fVal = fDefault1;
1519 uint32_t const fZap = fFeatures | fDefault1;
1520 AssertMsg((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures));
1521 uVmxMsr = RT_MAKE_U64(fVal, fZap);
1522 }
1523 else
1524 uVmxMsr = 0;
1525 return uVmxMsr;
1526}
1527
1528
1529/** @callback_method_impl{FNCPUMRDMSR} */
1530static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxEntryCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1531{
1532 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1533 *puValue = CPUMGetGuestIa32VmxEntryCtls(pVCpu);
1534 return VINF_SUCCESS;
1535}
1536
1537
1538/**
1539 * Gets IA32_VMX_MISC for IEM and cpumMsrRd_Ia32VmxMisc.
1540 *
1541 * @returns IA32_VMX_MISC MSR.
1542 * @param pVCpu The cross context per CPU structure.
1543 */
1544VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxMisc(PVMCPU pVCpu)
1545{
1546 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1547 uint64_t uVmxMsr;
1548 if (pGuestFeatures->fVmx)
1549 {
1550 uint64_t uHostMsr;
1551 int rc = HMVmxGetHostMsr(pVCpu->CTX_SUFF(pVM), MSR_IA32_VMX_MISC, &uHostMsr);
1552 AssertMsgRC(rc, ("HMVmxGetHostMsr failed. rc=%Rrc\n", rc)); RT_NOREF_PV(rc);
1553 uint8_t const cMaxMsrs = RT_MIN(RT_BF_GET(uHostMsr, VMX_BF_MISC_MAX_MSRS), VMX_V_AUTOMSR_COUNT_MAX);
1554 uint8_t const fActivityState = RT_BF_GET(uHostMsr, VMX_BF_MISC_ACTIVITY_STATES) & VMX_V_GUEST_ACTIVITY_STATE_MASK;
1555 uVmxMsr = RT_BF_MAKE(VMX_BF_MISC_PREEMPT_TIMER_TSC, VMX_V_PREEMPT_TIMER_SHIFT )
1556 | RT_BF_MAKE(VMX_BF_MISC_EXIT_SAVE_EFER_LMA, pGuestFeatures->fVmxExitSaveEferLma )
1557 | RT_BF_MAKE(VMX_BF_MISC_ACTIVITY_STATES, fActivityState )
1558 | RT_BF_MAKE(VMX_BF_MISC_INTEL_PT, pGuestFeatures->fVmxIntelPt )
1559 | RT_BF_MAKE(VMX_BF_MISC_SMM_READ_SMBASE_MSR, 0 )
1560 | RT_BF_MAKE(VMX_BF_MISC_CR3_TARGET, VMX_V_CR3_TARGET_COUNT )
1561 | RT_BF_MAKE(VMX_BF_MISC_MAX_MSRS, cMaxMsrs )
1562 | RT_BF_MAKE(VMX_BF_MISC_VMXOFF_BLOCK_SMI, 0 )
1563 | RT_BF_MAKE(VMX_BF_MISC_VMWRITE_ALL, pGuestFeatures->fVmxVmwriteAll )
1564 | RT_BF_MAKE(VMX_BF_MISC_ENTRY_INJECT_SOFT_INT, pGuestFeatures->fVmxEntryInjectSoftInt)
1565 | RT_BF_MAKE(VMX_BF_MISC_MSEG_ID, VMX_V_MSEG_REV_ID );
1566 }
1567 else
1568 uVmxMsr = 0;
1569 return uVmxMsr;
1570}
1571
1572
1573/** @callback_method_impl{FNCPUMRDMSR} */
1574static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxMisc(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1575{
1576 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1577 *puValue = CPUMGetGuestIa32VmxMisc(pVCpu);
1578 return VINF_SUCCESS;
1579}
1580
1581
1582/**
1583 * Gets IA32_VMX_CR0_FIXED0 for IEM and cpumMsrRd_Ia32VmxMisc.
1584 *
1585 * @returns IA32_VMX_CR0_FIXED0 value.
1586 * @param pVCpu The cross context per CPU structure.
1587 */
1588VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr0Fixed0(PVMCPU pVCpu)
1589{
1590 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1591 if (pGuestFeatures->fVmx)
1592 {
1593 uint64_t const uVmxMsr = pGuestFeatures->fVmxUnrestrictedGuest ? VMX_V_CR0_FIXED0_UX : VMX_V_CR0_FIXED0;
1594 return uVmxMsr;
1595 }
1596 return 0;
1597}
1598
1599
1600/** @callback_method_impl{FNCPUMRDMSR} */
1601static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr0Fixed0(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1602{
1603 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1604 *puValue = CPUMGetGuestIa32VmxCr0Fixed0(pVCpu);
1605 return VINF_SUCCESS;
1606}
1607
1608
1609/**
1610 * Gets IA32_VMX_CR0_FIXED1 for IEM and cpumMsrRd_Ia32VmxMisc.
1611 *
1612 * @returns IA32_VMX_CR0_FIXED1 MSR.
1613 * @param pVCpu The cross context per CPU structure.
1614 */
1615VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr0Fixed1(PVMCPU pVCpu)
1616{
1617 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1618 uint64_t uVmxMsr;
1619 if (pGuestFeatures->fVmx)
1620 {
1621 int rc = HMVmxGetHostMsr(pVCpu->CTX_SUFF(pVM), MSR_IA32_VMX_CR0_FIXED1, &uVmxMsr);
1622 AssertMsgRC(rc, ("HMVmxGetHostMsr failed. rc=%Rrc\n", rc)); RT_NOREF_PV(rc);
1623 uVmxMsr |= VMX_V_CR0_FIXED0; /* Make sure the CR0 MB1 bits are not clear. */
1624 }
1625 else
1626 uVmxMsr = 0;
1627 return uVmxMsr;
1628}
1629
1630
1631/** @callback_method_impl{FNCPUMRDMSR} */
1632static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr0Fixed1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1633{
1634 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1635 Assert(idMsr == MSR_IA32_VMX_CR0_FIXED1);
1636 *puValue = CPUMGetGuestIa32VmxCr0Fixed1(pVCpu);
1637 return VINF_SUCCESS;
1638}
1639
1640
1641/**
1642 * Gets IA32_VMX_CR4_FIXED0 for IEM and cpumMsrRd_Ia32VmxCr4Fixed0.
1643 *
1644 * @returns IA32_VMX_CR4_FIXED0 value.
1645 * @param pVCpu The cross context per CPU structure.
1646 */
1647VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr4Fixed0(PVMCPU pVCpu)
1648{
1649 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1650 uint64_t const uVmxMsr = pGuestFeatures->fVmx ? VMX_V_CR4_FIXED0 : 0;
1651 return uVmxMsr;
1652}
1653
1654
1655/** @callback_method_impl{FNCPUMRDMSR} */
1656static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr4Fixed0(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1657{
1658 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1659 *puValue = CPUMGetGuestIa32VmxCr4Fixed0(pVCpu);
1660 return VINF_SUCCESS;
1661}
1662
1663
1664/**
1665 * Gets IA32_VMX_CR4_FIXED1 for IEM and cpumMsrRd_Ia32VmxCr4Fixed1.
1666 *
1667 * @returns IA32_VMX_CR4_FIXED1 MSR.
1668 * @param pVCpu The cross context per CPU structure.
1669 */
1670VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr4Fixed1(PVMCPU pVCpu)
1671{
1672 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1673 uint64_t uVmxMsr;
1674 if (pGuestFeatures->fVmx)
1675 {
1676 int rc = HMVmxGetHostMsr(pVCpu->CTX_SUFF(pVM), MSR_IA32_VMX_CR4_FIXED1, &uVmxMsr);
1677 AssertMsgRC(rc, ("HMVmxGetHostMsr failed. rc=%Rrc\n", rc)); RT_NOREF_PV(rc);
1678 uVmxMsr |= VMX_V_CR4_FIXED0; /* Make sure the CR4 MB1 bits are not clear. */
1679 }
1680 else
1681 uVmxMsr = 0;
1682 return uVmxMsr;
1683}
1684
1685
1686/** @callback_method_impl{FNCPUMRDMSR} */
1687static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr4Fixed1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1688{
1689 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1690 Assert(idMsr == MSR_IA32_VMX_CR4_FIXED1);
1691 *puValue = CPUMGetGuestIa32VmxCr4Fixed1(pVCpu);
1692 return VINF_SUCCESS;
1693}
1694
1695
1696/**
1697 * Gets IA32_VMX_VMCS_ENUM for IEM and cpumMsrRd_Ia32VmxVmcsEnum.
1698 *
1699 * @returns IA32_VMX_VMCS_ENUM value.
1700 * @param pVCpu The cross context per CPU structure.
1701 */
1702VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxVmcsEnum(PVMCPU pVCpu)
1703{
1704 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1705 uint64_t uVmxMsr;
1706 if (pGuestFeatures->fVmx)
1707 uVmxMsr = VMX_V_VMCS_MAX_INDEX << VMX_BF_VMCS_ENUM_HIGHEST_IDX_SHIFT;
1708 else
1709 uVmxMsr = 0;
1710 return uVmxMsr;
1711}
1712
1713
1714/** @callback_method_impl{FNCPUMRDMSR} */
1715static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxVmcsEnum(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1716{
1717 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1718 *puValue = CPUMGetGuestIa32VmxVmcsEnum(pVCpu);
1719 return VINF_SUCCESS;
1720}
1721
1722
1723/**
1724 * Gets MSR_IA32_VMX_PROCBASED_CTLS2 for IEM and cpumMsrRd_Ia32VmxProcBasedCtls2.
1725 *
1726 * @returns MSR_IA32_VMX_PROCBASED_CTLS2 value.
1727 * @param pVCpu The cross context per CPU structure.
1728 */
1729VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxProcbasedCtls2(PVMCPU pVCpu)
1730{
1731 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1732 uint64_t uVmxMsr;
1733 if ( pGuestFeatures->fVmx
1734 && pGuestFeatures->fVmxSecondaryExecCtls)
1735 {
1736 uint32_t const fFeatures = (pGuestFeatures->fVmxVirtApicAccess << VMX_BF_PROC_CTLS2_VIRT_APIC_ACCESS_SHIFT )
1737 | (pGuestFeatures->fVmxEpt << VMX_BF_PROC_CTLS2_EPT_SHIFT )
1738 | (pGuestFeatures->fVmxDescTableExit << VMX_BF_PROC_CTLS2_DESC_TABLE_EXIT_SHIFT )
1739 | (pGuestFeatures->fVmxRdtscp << VMX_BF_PROC_CTLS2_RDTSCP_SHIFT )
1740 | (pGuestFeatures->fVmxVirtX2ApicMode << VMX_BF_PROC_CTLS2_VIRT_X2APIC_MODE_SHIFT )
1741 | (pGuestFeatures->fVmxVpid << VMX_BF_PROC_CTLS2_VPID_SHIFT )
1742 | (pGuestFeatures->fVmxWbinvdExit << VMX_BF_PROC_CTLS2_WBINVD_EXIT_SHIFT )
1743 | (pGuestFeatures->fVmxUnrestrictedGuest << VMX_BF_PROC_CTLS2_UNRESTRICTED_GUEST_SHIFT)
1744 | (pGuestFeatures->fVmxApicRegVirt << VMX_BF_PROC_CTLS2_APIC_REG_VIRT_SHIFT )
1745 | (pGuestFeatures->fVmxVirtIntDelivery << VMX_BF_PROC_CTLS2_VIRT_INT_DELIVERY_SHIFT )
1746 | (pGuestFeatures->fVmxPauseLoopExit << VMX_BF_PROC_CTLS2_PAUSE_LOOP_EXIT_SHIFT )
1747 | (pGuestFeatures->fVmxRdrandExit << VMX_BF_PROC_CTLS2_RDRAND_EXIT_SHIFT )
1748 | (pGuestFeatures->fVmxInvpcid << VMX_BF_PROC_CTLS2_INVPCID_SHIFT )
1749 | (pGuestFeatures->fVmxVmFunc << VMX_BF_PROC_CTLS2_VMFUNC_SHIFT )
1750 | (pGuestFeatures->fVmxVmcsShadowing << VMX_BF_PROC_CTLS2_VMCS_SHADOWING_SHIFT )
1751 | (pGuestFeatures->fVmxRdseedExit << VMX_BF_PROC_CTLS2_RDSEED_EXIT_SHIFT )
1752 | (pGuestFeatures->fVmxPml << VMX_BF_PROC_CTLS2_PML_SHIFT )
1753 | (pGuestFeatures->fVmxEptXcptVe << VMX_BF_PROC_CTLS2_EPT_VE_SHIFT )
1754 | (pGuestFeatures->fVmxXsavesXrstors << VMX_BF_PROC_CTLS2_XSAVES_XRSTORS_SHIFT )
1755 | (pGuestFeatures->fVmxUseTscScaling << VMX_BF_PROC_CTLS2_TSC_SCALING_SHIFT );
1756 uint32_t const fVal = 0;
1757 uint32_t const fZap = fFeatures;
1758 uVmxMsr = RT_MAKE_U64(fVal, fZap);
1759 }
1760 else
1761 uVmxMsr = 0;
1762 return uVmxMsr;
1763}
1764
1765
1766/** @callback_method_impl{FNCPUMRDMSR} */
1767static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxProcBasedCtls2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1768{
1769 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1770 *puValue = CPUMGetGuestIa32VmxProcbasedCtls2(pVCpu);
1771 return VINF_SUCCESS;
1772}
1773
1774
1775/** @callback_method_impl{FNCPUMRDMSR} */
1776static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxEptVpidCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1777{
1778 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1779 *puValue = 0;
1780 return VINF_SUCCESS;
1781}
1782
1783
1784/** @callback_method_impl{FNCPUMRDMSR} */
1785static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTruePinbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1786{
1787 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1788 *puValue = 0;
1789 return VINF_SUCCESS;
1790}
1791
1792
1793/** @callback_method_impl{FNCPUMRDMSR} */
1794static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTrueProcbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1795{
1796 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1797 *puValue = 0;
1798 return VINF_SUCCESS;
1799}
1800
1801
1802/** @callback_method_impl{FNCPUMRDMSR} */
1803static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTrueExitCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1804{
1805 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1806 *puValue = 0;
1807 return VINF_SUCCESS;
1808}
1809
1810
1811/** @callback_method_impl{FNCPUMRDMSR} */
1812static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTrueEntryCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1813{
1814 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1815 *puValue = 0;
1816 return VINF_SUCCESS;
1817}
1818
1819
1820/**
1821 * Gets IA32_VMX_VMFUNC for IEM and cpumMsrRd_Ia32VmxVmFunc.
1822 *
1823 * @returns IA32_VMX_VMFUNC value.
1824 * @param pVCpu The cross context per CPU structure.
1825 */
1826VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxVmFunc(PVMCPU pVCpu)
1827{
1828 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1829 uint64_t uVmxMsr;
1830 if ( pGuestFeatures->fVmx
1831 && pGuestFeatures->fVmxVmFunc)
1832 uVmxMsr = RT_BF_MAKE(VMX_BF_VMFUNC_EPTP_SWITCHING, 1);
1833 else
1834 uVmxMsr = 0;
1835 return uVmxMsr;
1836}
1837
1838
1839/** @callback_method_impl{FNCPUMRDMSR} */
1840static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxVmFunc(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1841{
1842 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1843 *puValue = CPUMGetGuestIa32VmxVmFunc(pVCpu);
1844 return VINF_SUCCESS;
1845}
1846
1847
1848/** @callback_method_impl{FNCPUMRDMSR} */
1849static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SpecCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1850{
1851 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1852 *puValue = pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl;
1853 return VINF_SUCCESS;
1854}
1855
1856
1857/** @callback_method_impl{FNCPUMWRMSR} */
1858static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SpecCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1859{
1860 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1861
1862 /* NB: The STIBP bit can be set even when IBRS is present, regardless of whether STIBP is actually implemented. */
1863 if (uValue & ~(MSR_IA32_SPEC_CTRL_F_IBRS | MSR_IA32_SPEC_CTRL_F_STIBP))
1864 {
1865 Log(("CPUM: Invalid IA32_SPEC_CTRL bits (trying to write %#llx)\n", uValue));
1866 return VERR_CPUM_RAISE_GP_0;
1867 }
1868
1869 pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl = uValue;
1870 return VINF_SUCCESS;
1871}
1872
1873
1874/** @callback_method_impl{FNCPUMWRMSR} */
1875static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PredCmd(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1876{
1877 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1878 return VINF_SUCCESS;
1879}
1880
1881
1882/** @callback_method_impl{FNCPUMRDMSR} */
1883static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ArchCapabilities(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1884{
1885 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1886 *puValue = pVCpu->cpum.s.GuestMsrs.msr.ArchCaps;
1887 return VINF_SUCCESS;
1888}
1889
1890
1891
1892
1893
1894
1895
1896
1897
1898
1899
1900
1901/*
1902 * AMD64
1903 * AMD64
1904 * AMD64
1905 */
1906
1907
1908/** @callback_method_impl{FNCPUMRDMSR} */
1909static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64Efer(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1910{
1911 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1912 *puValue = pVCpu->cpum.s.Guest.msrEFER;
1913 return VINF_SUCCESS;
1914}
1915
1916
1917/** @callback_method_impl{FNCPUMWRMSR} */
1918static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64Efer(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1919{
1920 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1921 uint64_t uValidatedEfer;
1922 uint64_t const uOldEfer = pVCpu->cpum.s.Guest.msrEFER;
1923 int rc = CPUMIsGuestEferMsrWriteValid(pVCpu->CTX_SUFF(pVM), pVCpu->cpum.s.Guest.cr0, uOldEfer, uValue, &uValidatedEfer);
1924 if (RT_FAILURE(rc))
1925 return VERR_CPUM_RAISE_GP_0;
1926
1927 CPUMSetGuestEferMsrNoChecks(pVCpu, uOldEfer, uValidatedEfer);
1928 return VINF_SUCCESS;
1929}
1930
1931
1932/** @callback_method_impl{FNCPUMRDMSR} */
1933static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64SyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1934{
1935 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1936 *puValue = pVCpu->cpum.s.Guest.msrSTAR;
1937 return VINF_SUCCESS;
1938}
1939
1940
1941/** @callback_method_impl{FNCPUMWRMSR} */
1942static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64SyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1943{
1944 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1945 pVCpu->cpum.s.Guest.msrSTAR = uValue;
1946 return VINF_SUCCESS;
1947}
1948
1949
1950/** @callback_method_impl{FNCPUMRDMSR} */
1951static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64LongSyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1952{
1953 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1954 *puValue = pVCpu->cpum.s.Guest.msrLSTAR;
1955 return VINF_SUCCESS;
1956}
1957
1958
1959/** @callback_method_impl{FNCPUMWRMSR} */
1960static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64LongSyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1961{
1962 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1963 if (!X86_IS_CANONICAL(uValue))
1964 {
1965 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
1966 return VERR_CPUM_RAISE_GP_0;
1967 }
1968 pVCpu->cpum.s.Guest.msrLSTAR = uValue;
1969 return VINF_SUCCESS;
1970}
1971
1972
1973/** @callback_method_impl{FNCPUMRDMSR} */
1974static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64CompSyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1975{
1976 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1977 *puValue = pVCpu->cpum.s.Guest.msrCSTAR;
1978 return VINF_SUCCESS;
1979}
1980
1981
1982/** @callback_method_impl{FNCPUMWRMSR} */
1983static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64CompSyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1984{
1985 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1986 if (!X86_IS_CANONICAL(uValue))
1987 {
1988 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
1989 return VERR_CPUM_RAISE_GP_0;
1990 }
1991 pVCpu->cpum.s.Guest.msrCSTAR = uValue;
1992 return VINF_SUCCESS;
1993}
1994
1995
1996/** @callback_method_impl{FNCPUMRDMSR} */
1997static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64SyscallFlagMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1998{
1999 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2000 *puValue = pVCpu->cpum.s.Guest.msrSFMASK;
2001 return VINF_SUCCESS;
2002}
2003
2004
2005/** @callback_method_impl{FNCPUMWRMSR} */
2006static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64SyscallFlagMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2007{
2008 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2009 pVCpu->cpum.s.Guest.msrSFMASK = uValue;
2010 return VINF_SUCCESS;
2011}
2012
2013
2014/** @callback_method_impl{FNCPUMRDMSR} */
2015static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64FsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2016{
2017 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2018 *puValue = pVCpu->cpum.s.Guest.fs.u64Base;
2019 return VINF_SUCCESS;
2020}
2021
2022
2023/** @callback_method_impl{FNCPUMWRMSR} */
2024static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64FsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2025{
2026 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2027 pVCpu->cpum.s.Guest.fs.u64Base = uValue;
2028 return VINF_SUCCESS;
2029}
2030
2031
2032/** @callback_method_impl{FNCPUMRDMSR} */
2033static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64GsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2034{
2035 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2036 *puValue = pVCpu->cpum.s.Guest.gs.u64Base;
2037 return VINF_SUCCESS;
2038}
2039
2040/** @callback_method_impl{FNCPUMWRMSR} */
2041static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64GsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2042{
2043 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2044 pVCpu->cpum.s.Guest.gs.u64Base = uValue;
2045 return VINF_SUCCESS;
2046}
2047
2048
2049
2050/** @callback_method_impl{FNCPUMRDMSR} */
2051static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64KernelGsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2052{
2053 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2054 *puValue = pVCpu->cpum.s.Guest.msrKERNELGSBASE;
2055 return VINF_SUCCESS;
2056}
2057
2058/** @callback_method_impl{FNCPUMWRMSR} */
2059static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64KernelGsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2060{
2061 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2062 pVCpu->cpum.s.Guest.msrKERNELGSBASE = uValue;
2063 return VINF_SUCCESS;
2064}
2065
2066
2067/** @callback_method_impl{FNCPUMRDMSR} */
2068static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64TscAux(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2069{
2070 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2071 *puValue = pVCpu->cpum.s.GuestMsrs.msr.TscAux;
2072 return VINF_SUCCESS;
2073}
2074
2075/** @callback_method_impl{FNCPUMWRMSR} */
2076static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64TscAux(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2077{
2078 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2079 pVCpu->cpum.s.GuestMsrs.msr.TscAux = uValue;
2080 return VINF_SUCCESS;
2081}
2082
2083
2084/*
2085 * Intel specific
2086 * Intel specific
2087 * Intel specific
2088 */
2089
2090/** @callback_method_impl{FNCPUMRDMSR} */
2091static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelEblCrPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2092{
2093 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2094 /** @todo recalc clock frequency ratio? */
2095 *puValue = pRange->uValue;
2096 return VINF_SUCCESS;
2097}
2098
2099
2100/** @callback_method_impl{FNCPUMWRMSR} */
2101static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelEblCrPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2102{
2103 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2104 /** @todo Write EBL_CR_POWERON: Remember written bits. */
2105 return VINF_SUCCESS;
2106}
2107
2108
2109/** @callback_method_impl{FNCPUMRDMSR} */
2110static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7CoreThreadCount(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2111{
2112 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2113
2114 /* Note! According to cpuid_set_info in XNU (10.7.0), Westmere CPU only
2115 have a 4-bit core count. */
2116 uint16_t cCores = pVCpu->CTX_SUFF(pVM)->cCpus;
2117 uint16_t cThreads = cCores; /** @todo hyper-threading. */
2118 *puValue = RT_MAKE_U32(cThreads, cCores);
2119 return VINF_SUCCESS;
2120}
2121
2122
2123/** @callback_method_impl{FNCPUMRDMSR} */
2124static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP4EbcHardPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2125{
2126 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2127 /** @todo P4 hard power on config */
2128 *puValue = pRange->uValue;
2129 return VINF_SUCCESS;
2130}
2131
2132
2133/** @callback_method_impl{FNCPUMWRMSR} */
2134static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP4EbcHardPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2135{
2136 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2137 /** @todo P4 hard power on config */
2138 return VINF_SUCCESS;
2139}
2140
2141
2142/** @callback_method_impl{FNCPUMRDMSR} */
2143static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP4EbcSoftPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2144{
2145 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2146 /** @todo P4 soft power on config */
2147 *puValue = pRange->uValue;
2148 return VINF_SUCCESS;
2149}
2150
2151
2152/** @callback_method_impl{FNCPUMWRMSR} */
2153static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP4EbcSoftPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2154{
2155 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2156 /** @todo P4 soft power on config */
2157 return VINF_SUCCESS;
2158}
2159
2160
2161/** @callback_method_impl{FNCPUMRDMSR} */
2162static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP4EbcFrequencyId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2163{
2164 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2165
2166 uint64_t uValue;
2167 PVM pVM = pVCpu->CTX_SUFF(pVM);
2168 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
2169 if (pVM->cpum.s.GuestFeatures.uModel >= 2)
2170 {
2171 if (uScalableBusHz <= CPUM_SBUSFREQ_100MHZ && pVM->cpum.s.GuestFeatures.uModel <= 2)
2172 {
2173 uScalableBusHz = CPUM_SBUSFREQ_100MHZ;
2174 uValue = 0;
2175 }
2176 else if (uScalableBusHz <= CPUM_SBUSFREQ_133MHZ)
2177 {
2178 uScalableBusHz = CPUM_SBUSFREQ_133MHZ;
2179 uValue = 1;
2180 }
2181 else if (uScalableBusHz <= CPUM_SBUSFREQ_167MHZ)
2182 {
2183 uScalableBusHz = CPUM_SBUSFREQ_167MHZ;
2184 uValue = 3;
2185 }
2186 else if (uScalableBusHz <= CPUM_SBUSFREQ_200MHZ)
2187 {
2188 uScalableBusHz = CPUM_SBUSFREQ_200MHZ;
2189 uValue = 2;
2190 }
2191 else if (uScalableBusHz <= CPUM_SBUSFREQ_267MHZ && pVM->cpum.s.GuestFeatures.uModel > 2)
2192 {
2193 uScalableBusHz = CPUM_SBUSFREQ_267MHZ;
2194 uValue = 0;
2195 }
2196 else
2197 {
2198 uScalableBusHz = CPUM_SBUSFREQ_333MHZ;
2199 uValue = 6;
2200 }
2201 uValue <<= 16;
2202
2203 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
2204 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
2205 uValue |= (uint32_t)uTscRatio << 24;
2206
2207 uValue |= pRange->uValue & ~UINT64_C(0xff0f0000);
2208 }
2209 else
2210 {
2211 /* Probably more stuff here, but intel doesn't want to tell us. */
2212 uValue = pRange->uValue;
2213 uValue &= ~(RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23)); /* 100 MHz is only documented value */
2214 }
2215
2216 *puValue = uValue;
2217 return VINF_SUCCESS;
2218}
2219
2220
2221/** @callback_method_impl{FNCPUMWRMSR} */
2222static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP4EbcFrequencyId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2223{
2224 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2225 /** @todo P4 bus frequency config */
2226 return VINF_SUCCESS;
2227}
2228
2229
2230/** @callback_method_impl{FNCPUMRDMSR} */
2231static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP6FsbFrequency(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2232{
2233 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2234
2235 /* Convert the scalable bus frequency to the encoding in the intel manual (for core+). */
2236 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVCpu->CTX_SUFF(pVM));
2237 if (uScalableBusHz <= CPUM_SBUSFREQ_100MHZ)
2238 *puValue = 5;
2239 else if (uScalableBusHz <= CPUM_SBUSFREQ_133MHZ)
2240 *puValue = 1;
2241 else if (uScalableBusHz <= CPUM_SBUSFREQ_167MHZ)
2242 *puValue = 3;
2243 else if (uScalableBusHz <= CPUM_SBUSFREQ_200MHZ)
2244 *puValue = 2;
2245 else if (uScalableBusHz <= CPUM_SBUSFREQ_267MHZ)
2246 *puValue = 0;
2247 else if (uScalableBusHz <= CPUM_SBUSFREQ_333MHZ)
2248 *puValue = 4;
2249 else /*if (uScalableBusHz <= CPUM_SBUSFREQ_400MHZ)*/
2250 *puValue = 6;
2251
2252 *puValue |= pRange->uValue & ~UINT64_C(0x7);
2253
2254 return VINF_SUCCESS;
2255}
2256
2257
2258/** @callback_method_impl{FNCPUMRDMSR} */
2259static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelPlatformInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2260{
2261 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2262
2263 /* Just indicate a fixed TSC, no turbo boost, no programmable anything. */
2264 PVM pVM = pVCpu->CTX_SUFF(pVM);
2265 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
2266 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
2267 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
2268 uint64_t uValue = ((uint32_t)uTscRatio << 8) /* TSC invariant frequency. */
2269 | ((uint64_t)uTscRatio << 40); /* The max turbo frequency. */
2270
2271 /* Ivy bridge has a minimum operating ratio as well. */
2272 if (true) /** @todo detect sandy bridge. */
2273 uValue |= (uint64_t)uTscRatio << 48;
2274
2275 *puValue = uValue;
2276 return VINF_SUCCESS;
2277}
2278
2279
2280/** @callback_method_impl{FNCPUMRDMSR} */
2281static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelFlexRatio(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2282{
2283 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2284
2285 uint64_t uValue = pRange->uValue & ~UINT64_C(0x1ff00);
2286
2287 PVM pVM = pVCpu->CTX_SUFF(pVM);
2288 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
2289 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
2290 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
2291 uValue |= (uint32_t)uTscRatio << 8;
2292
2293 *puValue = uValue;
2294 return VINF_SUCCESS;
2295}
2296
2297
2298/** @callback_method_impl{FNCPUMWRMSR} */
2299static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelFlexRatio(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2300{
2301 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2302 /** @todo implement writing MSR_FLEX_RATIO. */
2303 return VINF_SUCCESS;
2304}
2305
2306
2307/** @callback_method_impl{FNCPUMRDMSR} */
2308static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelPkgCStConfigControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2309{
2310 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2311 *puValue = pVCpu->cpum.s.GuestMsrs.msr.PkgCStateCfgCtrl;
2312 return VINF_SUCCESS;
2313}
2314
2315
2316/** @callback_method_impl{FNCPUMWRMSR} */
2317static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelPkgCStConfigControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2318{
2319 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2320
2321 if (pVCpu->cpum.s.GuestMsrs.msr.PkgCStateCfgCtrl & RT_BIT_64(15))
2322 {
2323 Log(("CPUM: WRMSR %#x (%s), %#llx: Write protected -> #GP\n", idMsr, pRange->szName, uValue));
2324 return VERR_CPUM_RAISE_GP_0;
2325 }
2326#if 0 /** @todo check what real (old) hardware does. */
2327 if ((uValue & 7) >= 5)
2328 {
2329 Log(("CPUM: WRMSR %#x (%s), %#llx: Invalid limit (%d) -> #GP\n", idMsr, pRange->szName, uValue, (uint32_t)(uValue & 7)));
2330 return VERR_CPUM_RAISE_GP_0;
2331 }
2332#endif
2333 pVCpu->cpum.s.GuestMsrs.msr.PkgCStateCfgCtrl = uValue;
2334 return VINF_SUCCESS;
2335}
2336
2337
2338/** @callback_method_impl{FNCPUMRDMSR} */
2339static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelPmgIoCaptureBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2340{
2341 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2342 /** @todo implement I/O mwait wakeup. */
2343 *puValue = 0;
2344 return VINF_SUCCESS;
2345}
2346
2347
2348/** @callback_method_impl{FNCPUMWRMSR} */
2349static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelPmgIoCaptureBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2350{
2351 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2352 /** @todo implement I/O mwait wakeup. */
2353 return VINF_SUCCESS;
2354}
2355
2356
2357/** @callback_method_impl{FNCPUMRDMSR} */
2358static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchFromToN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2359{
2360 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2361 /** @todo implement last branch records. */
2362 *puValue = 0;
2363 return VINF_SUCCESS;
2364}
2365
2366
2367/** @callback_method_impl{FNCPUMWRMSR} */
2368static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchFromToN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2369{
2370 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2371 /** @todo implement last branch records. */
2372 return VINF_SUCCESS;
2373}
2374
2375
2376/** @callback_method_impl{FNCPUMRDMSR} */
2377static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchFromN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2378{
2379 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2380 /** @todo implement last branch records. */
2381 *puValue = 0;
2382 return VINF_SUCCESS;
2383}
2384
2385
2386/** @callback_method_impl{FNCPUMWRMSR} */
2387static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchFromN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2388{
2389 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2390 /** @todo implement last branch records. */
2391 /** @todo Probing indicates that bit 63 is settable on SandyBridge, at least
2392 * if the rest of the bits are zero. Automatic sign extending?
2393 * Investigate! */
2394 if (!X86_IS_CANONICAL(uValue))
2395 {
2396 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
2397 return VERR_CPUM_RAISE_GP_0;
2398 }
2399 return VINF_SUCCESS;
2400}
2401
2402
2403/** @callback_method_impl{FNCPUMRDMSR} */
2404static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchToN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2405{
2406 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2407 /** @todo implement last branch records. */
2408 *puValue = 0;
2409 return VINF_SUCCESS;
2410}
2411
2412
2413/** @callback_method_impl{FNCPUMWRMSR} */
2414static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchToN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2415{
2416 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2417 /** @todo implement last branch records. */
2418 /** @todo Probing indicates that bit 63 is settable on SandyBridge, at least
2419 * if the rest of the bits are zero. Automatic sign extending?
2420 * Investigate! */
2421 if (!X86_IS_CANONICAL(uValue))
2422 {
2423 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
2424 return VERR_CPUM_RAISE_GP_0;
2425 }
2426 return VINF_SUCCESS;
2427}
2428
2429
2430/** @callback_method_impl{FNCPUMRDMSR} */
2431static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchTos(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2432{
2433 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2434 /** @todo implement last branch records. */
2435 *puValue = 0;
2436 return VINF_SUCCESS;
2437}
2438
2439
2440/** @callback_method_impl{FNCPUMWRMSR} */
2441static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchTos(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2442{
2443 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2444 /** @todo implement last branch records. */
2445 return VINF_SUCCESS;
2446}
2447
2448
2449/** @callback_method_impl{FNCPUMRDMSR} */
2450static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelBblCrCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2451{
2452 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2453 *puValue = pRange->uValue;
2454 return VINF_SUCCESS;
2455}
2456
2457
2458/** @callback_method_impl{FNCPUMWRMSR} */
2459static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelBblCrCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2460{
2461 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2462 return VINF_SUCCESS;
2463}
2464
2465
2466/** @callback_method_impl{FNCPUMRDMSR} */
2467static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelBblCrCtl3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2468{
2469 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2470 *puValue = pRange->uValue;
2471 return VINF_SUCCESS;
2472}
2473
2474
2475/** @callback_method_impl{FNCPUMWRMSR} */
2476static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelBblCrCtl3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2477{
2478 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2479 return VINF_SUCCESS;
2480}
2481
2482
2483/** @callback_method_impl{FNCPUMRDMSR} */
2484static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7TemperatureTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2485{
2486 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2487 *puValue = pRange->uValue;
2488 return VINF_SUCCESS;
2489}
2490
2491
2492/** @callback_method_impl{FNCPUMWRMSR} */
2493static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7TemperatureTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2494{
2495 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2496 return VINF_SUCCESS;
2497}
2498
2499
2500/** @callback_method_impl{FNCPUMRDMSR} */
2501static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7MsrOffCoreResponseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2502{
2503 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2504 /** @todo machine check. */
2505 *puValue = pRange->uValue;
2506 return VINF_SUCCESS;
2507}
2508
2509
2510/** @callback_method_impl{FNCPUMWRMSR} */
2511static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7MsrOffCoreResponseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2512{
2513 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2514 /** @todo machine check. */
2515 return VINF_SUCCESS;
2516}
2517
2518
2519/** @callback_method_impl{FNCPUMRDMSR} */
2520static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7MiscPwrMgmt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2521{
2522 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2523 *puValue = 0;
2524 return VINF_SUCCESS;
2525}
2526
2527
2528/** @callback_method_impl{FNCPUMWRMSR} */
2529static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7MiscPwrMgmt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2530{
2531 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2532 return VINF_SUCCESS;
2533}
2534
2535
2536/** @callback_method_impl{FNCPUMRDMSR} */
2537static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP6CrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2538{
2539 RT_NOREF_PV(idMsr);
2540 int rc = CPUMGetGuestCRx(pVCpu, pRange->uValue, puValue);
2541 AssertRC(rc);
2542 return VINF_SUCCESS;
2543}
2544
2545
2546/** @callback_method_impl{FNCPUMWRMSR} */
2547static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP6CrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2548{
2549 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2550 /* This CRx interface differs from the MOV CRx, GReg interface in that
2551 #GP(0) isn't raised if unsupported bits are written to. Instead they
2552 are simply ignored and masked off. (Pentium M Dothan) */
2553 /** @todo Implement MSR_P6_CRx writing. Too much effort for very little, if
2554 * any, gain. */
2555 return VINF_SUCCESS;
2556}
2557
2558
2559/** @callback_method_impl{FNCPUMRDMSR} */
2560static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCpuId1FeatureMaskEcdx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2561{
2562 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2563 /** @todo implement CPUID masking. */
2564 *puValue = UINT64_MAX;
2565 return VINF_SUCCESS;
2566}
2567
2568
2569/** @callback_method_impl{FNCPUMWRMSR} */
2570static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCpuId1FeatureMaskEcdx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2571{
2572 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2573 /** @todo implement CPUID masking. */
2574 return VINF_SUCCESS;
2575}
2576
2577
2578/** @callback_method_impl{FNCPUMRDMSR} */
2579static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCpuId1FeatureMaskEax(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2580{
2581 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2582 /** @todo implement CPUID masking. */
2583 *puValue = 0;
2584 return VINF_SUCCESS;
2585}
2586
2587
2588/** @callback_method_impl{FNCPUMWRMSR} */
2589static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCpuId1FeatureMaskEax(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2590{
2591 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2592 /** @todo implement CPUID masking. */
2593 return VINF_SUCCESS;
2594}
2595
2596
2597
2598/** @callback_method_impl{FNCPUMRDMSR} */
2599static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCpuId80000001FeatureMaskEcdx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2600{
2601 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2602 /** @todo implement CPUID masking. */
2603 *puValue = UINT64_MAX;
2604 return VINF_SUCCESS;
2605}
2606
2607
2608/** @callback_method_impl{FNCPUMWRMSR} */
2609static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCpuId80000001FeatureMaskEcdx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2610{
2611 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2612 /** @todo implement CPUID masking. */
2613 return VINF_SUCCESS;
2614}
2615
2616
2617
2618/** @callback_method_impl{FNCPUMRDMSR} */
2619static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyAesNiCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2620{
2621 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2622 /** @todo implement AES-NI. */
2623 *puValue = 3; /* Bit 0 is lock bit, bit 1 disables AES-NI. That's what they say. */
2624 return VINF_SUCCESS;
2625}
2626
2627
2628/** @callback_method_impl{FNCPUMWRMSR} */
2629static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyAesNiCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2630{
2631 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2632 /** @todo implement AES-NI. */
2633 return VERR_CPUM_RAISE_GP_0;
2634}
2635
2636
2637/** @callback_method_impl{FNCPUMRDMSR} */
2638static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7TurboRatioLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2639{
2640 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2641 /** @todo implement intel C states. */
2642 *puValue = pRange->uValue;
2643 return VINF_SUCCESS;
2644}
2645
2646
2647/** @callback_method_impl{FNCPUMWRMSR} */
2648static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7TurboRatioLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2649{
2650 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2651 /** @todo implement intel C states. */
2652 return VINF_SUCCESS;
2653}
2654
2655
2656/** @callback_method_impl{FNCPUMRDMSR} */
2657static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7LbrSelect(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2658{
2659 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2660 /** @todo implement last-branch-records. */
2661 *puValue = 0;
2662 return VINF_SUCCESS;
2663}
2664
2665
2666/** @callback_method_impl{FNCPUMWRMSR} */
2667static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7LbrSelect(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2668{
2669 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2670 /** @todo implement last-branch-records. */
2671 return VINF_SUCCESS;
2672}
2673
2674
2675/** @callback_method_impl{FNCPUMRDMSR} */
2676static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyErrorControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2677{
2678 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2679 /** @todo implement memory error injection (MSR_ERROR_CONTROL). */
2680 *puValue = 0;
2681 return VINF_SUCCESS;
2682}
2683
2684
2685/** @callback_method_impl{FNCPUMWRMSR} */
2686static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyErrorControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2687{
2688 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2689 /** @todo implement memory error injection (MSR_ERROR_CONTROL). */
2690 return VINF_SUCCESS;
2691}
2692
2693
2694/** @callback_method_impl{FNCPUMRDMSR} */
2695static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7VirtualLegacyWireCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2696{
2697 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2698 /** @todo implement memory VLW? */
2699 *puValue = pRange->uValue;
2700 /* Note: A20M is known to be bit 1 as this was disclosed in spec update
2701 AAJ49/AAK51/????, which documents the inversion of this bit. The
2702 Sandy bridge CPU here has value 0x74, so it probably doesn't have a BIOS
2703 that correct things. Some guesses at the other bits:
2704 bit 2 = INTR
2705 bit 4 = SMI
2706 bit 5 = INIT
2707 bit 6 = NMI */
2708 return VINF_SUCCESS;
2709}
2710
2711
2712/** @callback_method_impl{FNCPUMRDMSR} */
2713static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7PowerCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2714{
2715 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2716 /** @todo intel power management */
2717 *puValue = 0;
2718 return VINF_SUCCESS;
2719}
2720
2721
2722/** @callback_method_impl{FNCPUMWRMSR} */
2723static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7PowerCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2724{
2725 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2726 /** @todo intel power management */
2727 return VINF_SUCCESS;
2728}
2729
2730
2731/** @callback_method_impl{FNCPUMRDMSR} */
2732static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyPebsNumAlt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2733{
2734 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2735 /** @todo intel performance counters. */
2736 *puValue = 0;
2737 return VINF_SUCCESS;
2738}
2739
2740
2741/** @callback_method_impl{FNCPUMWRMSR} */
2742static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyPebsNumAlt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2743{
2744 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2745 /** @todo intel performance counters. */
2746 return VINF_SUCCESS;
2747}
2748
2749
2750/** @callback_method_impl{FNCPUMRDMSR} */
2751static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7PebsLdLat(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2752{
2753 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2754 /** @todo intel performance counters. */
2755 *puValue = 0;
2756 return VINF_SUCCESS;
2757}
2758
2759
2760/** @callback_method_impl{FNCPUMWRMSR} */
2761static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7PebsLdLat(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2762{
2763 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2764 /** @todo intel performance counters. */
2765 return VINF_SUCCESS;
2766}
2767
2768
2769/** @callback_method_impl{FNCPUMRDMSR} */
2770static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7PkgCnResidencyN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2771{
2772 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2773 /** @todo intel power management. */
2774 *puValue = 0;
2775 return VINF_SUCCESS;
2776}
2777
2778
2779/** @callback_method_impl{FNCPUMRDMSR} */
2780static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7CoreCnResidencyN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2781{
2782 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2783 /** @todo intel power management. */
2784 *puValue = 0;
2785 return VINF_SUCCESS;
2786}
2787
2788
2789/** @callback_method_impl{FNCPUMRDMSR} */
2790static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyVrCurrentConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2791{
2792 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2793 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2794 *puValue = 0;
2795 return VINF_SUCCESS;
2796}
2797
2798
2799/** @callback_method_impl{FNCPUMWRMSR} */
2800static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyVrCurrentConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2801{
2802 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2803 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2804 return VINF_SUCCESS;
2805}
2806
2807
2808/** @callback_method_impl{FNCPUMRDMSR} */
2809static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyVrMiscConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2810{
2811 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2812 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2813 *puValue = 0;
2814 return VINF_SUCCESS;
2815}
2816
2817
2818/** @callback_method_impl{FNCPUMWRMSR} */
2819static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyVrMiscConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2820{
2821 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2822 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2823 return VINF_SUCCESS;
2824}
2825
2826
2827/** @callback_method_impl{FNCPUMRDMSR} */
2828static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyRaplPowerUnit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2829{
2830 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2831 /** @todo intel RAPL. */
2832 *puValue = pRange->uValue;
2833 return VINF_SUCCESS;
2834}
2835
2836
2837/** @callback_method_impl{FNCPUMWRMSR} */
2838static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyRaplPowerUnit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2839{
2840 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2841 /* Note! This is documented as read only and except for a Silvermont sample has
2842 always been classified as read only. This is just here to make it compile. */
2843 return VINF_SUCCESS;
2844}
2845
2846
2847/** @callback_method_impl{FNCPUMRDMSR} */
2848static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyPkgCnIrtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2849{
2850 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2851 /** @todo intel power management. */
2852 *puValue = 0;
2853 return VINF_SUCCESS;
2854}
2855
2856
2857/** @callback_method_impl{FNCPUMWRMSR} */
2858static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyPkgCnIrtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2859{
2860 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2861 /** @todo intel power management. */
2862 return VINF_SUCCESS;
2863}
2864
2865
2866/** @callback_method_impl{FNCPUMRDMSR} */
2867static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyPkgC2Residency(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2868{
2869 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2870 /** @todo intel power management. */
2871 *puValue = 0;
2872 return VINF_SUCCESS;
2873}
2874
2875
2876/** @callback_method_impl{FNCPUMWRMSR} */
2877static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyPkgC2Residency(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2878{
2879 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2880 /* Note! This is documented as read only and except for a Silvermont sample has
2881 always been classified as read only. This is just here to make it compile. */
2882 return VINF_SUCCESS;
2883}
2884
2885
2886/** @callback_method_impl{FNCPUMRDMSR} */
2887static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgPowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2888{
2889 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2890 /** @todo intel RAPL. */
2891 *puValue = 0;
2892 return VINF_SUCCESS;
2893}
2894
2895
2896/** @callback_method_impl{FNCPUMWRMSR} */
2897static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPkgPowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2898{
2899 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2900 /** @todo intel RAPL. */
2901 return VINF_SUCCESS;
2902}
2903
2904
2905/** @callback_method_impl{FNCPUMRDMSR} */
2906static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgEnergyStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2907{
2908 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2909 /** @todo intel power management. */
2910 *puValue = 0;
2911 return VINF_SUCCESS;
2912}
2913
2914
2915/** @callback_method_impl{FNCPUMRDMSR} */
2916static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgPerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2917{
2918 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2919 /** @todo intel power management. */
2920 *puValue = 0;
2921 return VINF_SUCCESS;
2922}
2923
2924
2925/** @callback_method_impl{FNCPUMRDMSR} */
2926static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgPowerInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2927{
2928 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2929 /** @todo intel power management. */
2930 *puValue = 0;
2931 return VINF_SUCCESS;
2932}
2933
2934
2935/** @callback_method_impl{FNCPUMRDMSR} */
2936static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramPowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2937{
2938 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2939 /** @todo intel RAPL. */
2940 *puValue = 0;
2941 return VINF_SUCCESS;
2942}
2943
2944
2945/** @callback_method_impl{FNCPUMWRMSR} */
2946static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplDramPowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2947{
2948 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2949 /** @todo intel RAPL. */
2950 return VINF_SUCCESS;
2951}
2952
2953
2954/** @callback_method_impl{FNCPUMRDMSR} */
2955static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramEnergyStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2956{
2957 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2958 /** @todo intel power management. */
2959 *puValue = 0;
2960 return VINF_SUCCESS;
2961}
2962
2963
2964/** @callback_method_impl{FNCPUMRDMSR} */
2965static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramPerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2966{
2967 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2968 /** @todo intel power management. */
2969 *puValue = 0;
2970 return VINF_SUCCESS;
2971}
2972
2973
2974/** @callback_method_impl{FNCPUMRDMSR} */
2975static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramPowerInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2976{
2977 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2978 /** @todo intel power management. */
2979 *puValue = 0;
2980 return VINF_SUCCESS;
2981}
2982
2983
2984/** @callback_method_impl{FNCPUMRDMSR} */
2985static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0PowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2986{
2987 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2988 /** @todo intel RAPL. */
2989 *puValue = 0;
2990 return VINF_SUCCESS;
2991}
2992
2993
2994/** @callback_method_impl{FNCPUMWRMSR} */
2995static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp0PowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2996{
2997 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2998 /** @todo intel RAPL. */
2999 return VINF_SUCCESS;
3000}
3001
3002
3003/** @callback_method_impl{FNCPUMRDMSR} */
3004static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0EnergyStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3005{
3006 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3007 /** @todo intel power management. */
3008 *puValue = 0;
3009 return VINF_SUCCESS;
3010}
3011
3012
3013/** @callback_method_impl{FNCPUMRDMSR} */
3014static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0Policy(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3015{
3016 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3017 /** @todo intel RAPL. */
3018 *puValue = 0;
3019 return VINF_SUCCESS;
3020}
3021
3022
3023/** @callback_method_impl{FNCPUMWRMSR} */
3024static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp0Policy(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3025{
3026 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3027 /** @todo intel RAPL. */
3028 return VINF_SUCCESS;
3029}
3030
3031
3032/** @callback_method_impl{FNCPUMRDMSR} */
3033static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0PerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3034{
3035 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3036 /** @todo intel power management. */
3037 *puValue = 0;
3038 return VINF_SUCCESS;
3039}
3040
3041
3042/** @callback_method_impl{FNCPUMRDMSR} */
3043static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp1PowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3044{
3045 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3046 /** @todo intel RAPL. */
3047 *puValue = 0;
3048 return VINF_SUCCESS;
3049}
3050
3051
3052/** @callback_method_impl{FNCPUMWRMSR} */
3053static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp1PowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3054{
3055 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3056 /** @todo intel RAPL. */
3057 return VINF_SUCCESS;
3058}
3059
3060
3061/** @callback_method_impl{FNCPUMRDMSR} */
3062static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp1EnergyStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3063{
3064 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3065 /** @todo intel power management. */
3066 *puValue = 0;
3067 return VINF_SUCCESS;
3068}
3069
3070
3071/** @callback_method_impl{FNCPUMRDMSR} */
3072static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp1Policy(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3073{
3074 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3075 /** @todo intel RAPL. */
3076 *puValue = 0;
3077 return VINF_SUCCESS;
3078}
3079
3080
3081/** @callback_method_impl{FNCPUMWRMSR} */
3082static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp1Policy(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3083{
3084 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3085 /** @todo intel RAPL. */
3086 return VINF_SUCCESS;
3087}
3088
3089
3090/** @callback_method_impl{FNCPUMRDMSR} */
3091static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpNominal(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3092{
3093 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3094 /** @todo intel power management. */
3095 *puValue = pRange->uValue;
3096 return VINF_SUCCESS;
3097}
3098
3099
3100/** @callback_method_impl{FNCPUMRDMSR} */
3101static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpLevel1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3102{
3103 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3104 /** @todo intel power management. */
3105 *puValue = pRange->uValue;
3106 return VINF_SUCCESS;
3107}
3108
3109
3110/** @callback_method_impl{FNCPUMRDMSR} */
3111static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpLevel2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3112{
3113 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3114 /** @todo intel power management. */
3115 *puValue = pRange->uValue;
3116 return VINF_SUCCESS;
3117}
3118
3119
3120/** @callback_method_impl{FNCPUMRDMSR} */
3121static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3122{
3123 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3124 /** @todo intel power management. */
3125 *puValue = 0;
3126 return VINF_SUCCESS;
3127}
3128
3129
3130/** @callback_method_impl{FNCPUMWRMSR} */
3131static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7IvyConfigTdpControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3132{
3133 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3134 /** @todo intel power management. */
3135 return VINF_SUCCESS;
3136}
3137
3138
3139/** @callback_method_impl{FNCPUMRDMSR} */
3140static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyTurboActivationRatio(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3141{
3142 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3143 /** @todo intel power management. */
3144 *puValue = 0;
3145 return VINF_SUCCESS;
3146}
3147
3148
3149/** @callback_method_impl{FNCPUMWRMSR} */
3150static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7IvyTurboActivationRatio(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3151{
3152 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3153 /** @todo intel power management. */
3154 return VINF_SUCCESS;
3155}
3156
3157
3158/** @callback_method_impl{FNCPUMRDMSR} */
3159static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfGlobalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3160{
3161 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3162 /** @todo uncore msrs. */
3163 *puValue = 0;
3164 return VINF_SUCCESS;
3165}
3166
3167
3168/** @callback_method_impl{FNCPUMWRMSR} */
3169static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfGlobalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3170{
3171 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3172 /** @todo uncore msrs. */
3173 return VINF_SUCCESS;
3174}
3175
3176
3177/** @callback_method_impl{FNCPUMRDMSR} */
3178static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfGlobalStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3179{
3180 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3181 /** @todo uncore msrs. */
3182 *puValue = 0;
3183 return VINF_SUCCESS;
3184}
3185
3186
3187/** @callback_method_impl{FNCPUMWRMSR} */
3188static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfGlobalStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3189{
3190 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3191 /** @todo uncore msrs. */
3192 return VINF_SUCCESS;
3193}
3194
3195
3196/** @callback_method_impl{FNCPUMRDMSR} */
3197static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfGlobalOvfCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3198{
3199 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3200 /** @todo uncore msrs. */
3201 *puValue = 0;
3202 return VINF_SUCCESS;
3203}
3204
3205
3206/** @callback_method_impl{FNCPUMWRMSR} */
3207static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfGlobalOvfCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3208{
3209 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3210 /** @todo uncore msrs. */
3211 return VINF_SUCCESS;
3212}
3213
3214
3215/** @callback_method_impl{FNCPUMRDMSR} */
3216static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfFixedCtrCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3217{
3218 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3219 /** @todo uncore msrs. */
3220 *puValue = 0;
3221 return VINF_SUCCESS;
3222}
3223
3224
3225/** @callback_method_impl{FNCPUMWRMSR} */
3226static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfFixedCtrCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3227{
3228 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3229 /** @todo uncore msrs. */
3230 return VINF_SUCCESS;
3231}
3232
3233
3234/** @callback_method_impl{FNCPUMRDMSR} */
3235static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfFixedCtr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3236{
3237 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3238 /** @todo uncore msrs. */
3239 *puValue = 0;
3240 return VINF_SUCCESS;
3241}
3242
3243
3244/** @callback_method_impl{FNCPUMWRMSR} */
3245static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfFixedCtr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3246{
3247 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3248 /** @todo uncore msrs. */
3249 return VINF_SUCCESS;
3250}
3251
3252
3253/** @callback_method_impl{FNCPUMRDMSR} */
3254static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncCBoxConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3255{
3256 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3257 /** @todo uncore msrs. */
3258 *puValue = 0;
3259 return VINF_SUCCESS;
3260}
3261
3262
3263/** @callback_method_impl{FNCPUMRDMSR} */
3264static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncArbPerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3265{
3266 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3267 /** @todo uncore msrs. */
3268 *puValue = 0;
3269 return VINF_SUCCESS;
3270}
3271
3272
3273/** @callback_method_impl{FNCPUMWRMSR} */
3274static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncArbPerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3275{
3276 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3277 /** @todo uncore msrs. */
3278 return VINF_SUCCESS;
3279}
3280
3281
3282/** @callback_method_impl{FNCPUMRDMSR} */
3283static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncArbPerfEvtSelN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3284{
3285 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3286 /** @todo uncore msrs. */
3287 *puValue = 0;
3288 return VINF_SUCCESS;
3289}
3290
3291
3292/** @callback_method_impl{FNCPUMWRMSR} */
3293static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncArbPerfEvtSelN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3294{
3295 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3296 /** @todo uncore msrs. */
3297 return VINF_SUCCESS;
3298}
3299
3300
3301/** @callback_method_impl{FNCPUMRDMSR} */
3302static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SmiCount(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3303{
3304 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3305
3306 /*
3307 * 31:0 is SMI count (read only), 63:32 reserved.
3308 * Since we don't do SMI, the count is always zero.
3309 */
3310 *puValue = 0;
3311 return VINF_SUCCESS;
3312}
3313
3314
3315/** @callback_method_impl{FNCPUMRDMSR} */
3316static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore2EmttmCrTablesN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3317{
3318 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3319 /** @todo implement enhanced multi thread termal monitoring? */
3320 *puValue = pRange->uValue;
3321 return VINF_SUCCESS;
3322}
3323
3324
3325/** @callback_method_impl{FNCPUMWRMSR} */
3326static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore2EmttmCrTablesN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3327{
3328 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3329 /** @todo implement enhanced multi thread termal monitoring? */
3330 return VINF_SUCCESS;
3331}
3332
3333
3334/** @callback_method_impl{FNCPUMRDMSR} */
3335static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore2SmmCStMiscInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3336{
3337 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3338 /** @todo SMM & C-states? */
3339 *puValue = 0;
3340 return VINF_SUCCESS;
3341}
3342
3343
3344/** @callback_method_impl{FNCPUMWRMSR} */
3345static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore2SmmCStMiscInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3346{
3347 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3348 /** @todo SMM & C-states? */
3349 return VINF_SUCCESS;
3350}
3351
3352
3353/** @callback_method_impl{FNCPUMRDMSR} */
3354static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore1ExtConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3355{
3356 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3357 /** @todo Core1&2 EXT_CONFIG (whatever that is)? */
3358 *puValue = 0;
3359 return VINF_SUCCESS;
3360}
3361
3362
3363/** @callback_method_impl{FNCPUMWRMSR} */
3364static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore1ExtConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3365{
3366 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3367 /** @todo Core1&2 EXT_CONFIG (whatever that is)? */
3368 return VINF_SUCCESS;
3369}
3370
3371
3372/** @callback_method_impl{FNCPUMRDMSR} */
3373static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore1DtsCalControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3374{
3375 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3376 /** @todo Core1&2(?) DTS_CAL_CTRL (whatever that is)? */
3377 *puValue = 0;
3378 return VINF_SUCCESS;
3379}
3380
3381
3382/** @callback_method_impl{FNCPUMWRMSR} */
3383static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore1DtsCalControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3384{
3385 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3386 /** @todo Core1&2(?) DTS_CAL_CTRL (whatever that is)? */
3387 return VINF_SUCCESS;
3388}
3389
3390
3391/** @callback_method_impl{FNCPUMRDMSR} */
3392static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore2PeciControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3393{
3394 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3395 /** @todo Core2+ platform environment control interface control register? */
3396 *puValue = 0;
3397 return VINF_SUCCESS;
3398}
3399
3400
3401/** @callback_method_impl{FNCPUMWRMSR} */
3402static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore2PeciControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3403{
3404 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3405 /** @todo Core2+ platform environment control interface control register? */
3406 return VINF_SUCCESS;
3407}
3408
3409
3410/** @callback_method_impl{FNCPUMRDMSR} */
3411static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelAtSilvCoreC1Recidency(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3412{
3413 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3414 *puValue = 0;
3415 return VINF_SUCCESS;
3416}
3417
3418
3419/*
3420 * Multiple vendor P6 MSRs.
3421 * Multiple vendor P6 MSRs.
3422 * Multiple vendor P6 MSRs.
3423 *
3424 * These MSRs were introduced with the P6 but not elevated to architectural
3425 * MSRs, despite other vendors implementing them.
3426 */
3427
3428
3429/** @callback_method_impl{FNCPUMRDMSR} */
3430static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastBranchFromIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3431{
3432 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3433 /* AMD seems to just record RIP, while intel claims to record RIP+CS.BASE
3434 if I read the docs correctly, thus the need for separate functions. */
3435 /** @todo implement last branch records. */
3436 *puValue = 0;
3437 return VINF_SUCCESS;
3438}
3439
3440
3441/** @callback_method_impl{FNCPUMRDMSR} */
3442static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastBranchToIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3443{
3444 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3445 /** @todo implement last branch records. */
3446 *puValue = 0;
3447 return VINF_SUCCESS;
3448}
3449
3450
3451/** @callback_method_impl{FNCPUMRDMSR} */
3452static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastIntFromIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3453{
3454 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3455 /** @todo implement last exception records. */
3456 *puValue = 0;
3457 return VINF_SUCCESS;
3458}
3459
3460
3461/** @callback_method_impl{FNCPUMWRMSR} */
3462static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_P6LastIntFromIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3463{
3464 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3465 /** @todo implement last exception records. */
3466 /* Note! On many CPUs, the high bit of the 0x000001dd register is always writable, even when the result is
3467 a non-cannonical address. */
3468 return VINF_SUCCESS;
3469}
3470
3471
3472/** @callback_method_impl{FNCPUMRDMSR} */
3473static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastIntToIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3474{
3475 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3476 /** @todo implement last exception records. */
3477 *puValue = 0;
3478 return VINF_SUCCESS;
3479}
3480
3481
3482/** @callback_method_impl{FNCPUMWRMSR} */
3483static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_P6LastIntToIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3484{
3485 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3486 /** @todo implement last exception records. */
3487 return VINF_SUCCESS;
3488}
3489
3490
3491
3492/*
3493 * AMD specific
3494 * AMD specific
3495 * AMD specific
3496 */
3497
3498
3499/** @callback_method_impl{FNCPUMRDMSR} */
3500static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hTscRate(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3501{
3502 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3503 /** @todo Implement TscRateMsr */
3504 *puValue = RT_MAKE_U64(0, 1); /* 1.0 = reset value. */
3505 return VINF_SUCCESS;
3506}
3507
3508
3509/** @callback_method_impl{FNCPUMWRMSR} */
3510static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hTscRate(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3511{
3512 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3513 /** @todo Implement TscRateMsr */
3514 return VINF_SUCCESS;
3515}
3516
3517
3518/** @callback_method_impl{FNCPUMRDMSR} */
3519static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hLwpCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3520{
3521 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3522 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3523 /* Note: Only listes in BKDG for Family 15H. */
3524 *puValue = 0;
3525 return VINF_SUCCESS;
3526}
3527
3528
3529/** @callback_method_impl{FNCPUMWRMSR} */
3530static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hLwpCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3531{
3532 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3533 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3534 return VINF_SUCCESS;
3535}
3536
3537
3538/** @callback_method_impl{FNCPUMRDMSR} */
3539static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hLwpCbAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3540{
3541 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3542 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3543 /* Note: Only listes in BKDG for Family 15H. */
3544 *puValue = 0;
3545 return VINF_SUCCESS;
3546}
3547
3548
3549/** @callback_method_impl{FNCPUMWRMSR} */
3550static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hLwpCbAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3551{
3552 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3553 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3554 return VINF_SUCCESS;
3555}
3556
3557
3558/** @callback_method_impl{FNCPUMRDMSR} */
3559static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hMc4MiscN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3560{
3561 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3562 /** @todo machine check. */
3563 *puValue = 0;
3564 return VINF_SUCCESS;
3565}
3566
3567
3568/** @callback_method_impl{FNCPUMWRMSR} */
3569static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hMc4MiscN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3570{
3571 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3572 /** @todo machine check. */
3573 return VINF_SUCCESS;
3574}
3575
3576
3577/** @callback_method_impl{FNCPUMRDMSR} */
3578static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8PerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3579{
3580 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3581 /** @todo AMD performance events. */
3582 *puValue = 0;
3583 return VINF_SUCCESS;
3584}
3585
3586
3587/** @callback_method_impl{FNCPUMWRMSR} */
3588static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8PerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3589{
3590 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3591 /** @todo AMD performance events. */
3592 return VINF_SUCCESS;
3593}
3594
3595
3596/** @callback_method_impl{FNCPUMRDMSR} */
3597static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8PerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3598{
3599 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3600 /** @todo AMD performance events. */
3601 *puValue = 0;
3602 return VINF_SUCCESS;
3603}
3604
3605
3606/** @callback_method_impl{FNCPUMWRMSR} */
3607static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8PerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3608{
3609 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3610 /** @todo AMD performance events. */
3611 return VINF_SUCCESS;
3612}
3613
3614
3615/** @callback_method_impl{FNCPUMRDMSR} */
3616static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SysCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3617{
3618 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3619 /** @todo AMD SYS_CFG */
3620 *puValue = pRange->uValue;
3621 return VINF_SUCCESS;
3622}
3623
3624
3625/** @callback_method_impl{FNCPUMWRMSR} */
3626static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SysCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3627{
3628 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3629 /** @todo AMD SYS_CFG */
3630 return VINF_SUCCESS;
3631}
3632
3633
3634/** @callback_method_impl{FNCPUMRDMSR} */
3635static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8HwCr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3636{
3637 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3638 /** @todo AMD HW_CFG */
3639 *puValue = 0;
3640 return VINF_SUCCESS;
3641}
3642
3643
3644/** @callback_method_impl{FNCPUMWRMSR} */
3645static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8HwCr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3646{
3647 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3648 /** @todo AMD HW_CFG */
3649 return VINF_SUCCESS;
3650}
3651
3652
3653/** @callback_method_impl{FNCPUMRDMSR} */
3654static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IorrBaseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3655{
3656 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3657 /** @todo AMD IorrMask/IorrBase */
3658 *puValue = 0;
3659 return VINF_SUCCESS;
3660}
3661
3662
3663/** @callback_method_impl{FNCPUMWRMSR} */
3664static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IorrBaseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3665{
3666 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3667 /** @todo AMD IorrMask/IorrBase */
3668 return VINF_SUCCESS;
3669}
3670
3671
3672/** @callback_method_impl{FNCPUMRDMSR} */
3673static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IorrMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3674{
3675 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3676 /** @todo AMD IorrMask/IorrBase */
3677 *puValue = 0;
3678 return VINF_SUCCESS;
3679}
3680
3681
3682/** @callback_method_impl{FNCPUMWRMSR} */
3683static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IorrMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3684{
3685 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3686 /** @todo AMD IorrMask/IorrBase */
3687 return VINF_SUCCESS;
3688}
3689
3690
3691/** @callback_method_impl{FNCPUMRDMSR} */
3692static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8TopOfMemN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3693{
3694 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3695 *puValue = 0;
3696 /** @todo return 4GB - RamHoleSize here for TOPMEM. Figure out what to return
3697 * for TOPMEM2. */
3698 //if (pRange->uValue == 0)
3699 // *puValue = _4G - RamHoleSize;
3700 return VINF_SUCCESS;
3701}
3702
3703
3704/** @callback_method_impl{FNCPUMWRMSR} */
3705static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8TopOfMemN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3706{
3707 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3708 /** @todo AMD TOPMEM and TOPMEM2/TOM2. */
3709 return VINF_SUCCESS;
3710}
3711
3712
3713/** @callback_method_impl{FNCPUMRDMSR} */
3714static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8NbCfg1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3715{
3716 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3717 /** @todo AMD NB_CFG1 */
3718 *puValue = 0;
3719 return VINF_SUCCESS;
3720}
3721
3722
3723/** @callback_method_impl{FNCPUMWRMSR} */
3724static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8NbCfg1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3725{
3726 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3727 /** @todo AMD NB_CFG1 */
3728 return VINF_SUCCESS;
3729}
3730
3731
3732/** @callback_method_impl{FNCPUMRDMSR} */
3733static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8McXcptRedir(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3734{
3735 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3736 /** @todo machine check. */
3737 *puValue = 0;
3738 return VINF_SUCCESS;
3739}
3740
3741
3742/** @callback_method_impl{FNCPUMWRMSR} */
3743static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8McXcptRedir(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3744{
3745 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3746 /** @todo machine check. */
3747 return VINF_SUCCESS;
3748}
3749
3750
3751/** @callback_method_impl{FNCPUMRDMSR} */
3752static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuNameN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3753{
3754 RT_NOREF_PV(idMsr);
3755 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), pRange->uValue / 2 + 0x80000001);
3756 if (pLeaf)
3757 {
3758 if (!(pRange->uValue & 1))
3759 *puValue = RT_MAKE_U64(pLeaf->uEax, pLeaf->uEbx);
3760 else
3761 *puValue = RT_MAKE_U64(pLeaf->uEcx, pLeaf->uEdx);
3762 }
3763 else
3764 *puValue = 0;
3765 return VINF_SUCCESS;
3766}
3767
3768
3769/** @callback_method_impl{FNCPUMWRMSR} */
3770static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuNameN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3771{
3772 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3773 /** @todo Remember guest programmed CPU name. */
3774 return VINF_SUCCESS;
3775}
3776
3777
3778/** @callback_method_impl{FNCPUMRDMSR} */
3779static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8HwThermalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3780{
3781 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3782 /** @todo AMD HTC. */
3783 *puValue = pRange->uValue;
3784 return VINF_SUCCESS;
3785}
3786
3787
3788/** @callback_method_impl{FNCPUMWRMSR} */
3789static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8HwThermalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3790{
3791 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3792 /** @todo AMD HTC. */
3793 return VINF_SUCCESS;
3794}
3795
3796
3797/** @callback_method_impl{FNCPUMRDMSR} */
3798static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SwThermalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3799{
3800 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3801 /** @todo AMD STC. */
3802 *puValue = 0;
3803 return VINF_SUCCESS;
3804}
3805
3806
3807/** @callback_method_impl{FNCPUMWRMSR} */
3808static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SwThermalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3809{
3810 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3811 /** @todo AMD STC. */
3812 return VINF_SUCCESS;
3813}
3814
3815
3816/** @callback_method_impl{FNCPUMRDMSR} */
3817static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8FidVidControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3818{
3819 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3820 /** @todo AMD FIDVID_CTL. */
3821 *puValue = pRange->uValue;
3822 return VINF_SUCCESS;
3823}
3824
3825
3826/** @callback_method_impl{FNCPUMWRMSR} */
3827static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8FidVidControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3828{
3829 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3830 /** @todo AMD FIDVID_CTL. */
3831 return VINF_SUCCESS;
3832}
3833
3834
3835/** @callback_method_impl{FNCPUMRDMSR} */
3836static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8FidVidStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3837{
3838 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3839 /** @todo AMD FIDVID_STATUS. */
3840 *puValue = pRange->uValue;
3841 return VINF_SUCCESS;
3842}
3843
3844
3845/** @callback_method_impl{FNCPUMRDMSR} */
3846static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8McCtlMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3847{
3848 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3849 /** @todo AMD MC. */
3850 *puValue = 0;
3851 return VINF_SUCCESS;
3852}
3853
3854
3855/** @callback_method_impl{FNCPUMWRMSR} */
3856static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8McCtlMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3857{
3858 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3859 /** @todo AMD MC. */
3860 return VINF_SUCCESS;
3861}
3862
3863
3864/** @callback_method_impl{FNCPUMRDMSR} */
3865static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmiOnIoTrapN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3866{
3867 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3868 /** @todo AMD SMM/SMI and I/O trap. */
3869 *puValue = 0;
3870 return VINF_SUCCESS;
3871}
3872
3873
3874/** @callback_method_impl{FNCPUMWRMSR} */
3875static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmiOnIoTrapN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3876{
3877 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3878 /** @todo AMD SMM/SMI and I/O trap. */
3879 return VINF_SUCCESS;
3880}
3881
3882
3883/** @callback_method_impl{FNCPUMRDMSR} */
3884static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmiOnIoTrapCtlSts(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3885{
3886 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3887 /** @todo AMD SMM/SMI and I/O trap. */
3888 *puValue = 0;
3889 return VINF_SUCCESS;
3890}
3891
3892
3893/** @callback_method_impl{FNCPUMWRMSR} */
3894static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmiOnIoTrapCtlSts(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3895{
3896 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3897 /** @todo AMD SMM/SMI and I/O trap. */
3898 return VINF_SUCCESS;
3899}
3900
3901
3902/** @callback_method_impl{FNCPUMRDMSR} */
3903static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IntPendingMessage(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3904{
3905 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3906 /** @todo Interrupt pending message. */
3907 *puValue = 0;
3908 return VINF_SUCCESS;
3909}
3910
3911
3912/** @callback_method_impl{FNCPUMWRMSR} */
3913static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IntPendingMessage(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3914{
3915 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3916 /** @todo Interrupt pending message. */
3917 return VINF_SUCCESS;
3918}
3919
3920
3921/** @callback_method_impl{FNCPUMRDMSR} */
3922static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmiTriggerIoCycle(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3923{
3924 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3925 /** @todo AMD SMM/SMI and trigger I/O cycle. */
3926 *puValue = 0;
3927 return VINF_SUCCESS;
3928}
3929
3930
3931/** @callback_method_impl{FNCPUMWRMSR} */
3932static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmiTriggerIoCycle(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3933{
3934 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3935 /** @todo AMD SMM/SMI and trigger I/O cycle. */
3936 return VINF_SUCCESS;
3937}
3938
3939
3940/** @callback_method_impl{FNCPUMRDMSR} */
3941static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hMmioCfgBaseAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3942{
3943 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3944 /** @todo AMD MMIO Configuration base address. */
3945 *puValue = 0;
3946 return VINF_SUCCESS;
3947}
3948
3949
3950/** @callback_method_impl{FNCPUMWRMSR} */
3951static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hMmioCfgBaseAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3952{
3953 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3954 /** @todo AMD MMIO Configuration base address. */
3955 return VINF_SUCCESS;
3956}
3957
3958
3959/** @callback_method_impl{FNCPUMRDMSR} */
3960static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hTrapCtlMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3961{
3962 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3963 /** @todo AMD 0xc0010059. */
3964 *puValue = 0;
3965 return VINF_SUCCESS;
3966}
3967
3968
3969/** @callback_method_impl{FNCPUMWRMSR} */
3970static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hTrapCtlMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3971{
3972 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3973 /** @todo AMD 0xc0010059. */
3974 return VINF_SUCCESS;
3975}
3976
3977
3978/** @callback_method_impl{FNCPUMRDMSR} */
3979static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateCurLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3980{
3981 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3982 /** @todo AMD P-states. */
3983 *puValue = pRange->uValue;
3984 return VINF_SUCCESS;
3985}
3986
3987
3988/** @callback_method_impl{FNCPUMRDMSR} */
3989static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3990{
3991 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3992 /** @todo AMD P-states. */
3993 *puValue = pRange->uValue;
3994 return VINF_SUCCESS;
3995}
3996
3997
3998/** @callback_method_impl{FNCPUMWRMSR} */
3999static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hPStateControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4000{
4001 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4002 /** @todo AMD P-states. */
4003 return VINF_SUCCESS;
4004}
4005
4006
4007/** @callback_method_impl{FNCPUMRDMSR} */
4008static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4009{
4010 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4011 /** @todo AMD P-states. */
4012 *puValue = pRange->uValue;
4013 return VINF_SUCCESS;
4014}
4015
4016
4017/** @callback_method_impl{FNCPUMWRMSR} */
4018static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hPStateStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4019{
4020 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4021 /** @todo AMD P-states. */
4022 return VINF_SUCCESS;
4023}
4024
4025
4026/** @callback_method_impl{FNCPUMRDMSR} */
4027static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4028{
4029 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4030 /** @todo AMD P-states. */
4031 *puValue = pRange->uValue;
4032 return VINF_SUCCESS;
4033}
4034
4035
4036/** @callback_method_impl{FNCPUMWRMSR} */
4037static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hPStateN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4038{
4039 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4040 /** @todo AMD P-states. */
4041 return VINF_SUCCESS;
4042}
4043
4044
4045/** @callback_method_impl{FNCPUMRDMSR} */
4046static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCofVidControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4047{
4048 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4049 /** @todo AMD P-states. */
4050 *puValue = pRange->uValue;
4051 return VINF_SUCCESS;
4052}
4053
4054
4055/** @callback_method_impl{FNCPUMWRMSR} */
4056static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCofVidControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4057{
4058 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4059 /** @todo AMD P-states. */
4060 return VINF_SUCCESS;
4061}
4062
4063
4064/** @callback_method_impl{FNCPUMRDMSR} */
4065static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCofVidStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4066{
4067 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4068 /** @todo AMD P-states. */
4069 *puValue = pRange->uValue;
4070 return VINF_SUCCESS;
4071}
4072
4073
4074/** @callback_method_impl{FNCPUMWRMSR} */
4075static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCofVidStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4076{
4077 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4078 /* Note! Writing 0 seems to not GP, not sure if it does anything to the value... */
4079 /** @todo AMD P-states. */
4080 return VINF_SUCCESS;
4081}
4082
4083
4084/** @callback_method_impl{FNCPUMRDMSR} */
4085static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCStateIoBaseAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4086{
4087 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4088 /** @todo AMD C-states. */
4089 *puValue = 0;
4090 return VINF_SUCCESS;
4091}
4092
4093
4094/** @callback_method_impl{FNCPUMWRMSR} */
4095static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCStateIoBaseAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4096{
4097 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4098 /** @todo AMD C-states. */
4099 return VINF_SUCCESS;
4100}
4101
4102
4103/** @callback_method_impl{FNCPUMRDMSR} */
4104static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCpuWatchdogTimer(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4105{
4106 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4107 /** @todo AMD machine checks. */
4108 *puValue = 0;
4109 return VINF_SUCCESS;
4110}
4111
4112
4113/** @callback_method_impl{FNCPUMWRMSR} */
4114static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCpuWatchdogTimer(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4115{
4116 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4117 /** @todo AMD machine checks. */
4118 return VINF_SUCCESS;
4119}
4120
4121
4122/** @callback_method_impl{FNCPUMRDMSR} */
4123static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4124{
4125 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4126 /** @todo AMD SMM. */
4127 *puValue = 0;
4128 return VINF_SUCCESS;
4129}
4130
4131
4132/** @callback_method_impl{FNCPUMWRMSR} */
4133static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4134{
4135 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4136 /** @todo AMD SMM. */
4137 return VINF_SUCCESS;
4138}
4139
4140
4141/** @callback_method_impl{FNCPUMRDMSR} */
4142static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4143{
4144 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4145 /** @todo AMD SMM. */
4146 *puValue = 0;
4147 return VINF_SUCCESS;
4148}
4149
4150
4151/** @callback_method_impl{FNCPUMWRMSR} */
4152static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4153{
4154 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4155 /** @todo AMD SMM. */
4156 return VINF_SUCCESS;
4157}
4158
4159
4160
4161/** @callback_method_impl{FNCPUMRDMSR} */
4162static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4163{
4164 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4165 /** @todo AMD SMM. */
4166 *puValue = 0;
4167 return VINF_SUCCESS;
4168}
4169
4170
4171/** @callback_method_impl{FNCPUMWRMSR} */
4172static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4173{
4174 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4175 /** @todo AMD SMM. */
4176 return VINF_SUCCESS;
4177}
4178
4179
4180/** @callback_method_impl{FNCPUMRDMSR} */
4181static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8VmCr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4182{
4183 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4184 PVM pVM = pVCpu->CTX_SUFF(pVM);
4185 if (pVM->cpum.s.GuestFeatures.fSvm)
4186 *puValue = MSR_K8_VM_CR_LOCK;
4187 else
4188 *puValue = 0;
4189 return VINF_SUCCESS;
4190}
4191
4192
4193/** @callback_method_impl{FNCPUMWRMSR} */
4194static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8VmCr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4195{
4196 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
4197 PVM pVM = pVCpu->CTX_SUFF(pVM);
4198 if (pVM->cpum.s.GuestFeatures.fSvm)
4199 {
4200 /* Silently ignore writes to LOCK and SVM_DISABLE bit when the LOCK bit is set (see cpumMsrRd_AmdK8VmCr). */
4201 if (uValue & (MSR_K8_VM_CR_DPD | MSR_K8_VM_CR_R_INIT | MSR_K8_VM_CR_DIS_A20M))
4202 return VERR_CPUM_RAISE_GP_0;
4203 return VINF_SUCCESS;
4204 }
4205 return VERR_CPUM_RAISE_GP_0;
4206}
4207
4208
4209/** @callback_method_impl{FNCPUMRDMSR} */
4210static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IgnNe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4211{
4212 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4213 /** @todo AMD IGNNE\# control. */
4214 *puValue = 0;
4215 return VINF_SUCCESS;
4216}
4217
4218
4219/** @callback_method_impl{FNCPUMWRMSR} */
4220static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IgnNe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4221{
4222 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4223 /** @todo AMD IGNNE\# control. */
4224 return VINF_SUCCESS;
4225}
4226
4227
4228/** @callback_method_impl{FNCPUMRDMSR} */
4229static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4230{
4231 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4232 /** @todo AMD SMM. */
4233 *puValue = 0;
4234 return VINF_SUCCESS;
4235}
4236
4237
4238/** @callback_method_impl{FNCPUMWRMSR} */
4239static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4240{
4241 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4242 /** @todo AMD SMM. */
4243 return VINF_SUCCESS;
4244}
4245
4246
4247/** @callback_method_impl{FNCPUMRDMSR} */
4248static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8VmHSavePa(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4249{
4250 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4251 *puValue = pVCpu->cpum.s.Guest.hwvirt.svm.uMsrHSavePa;
4252 return VINF_SUCCESS;
4253}
4254
4255
4256/** @callback_method_impl{FNCPUMWRMSR} */
4257static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8VmHSavePa(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4258{
4259 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
4260 if (uValue & UINT64_C(0xfff))
4261 {
4262 Log(("CPUM: Invalid setting of low 12 bits set writing host-state save area MSR %#x: %#llx\n", idMsr, uValue));
4263 return VERR_CPUM_RAISE_GP_0;
4264 }
4265
4266 uint64_t fInvPhysMask = ~(RT_BIT_64(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.cMaxPhysAddrWidth) - 1U);
4267 if (fInvPhysMask & uValue)
4268 {
4269 Log(("CPUM: Invalid physical address bits set writing host-state save area MSR %#x: %#llx (%#llx)\n",
4270 idMsr, uValue, uValue & fInvPhysMask));
4271 return VERR_CPUM_RAISE_GP_0;
4272 }
4273
4274 pVCpu->cpum.s.Guest.hwvirt.svm.uMsrHSavePa = uValue;
4275 return VINF_SUCCESS;
4276}
4277
4278
4279/** @callback_method_impl{FNCPUMRDMSR} */
4280static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hVmLockKey(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4281{
4282 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4283 /** @todo AMD SVM. */
4284 *puValue = 0; /* RAZ */
4285 return VINF_SUCCESS;
4286}
4287
4288
4289/** @callback_method_impl{FNCPUMWRMSR} */
4290static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hVmLockKey(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4291{
4292 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4293 /** @todo AMD SVM. */
4294 return VINF_SUCCESS;
4295}
4296
4297
4298/** @callback_method_impl{FNCPUMRDMSR} */
4299static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hSmmLockKey(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4300{
4301 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4302 /** @todo AMD SMM. */
4303 *puValue = 0; /* RAZ */
4304 return VINF_SUCCESS;
4305}
4306
4307
4308/** @callback_method_impl{FNCPUMWRMSR} */
4309static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hSmmLockKey(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4310{
4311 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4312 /** @todo AMD SMM. */
4313 return VINF_SUCCESS;
4314}
4315
4316
4317/** @callback_method_impl{FNCPUMRDMSR} */
4318static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hLocalSmiStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4319{
4320 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4321 /** @todo AMD SMM/SMI. */
4322 *puValue = 0;
4323 return VINF_SUCCESS;
4324}
4325
4326
4327/** @callback_method_impl{FNCPUMWRMSR} */
4328static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hLocalSmiStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4329{
4330 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4331 /** @todo AMD SMM/SMI. */
4332 return VINF_SUCCESS;
4333}
4334
4335
4336/** @callback_method_impl{FNCPUMRDMSR} */
4337static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hOsVisWrkIdLength(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4338{
4339 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
4340 /** @todo AMD OS visible workaround. */
4341 *puValue = pRange->uValue;
4342 return VINF_SUCCESS;
4343}
4344
4345
4346/** @callback_method_impl{FNCPUMWRMSR} */
4347static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hOsVisWrkIdLength(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4348{
4349 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4350 /** @todo AMD OS visible workaround. */
4351 return VINF_SUCCESS;
4352}
4353
4354
4355/** @callback_method_impl{FNCPUMRDMSR} */
4356static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hOsVisWrkStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4357{
4358 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4359 /** @todo AMD OS visible workaround. */
4360 *puValue = 0;
4361 return VINF_SUCCESS;
4362}
4363
4364
4365/** @callback_method_impl{FNCPUMWRMSR} */
4366static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hOsVisWrkStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4367{
4368 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4369 /** @todo AMD OS visible workaround. */
4370 return VINF_SUCCESS;
4371}
4372
4373
4374/** @callback_method_impl{FNCPUMRDMSR} */
4375static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam16hL2IPerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4376{
4377 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4378 /** @todo AMD L2I performance counters. */
4379 *puValue = 0;
4380 return VINF_SUCCESS;
4381}
4382
4383
4384/** @callback_method_impl{FNCPUMWRMSR} */
4385static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam16hL2IPerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4386{
4387 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4388 /** @todo AMD L2I performance counters. */
4389 return VINF_SUCCESS;
4390}
4391
4392
4393/** @callback_method_impl{FNCPUMRDMSR} */
4394static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam16hL2IPerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4395{
4396 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4397 /** @todo AMD L2I performance counters. */
4398 *puValue = 0;
4399 return VINF_SUCCESS;
4400}
4401
4402
4403/** @callback_method_impl{FNCPUMWRMSR} */
4404static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam16hL2IPerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4405{
4406 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4407 /** @todo AMD L2I performance counters. */
4408 return VINF_SUCCESS;
4409}
4410
4411
4412/** @callback_method_impl{FNCPUMRDMSR} */
4413static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hNorthbridgePerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4414{
4415 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4416 /** @todo AMD Northbridge performance counters. */
4417 *puValue = 0;
4418 return VINF_SUCCESS;
4419}
4420
4421
4422/** @callback_method_impl{FNCPUMWRMSR} */
4423static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hNorthbridgePerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4424{
4425 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4426 /** @todo AMD Northbridge performance counters. */
4427 return VINF_SUCCESS;
4428}
4429
4430
4431/** @callback_method_impl{FNCPUMRDMSR} */
4432static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hNorthbridgePerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4433{
4434 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4435 /** @todo AMD Northbridge performance counters. */
4436 *puValue = 0;
4437 return VINF_SUCCESS;
4438}
4439
4440
4441/** @callback_method_impl{FNCPUMWRMSR} */
4442static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hNorthbridgePerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4443{
4444 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4445 /** @todo AMD Northbridge performance counters. */
4446 return VINF_SUCCESS;
4447}
4448
4449
4450/** @callback_method_impl{FNCPUMRDMSR} */
4451static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7MicrocodeCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4452{
4453 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4454 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4455 * cpus. Need to be explored and verify K7 presence. */
4456 /** @todo Undocumented register only seen mentioned in fam15h erratum \#608. */
4457 *puValue = pRange->uValue;
4458 return VINF_SUCCESS;
4459}
4460
4461
4462/** @callback_method_impl{FNCPUMWRMSR} */
4463static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7MicrocodeCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4464{
4465 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4466 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4467 * cpus. Need to be explored and verify K7 presence. */
4468 /** @todo Undocumented register only seen mentioned in fam15h erratum \#608. */
4469 return VINF_SUCCESS;
4470}
4471
4472
4473/** @callback_method_impl{FNCPUMRDMSR} */
4474static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7ClusterIdMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4475{
4476 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4477 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4478 * cpus. Need to be explored and verify K7 presence. */
4479 /** @todo Undocumented register only seen mentioned in fam16h BKDG r3.00 when
4480 * describing EBL_CR_POWERON. */
4481 *puValue = pRange->uValue;
4482 return VINF_SUCCESS;
4483}
4484
4485
4486/** @callback_method_impl{FNCPUMWRMSR} */
4487static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7ClusterIdMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4488{
4489 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4490 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4491 * cpus. Need to be explored and verify K7 presence. */
4492 /** @todo Undocumented register only seen mentioned in fam16h BKDG r3.00 when
4493 * describing EBL_CR_POWERON. */
4494 return VINF_SUCCESS;
4495}
4496
4497
4498/** @callback_method_impl{FNCPUMRDMSR} */
4499static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlStd07hEbax(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4500{
4501 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4502 bool fIgnored;
4503 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeafEx(pVCpu->CTX_SUFF(pVM), 0x00000007, 0, &fIgnored);
4504 if (pLeaf)
4505 *puValue = RT_MAKE_U64(pLeaf->uEbx, pLeaf->uEax);
4506 else
4507 *puValue = 0;
4508 return VINF_SUCCESS;
4509}
4510
4511
4512/** @callback_method_impl{FNCPUMWRMSR} */
4513static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlStd07hEbax(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4514{
4515 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4516 /** @todo Changing CPUID leaf 7/0. */
4517 return VINF_SUCCESS;
4518}
4519
4520
4521/** @callback_method_impl{FNCPUMRDMSR} */
4522static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlStd06hEcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4523{
4524 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4525 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), 0x00000006);
4526 if (pLeaf)
4527 *puValue = pLeaf->uEcx;
4528 else
4529 *puValue = 0;
4530 return VINF_SUCCESS;
4531}
4532
4533
4534/** @callback_method_impl{FNCPUMWRMSR} */
4535static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlStd06hEcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4536{
4537 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4538 /** @todo Changing CPUID leaf 6. */
4539 return VINF_SUCCESS;
4540}
4541
4542
4543/** @callback_method_impl{FNCPUMRDMSR} */
4544static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlStd01hEdcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4545{
4546 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4547 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), 0x00000001);
4548 if (pLeaf)
4549 *puValue = RT_MAKE_U64(pLeaf->uEdx, pLeaf->uEcx);
4550 else
4551 *puValue = 0;
4552 return VINF_SUCCESS;
4553}
4554
4555
4556/** @callback_method_impl{FNCPUMWRMSR} */
4557static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlStd01hEdcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4558{
4559 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4560 /** @todo Changing CPUID leaf 0x80000001. */
4561 return VINF_SUCCESS;
4562}
4563
4564
4565/** @callback_method_impl{FNCPUMRDMSR} */
4566static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlExt01hEdcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4567{
4568 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4569 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), 0x80000001);
4570 if (pLeaf)
4571 *puValue = RT_MAKE_U64(pLeaf->uEdx, pLeaf->uEcx);
4572 else
4573 *puValue = 0;
4574 return VINF_SUCCESS;
4575}
4576
4577
4578/** @callback_method_impl{FNCPUMWRMSR} */
4579static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlExt01hEdcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4580{
4581 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4582 /** @todo Changing CPUID leaf 0x80000001. */
4583 return VINF_SUCCESS;
4584}
4585
4586
4587/** @callback_method_impl{FNCPUMRDMSR} */
4588static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8PatchLevel(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4589{
4590 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4591 /** @todo Fake AMD microcode patching. */
4592 *puValue = pRange->uValue;
4593 return VINF_SUCCESS;
4594}
4595
4596
4597/** @callback_method_impl{FNCPUMWRMSR} */
4598static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8PatchLoader(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4599{
4600 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4601 /** @todo Fake AMD microcode patching. */
4602 return VINF_SUCCESS;
4603}
4604
4605
4606/** @callback_method_impl{FNCPUMRDMSR} */
4607static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DebugStatusMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4608{
4609 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4610 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4611 * cpus. Need to be explored and verify K7 presence. */
4612 /** @todo undocumented */
4613 *puValue = 0;
4614 return VINF_SUCCESS;
4615}
4616
4617
4618/** @callback_method_impl{FNCPUMWRMSR} */
4619static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DebugStatusMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4620{
4621 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4622 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4623 * cpus. Need to be explored and verify K7 presence. */
4624 /** @todo undocumented */
4625 return VINF_SUCCESS;
4626}
4627
4628
4629/** @callback_method_impl{FNCPUMRDMSR} */
4630static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BHTraceBaseMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4631{
4632 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4633 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4634 * cpus. Need to be explored and verify K7 presence. */
4635 /** @todo undocumented */
4636 *puValue = 0;
4637 return VINF_SUCCESS;
4638}
4639
4640
4641/** @callback_method_impl{FNCPUMWRMSR} */
4642static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BHTraceBaseMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4643{
4644 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4645 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4646 * cpus. Need to be explored and verify K7 presence. */
4647 /** @todo undocumented */
4648 return VINF_SUCCESS;
4649}
4650
4651
4652/** @callback_method_impl{FNCPUMRDMSR} */
4653static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BHTracePtrMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4654{
4655 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4656 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4657 * cpus. Need to be explored and verify K7 presence. */
4658 /** @todo undocumented */
4659 *puValue = 0;
4660 return VINF_SUCCESS;
4661}
4662
4663
4664/** @callback_method_impl{FNCPUMWRMSR} */
4665static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BHTracePtrMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4666{
4667 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4668 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4669 * cpus. Need to be explored and verify K7 presence. */
4670 /** @todo undocumented */
4671 return VINF_SUCCESS;
4672}
4673
4674
4675/** @callback_method_impl{FNCPUMRDMSR} */
4676static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BHTraceLimitMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4677{
4678 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4679 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4680 * cpus. Need to be explored and verify K7 presence. */
4681 /** @todo undocumented */
4682 *puValue = 0;
4683 return VINF_SUCCESS;
4684}
4685
4686
4687/** @callback_method_impl{FNCPUMWRMSR} */
4688static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BHTraceLimitMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4689{
4690 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4691 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4692 * cpus. Need to be explored and verify K7 presence. */
4693 /** @todo undocumented */
4694 return VINF_SUCCESS;
4695}
4696
4697
4698/** @callback_method_impl{FNCPUMRDMSR} */
4699static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7HardwareDebugToolCfgMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4700{
4701 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4702 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4703 * cpus. Need to be explored and verify K7 presence. */
4704 /** @todo undocumented */
4705 *puValue = 0;
4706 return VINF_SUCCESS;
4707}
4708
4709
4710/** @callback_method_impl{FNCPUMWRMSR} */
4711static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7HardwareDebugToolCfgMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4712{
4713 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4714 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4715 * cpus. Need to be explored and verify K7 presence. */
4716 /** @todo undocumented */
4717 return VINF_SUCCESS;
4718}
4719
4720
4721/** @callback_method_impl{FNCPUMRDMSR} */
4722static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7FastFlushCountMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4723{
4724 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4725 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4726 * cpus. Need to be explored and verify K7 presence. */
4727 /** @todo undocumented */
4728 *puValue = 0;
4729 return VINF_SUCCESS;
4730}
4731
4732
4733/** @callback_method_impl{FNCPUMWRMSR} */
4734static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7FastFlushCountMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4735{
4736 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4737 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4738 * cpus. Need to be explored and verify K7 presence. */
4739 /** @todo undocumented */
4740 return VINF_SUCCESS;
4741}
4742
4743
4744/** @callback_method_impl{FNCPUMRDMSR} */
4745static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7NodeId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4746{
4747 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4748 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4749 * cpus. Need to be explored and verify K7 presence. */
4750 /** @todo AMD node ID and bios scratch. */
4751 *puValue = 0; /* nodeid = 0; nodes-per-cpu = 1 */
4752 return VINF_SUCCESS;
4753}
4754
4755
4756/** @callback_method_impl{FNCPUMWRMSR} */
4757static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7NodeId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4758{
4759 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4760 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4761 * cpus. Need to be explored and verify K7 presence. */
4762 /** @todo AMD node ID and bios scratch. */
4763 return VINF_SUCCESS;
4764}
4765
4766
4767/** @callback_method_impl{FNCPUMRDMSR} */
4768static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DrXAddrMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4769{
4770 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4771 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4772 * cpus. Need to be explored and verify K7 presence. */
4773 /** @todo AMD DRx address masking (range breakpoints). */
4774 *puValue = 0;
4775 return VINF_SUCCESS;
4776}
4777
4778
4779/** @callback_method_impl{FNCPUMWRMSR} */
4780static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DrXAddrMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4781{
4782 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4783 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4784 * cpus. Need to be explored and verify K7 presence. */
4785 /** @todo AMD DRx address masking (range breakpoints). */
4786 return VINF_SUCCESS;
4787}
4788
4789
4790/** @callback_method_impl{FNCPUMRDMSR} */
4791static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7Dr0DataMatchMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4792{
4793 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4794 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4795 * cpus. Need to be explored and verify K7 presence. */
4796 /** @todo AMD undocument debugging features. */
4797 *puValue = 0;
4798 return VINF_SUCCESS;
4799}
4800
4801
4802/** @callback_method_impl{FNCPUMWRMSR} */
4803static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7Dr0DataMatchMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4804{
4805 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4806 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4807 * cpus. Need to be explored and verify K7 presence. */
4808 /** @todo AMD undocument debugging features. */
4809 return VINF_SUCCESS;
4810}
4811
4812
4813/** @callback_method_impl{FNCPUMRDMSR} */
4814static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7Dr0DataMaskMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4815{
4816 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4817 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4818 * cpus. Need to be explored and verify K7 presence. */
4819 /** @todo AMD undocument debugging features. */
4820 *puValue = 0;
4821 return VINF_SUCCESS;
4822}
4823
4824
4825/** @callback_method_impl{FNCPUMWRMSR} */
4826static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7Dr0DataMaskMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4827{
4828 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4829 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4830 * cpus. Need to be explored and verify K7 presence. */
4831 /** @todo AMD undocument debugging features. */
4832 return VINF_SUCCESS;
4833}
4834
4835
4836/** @callback_method_impl{FNCPUMRDMSR} */
4837static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7LoadStoreCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4838{
4839 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4840 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4841 * cpus. Need to be explored and verify K7 presence. */
4842 /** @todo AMD load-store config. */
4843 *puValue = 0;
4844 return VINF_SUCCESS;
4845}
4846
4847
4848/** @callback_method_impl{FNCPUMWRMSR} */
4849static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7LoadStoreCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4850{
4851 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4852 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4853 * cpus. Need to be explored and verify K7 presence. */
4854 /** @todo AMD load-store config. */
4855 return VINF_SUCCESS;
4856}
4857
4858
4859/** @callback_method_impl{FNCPUMRDMSR} */
4860static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7InstrCacheCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4861{
4862 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4863 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4864 * cpus. Need to be explored and verify K7 presence. */
4865 /** @todo AMD instruction cache config. */
4866 *puValue = 0;
4867 return VINF_SUCCESS;
4868}
4869
4870
4871/** @callback_method_impl{FNCPUMWRMSR} */
4872static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7InstrCacheCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4873{
4874 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4875 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4876 * cpus. Need to be explored and verify K7 presence. */
4877 /** @todo AMD instruction cache config. */
4878 return VINF_SUCCESS;
4879}
4880
4881
4882/** @callback_method_impl{FNCPUMRDMSR} */
4883static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DataCacheCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4884{
4885 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4886 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4887 * cpus. Need to be explored and verify K7 presence. */
4888 /** @todo AMD data cache config. */
4889 *puValue = 0;
4890 return VINF_SUCCESS;
4891}
4892
4893
4894/** @callback_method_impl{FNCPUMWRMSR} */
4895static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DataCacheCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4896{
4897 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4898 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4899 * cpus. Need to be explored and verify K7 presence. */
4900 /** @todo AMD data cache config. */
4901 return VINF_SUCCESS;
4902}
4903
4904
4905/** @callback_method_impl{FNCPUMRDMSR} */
4906static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BusUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4907{
4908 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4909 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4910 * cpus. Need to be explored and verify K7 presence. */
4911 /** @todo AMD bus unit config. */
4912 *puValue = 0;
4913 return VINF_SUCCESS;
4914}
4915
4916
4917/** @callback_method_impl{FNCPUMWRMSR} */
4918static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BusUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4919{
4920 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4921 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4922 * cpus. Need to be explored and verify K7 presence. */
4923 /** @todo AMD bus unit config. */
4924 return VINF_SUCCESS;
4925}
4926
4927
4928/** @callback_method_impl{FNCPUMRDMSR} */
4929static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DebugCtl2Maybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4930{
4931 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4932 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4933 * cpus. Need to be explored and verify K7 presence. */
4934 /** @todo Undocument AMD debug control register \#2. */
4935 *puValue = 0;
4936 return VINF_SUCCESS;
4937}
4938
4939
4940/** @callback_method_impl{FNCPUMWRMSR} */
4941static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DebugCtl2Maybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4942{
4943 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4944 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4945 * cpus. Need to be explored and verify K7 presence. */
4946 /** @todo Undocument AMD debug control register \#2. */
4947 return VINF_SUCCESS;
4948}
4949
4950
4951/** @callback_method_impl{FNCPUMRDMSR} */
4952static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hFpuCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4953{
4954 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4955 /** @todo AMD FPU config. */
4956 *puValue = 0;
4957 return VINF_SUCCESS;
4958}
4959
4960
4961/** @callback_method_impl{FNCPUMWRMSR} */
4962static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hFpuCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4963{
4964 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4965 /** @todo AMD FPU config. */
4966 return VINF_SUCCESS;
4967}
4968
4969
4970/** @callback_method_impl{FNCPUMRDMSR} */
4971static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hDecoderCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4972{
4973 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4974 /** @todo AMD decoder config. */
4975 *puValue = 0;
4976 return VINF_SUCCESS;
4977}
4978
4979
4980/** @callback_method_impl{FNCPUMWRMSR} */
4981static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hDecoderCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4982{
4983 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4984 /** @todo AMD decoder config. */
4985 return VINF_SUCCESS;
4986}
4987
4988
4989/** @callback_method_impl{FNCPUMRDMSR} */
4990static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hBusUnitCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4991{
4992 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4993 /* Note! 10h and 16h */
4994 /** @todo AMD bus unit config. */
4995 *puValue = 0;
4996 return VINF_SUCCESS;
4997}
4998
4999
5000/** @callback_method_impl{FNCPUMWRMSR} */
5001static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hBusUnitCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5002{
5003 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5004 /* Note! 10h and 16h */
5005 /** @todo AMD bus unit config. */
5006 return VINF_SUCCESS;
5007}
5008
5009
5010/** @callback_method_impl{FNCPUMRDMSR} */
5011static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hCombUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5012{
5013 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5014 /** @todo AMD unit config. */
5015 *puValue = 0;
5016 return VINF_SUCCESS;
5017}
5018
5019
5020/** @callback_method_impl{FNCPUMWRMSR} */
5021static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hCombUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5022{
5023 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5024 /** @todo AMD unit config. */
5025 return VINF_SUCCESS;
5026}
5027
5028
5029/** @callback_method_impl{FNCPUMRDMSR} */
5030static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hCombUnitCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5031{
5032 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5033 /** @todo AMD unit config 2. */
5034 *puValue = 0;
5035 return VINF_SUCCESS;
5036}
5037
5038
5039/** @callback_method_impl{FNCPUMWRMSR} */
5040static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hCombUnitCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5041{
5042 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5043 /** @todo AMD unit config 2. */
5044 return VINF_SUCCESS;
5045}
5046
5047
5048/** @callback_method_impl{FNCPUMRDMSR} */
5049static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hCombUnitCfg3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5050{
5051 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5052 /** @todo AMD combined unit config 3. */
5053 *puValue = 0;
5054 return VINF_SUCCESS;
5055}
5056
5057
5058/** @callback_method_impl{FNCPUMWRMSR} */
5059static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hCombUnitCfg3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5060{
5061 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5062 /** @todo AMD combined unit config 3. */
5063 return VINF_SUCCESS;
5064}
5065
5066
5067/** @callback_method_impl{FNCPUMRDMSR} */
5068static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hExecUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5069{
5070 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5071 /** @todo AMD execution unit config. */
5072 *puValue = 0;
5073 return VINF_SUCCESS;
5074}
5075
5076
5077/** @callback_method_impl{FNCPUMWRMSR} */
5078static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hExecUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5079{
5080 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5081 /** @todo AMD execution unit config. */
5082 return VINF_SUCCESS;
5083}
5084
5085
5086/** @callback_method_impl{FNCPUMRDMSR} */
5087static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hLoadStoreCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5088{
5089 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5090 /** @todo AMD load-store config 2. */
5091 *puValue = 0;
5092 return VINF_SUCCESS;
5093}
5094
5095
5096/** @callback_method_impl{FNCPUMWRMSR} */
5097static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hLoadStoreCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5098{
5099 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5100 /** @todo AMD load-store config 2. */
5101 return VINF_SUCCESS;
5102}
5103
5104
5105/** @callback_method_impl{FNCPUMRDMSR} */
5106static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsFetchCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5107{
5108 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5109 /** @todo AMD IBS. */
5110 *puValue = 0;
5111 return VINF_SUCCESS;
5112}
5113
5114
5115/** @callback_method_impl{FNCPUMWRMSR} */
5116static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsFetchCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5117{
5118 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5119 /** @todo AMD IBS. */
5120 return VINF_SUCCESS;
5121}
5122
5123
5124/** @callback_method_impl{FNCPUMRDMSR} */
5125static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsFetchLinAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5126{
5127 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5128 /** @todo AMD IBS. */
5129 *puValue = 0;
5130 return VINF_SUCCESS;
5131}
5132
5133
5134/** @callback_method_impl{FNCPUMWRMSR} */
5135static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsFetchLinAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5136{
5137 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5138 /** @todo AMD IBS. */
5139 return VINF_SUCCESS;
5140}
5141
5142
5143/** @callback_method_impl{FNCPUMRDMSR} */
5144static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsFetchPhysAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5145{
5146 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5147 /** @todo AMD IBS. */
5148 *puValue = 0;
5149 return VINF_SUCCESS;
5150}
5151
5152
5153/** @callback_method_impl{FNCPUMWRMSR} */
5154static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsFetchPhysAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5155{
5156 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5157 /** @todo AMD IBS. */
5158 return VINF_SUCCESS;
5159}
5160
5161
5162/** @callback_method_impl{FNCPUMRDMSR} */
5163static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpExecCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5164{
5165 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5166 /** @todo AMD IBS. */
5167 *puValue = 0;
5168 return VINF_SUCCESS;
5169}
5170
5171
5172/** @callback_method_impl{FNCPUMWRMSR} */
5173static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpExecCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5174{
5175 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5176 /** @todo AMD IBS. */
5177 return VINF_SUCCESS;
5178}
5179
5180
5181/** @callback_method_impl{FNCPUMRDMSR} */
5182static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpRip(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5183{
5184 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5185 /** @todo AMD IBS. */
5186 *puValue = 0;
5187 return VINF_SUCCESS;
5188}
5189
5190
5191/** @callback_method_impl{FNCPUMWRMSR} */
5192static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpRip(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5193{
5194 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5195 /** @todo AMD IBS. */
5196 if (!X86_IS_CANONICAL(uValue))
5197 {
5198 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
5199 return VERR_CPUM_RAISE_GP_0;
5200 }
5201 return VINF_SUCCESS;
5202}
5203
5204
5205/** @callback_method_impl{FNCPUMRDMSR} */
5206static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpData(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5207{
5208 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5209 /** @todo AMD IBS. */
5210 *puValue = 0;
5211 return VINF_SUCCESS;
5212}
5213
5214
5215/** @callback_method_impl{FNCPUMWRMSR} */
5216static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpData(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5217{
5218 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5219 /** @todo AMD IBS. */
5220 return VINF_SUCCESS;
5221}
5222
5223
5224/** @callback_method_impl{FNCPUMRDMSR} */
5225static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpData2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5226{
5227 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5228 /** @todo AMD IBS. */
5229 *puValue = 0;
5230 return VINF_SUCCESS;
5231}
5232
5233
5234/** @callback_method_impl{FNCPUMWRMSR} */
5235static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpData2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5236{
5237 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5238 /** @todo AMD IBS. */
5239 return VINF_SUCCESS;
5240}
5241
5242
5243/** @callback_method_impl{FNCPUMRDMSR} */
5244static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpData3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5245{
5246 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5247 /** @todo AMD IBS. */
5248 *puValue = 0;
5249 return VINF_SUCCESS;
5250}
5251
5252
5253/** @callback_method_impl{FNCPUMWRMSR} */
5254static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpData3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5255{
5256 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5257 /** @todo AMD IBS. */
5258 return VINF_SUCCESS;
5259}
5260
5261
5262/** @callback_method_impl{FNCPUMRDMSR} */
5263static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsDcLinAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5264{
5265 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5266 /** @todo AMD IBS. */
5267 *puValue = 0;
5268 return VINF_SUCCESS;
5269}
5270
5271
5272/** @callback_method_impl{FNCPUMWRMSR} */
5273static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsDcLinAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5274{
5275 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5276 /** @todo AMD IBS. */
5277 if (!X86_IS_CANONICAL(uValue))
5278 {
5279 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
5280 return VERR_CPUM_RAISE_GP_0;
5281 }
5282 return VINF_SUCCESS;
5283}
5284
5285
5286/** @callback_method_impl{FNCPUMRDMSR} */
5287static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsDcPhysAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5288{
5289 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5290 /** @todo AMD IBS. */
5291 *puValue = 0;
5292 return VINF_SUCCESS;
5293}
5294
5295
5296/** @callback_method_impl{FNCPUMWRMSR} */
5297static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsDcPhysAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5298{
5299 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5300 /** @todo AMD IBS. */
5301 return VINF_SUCCESS;
5302}
5303
5304
5305/** @callback_method_impl{FNCPUMRDMSR} */
5306static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5307{
5308 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5309 /** @todo AMD IBS. */
5310 *puValue = 0;
5311 return VINF_SUCCESS;
5312}
5313
5314
5315/** @callback_method_impl{FNCPUMWRMSR} */
5316static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5317{
5318 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5319 /** @todo AMD IBS. */
5320 return VINF_SUCCESS;
5321}
5322
5323
5324/** @callback_method_impl{FNCPUMRDMSR} */
5325static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam14hIbsBrTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5326{
5327 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5328 /** @todo AMD IBS. */
5329 *puValue = 0;
5330 return VINF_SUCCESS;
5331}
5332
5333
5334/** @callback_method_impl{FNCPUMWRMSR} */
5335static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam14hIbsBrTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5336{
5337 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5338 /** @todo AMD IBS. */
5339 if (!X86_IS_CANONICAL(uValue))
5340 {
5341 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
5342 return VERR_CPUM_RAISE_GP_0;
5343 }
5344 return VINF_SUCCESS;
5345}
5346
5347
5348
5349/*
5350 * GIM MSRs.
5351 * GIM MSRs.
5352 * GIM MSRs.
5353 */
5354
5355
5356/** @callback_method_impl{FNCPUMRDMSR} */
5357static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Gim(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5358{
5359#if defined(VBOX_WITH_NESTED_HWVIRT_SVM) || defined(VBOX_WITH_NESTED_HWVIRT_VMX)
5360 /* Raise #GP(0) like a physical CPU would since the nested-hypervisor hasn't intercept these MSRs. */
5361 if ( CPUMIsGuestInSvmNestedHwVirtMode(&pVCpu->cpum.s.Guest)
5362 || CPUMIsGuestInVmxNonRootMode(&pVCpu->cpum.s.Guest))
5363 return VERR_CPUM_RAISE_GP_0;
5364#endif
5365 return GIMReadMsr(pVCpu, idMsr, pRange, puValue);
5366}
5367
5368
5369/** @callback_method_impl{FNCPUMWRMSR} */
5370static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Gim(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5371{
5372#if defined(VBOX_WITH_NESTED_HWVIRT_SVM) || defined(VBOX_WITH_NESTED_HWVIRT_VMX)
5373 /* Raise #GP(0) like a physical CPU would since the nested-hypervisor hasn't intercept these MSRs. */
5374 if ( CPUMIsGuestInSvmNestedHwVirtMode(&pVCpu->cpum.s.Guest)
5375 || CPUMIsGuestInVmxNonRootMode(&pVCpu->cpum.s.Guest))
5376 return VERR_CPUM_RAISE_GP_0;
5377#endif
5378 return GIMWriteMsr(pVCpu, idMsr, pRange, uValue, uRawValue);
5379}
5380
5381
5382/**
5383 * MSR read function table.
5384 */
5385static const PFNCPUMRDMSR g_aCpumRdMsrFns[kCpumMsrRdFn_End] =
5386{
5387 NULL, /* Invalid */
5388 cpumMsrRd_FixedValue,
5389 NULL, /* Alias */
5390 cpumMsrRd_WriteOnly,
5391 cpumMsrRd_Ia32P5McAddr,
5392 cpumMsrRd_Ia32P5McType,
5393 cpumMsrRd_Ia32TimestampCounter,
5394 cpumMsrRd_Ia32PlatformId,
5395 cpumMsrRd_Ia32ApicBase,
5396 cpumMsrRd_Ia32FeatureControl,
5397 cpumMsrRd_Ia32BiosSignId,
5398 cpumMsrRd_Ia32SmmMonitorCtl,
5399 cpumMsrRd_Ia32PmcN,
5400 cpumMsrRd_Ia32MonitorFilterLineSize,
5401 cpumMsrRd_Ia32MPerf,
5402 cpumMsrRd_Ia32APerf,
5403 cpumMsrRd_Ia32MtrrCap,
5404 cpumMsrRd_Ia32MtrrPhysBaseN,
5405 cpumMsrRd_Ia32MtrrPhysMaskN,
5406 cpumMsrRd_Ia32MtrrFixed,
5407 cpumMsrRd_Ia32MtrrDefType,
5408 cpumMsrRd_Ia32Pat,
5409 cpumMsrRd_Ia32SysEnterCs,
5410 cpumMsrRd_Ia32SysEnterEsp,
5411 cpumMsrRd_Ia32SysEnterEip,
5412 cpumMsrRd_Ia32McgCap,
5413 cpumMsrRd_Ia32McgStatus,
5414 cpumMsrRd_Ia32McgCtl,
5415 cpumMsrRd_Ia32DebugCtl,
5416 cpumMsrRd_Ia32SmrrPhysBase,
5417 cpumMsrRd_Ia32SmrrPhysMask,
5418 cpumMsrRd_Ia32PlatformDcaCap,
5419 cpumMsrRd_Ia32CpuDcaCap,
5420 cpumMsrRd_Ia32Dca0Cap,
5421 cpumMsrRd_Ia32PerfEvtSelN,
5422 cpumMsrRd_Ia32PerfStatus,
5423 cpumMsrRd_Ia32PerfCtl,
5424 cpumMsrRd_Ia32FixedCtrN,
5425 cpumMsrRd_Ia32PerfCapabilities,
5426 cpumMsrRd_Ia32FixedCtrCtrl,
5427 cpumMsrRd_Ia32PerfGlobalStatus,
5428 cpumMsrRd_Ia32PerfGlobalCtrl,
5429 cpumMsrRd_Ia32PerfGlobalOvfCtrl,
5430 cpumMsrRd_Ia32PebsEnable,
5431 cpumMsrRd_Ia32ClockModulation,
5432 cpumMsrRd_Ia32ThermInterrupt,
5433 cpumMsrRd_Ia32ThermStatus,
5434 cpumMsrRd_Ia32Therm2Ctl,
5435 cpumMsrRd_Ia32MiscEnable,
5436 cpumMsrRd_Ia32McCtlStatusAddrMiscN,
5437 cpumMsrRd_Ia32McNCtl2,
5438 cpumMsrRd_Ia32DsArea,
5439 cpumMsrRd_Ia32TscDeadline,
5440 cpumMsrRd_Ia32X2ApicN,
5441 cpumMsrRd_Ia32DebugInterface,
5442 cpumMsrRd_Ia32VmxBasic,
5443 cpumMsrRd_Ia32VmxPinbasedCtls,
5444 cpumMsrRd_Ia32VmxProcbasedCtls,
5445 cpumMsrRd_Ia32VmxExitCtls,
5446 cpumMsrRd_Ia32VmxEntryCtls,
5447 cpumMsrRd_Ia32VmxMisc,
5448 cpumMsrRd_Ia32VmxCr0Fixed0,
5449 cpumMsrRd_Ia32VmxCr0Fixed1,
5450 cpumMsrRd_Ia32VmxCr4Fixed0,
5451 cpumMsrRd_Ia32VmxCr4Fixed1,
5452 cpumMsrRd_Ia32VmxVmcsEnum,
5453 cpumMsrRd_Ia32VmxProcBasedCtls2,
5454 cpumMsrRd_Ia32VmxEptVpidCap,
5455 cpumMsrRd_Ia32VmxTruePinbasedCtls,
5456 cpumMsrRd_Ia32VmxTrueProcbasedCtls,
5457 cpumMsrRd_Ia32VmxTrueExitCtls,
5458 cpumMsrRd_Ia32VmxTrueEntryCtls,
5459 cpumMsrRd_Ia32VmxVmFunc,
5460 cpumMsrRd_Ia32SpecCtrl,
5461 cpumMsrRd_Ia32ArchCapabilities,
5462
5463 cpumMsrRd_Amd64Efer,
5464 cpumMsrRd_Amd64SyscallTarget,
5465 cpumMsrRd_Amd64LongSyscallTarget,
5466 cpumMsrRd_Amd64CompSyscallTarget,
5467 cpumMsrRd_Amd64SyscallFlagMask,
5468 cpumMsrRd_Amd64FsBase,
5469 cpumMsrRd_Amd64GsBase,
5470 cpumMsrRd_Amd64KernelGsBase,
5471 cpumMsrRd_Amd64TscAux,
5472
5473 cpumMsrRd_IntelEblCrPowerOn,
5474 cpumMsrRd_IntelI7CoreThreadCount,
5475 cpumMsrRd_IntelP4EbcHardPowerOn,
5476 cpumMsrRd_IntelP4EbcSoftPowerOn,
5477 cpumMsrRd_IntelP4EbcFrequencyId,
5478 cpumMsrRd_IntelP6FsbFrequency,
5479 cpumMsrRd_IntelPlatformInfo,
5480 cpumMsrRd_IntelFlexRatio,
5481 cpumMsrRd_IntelPkgCStConfigControl,
5482 cpumMsrRd_IntelPmgIoCaptureBase,
5483 cpumMsrRd_IntelLastBranchFromToN,
5484 cpumMsrRd_IntelLastBranchFromN,
5485 cpumMsrRd_IntelLastBranchToN,
5486 cpumMsrRd_IntelLastBranchTos,
5487 cpumMsrRd_IntelBblCrCtl,
5488 cpumMsrRd_IntelBblCrCtl3,
5489 cpumMsrRd_IntelI7TemperatureTarget,
5490 cpumMsrRd_IntelI7MsrOffCoreResponseN,
5491 cpumMsrRd_IntelI7MiscPwrMgmt,
5492 cpumMsrRd_IntelP6CrN,
5493 cpumMsrRd_IntelCpuId1FeatureMaskEcdx,
5494 cpumMsrRd_IntelCpuId1FeatureMaskEax,
5495 cpumMsrRd_IntelCpuId80000001FeatureMaskEcdx,
5496 cpumMsrRd_IntelI7SandyAesNiCtl,
5497 cpumMsrRd_IntelI7TurboRatioLimit,
5498 cpumMsrRd_IntelI7LbrSelect,
5499 cpumMsrRd_IntelI7SandyErrorControl,
5500 cpumMsrRd_IntelI7VirtualLegacyWireCap,
5501 cpumMsrRd_IntelI7PowerCtl,
5502 cpumMsrRd_IntelI7SandyPebsNumAlt,
5503 cpumMsrRd_IntelI7PebsLdLat,
5504 cpumMsrRd_IntelI7PkgCnResidencyN,
5505 cpumMsrRd_IntelI7CoreCnResidencyN,
5506 cpumMsrRd_IntelI7SandyVrCurrentConfig,
5507 cpumMsrRd_IntelI7SandyVrMiscConfig,
5508 cpumMsrRd_IntelI7SandyRaplPowerUnit,
5509 cpumMsrRd_IntelI7SandyPkgCnIrtlN,
5510 cpumMsrRd_IntelI7SandyPkgC2Residency,
5511 cpumMsrRd_IntelI7RaplPkgPowerLimit,
5512 cpumMsrRd_IntelI7RaplPkgEnergyStatus,
5513 cpumMsrRd_IntelI7RaplPkgPerfStatus,
5514 cpumMsrRd_IntelI7RaplPkgPowerInfo,
5515 cpumMsrRd_IntelI7RaplDramPowerLimit,
5516 cpumMsrRd_IntelI7RaplDramEnergyStatus,
5517 cpumMsrRd_IntelI7RaplDramPerfStatus,
5518 cpumMsrRd_IntelI7RaplDramPowerInfo,
5519 cpumMsrRd_IntelI7RaplPp0PowerLimit,
5520 cpumMsrRd_IntelI7RaplPp0EnergyStatus,
5521 cpumMsrRd_IntelI7RaplPp0Policy,
5522 cpumMsrRd_IntelI7RaplPp0PerfStatus,
5523 cpumMsrRd_IntelI7RaplPp1PowerLimit,
5524 cpumMsrRd_IntelI7RaplPp1EnergyStatus,
5525 cpumMsrRd_IntelI7RaplPp1Policy,
5526 cpumMsrRd_IntelI7IvyConfigTdpNominal,
5527 cpumMsrRd_IntelI7IvyConfigTdpLevel1,
5528 cpumMsrRd_IntelI7IvyConfigTdpLevel2,
5529 cpumMsrRd_IntelI7IvyConfigTdpControl,
5530 cpumMsrRd_IntelI7IvyTurboActivationRatio,
5531 cpumMsrRd_IntelI7UncPerfGlobalCtrl,
5532 cpumMsrRd_IntelI7UncPerfGlobalStatus,
5533 cpumMsrRd_IntelI7UncPerfGlobalOvfCtrl,
5534 cpumMsrRd_IntelI7UncPerfFixedCtrCtrl,
5535 cpumMsrRd_IntelI7UncPerfFixedCtr,
5536 cpumMsrRd_IntelI7UncCBoxConfig,
5537 cpumMsrRd_IntelI7UncArbPerfCtrN,
5538 cpumMsrRd_IntelI7UncArbPerfEvtSelN,
5539 cpumMsrRd_IntelI7SmiCount,
5540 cpumMsrRd_IntelCore2EmttmCrTablesN,
5541 cpumMsrRd_IntelCore2SmmCStMiscInfo,
5542 cpumMsrRd_IntelCore1ExtConfig,
5543 cpumMsrRd_IntelCore1DtsCalControl,
5544 cpumMsrRd_IntelCore2PeciControl,
5545 cpumMsrRd_IntelAtSilvCoreC1Recidency,
5546
5547 cpumMsrRd_P6LastBranchFromIp,
5548 cpumMsrRd_P6LastBranchToIp,
5549 cpumMsrRd_P6LastIntFromIp,
5550 cpumMsrRd_P6LastIntToIp,
5551
5552 cpumMsrRd_AmdFam15hTscRate,
5553 cpumMsrRd_AmdFam15hLwpCfg,
5554 cpumMsrRd_AmdFam15hLwpCbAddr,
5555 cpumMsrRd_AmdFam10hMc4MiscN,
5556 cpumMsrRd_AmdK8PerfCtlN,
5557 cpumMsrRd_AmdK8PerfCtrN,
5558 cpumMsrRd_AmdK8SysCfg,
5559 cpumMsrRd_AmdK8HwCr,
5560 cpumMsrRd_AmdK8IorrBaseN,
5561 cpumMsrRd_AmdK8IorrMaskN,
5562 cpumMsrRd_AmdK8TopOfMemN,
5563 cpumMsrRd_AmdK8NbCfg1,
5564 cpumMsrRd_AmdK8McXcptRedir,
5565 cpumMsrRd_AmdK8CpuNameN,
5566 cpumMsrRd_AmdK8HwThermalCtrl,
5567 cpumMsrRd_AmdK8SwThermalCtrl,
5568 cpumMsrRd_AmdK8FidVidControl,
5569 cpumMsrRd_AmdK8FidVidStatus,
5570 cpumMsrRd_AmdK8McCtlMaskN,
5571 cpumMsrRd_AmdK8SmiOnIoTrapN,
5572 cpumMsrRd_AmdK8SmiOnIoTrapCtlSts,
5573 cpumMsrRd_AmdK8IntPendingMessage,
5574 cpumMsrRd_AmdK8SmiTriggerIoCycle,
5575 cpumMsrRd_AmdFam10hMmioCfgBaseAddr,
5576 cpumMsrRd_AmdFam10hTrapCtlMaybe,
5577 cpumMsrRd_AmdFam10hPStateCurLimit,
5578 cpumMsrRd_AmdFam10hPStateControl,
5579 cpumMsrRd_AmdFam10hPStateStatus,
5580 cpumMsrRd_AmdFam10hPStateN,
5581 cpumMsrRd_AmdFam10hCofVidControl,
5582 cpumMsrRd_AmdFam10hCofVidStatus,
5583 cpumMsrRd_AmdFam10hCStateIoBaseAddr,
5584 cpumMsrRd_AmdFam10hCpuWatchdogTimer,
5585 cpumMsrRd_AmdK8SmmBase,
5586 cpumMsrRd_AmdK8SmmAddr,
5587 cpumMsrRd_AmdK8SmmMask,
5588 cpumMsrRd_AmdK8VmCr,
5589 cpumMsrRd_AmdK8IgnNe,
5590 cpumMsrRd_AmdK8SmmCtl,
5591 cpumMsrRd_AmdK8VmHSavePa,
5592 cpumMsrRd_AmdFam10hVmLockKey,
5593 cpumMsrRd_AmdFam10hSmmLockKey,
5594 cpumMsrRd_AmdFam10hLocalSmiStatus,
5595 cpumMsrRd_AmdFam10hOsVisWrkIdLength,
5596 cpumMsrRd_AmdFam10hOsVisWrkStatus,
5597 cpumMsrRd_AmdFam16hL2IPerfCtlN,
5598 cpumMsrRd_AmdFam16hL2IPerfCtrN,
5599 cpumMsrRd_AmdFam15hNorthbridgePerfCtlN,
5600 cpumMsrRd_AmdFam15hNorthbridgePerfCtrN,
5601 cpumMsrRd_AmdK7MicrocodeCtl,
5602 cpumMsrRd_AmdK7ClusterIdMaybe,
5603 cpumMsrRd_AmdK8CpuIdCtlStd07hEbax,
5604 cpumMsrRd_AmdK8CpuIdCtlStd06hEcx,
5605 cpumMsrRd_AmdK8CpuIdCtlStd01hEdcx,
5606 cpumMsrRd_AmdK8CpuIdCtlExt01hEdcx,
5607 cpumMsrRd_AmdK8PatchLevel,
5608 cpumMsrRd_AmdK7DebugStatusMaybe,
5609 cpumMsrRd_AmdK7BHTraceBaseMaybe,
5610 cpumMsrRd_AmdK7BHTracePtrMaybe,
5611 cpumMsrRd_AmdK7BHTraceLimitMaybe,
5612 cpumMsrRd_AmdK7HardwareDebugToolCfgMaybe,
5613 cpumMsrRd_AmdK7FastFlushCountMaybe,
5614 cpumMsrRd_AmdK7NodeId,
5615 cpumMsrRd_AmdK7DrXAddrMaskN,
5616 cpumMsrRd_AmdK7Dr0DataMatchMaybe,
5617 cpumMsrRd_AmdK7Dr0DataMaskMaybe,
5618 cpumMsrRd_AmdK7LoadStoreCfg,
5619 cpumMsrRd_AmdK7InstrCacheCfg,
5620 cpumMsrRd_AmdK7DataCacheCfg,
5621 cpumMsrRd_AmdK7BusUnitCfg,
5622 cpumMsrRd_AmdK7DebugCtl2Maybe,
5623 cpumMsrRd_AmdFam15hFpuCfg,
5624 cpumMsrRd_AmdFam15hDecoderCfg,
5625 cpumMsrRd_AmdFam10hBusUnitCfg2,
5626 cpumMsrRd_AmdFam15hCombUnitCfg,
5627 cpumMsrRd_AmdFam15hCombUnitCfg2,
5628 cpumMsrRd_AmdFam15hCombUnitCfg3,
5629 cpumMsrRd_AmdFam15hExecUnitCfg,
5630 cpumMsrRd_AmdFam15hLoadStoreCfg2,
5631 cpumMsrRd_AmdFam10hIbsFetchCtl,
5632 cpumMsrRd_AmdFam10hIbsFetchLinAddr,
5633 cpumMsrRd_AmdFam10hIbsFetchPhysAddr,
5634 cpumMsrRd_AmdFam10hIbsOpExecCtl,
5635 cpumMsrRd_AmdFam10hIbsOpRip,
5636 cpumMsrRd_AmdFam10hIbsOpData,
5637 cpumMsrRd_AmdFam10hIbsOpData2,
5638 cpumMsrRd_AmdFam10hIbsOpData3,
5639 cpumMsrRd_AmdFam10hIbsDcLinAddr,
5640 cpumMsrRd_AmdFam10hIbsDcPhysAddr,
5641 cpumMsrRd_AmdFam10hIbsCtl,
5642 cpumMsrRd_AmdFam14hIbsBrTarget,
5643
5644 cpumMsrRd_Gim
5645};
5646
5647
5648/**
5649 * MSR write function table.
5650 */
5651static const PFNCPUMWRMSR g_aCpumWrMsrFns[kCpumMsrWrFn_End] =
5652{
5653 NULL, /* Invalid */
5654 cpumMsrWr_IgnoreWrite,
5655 cpumMsrWr_ReadOnly,
5656 NULL, /* Alias */
5657 cpumMsrWr_Ia32P5McAddr,
5658 cpumMsrWr_Ia32P5McType,
5659 cpumMsrWr_Ia32TimestampCounter,
5660 cpumMsrWr_Ia32ApicBase,
5661 cpumMsrWr_Ia32FeatureControl,
5662 cpumMsrWr_Ia32BiosSignId,
5663 cpumMsrWr_Ia32BiosUpdateTrigger,
5664 cpumMsrWr_Ia32SmmMonitorCtl,
5665 cpumMsrWr_Ia32PmcN,
5666 cpumMsrWr_Ia32MonitorFilterLineSize,
5667 cpumMsrWr_Ia32MPerf,
5668 cpumMsrWr_Ia32APerf,
5669 cpumMsrWr_Ia32MtrrPhysBaseN,
5670 cpumMsrWr_Ia32MtrrPhysMaskN,
5671 cpumMsrWr_Ia32MtrrFixed,
5672 cpumMsrWr_Ia32MtrrDefType,
5673 cpumMsrWr_Ia32Pat,
5674 cpumMsrWr_Ia32SysEnterCs,
5675 cpumMsrWr_Ia32SysEnterEsp,
5676 cpumMsrWr_Ia32SysEnterEip,
5677 cpumMsrWr_Ia32McgStatus,
5678 cpumMsrWr_Ia32McgCtl,
5679 cpumMsrWr_Ia32DebugCtl,
5680 cpumMsrWr_Ia32SmrrPhysBase,
5681 cpumMsrWr_Ia32SmrrPhysMask,
5682 cpumMsrWr_Ia32PlatformDcaCap,
5683 cpumMsrWr_Ia32Dca0Cap,
5684 cpumMsrWr_Ia32PerfEvtSelN,
5685 cpumMsrWr_Ia32PerfStatus,
5686 cpumMsrWr_Ia32PerfCtl,
5687 cpumMsrWr_Ia32FixedCtrN,
5688 cpumMsrWr_Ia32PerfCapabilities,
5689 cpumMsrWr_Ia32FixedCtrCtrl,
5690 cpumMsrWr_Ia32PerfGlobalStatus,
5691 cpumMsrWr_Ia32PerfGlobalCtrl,
5692 cpumMsrWr_Ia32PerfGlobalOvfCtrl,
5693 cpumMsrWr_Ia32PebsEnable,
5694 cpumMsrWr_Ia32ClockModulation,
5695 cpumMsrWr_Ia32ThermInterrupt,
5696 cpumMsrWr_Ia32ThermStatus,
5697 cpumMsrWr_Ia32Therm2Ctl,
5698 cpumMsrWr_Ia32MiscEnable,
5699 cpumMsrWr_Ia32McCtlStatusAddrMiscN,
5700 cpumMsrWr_Ia32McNCtl2,
5701 cpumMsrWr_Ia32DsArea,
5702 cpumMsrWr_Ia32TscDeadline,
5703 cpumMsrWr_Ia32X2ApicN,
5704 cpumMsrWr_Ia32DebugInterface,
5705 cpumMsrWr_Ia32SpecCtrl,
5706 cpumMsrWr_Ia32PredCmd,
5707
5708 cpumMsrWr_Amd64Efer,
5709 cpumMsrWr_Amd64SyscallTarget,
5710 cpumMsrWr_Amd64LongSyscallTarget,
5711 cpumMsrWr_Amd64CompSyscallTarget,
5712 cpumMsrWr_Amd64SyscallFlagMask,
5713 cpumMsrWr_Amd64FsBase,
5714 cpumMsrWr_Amd64GsBase,
5715 cpumMsrWr_Amd64KernelGsBase,
5716 cpumMsrWr_Amd64TscAux,
5717
5718 cpumMsrWr_IntelEblCrPowerOn,
5719 cpumMsrWr_IntelP4EbcHardPowerOn,
5720 cpumMsrWr_IntelP4EbcSoftPowerOn,
5721 cpumMsrWr_IntelP4EbcFrequencyId,
5722 cpumMsrWr_IntelFlexRatio,
5723 cpumMsrWr_IntelPkgCStConfigControl,
5724 cpumMsrWr_IntelPmgIoCaptureBase,
5725 cpumMsrWr_IntelLastBranchFromToN,
5726 cpumMsrWr_IntelLastBranchFromN,
5727 cpumMsrWr_IntelLastBranchToN,
5728 cpumMsrWr_IntelLastBranchTos,
5729 cpumMsrWr_IntelBblCrCtl,
5730 cpumMsrWr_IntelBblCrCtl3,
5731 cpumMsrWr_IntelI7TemperatureTarget,
5732 cpumMsrWr_IntelI7MsrOffCoreResponseN,
5733 cpumMsrWr_IntelI7MiscPwrMgmt,
5734 cpumMsrWr_IntelP6CrN,
5735 cpumMsrWr_IntelCpuId1FeatureMaskEcdx,
5736 cpumMsrWr_IntelCpuId1FeatureMaskEax,
5737 cpumMsrWr_IntelCpuId80000001FeatureMaskEcdx,
5738 cpumMsrWr_IntelI7SandyAesNiCtl,
5739 cpumMsrWr_IntelI7TurboRatioLimit,
5740 cpumMsrWr_IntelI7LbrSelect,
5741 cpumMsrWr_IntelI7SandyErrorControl,
5742 cpumMsrWr_IntelI7PowerCtl,
5743 cpumMsrWr_IntelI7SandyPebsNumAlt,
5744 cpumMsrWr_IntelI7PebsLdLat,
5745 cpumMsrWr_IntelI7SandyVrCurrentConfig,
5746 cpumMsrWr_IntelI7SandyVrMiscConfig,
5747 cpumMsrWr_IntelI7SandyRaplPowerUnit,
5748 cpumMsrWr_IntelI7SandyPkgCnIrtlN,
5749 cpumMsrWr_IntelI7SandyPkgC2Residency,
5750 cpumMsrWr_IntelI7RaplPkgPowerLimit,
5751 cpumMsrWr_IntelI7RaplDramPowerLimit,
5752 cpumMsrWr_IntelI7RaplPp0PowerLimit,
5753 cpumMsrWr_IntelI7RaplPp0Policy,
5754 cpumMsrWr_IntelI7RaplPp1PowerLimit,
5755 cpumMsrWr_IntelI7RaplPp1Policy,
5756 cpumMsrWr_IntelI7IvyConfigTdpControl,
5757 cpumMsrWr_IntelI7IvyTurboActivationRatio,
5758 cpumMsrWr_IntelI7UncPerfGlobalCtrl,
5759 cpumMsrWr_IntelI7UncPerfGlobalStatus,
5760 cpumMsrWr_IntelI7UncPerfGlobalOvfCtrl,
5761 cpumMsrWr_IntelI7UncPerfFixedCtrCtrl,
5762 cpumMsrWr_IntelI7UncPerfFixedCtr,
5763 cpumMsrWr_IntelI7UncArbPerfCtrN,
5764 cpumMsrWr_IntelI7UncArbPerfEvtSelN,
5765 cpumMsrWr_IntelCore2EmttmCrTablesN,
5766 cpumMsrWr_IntelCore2SmmCStMiscInfo,
5767 cpumMsrWr_IntelCore1ExtConfig,
5768 cpumMsrWr_IntelCore1DtsCalControl,
5769 cpumMsrWr_IntelCore2PeciControl,
5770
5771 cpumMsrWr_P6LastIntFromIp,
5772 cpumMsrWr_P6LastIntToIp,
5773
5774 cpumMsrWr_AmdFam15hTscRate,
5775 cpumMsrWr_AmdFam15hLwpCfg,
5776 cpumMsrWr_AmdFam15hLwpCbAddr,
5777 cpumMsrWr_AmdFam10hMc4MiscN,
5778 cpumMsrWr_AmdK8PerfCtlN,
5779 cpumMsrWr_AmdK8PerfCtrN,
5780 cpumMsrWr_AmdK8SysCfg,
5781 cpumMsrWr_AmdK8HwCr,
5782 cpumMsrWr_AmdK8IorrBaseN,
5783 cpumMsrWr_AmdK8IorrMaskN,
5784 cpumMsrWr_AmdK8TopOfMemN,
5785 cpumMsrWr_AmdK8NbCfg1,
5786 cpumMsrWr_AmdK8McXcptRedir,
5787 cpumMsrWr_AmdK8CpuNameN,
5788 cpumMsrWr_AmdK8HwThermalCtrl,
5789 cpumMsrWr_AmdK8SwThermalCtrl,
5790 cpumMsrWr_AmdK8FidVidControl,
5791 cpumMsrWr_AmdK8McCtlMaskN,
5792 cpumMsrWr_AmdK8SmiOnIoTrapN,
5793 cpumMsrWr_AmdK8SmiOnIoTrapCtlSts,
5794 cpumMsrWr_AmdK8IntPendingMessage,
5795 cpumMsrWr_AmdK8SmiTriggerIoCycle,
5796 cpumMsrWr_AmdFam10hMmioCfgBaseAddr,
5797 cpumMsrWr_AmdFam10hTrapCtlMaybe,
5798 cpumMsrWr_AmdFam10hPStateControl,
5799 cpumMsrWr_AmdFam10hPStateStatus,
5800 cpumMsrWr_AmdFam10hPStateN,
5801 cpumMsrWr_AmdFam10hCofVidControl,
5802 cpumMsrWr_AmdFam10hCofVidStatus,
5803 cpumMsrWr_AmdFam10hCStateIoBaseAddr,
5804 cpumMsrWr_AmdFam10hCpuWatchdogTimer,
5805 cpumMsrWr_AmdK8SmmBase,
5806 cpumMsrWr_AmdK8SmmAddr,
5807 cpumMsrWr_AmdK8SmmMask,
5808 cpumMsrWr_AmdK8VmCr,
5809 cpumMsrWr_AmdK8IgnNe,
5810 cpumMsrWr_AmdK8SmmCtl,
5811 cpumMsrWr_AmdK8VmHSavePa,
5812 cpumMsrWr_AmdFam10hVmLockKey,
5813 cpumMsrWr_AmdFam10hSmmLockKey,
5814 cpumMsrWr_AmdFam10hLocalSmiStatus,
5815 cpumMsrWr_AmdFam10hOsVisWrkIdLength,
5816 cpumMsrWr_AmdFam10hOsVisWrkStatus,
5817 cpumMsrWr_AmdFam16hL2IPerfCtlN,
5818 cpumMsrWr_AmdFam16hL2IPerfCtrN,
5819 cpumMsrWr_AmdFam15hNorthbridgePerfCtlN,
5820 cpumMsrWr_AmdFam15hNorthbridgePerfCtrN,
5821 cpumMsrWr_AmdK7MicrocodeCtl,
5822 cpumMsrWr_AmdK7ClusterIdMaybe,
5823 cpumMsrWr_AmdK8CpuIdCtlStd07hEbax,
5824 cpumMsrWr_AmdK8CpuIdCtlStd06hEcx,
5825 cpumMsrWr_AmdK8CpuIdCtlStd01hEdcx,
5826 cpumMsrWr_AmdK8CpuIdCtlExt01hEdcx,
5827 cpumMsrWr_AmdK8PatchLoader,
5828 cpumMsrWr_AmdK7DebugStatusMaybe,
5829 cpumMsrWr_AmdK7BHTraceBaseMaybe,
5830 cpumMsrWr_AmdK7BHTracePtrMaybe,
5831 cpumMsrWr_AmdK7BHTraceLimitMaybe,
5832 cpumMsrWr_AmdK7HardwareDebugToolCfgMaybe,
5833 cpumMsrWr_AmdK7FastFlushCountMaybe,
5834 cpumMsrWr_AmdK7NodeId,
5835 cpumMsrWr_AmdK7DrXAddrMaskN,
5836 cpumMsrWr_AmdK7Dr0DataMatchMaybe,
5837 cpumMsrWr_AmdK7Dr0DataMaskMaybe,
5838 cpumMsrWr_AmdK7LoadStoreCfg,
5839 cpumMsrWr_AmdK7InstrCacheCfg,
5840 cpumMsrWr_AmdK7DataCacheCfg,
5841 cpumMsrWr_AmdK7BusUnitCfg,
5842 cpumMsrWr_AmdK7DebugCtl2Maybe,
5843 cpumMsrWr_AmdFam15hFpuCfg,
5844 cpumMsrWr_AmdFam15hDecoderCfg,
5845 cpumMsrWr_AmdFam10hBusUnitCfg2,
5846 cpumMsrWr_AmdFam15hCombUnitCfg,
5847 cpumMsrWr_AmdFam15hCombUnitCfg2,
5848 cpumMsrWr_AmdFam15hCombUnitCfg3,
5849 cpumMsrWr_AmdFam15hExecUnitCfg,
5850 cpumMsrWr_AmdFam15hLoadStoreCfg2,
5851 cpumMsrWr_AmdFam10hIbsFetchCtl,
5852 cpumMsrWr_AmdFam10hIbsFetchLinAddr,
5853 cpumMsrWr_AmdFam10hIbsFetchPhysAddr,
5854 cpumMsrWr_AmdFam10hIbsOpExecCtl,
5855 cpumMsrWr_AmdFam10hIbsOpRip,
5856 cpumMsrWr_AmdFam10hIbsOpData,
5857 cpumMsrWr_AmdFam10hIbsOpData2,
5858 cpumMsrWr_AmdFam10hIbsOpData3,
5859 cpumMsrWr_AmdFam10hIbsDcLinAddr,
5860 cpumMsrWr_AmdFam10hIbsDcPhysAddr,
5861 cpumMsrWr_AmdFam10hIbsCtl,
5862 cpumMsrWr_AmdFam14hIbsBrTarget,
5863
5864 cpumMsrWr_Gim
5865};
5866
5867
5868/**
5869 * Looks up the range for the given MSR.
5870 *
5871 * @returns Pointer to the range if found, NULL if not.
5872 * @param pVM The cross context VM structure.
5873 * @param idMsr The MSR to look up.
5874 */
5875# ifndef IN_RING3
5876static
5877# endif
5878PCPUMMSRRANGE cpumLookupMsrRange(PVM pVM, uint32_t idMsr)
5879{
5880 /*
5881 * Binary lookup.
5882 */
5883 uint32_t cRanges = pVM->cpum.s.GuestInfo.cMsrRanges;
5884 if (!cRanges)
5885 return NULL;
5886 PCPUMMSRRANGE paRanges = pVM->cpum.s.GuestInfo.CTX_SUFF(paMsrRanges);
5887 for (;;)
5888 {
5889 uint32_t i = cRanges / 2;
5890 if (idMsr < paRanges[i].uFirst)
5891 {
5892 if (i == 0)
5893 break;
5894 cRanges = i;
5895 }
5896 else if (idMsr > paRanges[i].uLast)
5897 {
5898 i++;
5899 if (i >= cRanges)
5900 break;
5901 cRanges -= i;
5902 paRanges = &paRanges[i];
5903 }
5904 else
5905 {
5906 if (paRanges[i].enmRdFn == kCpumMsrRdFn_MsrAlias)
5907 return cpumLookupMsrRange(pVM, paRanges[i].uValue);
5908 return &paRanges[i];
5909 }
5910 }
5911
5912# ifdef VBOX_STRICT
5913 /*
5914 * Linear lookup to verify the above binary search.
5915 */
5916 uint32_t cLeft = pVM->cpum.s.GuestInfo.cMsrRanges;
5917 PCPUMMSRRANGE pCur = pVM->cpum.s.GuestInfo.CTX_SUFF(paMsrRanges);
5918 while (cLeft-- > 0)
5919 {
5920 if (idMsr >= pCur->uFirst && idMsr <= pCur->uLast)
5921 {
5922 AssertFailed();
5923 if (pCur->enmRdFn == kCpumMsrRdFn_MsrAlias)
5924 return cpumLookupMsrRange(pVM, pCur->uValue);
5925 return pCur;
5926 }
5927 pCur++;
5928 }
5929# endif
5930 return NULL;
5931}
5932
5933
5934/**
5935 * Query a guest MSR.
5936 *
5937 * The caller is responsible for checking privilege if the call is the result of
5938 * a RDMSR instruction. We'll do the rest.
5939 *
5940 * @retval VINF_SUCCESS on success.
5941 * @retval VINF_CPUM_R3_MSR_READ if the MSR read could not be serviced in the
5942 * current context (raw-mode or ring-0).
5943 * @retval VERR_CPUM_RAISE_GP_0 on failure (invalid MSR), the caller is
5944 * expected to take the appropriate actions. @a *puValue is set to 0.
5945 * @param pVCpu The cross context virtual CPU structure.
5946 * @param idMsr The MSR.
5947 * @param puValue Where to return the value.
5948 *
5949 * @remarks This will always return the right values, even when we're in the
5950 * recompiler.
5951 */
5952VMMDECL(VBOXSTRICTRC) CPUMQueryGuestMsr(PVMCPU pVCpu, uint32_t idMsr, uint64_t *puValue)
5953{
5954 *puValue = 0;
5955
5956 VBOXSTRICTRC rcStrict;
5957 PVM pVM = pVCpu->CTX_SUFF(pVM);
5958 PCPUMMSRRANGE pRange = cpumLookupMsrRange(pVM, idMsr);
5959 if (pRange)
5960 {
5961 CPUMMSRRDFN enmRdFn = (CPUMMSRRDFN)pRange->enmRdFn;
5962 AssertReturn(enmRdFn > kCpumMsrRdFn_Invalid && enmRdFn < kCpumMsrRdFn_End, VERR_CPUM_IPE_1);
5963
5964 PFNCPUMRDMSR pfnRdMsr = g_aCpumRdMsrFns[enmRdFn];
5965 AssertReturn(pfnRdMsr, VERR_CPUM_IPE_2);
5966
5967 STAM_COUNTER_INC(&pRange->cReads);
5968 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReads);
5969
5970 rcStrict = pfnRdMsr(pVCpu, idMsr, pRange, puValue);
5971 if (rcStrict == VINF_SUCCESS)
5972 Log2(("CPUM: RDMSR %#x (%s) -> %#llx\n", idMsr, pRange->szName, *puValue));
5973 else if (rcStrict == VERR_CPUM_RAISE_GP_0)
5974 {
5975 Log(("CPUM: RDMSR %#x (%s) -> #GP(0)\n", idMsr, pRange->szName));
5976 STAM_COUNTER_INC(&pRange->cGps);
5977 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReadsRaiseGp);
5978 }
5979#ifndef IN_RING3
5980 else if (rcStrict == VINF_CPUM_R3_MSR_READ)
5981 Log(("CPUM: RDMSR %#x (%s) -> ring-3\n", idMsr, pRange->szName));
5982#endif
5983 else
5984 {
5985 Log(("CPUM: RDMSR %#x (%s) -> rcStrict=%Rrc\n", idMsr, pRange->szName, VBOXSTRICTRC_VAL(rcStrict)));
5986 AssertMsgStmt(RT_FAILURE_NP(rcStrict), ("%Rrc idMsr=%#x\n", VBOXSTRICTRC_VAL(rcStrict), idMsr),
5987 rcStrict = VERR_IPE_UNEXPECTED_INFO_STATUS);
5988 Assert(rcStrict != VERR_EM_INTERPRETER);
5989 }
5990 }
5991 else
5992 {
5993 Log(("CPUM: Unknown RDMSR %#x -> #GP(0)\n", idMsr));
5994 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReads);
5995 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReadsUnknown);
5996 rcStrict = VERR_CPUM_RAISE_GP_0;
5997 }
5998 return rcStrict;
5999}
6000
6001
6002/**
6003 * Writes to a guest MSR.
6004 *
6005 * The caller is responsible for checking privilege if the call is the result of
6006 * a WRMSR instruction. We'll do the rest.
6007 *
6008 * @retval VINF_SUCCESS on success.
6009 * @retval VINF_CPUM_R3_MSR_WRITE if the MSR write could not be serviced in the
6010 * current context (raw-mode or ring-0).
6011 * @retval VERR_CPUM_RAISE_GP_0 on failure, the caller is expected to take the
6012 * appropriate actions.
6013 *
6014 * @param pVCpu The cross context virtual CPU structure.
6015 * @param idMsr The MSR id.
6016 * @param uValue The value to set.
6017 *
6018 * @remarks Everyone changing MSR values, including the recompiler, shall do it
6019 * by calling this method. This makes sure we have current values and
6020 * that we trigger all the right actions when something changes.
6021 *
6022 * For performance reasons, this actually isn't entirely true for some
6023 * MSRs when in HM mode. The code here and in HM must be aware of
6024 * this.
6025 */
6026VMMDECL(VBOXSTRICTRC) CPUMSetGuestMsr(PVMCPU pVCpu, uint32_t idMsr, uint64_t uValue)
6027{
6028 VBOXSTRICTRC rcStrict;
6029 PVM pVM = pVCpu->CTX_SUFF(pVM);
6030 PCPUMMSRRANGE pRange = cpumLookupMsrRange(pVM, idMsr);
6031 if (pRange)
6032 {
6033 STAM_COUNTER_INC(&pRange->cWrites);
6034 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWrites);
6035
6036 if (!(uValue & pRange->fWrGpMask))
6037 {
6038 CPUMMSRWRFN enmWrFn = (CPUMMSRWRFN)pRange->enmWrFn;
6039 AssertReturn(enmWrFn > kCpumMsrWrFn_Invalid && enmWrFn < kCpumMsrWrFn_End, VERR_CPUM_IPE_1);
6040
6041 PFNCPUMWRMSR pfnWrMsr = g_aCpumWrMsrFns[enmWrFn];
6042 AssertReturn(pfnWrMsr, VERR_CPUM_IPE_2);
6043
6044 uint64_t uValueAdjusted = uValue & ~pRange->fWrIgnMask;
6045 if (uValueAdjusted != uValue)
6046 {
6047 STAM_COUNTER_INC(&pRange->cIgnoredBits);
6048 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesToIgnoredBits);
6049 }
6050
6051 rcStrict = pfnWrMsr(pVCpu, idMsr, pRange, uValueAdjusted, uValue);
6052 if (rcStrict == VINF_SUCCESS)
6053 Log2(("CPUM: WRMSR %#x (%s), %#llx [%#llx]\n", idMsr, pRange->szName, uValueAdjusted, uValue));
6054 else if (rcStrict == VERR_CPUM_RAISE_GP_0)
6055 {
6056 Log(("CPUM: WRMSR %#x (%s), %#llx [%#llx] -> #GP(0)\n", idMsr, pRange->szName, uValueAdjusted, uValue));
6057 STAM_COUNTER_INC(&pRange->cGps);
6058 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesRaiseGp);
6059 }
6060#ifndef IN_RING3
6061 else if (rcStrict == VINF_CPUM_R3_MSR_WRITE)
6062 Log(("CPUM: WRMSR %#x (%s), %#llx [%#llx] -> ring-3\n", idMsr, pRange->szName, uValueAdjusted, uValue));
6063#endif
6064 else
6065 {
6066 Log(("CPUM: WRMSR %#x (%s), %#llx [%#llx] -> rcStrict=%Rrc\n",
6067 idMsr, pRange->szName, uValueAdjusted, uValue, VBOXSTRICTRC_VAL(rcStrict)));
6068 AssertMsgStmt(RT_FAILURE_NP(rcStrict), ("%Rrc idMsr=%#x\n", VBOXSTRICTRC_VAL(rcStrict), idMsr),
6069 rcStrict = VERR_IPE_UNEXPECTED_INFO_STATUS);
6070 Assert(rcStrict != VERR_EM_INTERPRETER);
6071 }
6072 }
6073 else
6074 {
6075 Log(("CPUM: WRMSR %#x (%s), %#llx -> #GP(0) - invalid bits %#llx\n",
6076 idMsr, pRange->szName, uValue, uValue & pRange->fWrGpMask));
6077 STAM_COUNTER_INC(&pRange->cGps);
6078 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesRaiseGp);
6079 rcStrict = VERR_CPUM_RAISE_GP_0;
6080 }
6081 }
6082 else
6083 {
6084 Log(("CPUM: Unknown WRMSR %#x, %#llx -> #GP(0)\n", idMsr, uValue));
6085 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWrites);
6086 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesUnknown);
6087 rcStrict = VERR_CPUM_RAISE_GP_0;
6088 }
6089 return rcStrict;
6090}
6091
6092
6093#if defined(VBOX_STRICT) && defined(IN_RING3)
6094/**
6095 * Performs some checks on the static data related to MSRs.
6096 *
6097 * @returns VINF_SUCCESS on success, error on failure.
6098 */
6099int cpumR3MsrStrictInitChecks(void)
6100{
6101#define CPUM_ASSERT_RD_MSR_FN(a_Register) \
6102 AssertReturn(g_aCpumRdMsrFns[kCpumMsrRdFn_##a_Register] == cpumMsrRd_##a_Register, VERR_CPUM_IPE_2);
6103#define CPUM_ASSERT_WR_MSR_FN(a_Register) \
6104 AssertReturn(g_aCpumWrMsrFns[kCpumMsrWrFn_##a_Register] == cpumMsrWr_##a_Register, VERR_CPUM_IPE_2);
6105
6106 AssertReturn(g_aCpumRdMsrFns[kCpumMsrRdFn_Invalid] == NULL, VERR_CPUM_IPE_2);
6107 CPUM_ASSERT_RD_MSR_FN(FixedValue);
6108 CPUM_ASSERT_RD_MSR_FN(WriteOnly);
6109 CPUM_ASSERT_RD_MSR_FN(Ia32P5McAddr);
6110 CPUM_ASSERT_RD_MSR_FN(Ia32P5McType);
6111 CPUM_ASSERT_RD_MSR_FN(Ia32TimestampCounter);
6112 CPUM_ASSERT_RD_MSR_FN(Ia32PlatformId);
6113 CPUM_ASSERT_RD_MSR_FN(Ia32ApicBase);
6114 CPUM_ASSERT_RD_MSR_FN(Ia32FeatureControl);
6115 CPUM_ASSERT_RD_MSR_FN(Ia32BiosSignId);
6116 CPUM_ASSERT_RD_MSR_FN(Ia32SmmMonitorCtl);
6117 CPUM_ASSERT_RD_MSR_FN(Ia32PmcN);
6118 CPUM_ASSERT_RD_MSR_FN(Ia32MonitorFilterLineSize);
6119 CPUM_ASSERT_RD_MSR_FN(Ia32MPerf);
6120 CPUM_ASSERT_RD_MSR_FN(Ia32APerf);
6121 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrCap);
6122 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrPhysBaseN);
6123 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrPhysMaskN);
6124 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrFixed);
6125 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrDefType);
6126 CPUM_ASSERT_RD_MSR_FN(Ia32Pat);
6127 CPUM_ASSERT_RD_MSR_FN(Ia32SysEnterCs);
6128 CPUM_ASSERT_RD_MSR_FN(Ia32SysEnterEsp);
6129 CPUM_ASSERT_RD_MSR_FN(Ia32SysEnterEip);
6130 CPUM_ASSERT_RD_MSR_FN(Ia32McgCap);
6131 CPUM_ASSERT_RD_MSR_FN(Ia32McgStatus);
6132 CPUM_ASSERT_RD_MSR_FN(Ia32McgCtl);
6133 CPUM_ASSERT_RD_MSR_FN(Ia32DebugCtl);
6134 CPUM_ASSERT_RD_MSR_FN(Ia32SmrrPhysBase);
6135 CPUM_ASSERT_RD_MSR_FN(Ia32SmrrPhysMask);
6136 CPUM_ASSERT_RD_MSR_FN(Ia32PlatformDcaCap);
6137 CPUM_ASSERT_RD_MSR_FN(Ia32CpuDcaCap);
6138 CPUM_ASSERT_RD_MSR_FN(Ia32Dca0Cap);
6139 CPUM_ASSERT_RD_MSR_FN(Ia32PerfEvtSelN);
6140 CPUM_ASSERT_RD_MSR_FN(Ia32PerfStatus);
6141 CPUM_ASSERT_RD_MSR_FN(Ia32PerfCtl);
6142 CPUM_ASSERT_RD_MSR_FN(Ia32FixedCtrN);
6143 CPUM_ASSERT_RD_MSR_FN(Ia32PerfCapabilities);
6144 CPUM_ASSERT_RD_MSR_FN(Ia32FixedCtrCtrl);
6145 CPUM_ASSERT_RD_MSR_FN(Ia32PerfGlobalStatus);
6146 CPUM_ASSERT_RD_MSR_FN(Ia32PerfGlobalCtrl);
6147 CPUM_ASSERT_RD_MSR_FN(Ia32PerfGlobalOvfCtrl);
6148 CPUM_ASSERT_RD_MSR_FN(Ia32PebsEnable);
6149 CPUM_ASSERT_RD_MSR_FN(Ia32ClockModulation);
6150 CPUM_ASSERT_RD_MSR_FN(Ia32ThermInterrupt);
6151 CPUM_ASSERT_RD_MSR_FN(Ia32ThermStatus);
6152 CPUM_ASSERT_RD_MSR_FN(Ia32MiscEnable);
6153 CPUM_ASSERT_RD_MSR_FN(Ia32McCtlStatusAddrMiscN);
6154 CPUM_ASSERT_RD_MSR_FN(Ia32McNCtl2);
6155 CPUM_ASSERT_RD_MSR_FN(Ia32DsArea);
6156 CPUM_ASSERT_RD_MSR_FN(Ia32TscDeadline);
6157 CPUM_ASSERT_RD_MSR_FN(Ia32X2ApicN);
6158 CPUM_ASSERT_RD_MSR_FN(Ia32DebugInterface);
6159 CPUM_ASSERT_RD_MSR_FN(Ia32VmxBasic);
6160 CPUM_ASSERT_RD_MSR_FN(Ia32VmxPinbasedCtls);
6161 CPUM_ASSERT_RD_MSR_FN(Ia32VmxProcbasedCtls);
6162 CPUM_ASSERT_RD_MSR_FN(Ia32VmxExitCtls);
6163 CPUM_ASSERT_RD_MSR_FN(Ia32VmxEntryCtls);
6164 CPUM_ASSERT_RD_MSR_FN(Ia32VmxMisc);
6165 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr0Fixed0);
6166 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr0Fixed1);
6167 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr4Fixed0);
6168 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr4Fixed1);
6169 CPUM_ASSERT_RD_MSR_FN(Ia32VmxVmcsEnum);
6170 CPUM_ASSERT_RD_MSR_FN(Ia32VmxProcBasedCtls2);
6171 CPUM_ASSERT_RD_MSR_FN(Ia32VmxEptVpidCap);
6172 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTruePinbasedCtls);
6173 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTrueProcbasedCtls);
6174 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTrueExitCtls);
6175 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTrueEntryCtls);
6176 CPUM_ASSERT_RD_MSR_FN(Ia32VmxVmFunc);
6177 CPUM_ASSERT_RD_MSR_FN(Ia32SpecCtrl);
6178 CPUM_ASSERT_RD_MSR_FN(Ia32ArchCapabilities);
6179
6180 CPUM_ASSERT_RD_MSR_FN(Amd64Efer);
6181 CPUM_ASSERT_RD_MSR_FN(Amd64SyscallTarget);
6182 CPUM_ASSERT_RD_MSR_FN(Amd64LongSyscallTarget);
6183 CPUM_ASSERT_RD_MSR_FN(Amd64CompSyscallTarget);
6184 CPUM_ASSERT_RD_MSR_FN(Amd64SyscallFlagMask);
6185 CPUM_ASSERT_RD_MSR_FN(Amd64FsBase);
6186 CPUM_ASSERT_RD_MSR_FN(Amd64GsBase);
6187 CPUM_ASSERT_RD_MSR_FN(Amd64KernelGsBase);
6188 CPUM_ASSERT_RD_MSR_FN(Amd64TscAux);
6189
6190 CPUM_ASSERT_RD_MSR_FN(IntelEblCrPowerOn);
6191 CPUM_ASSERT_RD_MSR_FN(IntelI7CoreThreadCount);
6192 CPUM_ASSERT_RD_MSR_FN(IntelP4EbcHardPowerOn);
6193 CPUM_ASSERT_RD_MSR_FN(IntelP4EbcSoftPowerOn);
6194 CPUM_ASSERT_RD_MSR_FN(IntelP4EbcFrequencyId);
6195 CPUM_ASSERT_RD_MSR_FN(IntelP6FsbFrequency);
6196 CPUM_ASSERT_RD_MSR_FN(IntelPlatformInfo);
6197 CPUM_ASSERT_RD_MSR_FN(IntelFlexRatio);
6198 CPUM_ASSERT_RD_MSR_FN(IntelPkgCStConfigControl);
6199 CPUM_ASSERT_RD_MSR_FN(IntelPmgIoCaptureBase);
6200 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchFromToN);
6201 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchFromN);
6202 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchToN);
6203 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchTos);
6204 CPUM_ASSERT_RD_MSR_FN(IntelBblCrCtl);
6205 CPUM_ASSERT_RD_MSR_FN(IntelBblCrCtl3);
6206 CPUM_ASSERT_RD_MSR_FN(IntelI7TemperatureTarget);
6207 CPUM_ASSERT_RD_MSR_FN(IntelI7MsrOffCoreResponseN);
6208 CPUM_ASSERT_RD_MSR_FN(IntelI7MiscPwrMgmt);
6209 CPUM_ASSERT_RD_MSR_FN(IntelP6CrN);
6210 CPUM_ASSERT_RD_MSR_FN(IntelCpuId1FeatureMaskEcdx);
6211 CPUM_ASSERT_RD_MSR_FN(IntelCpuId1FeatureMaskEax);
6212 CPUM_ASSERT_RD_MSR_FN(IntelCpuId80000001FeatureMaskEcdx);
6213 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyAesNiCtl);
6214 CPUM_ASSERT_RD_MSR_FN(IntelI7TurboRatioLimit);
6215 CPUM_ASSERT_RD_MSR_FN(IntelI7LbrSelect);
6216 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyErrorControl);
6217 CPUM_ASSERT_RD_MSR_FN(IntelI7VirtualLegacyWireCap);
6218 CPUM_ASSERT_RD_MSR_FN(IntelI7PowerCtl);
6219 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyPebsNumAlt);
6220 CPUM_ASSERT_RD_MSR_FN(IntelI7PebsLdLat);
6221 CPUM_ASSERT_RD_MSR_FN(IntelI7PkgCnResidencyN);
6222 CPUM_ASSERT_RD_MSR_FN(IntelI7CoreCnResidencyN);
6223 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyVrCurrentConfig);
6224 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyVrMiscConfig);
6225 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyRaplPowerUnit);
6226 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyPkgCnIrtlN);
6227 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyPkgC2Residency);
6228 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgPowerLimit);
6229 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgEnergyStatus);
6230 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgPerfStatus);
6231 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgPowerInfo);
6232 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramPowerLimit);
6233 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramEnergyStatus);
6234 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramPerfStatus);
6235 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramPowerInfo);
6236 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0PowerLimit);
6237 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0EnergyStatus);
6238 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0Policy);
6239 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0PerfStatus);
6240 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp1PowerLimit);
6241 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp1EnergyStatus);
6242 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp1Policy);
6243 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpNominal);
6244 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpLevel1);
6245 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpLevel2);
6246 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpControl);
6247 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyTurboActivationRatio);
6248 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfGlobalCtrl);
6249 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfGlobalStatus);
6250 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfGlobalOvfCtrl);
6251 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfFixedCtrCtrl);
6252 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfFixedCtr);
6253 CPUM_ASSERT_RD_MSR_FN(IntelI7UncCBoxConfig);
6254 CPUM_ASSERT_RD_MSR_FN(IntelI7UncArbPerfCtrN);
6255 CPUM_ASSERT_RD_MSR_FN(IntelI7UncArbPerfEvtSelN);
6256 CPUM_ASSERT_RD_MSR_FN(IntelI7SmiCount);
6257 CPUM_ASSERT_RD_MSR_FN(IntelCore2EmttmCrTablesN);
6258 CPUM_ASSERT_RD_MSR_FN(IntelCore2SmmCStMiscInfo);
6259 CPUM_ASSERT_RD_MSR_FN(IntelCore1ExtConfig);
6260 CPUM_ASSERT_RD_MSR_FN(IntelCore1DtsCalControl);
6261 CPUM_ASSERT_RD_MSR_FN(IntelCore2PeciControl);
6262 CPUM_ASSERT_RD_MSR_FN(IntelAtSilvCoreC1Recidency);
6263
6264 CPUM_ASSERT_RD_MSR_FN(P6LastBranchFromIp);
6265 CPUM_ASSERT_RD_MSR_FN(P6LastBranchToIp);
6266 CPUM_ASSERT_RD_MSR_FN(P6LastIntFromIp);
6267 CPUM_ASSERT_RD_MSR_FN(P6LastIntToIp);
6268
6269 CPUM_ASSERT_RD_MSR_FN(AmdFam15hTscRate);
6270 CPUM_ASSERT_RD_MSR_FN(AmdFam15hLwpCfg);
6271 CPUM_ASSERT_RD_MSR_FN(AmdFam15hLwpCbAddr);
6272 CPUM_ASSERT_RD_MSR_FN(AmdFam10hMc4MiscN);
6273 CPUM_ASSERT_RD_MSR_FN(AmdK8PerfCtlN);
6274 CPUM_ASSERT_RD_MSR_FN(AmdK8PerfCtrN);
6275 CPUM_ASSERT_RD_MSR_FN(AmdK8SysCfg);
6276 CPUM_ASSERT_RD_MSR_FN(AmdK8HwCr);
6277 CPUM_ASSERT_RD_MSR_FN(AmdK8IorrBaseN);
6278 CPUM_ASSERT_RD_MSR_FN(AmdK8IorrMaskN);
6279 CPUM_ASSERT_RD_MSR_FN(AmdK8TopOfMemN);
6280 CPUM_ASSERT_RD_MSR_FN(AmdK8NbCfg1);
6281 CPUM_ASSERT_RD_MSR_FN(AmdK8McXcptRedir);
6282 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuNameN);
6283 CPUM_ASSERT_RD_MSR_FN(AmdK8HwThermalCtrl);
6284 CPUM_ASSERT_RD_MSR_FN(AmdK8SwThermalCtrl);
6285 CPUM_ASSERT_RD_MSR_FN(AmdK8FidVidControl);
6286 CPUM_ASSERT_RD_MSR_FN(AmdK8FidVidStatus);
6287 CPUM_ASSERT_RD_MSR_FN(AmdK8McCtlMaskN);
6288 CPUM_ASSERT_RD_MSR_FN(AmdK8SmiOnIoTrapN);
6289 CPUM_ASSERT_RD_MSR_FN(AmdK8SmiOnIoTrapCtlSts);
6290 CPUM_ASSERT_RD_MSR_FN(AmdK8IntPendingMessage);
6291 CPUM_ASSERT_RD_MSR_FN(AmdK8SmiTriggerIoCycle);
6292 CPUM_ASSERT_RD_MSR_FN(AmdFam10hMmioCfgBaseAddr);
6293 CPUM_ASSERT_RD_MSR_FN(AmdFam10hTrapCtlMaybe);
6294 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateCurLimit);
6295 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateControl);
6296 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateStatus);
6297 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateN);
6298 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCofVidControl);
6299 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCofVidStatus);
6300 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCStateIoBaseAddr);
6301 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCpuWatchdogTimer);
6302 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmBase);
6303 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmAddr);
6304 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmMask);
6305 CPUM_ASSERT_RD_MSR_FN(AmdK8VmCr);
6306 CPUM_ASSERT_RD_MSR_FN(AmdK8IgnNe);
6307 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmCtl);
6308 CPUM_ASSERT_RD_MSR_FN(AmdK8VmHSavePa);
6309 CPUM_ASSERT_RD_MSR_FN(AmdFam10hVmLockKey);
6310 CPUM_ASSERT_RD_MSR_FN(AmdFam10hSmmLockKey);
6311 CPUM_ASSERT_RD_MSR_FN(AmdFam10hLocalSmiStatus);
6312 CPUM_ASSERT_RD_MSR_FN(AmdFam10hOsVisWrkIdLength);
6313 CPUM_ASSERT_RD_MSR_FN(AmdFam10hOsVisWrkStatus);
6314 CPUM_ASSERT_RD_MSR_FN(AmdFam16hL2IPerfCtlN);
6315 CPUM_ASSERT_RD_MSR_FN(AmdFam16hL2IPerfCtrN);
6316 CPUM_ASSERT_RD_MSR_FN(AmdFam15hNorthbridgePerfCtlN);
6317 CPUM_ASSERT_RD_MSR_FN(AmdFam15hNorthbridgePerfCtrN);
6318 CPUM_ASSERT_RD_MSR_FN(AmdK7MicrocodeCtl);
6319 CPUM_ASSERT_RD_MSR_FN(AmdK7ClusterIdMaybe);
6320 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlStd07hEbax);
6321 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlStd06hEcx);
6322 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlStd01hEdcx);
6323 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlExt01hEdcx);
6324 CPUM_ASSERT_RD_MSR_FN(AmdK8PatchLevel);
6325 CPUM_ASSERT_RD_MSR_FN(AmdK7DebugStatusMaybe);
6326 CPUM_ASSERT_RD_MSR_FN(AmdK7BHTraceBaseMaybe);
6327 CPUM_ASSERT_RD_MSR_FN(AmdK7BHTracePtrMaybe);
6328 CPUM_ASSERT_RD_MSR_FN(AmdK7BHTraceLimitMaybe);
6329 CPUM_ASSERT_RD_MSR_FN(AmdK7HardwareDebugToolCfgMaybe);
6330 CPUM_ASSERT_RD_MSR_FN(AmdK7FastFlushCountMaybe);
6331 CPUM_ASSERT_RD_MSR_FN(AmdK7NodeId);
6332 CPUM_ASSERT_RD_MSR_FN(AmdK7DrXAddrMaskN);
6333 CPUM_ASSERT_RD_MSR_FN(AmdK7Dr0DataMatchMaybe);
6334 CPUM_ASSERT_RD_MSR_FN(AmdK7Dr0DataMaskMaybe);
6335 CPUM_ASSERT_RD_MSR_FN(AmdK7LoadStoreCfg);
6336 CPUM_ASSERT_RD_MSR_FN(AmdK7InstrCacheCfg);
6337 CPUM_ASSERT_RD_MSR_FN(AmdK7DataCacheCfg);
6338 CPUM_ASSERT_RD_MSR_FN(AmdK7BusUnitCfg);
6339 CPUM_ASSERT_RD_MSR_FN(AmdK7DebugCtl2Maybe);
6340 CPUM_ASSERT_RD_MSR_FN(AmdFam15hFpuCfg);
6341 CPUM_ASSERT_RD_MSR_FN(AmdFam15hDecoderCfg);
6342 CPUM_ASSERT_RD_MSR_FN(AmdFam10hBusUnitCfg2);
6343 CPUM_ASSERT_RD_MSR_FN(AmdFam15hCombUnitCfg);
6344 CPUM_ASSERT_RD_MSR_FN(AmdFam15hCombUnitCfg2);
6345 CPUM_ASSERT_RD_MSR_FN(AmdFam15hCombUnitCfg3);
6346 CPUM_ASSERT_RD_MSR_FN(AmdFam15hExecUnitCfg);
6347 CPUM_ASSERT_RD_MSR_FN(AmdFam15hLoadStoreCfg2);
6348 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsFetchCtl);
6349 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsFetchLinAddr);
6350 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsFetchPhysAddr);
6351 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpExecCtl);
6352 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpRip);
6353 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpData);
6354 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpData2);
6355 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpData3);
6356 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsDcLinAddr);
6357 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsDcPhysAddr);
6358 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsCtl);
6359 CPUM_ASSERT_RD_MSR_FN(AmdFam14hIbsBrTarget);
6360
6361 CPUM_ASSERT_RD_MSR_FN(Gim)
6362
6363 AssertReturn(g_aCpumWrMsrFns[kCpumMsrWrFn_Invalid] == NULL, VERR_CPUM_IPE_2);
6364 CPUM_ASSERT_WR_MSR_FN(Ia32P5McAddr);
6365 CPUM_ASSERT_WR_MSR_FN(Ia32P5McType);
6366 CPUM_ASSERT_WR_MSR_FN(Ia32TimestampCounter);
6367 CPUM_ASSERT_WR_MSR_FN(Ia32ApicBase);
6368 CPUM_ASSERT_WR_MSR_FN(Ia32FeatureControl);
6369 CPUM_ASSERT_WR_MSR_FN(Ia32BiosSignId);
6370 CPUM_ASSERT_WR_MSR_FN(Ia32BiosUpdateTrigger);
6371 CPUM_ASSERT_WR_MSR_FN(Ia32SmmMonitorCtl);
6372 CPUM_ASSERT_WR_MSR_FN(Ia32PmcN);
6373 CPUM_ASSERT_WR_MSR_FN(Ia32MonitorFilterLineSize);
6374 CPUM_ASSERT_WR_MSR_FN(Ia32MPerf);
6375 CPUM_ASSERT_WR_MSR_FN(Ia32APerf);
6376 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrPhysBaseN);
6377 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrPhysMaskN);
6378 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrFixed);
6379 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrDefType);
6380 CPUM_ASSERT_WR_MSR_FN(Ia32Pat);
6381 CPUM_ASSERT_WR_MSR_FN(Ia32SysEnterCs);
6382 CPUM_ASSERT_WR_MSR_FN(Ia32SysEnterEsp);
6383 CPUM_ASSERT_WR_MSR_FN(Ia32SysEnterEip);
6384 CPUM_ASSERT_WR_MSR_FN(Ia32McgStatus);
6385 CPUM_ASSERT_WR_MSR_FN(Ia32McgCtl);
6386 CPUM_ASSERT_WR_MSR_FN(Ia32DebugCtl);
6387 CPUM_ASSERT_WR_MSR_FN(Ia32SmrrPhysBase);
6388 CPUM_ASSERT_WR_MSR_FN(Ia32SmrrPhysMask);
6389 CPUM_ASSERT_WR_MSR_FN(Ia32PlatformDcaCap);
6390 CPUM_ASSERT_WR_MSR_FN(Ia32Dca0Cap);
6391 CPUM_ASSERT_WR_MSR_FN(Ia32PerfEvtSelN);
6392 CPUM_ASSERT_WR_MSR_FN(Ia32PerfStatus);
6393 CPUM_ASSERT_WR_MSR_FN(Ia32PerfCtl);
6394 CPUM_ASSERT_WR_MSR_FN(Ia32FixedCtrN);
6395 CPUM_ASSERT_WR_MSR_FN(Ia32PerfCapabilities);
6396 CPUM_ASSERT_WR_MSR_FN(Ia32FixedCtrCtrl);
6397 CPUM_ASSERT_WR_MSR_FN(Ia32PerfGlobalStatus);
6398 CPUM_ASSERT_WR_MSR_FN(Ia32PerfGlobalCtrl);
6399 CPUM_ASSERT_WR_MSR_FN(Ia32PerfGlobalOvfCtrl);
6400 CPUM_ASSERT_WR_MSR_FN(Ia32PebsEnable);
6401 CPUM_ASSERT_WR_MSR_FN(Ia32ClockModulation);
6402 CPUM_ASSERT_WR_MSR_FN(Ia32ThermInterrupt);
6403 CPUM_ASSERT_WR_MSR_FN(Ia32ThermStatus);
6404 CPUM_ASSERT_WR_MSR_FN(Ia32MiscEnable);
6405 CPUM_ASSERT_WR_MSR_FN(Ia32McCtlStatusAddrMiscN);
6406 CPUM_ASSERT_WR_MSR_FN(Ia32McNCtl2);
6407 CPUM_ASSERT_WR_MSR_FN(Ia32DsArea);
6408 CPUM_ASSERT_WR_MSR_FN(Ia32TscDeadline);
6409 CPUM_ASSERT_WR_MSR_FN(Ia32X2ApicN);
6410 CPUM_ASSERT_WR_MSR_FN(Ia32DebugInterface);
6411 CPUM_ASSERT_WR_MSR_FN(Ia32SpecCtrl);
6412 CPUM_ASSERT_WR_MSR_FN(Ia32PredCmd);
6413
6414 CPUM_ASSERT_WR_MSR_FN(Amd64Efer);
6415 CPUM_ASSERT_WR_MSR_FN(Amd64SyscallTarget);
6416 CPUM_ASSERT_WR_MSR_FN(Amd64LongSyscallTarget);
6417 CPUM_ASSERT_WR_MSR_FN(Amd64CompSyscallTarget);
6418 CPUM_ASSERT_WR_MSR_FN(Amd64SyscallFlagMask);
6419 CPUM_ASSERT_WR_MSR_FN(Amd64FsBase);
6420 CPUM_ASSERT_WR_MSR_FN(Amd64GsBase);
6421 CPUM_ASSERT_WR_MSR_FN(Amd64KernelGsBase);
6422 CPUM_ASSERT_WR_MSR_FN(Amd64TscAux);
6423
6424 CPUM_ASSERT_WR_MSR_FN(IntelEblCrPowerOn);
6425 CPUM_ASSERT_WR_MSR_FN(IntelP4EbcHardPowerOn);
6426 CPUM_ASSERT_WR_MSR_FN(IntelP4EbcSoftPowerOn);
6427 CPUM_ASSERT_WR_MSR_FN(IntelP4EbcFrequencyId);
6428 CPUM_ASSERT_WR_MSR_FN(IntelFlexRatio);
6429 CPUM_ASSERT_WR_MSR_FN(IntelPkgCStConfigControl);
6430 CPUM_ASSERT_WR_MSR_FN(IntelPmgIoCaptureBase);
6431 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchFromToN);
6432 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchFromN);
6433 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchToN);
6434 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchTos);
6435 CPUM_ASSERT_WR_MSR_FN(IntelBblCrCtl);
6436 CPUM_ASSERT_WR_MSR_FN(IntelBblCrCtl3);
6437 CPUM_ASSERT_WR_MSR_FN(IntelI7TemperatureTarget);
6438 CPUM_ASSERT_WR_MSR_FN(IntelI7MsrOffCoreResponseN);
6439 CPUM_ASSERT_WR_MSR_FN(IntelI7MiscPwrMgmt);
6440 CPUM_ASSERT_WR_MSR_FN(IntelP6CrN);
6441 CPUM_ASSERT_WR_MSR_FN(IntelCpuId1FeatureMaskEcdx);
6442 CPUM_ASSERT_WR_MSR_FN(IntelCpuId1FeatureMaskEax);
6443 CPUM_ASSERT_WR_MSR_FN(IntelCpuId80000001FeatureMaskEcdx);
6444 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyAesNiCtl);
6445 CPUM_ASSERT_WR_MSR_FN(IntelI7TurboRatioLimit);
6446 CPUM_ASSERT_WR_MSR_FN(IntelI7LbrSelect);
6447 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyErrorControl);
6448 CPUM_ASSERT_WR_MSR_FN(IntelI7PowerCtl);
6449 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyPebsNumAlt);
6450 CPUM_ASSERT_WR_MSR_FN(IntelI7PebsLdLat);
6451 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyVrCurrentConfig);
6452 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyVrMiscConfig);
6453 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyPkgCnIrtlN);
6454 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyPkgC2Residency);
6455 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPkgPowerLimit);
6456 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplDramPowerLimit);
6457 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp0PowerLimit);
6458 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp0Policy);
6459 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp1PowerLimit);
6460 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp1Policy);
6461 CPUM_ASSERT_WR_MSR_FN(IntelI7IvyConfigTdpControl);
6462 CPUM_ASSERT_WR_MSR_FN(IntelI7IvyTurboActivationRatio);
6463 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfGlobalCtrl);
6464 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfGlobalStatus);
6465 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfGlobalOvfCtrl);
6466 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfFixedCtrCtrl);
6467 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfFixedCtr);
6468 CPUM_ASSERT_WR_MSR_FN(IntelI7UncArbPerfCtrN);
6469 CPUM_ASSERT_WR_MSR_FN(IntelI7UncArbPerfEvtSelN);
6470 CPUM_ASSERT_WR_MSR_FN(IntelCore2EmttmCrTablesN);
6471 CPUM_ASSERT_WR_MSR_FN(IntelCore2SmmCStMiscInfo);
6472 CPUM_ASSERT_WR_MSR_FN(IntelCore1ExtConfig);
6473 CPUM_ASSERT_WR_MSR_FN(IntelCore1DtsCalControl);
6474 CPUM_ASSERT_WR_MSR_FN(IntelCore2PeciControl);
6475
6476 CPUM_ASSERT_WR_MSR_FN(P6LastIntFromIp);
6477 CPUM_ASSERT_WR_MSR_FN(P6LastIntToIp);
6478
6479 CPUM_ASSERT_WR_MSR_FN(AmdFam15hTscRate);
6480 CPUM_ASSERT_WR_MSR_FN(AmdFam15hLwpCfg);
6481 CPUM_ASSERT_WR_MSR_FN(AmdFam15hLwpCbAddr);
6482 CPUM_ASSERT_WR_MSR_FN(AmdFam10hMc4MiscN);
6483 CPUM_ASSERT_WR_MSR_FN(AmdK8PerfCtlN);
6484 CPUM_ASSERT_WR_MSR_FN(AmdK8PerfCtrN);
6485 CPUM_ASSERT_WR_MSR_FN(AmdK8SysCfg);
6486 CPUM_ASSERT_WR_MSR_FN(AmdK8HwCr);
6487 CPUM_ASSERT_WR_MSR_FN(AmdK8IorrBaseN);
6488 CPUM_ASSERT_WR_MSR_FN(AmdK8IorrMaskN);
6489 CPUM_ASSERT_WR_MSR_FN(AmdK8TopOfMemN);
6490 CPUM_ASSERT_WR_MSR_FN(AmdK8NbCfg1);
6491 CPUM_ASSERT_WR_MSR_FN(AmdK8McXcptRedir);
6492 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuNameN);
6493 CPUM_ASSERT_WR_MSR_FN(AmdK8HwThermalCtrl);
6494 CPUM_ASSERT_WR_MSR_FN(AmdK8SwThermalCtrl);
6495 CPUM_ASSERT_WR_MSR_FN(AmdK8FidVidControl);
6496 CPUM_ASSERT_WR_MSR_FN(AmdK8McCtlMaskN);
6497 CPUM_ASSERT_WR_MSR_FN(AmdK8SmiOnIoTrapN);
6498 CPUM_ASSERT_WR_MSR_FN(AmdK8SmiOnIoTrapCtlSts);
6499 CPUM_ASSERT_WR_MSR_FN(AmdK8IntPendingMessage);
6500 CPUM_ASSERT_WR_MSR_FN(AmdK8SmiTriggerIoCycle);
6501 CPUM_ASSERT_WR_MSR_FN(AmdFam10hMmioCfgBaseAddr);
6502 CPUM_ASSERT_WR_MSR_FN(AmdFam10hTrapCtlMaybe);
6503 CPUM_ASSERT_WR_MSR_FN(AmdFam10hPStateControl);
6504 CPUM_ASSERT_WR_MSR_FN(AmdFam10hPStateStatus);
6505 CPUM_ASSERT_WR_MSR_FN(AmdFam10hPStateN);
6506 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCofVidControl);
6507 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCofVidStatus);
6508 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCStateIoBaseAddr);
6509 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCpuWatchdogTimer);
6510 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmBase);
6511 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmAddr);
6512 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmMask);
6513 CPUM_ASSERT_WR_MSR_FN(AmdK8VmCr);
6514 CPUM_ASSERT_WR_MSR_FN(AmdK8IgnNe);
6515 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmCtl);
6516 CPUM_ASSERT_WR_MSR_FN(AmdK8VmHSavePa);
6517 CPUM_ASSERT_WR_MSR_FN(AmdFam10hVmLockKey);
6518 CPUM_ASSERT_WR_MSR_FN(AmdFam10hSmmLockKey);
6519 CPUM_ASSERT_WR_MSR_FN(AmdFam10hLocalSmiStatus);
6520 CPUM_ASSERT_WR_MSR_FN(AmdFam10hOsVisWrkIdLength);
6521 CPUM_ASSERT_WR_MSR_FN(AmdFam10hOsVisWrkStatus);
6522 CPUM_ASSERT_WR_MSR_FN(AmdFam16hL2IPerfCtlN);
6523 CPUM_ASSERT_WR_MSR_FN(AmdFam16hL2IPerfCtrN);
6524 CPUM_ASSERT_WR_MSR_FN(AmdFam15hNorthbridgePerfCtlN);
6525 CPUM_ASSERT_WR_MSR_FN(AmdFam15hNorthbridgePerfCtrN);
6526 CPUM_ASSERT_WR_MSR_FN(AmdK7MicrocodeCtl);
6527 CPUM_ASSERT_WR_MSR_FN(AmdK7ClusterIdMaybe);
6528 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlStd07hEbax);
6529 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlStd06hEcx);
6530 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlStd01hEdcx);
6531 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlExt01hEdcx);
6532 CPUM_ASSERT_WR_MSR_FN(AmdK8PatchLoader);
6533 CPUM_ASSERT_WR_MSR_FN(AmdK7DebugStatusMaybe);
6534 CPUM_ASSERT_WR_MSR_FN(AmdK7BHTraceBaseMaybe);
6535 CPUM_ASSERT_WR_MSR_FN(AmdK7BHTracePtrMaybe);
6536 CPUM_ASSERT_WR_MSR_FN(AmdK7BHTraceLimitMaybe);
6537 CPUM_ASSERT_WR_MSR_FN(AmdK7HardwareDebugToolCfgMaybe);
6538 CPUM_ASSERT_WR_MSR_FN(AmdK7FastFlushCountMaybe);
6539 CPUM_ASSERT_WR_MSR_FN(AmdK7NodeId);
6540 CPUM_ASSERT_WR_MSR_FN(AmdK7DrXAddrMaskN);
6541 CPUM_ASSERT_WR_MSR_FN(AmdK7Dr0DataMatchMaybe);
6542 CPUM_ASSERT_WR_MSR_FN(AmdK7Dr0DataMaskMaybe);
6543 CPUM_ASSERT_WR_MSR_FN(AmdK7LoadStoreCfg);
6544 CPUM_ASSERT_WR_MSR_FN(AmdK7InstrCacheCfg);
6545 CPUM_ASSERT_WR_MSR_FN(AmdK7DataCacheCfg);
6546 CPUM_ASSERT_WR_MSR_FN(AmdK7BusUnitCfg);
6547 CPUM_ASSERT_WR_MSR_FN(AmdK7DebugCtl2Maybe);
6548 CPUM_ASSERT_WR_MSR_FN(AmdFam15hFpuCfg);
6549 CPUM_ASSERT_WR_MSR_FN(AmdFam15hDecoderCfg);
6550 CPUM_ASSERT_WR_MSR_FN(AmdFam10hBusUnitCfg2);
6551 CPUM_ASSERT_WR_MSR_FN(AmdFam15hCombUnitCfg);
6552 CPUM_ASSERT_WR_MSR_FN(AmdFam15hCombUnitCfg2);
6553 CPUM_ASSERT_WR_MSR_FN(AmdFam15hCombUnitCfg3);
6554 CPUM_ASSERT_WR_MSR_FN(AmdFam15hExecUnitCfg);
6555 CPUM_ASSERT_WR_MSR_FN(AmdFam15hLoadStoreCfg2);
6556 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsFetchCtl);
6557 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsFetchLinAddr);
6558 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsFetchPhysAddr);
6559 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpExecCtl);
6560 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpRip);
6561 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpData);
6562 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpData2);
6563 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpData3);
6564 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsDcLinAddr);
6565 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsDcPhysAddr);
6566 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsCtl);
6567 CPUM_ASSERT_WR_MSR_FN(AmdFam14hIbsBrTarget);
6568
6569 CPUM_ASSERT_WR_MSR_FN(Gim);
6570
6571 return VINF_SUCCESS;
6572}
6573#endif /* VBOX_STRICT && IN_RING3 */
6574
6575
6576/**
6577 * Gets the scalable bus frequency.
6578 *
6579 * The bus frequency is used as a base in several MSRs that gives the CPU and
6580 * other frequency ratios.
6581 *
6582 * @returns Scalable bus frequency in Hz. Will not return CPUM_SBUSFREQ_UNKNOWN.
6583 * @param pVM The cross context VM structure.
6584 */
6585VMMDECL(uint64_t) CPUMGetGuestScalableBusFrequency(PVM pVM)
6586{
6587 uint64_t uFreq = pVM->cpum.s.GuestInfo.uScalableBusFreq;
6588 if (uFreq == CPUM_SBUSFREQ_UNKNOWN)
6589 uFreq = CPUM_SBUSFREQ_100MHZ;
6590 return uFreq;
6591}
6592
6593
6594/**
6595 * Sets the guest EFER MSR without performing any additional checks.
6596 *
6597 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6598 * @param uOldEfer The previous EFER MSR value.
6599 * @param uValidEfer The new, validated EFER MSR value.
6600 *
6601 * @remarks One would normally call CPUMIsGuestEferMsrWriteValid() before calling
6602 * this function to change the EFER in order to perform an EFER transition.
6603 */
6604VMMDECL(void) CPUMSetGuestEferMsrNoChecks(PVMCPU pVCpu, uint64_t uOldEfer, uint64_t uValidEfer)
6605{
6606 pVCpu->cpum.s.Guest.msrEFER = uValidEfer;
6607
6608 /* AMD64 Architecture Programmer's Manual: 15.15 TLB Control; flush the TLB
6609 if MSR_K6_EFER_NXE, MSR_K6_EFER_LME or MSR_K6_EFER_LMA are changed. */
6610 if ( (uOldEfer & (MSR_K6_EFER_NXE | MSR_K6_EFER_LME | MSR_K6_EFER_LMA))
6611 != (pVCpu->cpum.s.Guest.msrEFER & (MSR_K6_EFER_NXE | MSR_K6_EFER_LME | MSR_K6_EFER_LMA)))
6612 {
6613 /// @todo PGMFlushTLB(pVCpu, cr3, true /*fGlobal*/);
6614 HMFlushTLB(pVCpu);
6615
6616 /* Notify PGM about NXE changes. */
6617 if ( (uOldEfer & MSR_K6_EFER_NXE)
6618 != (pVCpu->cpum.s.Guest.msrEFER & MSR_K6_EFER_NXE))
6619 PGMNotifyNxeChanged(pVCpu, !(uOldEfer & MSR_K6_EFER_NXE));
6620 }
6621}
6622
6623
6624/**
6625 * Checks if a guest PAT MSR write is valid.
6626 *
6627 * @returns @c true if the PAT bit combination is valid, @c false otherwise.
6628 * @param uValue The PAT MSR value.
6629 */
6630VMMDECL(bool) CPUMIsPatMsrValid(uint64_t uValue)
6631{
6632 for (uint32_t cShift = 0; cShift < 63; cShift += 8)
6633 {
6634 /* Check all eight bits because the top 5 bits of each byte are reserved. */
6635 uint8_t uType = (uint8_t)(uValue >> cShift);
6636 if ((uType >= 8) || (uType == 2) || (uType == 3))
6637 {
6638 Log(("CPUM: Invalid PAT type at %u:%u in IA32_PAT: %#llx (%#llx)\n", cShift + 7, cShift, uValue, uType));
6639 return false;
6640 }
6641 }
6642 return true;
6643}
6644
6645
6646/**
6647 * Validates an EFER MSR write and provides the new, validated EFER MSR.
6648 *
6649 * @returns VBox status code.
6650 * @param pVM The cross context VM structure.
6651 * @param uCr0 The CR0 of the CPU corresponding to the EFER MSR.
6652 * @param uOldEfer Value of the previous EFER MSR on the CPU if any.
6653 * @param uNewEfer The new EFER MSR value being written.
6654 * @param puValidEfer Where to store the validated EFER (only updated if
6655 * this function returns VINF_SUCCESS).
6656 */
6657VMMDECL(int) CPUMIsGuestEferMsrWriteValid(PVM pVM, uint64_t uCr0, uint64_t uOldEfer, uint64_t uNewEfer, uint64_t *puValidEfer)
6658{
6659 /* #GP(0) If anything outside the allowed bits is set. */
6660 uint64_t fMask = CPUMGetGuestEferMsrValidMask(pVM);
6661 if (uNewEfer & ~fMask)
6662 {
6663 Log(("CPUM: Settings disallowed EFER bit. uNewEfer=%#RX64 fAllowed=%#RX64 -> #GP(0)\n", uNewEfer, fMask));
6664 return VERR_CPUM_RAISE_GP_0;
6665 }
6666
6667 /* Check for illegal MSR_K6_EFER_LME transitions: not allowed to change LME if
6668 paging is enabled. (AMD Arch. Programmer's Manual Volume 2: Table 14-5) */
6669 if ( (uOldEfer & MSR_K6_EFER_LME) != (uNewEfer & MSR_K6_EFER_LME)
6670 && (uCr0 & X86_CR0_PG))
6671 {
6672 Log(("CPUM: Illegal MSR_K6_EFER_LME change: paging is enabled!!\n"));
6673 return VERR_CPUM_RAISE_GP_0;
6674 }
6675
6676 /* There are a few more: e.g. MSR_K6_EFER_LMSLE. */
6677 AssertMsg(!(uNewEfer & ~( MSR_K6_EFER_NXE
6678 | MSR_K6_EFER_LME
6679 | MSR_K6_EFER_LMA /* ignored anyway */
6680 | MSR_K6_EFER_SCE
6681 | MSR_K6_EFER_FFXSR
6682 | MSR_K6_EFER_SVME)),
6683 ("Unexpected value %#RX64\n", uNewEfer));
6684
6685 /* Ignore EFER.LMA, it's updated when setting CR0. */
6686 fMask &= ~MSR_K6_EFER_LMA;
6687
6688 *puValidEfer = (uOldEfer & ~fMask) | (uNewEfer & fMask);
6689 return VINF_SUCCESS;
6690}
6691
6692
6693/**
6694 * Gets the mask of valid EFER bits depending on supported guest-CPU features.
6695 *
6696 * @returns Mask of valid EFER bits.
6697 * @param pVM The cross context VM structure.
6698 *
6699 * @remarks EFER.LMA is included as part of the valid mask. It's not invalid but
6700 * rather a read-only bit.
6701 */
6702VMMDECL(uint64_t) CPUMGetGuestEferMsrValidMask(PVM pVM)
6703{
6704 uint32_t const fExtFeatures = pVM->cpum.s.aGuestCpuIdPatmExt[0].uEax >= 0x80000001
6705 ? pVM->cpum.s.aGuestCpuIdPatmExt[1].uEdx
6706 : 0;
6707 uint64_t fMask = 0;
6708 uint64_t const fIgnoreMask = MSR_K6_EFER_LMA;
6709
6710 /* Filter out those bits the guest is allowed to change. (e.g. LMA is read-only) */
6711 if (fExtFeatures & X86_CPUID_EXT_FEATURE_EDX_NX)
6712 fMask |= MSR_K6_EFER_NXE;
6713 if (fExtFeatures & X86_CPUID_EXT_FEATURE_EDX_LONG_MODE)
6714 fMask |= MSR_K6_EFER_LME;
6715 if (fExtFeatures & X86_CPUID_EXT_FEATURE_EDX_SYSCALL)
6716 fMask |= MSR_K6_EFER_SCE;
6717 if (fExtFeatures & X86_CPUID_AMD_FEATURE_EDX_FFXSR)
6718 fMask |= MSR_K6_EFER_FFXSR;
6719 if (pVM->cpum.s.GuestFeatures.fSvm)
6720 fMask |= MSR_K6_EFER_SVME;
6721
6722 return (fIgnoreMask | fMask);
6723}
6724
6725
6726/**
6727 * Fast way for HM to access the MSR_K8_TSC_AUX register.
6728 *
6729 * @returns The register value.
6730 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6731 * @thread EMT(pVCpu)
6732 */
6733VMM_INT_DECL(uint64_t) CPUMGetGuestTscAux(PVMCPU pVCpu)
6734{
6735 Assert(!(pVCpu->cpum.s.Guest.fExtrn & CPUMCTX_EXTRN_TSC_AUX));
6736 return pVCpu->cpum.s.GuestMsrs.msr.TscAux;
6737}
6738
6739
6740/**
6741 * Fast way for HM to access the MSR_K8_TSC_AUX register.
6742 *
6743 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6744 * @param uValue The new value.
6745 * @thread EMT(pVCpu)
6746 */
6747VMM_INT_DECL(void) CPUMSetGuestTscAux(PVMCPU pVCpu, uint64_t uValue)
6748{
6749 pVCpu->cpum.s.Guest.fExtrn &= ~CPUMCTX_EXTRN_TSC_AUX;
6750 pVCpu->cpum.s.GuestMsrs.msr.TscAux = uValue;
6751}
6752
6753
6754/**
6755 * Fast way for HM to access the IA32_SPEC_CTRL register.
6756 *
6757 * @returns The register value.
6758 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6759 * @thread EMT(pVCpu)
6760 */
6761VMM_INT_DECL(uint64_t) CPUMGetGuestSpecCtrl(PVMCPU pVCpu)
6762{
6763 return pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl;
6764}
6765
6766
6767/**
6768 * Fast way for HM to access the IA32_SPEC_CTRL register.
6769 *
6770 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6771 * @param uValue The new value.
6772 * @thread EMT(pVCpu)
6773 */
6774VMM_INT_DECL(void) CPUMSetGuestSpecCtrl(PVMCPU pVCpu, uint64_t uValue)
6775{
6776 pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl = uValue;
6777}
6778
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette