VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMR3/IEMR3.cpp@ 105663

最後變更 在這個檔案從105663是 105663,由 vboxsync 提交於 7 月 前

VMM/IEM: Corrected missing uRegFpCtrl initialization. bugref:10652

  • 屬性 svn:eol-style 設為 native
  • 屬性 svn:keywords 設為 Author Date Id Revision
檔案大小: 124.1 KB
 
1/* $Id: IEMR3.cpp 105663 2024-08-13 23:35:12Z vboxsync $ */
2/** @file
3 * IEM - Interpreted Execution Manager.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.alldomusa.eu.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*********************************************************************************************************************************
30* Header Files *
31*********************************************************************************************************************************/
32#define LOG_GROUP LOG_GROUP_EM
33#define VMCPU_INCL_CPUM_GST_CTX
34#include <VBox/vmm/iem.h>
35#include <VBox/vmm/cpum.h>
36#include <VBox/vmm/dbgf.h>
37#include <VBox/vmm/mm.h>
38#include <VBox/vmm/ssm.h>
39#if defined(VBOX_VMM_TARGET_ARMV8)
40# include "IEMInternal-armv8.h"
41#else
42# include "IEMInternal.h"
43#endif
44#include <VBox/vmm/vm.h>
45#include <VBox/vmm/vmapi.h>
46#include <VBox/err.h>
47#ifdef VBOX_WITH_DEBUGGER
48# include <VBox/dbg.h>
49#endif
50
51#include <iprt/assert.h>
52#include <iprt/getopt.h>
53#ifdef IEM_WITH_TLB_TRACE
54# include <iprt/mem.h>
55#endif
56#include <iprt/string.h>
57
58#if defined(VBOX_WITH_IEM_RECOMPILER) && !defined(VBOX_VMM_TARGET_ARMV8)
59# include "IEMN8veRecompiler.h"
60# include "IEMThreadedFunctions.h"
61# include "IEMInline.h"
62#endif
63
64
65/*********************************************************************************************************************************
66* Internal Functions *
67*********************************************************************************************************************************/
68static FNDBGFINFOARGVINT iemR3InfoITlb;
69static FNDBGFINFOARGVINT iemR3InfoDTlb;
70#ifdef IEM_WITH_TLB_TRACE
71static FNDBGFINFOARGVINT iemR3InfoTlbTrace;
72#endif
73#if defined(VBOX_WITH_IEM_RECOMPILER) && !defined(VBOX_VMM_TARGET_ARMV8)
74static FNDBGFINFOARGVINT iemR3InfoTb;
75#endif
76#ifdef VBOX_WITH_DEBUGGER
77static void iemR3RegisterDebuggerCommands(void);
78#endif
79
80
81#if !defined(VBOX_VMM_TARGET_ARMV8)
82static const char *iemGetTargetCpuName(uint32_t enmTargetCpu)
83{
84 switch (enmTargetCpu)
85 {
86#define CASE_RET_STR(enmValue) case enmValue: return #enmValue + (sizeof("IEMTARGETCPU_") - 1)
87 CASE_RET_STR(IEMTARGETCPU_8086);
88 CASE_RET_STR(IEMTARGETCPU_V20);
89 CASE_RET_STR(IEMTARGETCPU_186);
90 CASE_RET_STR(IEMTARGETCPU_286);
91 CASE_RET_STR(IEMTARGETCPU_386);
92 CASE_RET_STR(IEMTARGETCPU_486);
93 CASE_RET_STR(IEMTARGETCPU_PENTIUM);
94 CASE_RET_STR(IEMTARGETCPU_PPRO);
95 CASE_RET_STR(IEMTARGETCPU_CURRENT);
96#undef CASE_RET_STR
97 default: return "Unknown";
98 }
99}
100#endif
101
102
103/**
104 * Initializes the interpreted execution manager.
105 *
106 * This must be called after CPUM as we're quering information from CPUM about
107 * the guest and host CPUs.
108 *
109 * @returns VBox status code.
110 * @param pVM The cross context VM structure.
111 */
112VMMR3DECL(int) IEMR3Init(PVM pVM)
113{
114 /*
115 * Read configuration.
116 */
117#if (!defined(VBOX_VMM_TARGET_ARMV8) && !defined(VBOX_WITHOUT_CPUID_HOST_CALL)) || defined(VBOX_WITH_IEM_RECOMPILER)
118 PCFGMNODE const pIem = CFGMR3GetChild(CFGMR3GetRoot(pVM), "IEM");
119 int rc;
120#endif
121
122#if !defined(VBOX_VMM_TARGET_ARMV8) && !defined(VBOX_WITHOUT_CPUID_HOST_CALL)
123 /** @cfgm{/IEM/CpuIdHostCall, boolean, false}
124 * Controls whether the custom VBox specific CPUID host call interface is
125 * enabled or not. */
126# ifdef DEBUG_bird
127 rc = CFGMR3QueryBoolDef(pIem, "CpuIdHostCall", &pVM->iem.s.fCpuIdHostCall, true);
128# else
129 rc = CFGMR3QueryBoolDef(pIem, "CpuIdHostCall", &pVM->iem.s.fCpuIdHostCall, false);
130# endif
131 AssertLogRelRCReturn(rc, rc);
132#endif
133
134#ifdef VBOX_WITH_IEM_RECOMPILER
135 /** @cfgm{/IEM/MaxTbCount, uint32_t, 524288}
136 * Max number of TBs per EMT. */
137 uint32_t cMaxTbs = 0;
138 rc = CFGMR3QueryU32Def(pIem, "MaxTbCount", &cMaxTbs, _512K);
139 AssertLogRelRCReturn(rc, rc);
140 if (cMaxTbs < _16K || cMaxTbs > _8M)
141 return VMSetError(pVM, VERR_OUT_OF_RANGE, RT_SRC_POS,
142 "MaxTbCount value %u (%#x) is out of range (min %u, max %u)", cMaxTbs, cMaxTbs, _16K, _8M);
143
144 /** @cfgm{/IEM/InitialTbCount, uint32_t, 32678}
145 * Initial (minimum) number of TBs per EMT in ring-3. */
146 uint32_t cInitialTbs = 0;
147 rc = CFGMR3QueryU32Def(pIem, "InitialTbCount", &cInitialTbs, RT_MIN(cMaxTbs, _32K));
148 AssertLogRelRCReturn(rc, rc);
149 if (cInitialTbs < _16K || cInitialTbs > _8M)
150 return VMSetError(pVM, VERR_OUT_OF_RANGE, RT_SRC_POS,
151 "InitialTbCount value %u (%#x) is out of range (min %u, max %u)", cInitialTbs, cInitialTbs, _16K, _8M);
152
153 /* Check that the two values makes sense together. Expect user/api to do
154 the right thing or get lost. */
155 if (cInitialTbs > cMaxTbs)
156 return VMSetError(pVM, VERR_OUT_OF_RANGE, RT_SRC_POS,
157 "InitialTbCount value %u (%#x) is higher than the MaxTbCount value %u (%#x)",
158 cInitialTbs, cInitialTbs, cMaxTbs, cMaxTbs);
159
160 /** @cfgm{/IEM/MaxExecMem, uint64_t, 512 MiB}
161 * Max executable memory for recompiled code per EMT. */
162 uint64_t cbMaxExec = 0;
163 rc = CFGMR3QueryU64Def(pIem, "MaxExecMem", &cbMaxExec, _512M);
164 AssertLogRelRCReturn(rc, rc);
165 if (cbMaxExec < _1M || cbMaxExec > 16*_1G64)
166 return VMSetError(pVM, VERR_OUT_OF_RANGE, RT_SRC_POS,
167 "MaxExecMem value %'RU64 (%#RX64) is out of range (min %'RU64, max %'RU64)",
168 cbMaxExec, cbMaxExec, (uint64_t)_1M, 16*_1G64);
169
170 /** @cfgm{/IEM/ExecChunkSize, uint32_t, 0 (auto)}
171 * The executable memory allocator chunk size. */
172 uint32_t cbChunkExec = 0;
173 rc = CFGMR3QueryU32Def(pIem, "ExecChunkSize", &cbChunkExec, 0);
174 AssertLogRelRCReturn(rc, rc);
175 if (cbChunkExec != 0 && cbChunkExec != UINT32_MAX && (cbChunkExec < _1M || cbChunkExec > _256M))
176 return VMSetError(pVM, VERR_OUT_OF_RANGE, RT_SRC_POS,
177 "ExecChunkSize value %'RU32 (%#RX32) is out of range (min %'RU32, max %'RU32)",
178 cbChunkExec, cbChunkExec, _1M, _256M);
179
180 /** @cfgm{/IEM/InitialExecMemSize, uint64_t, 1}
181 * The initial executable memory allocator size (per EMT). The value is
182 * rounded up to the nearest chunk size, so 1 byte means one chunk. */
183 uint64_t cbInitialExec = 0;
184 rc = CFGMR3QueryU64Def(pIem, "InitialExecMemSize", &cbInitialExec, 0);
185 AssertLogRelRCReturn(rc, rc);
186 if (cbInitialExec > cbMaxExec)
187 return VMSetError(pVM, VERR_OUT_OF_RANGE, RT_SRC_POS,
188 "InitialExecMemSize value %'RU64 (%#RX64) is out of range (max %'RU64)",
189 cbInitialExec, cbInitialExec, cbMaxExec);
190
191 /** @cfgm{/IEM/NativeRecompileAtUsedCount, uint32_t, 16}
192 * The translation block use count value to do native recompilation at.
193 * Set to zero to disable native recompilation. */
194 uint32_t uTbNativeRecompileAtUsedCount = 16;
195 rc = CFGMR3QueryU32Def(pIem, "NativeRecompileAtUsedCount", &uTbNativeRecompileAtUsedCount, 16);
196 AssertLogRelRCReturn(rc, rc);
197
198#endif /* VBOX_WITH_IEM_RECOMPILER*/
199
200 /*
201 * Initialize per-CPU data and register statistics.
202 */
203#if 1
204 uint64_t const uInitialTlbRevision = UINT64_C(0) - (IEMTLB_REVISION_INCR * 200U);
205 uint64_t const uInitialTlbPhysRev = UINT64_C(0) - (IEMTLB_PHYS_REV_INCR * 100U);
206#else
207 uint64_t const uInitialTlbRevision = UINT64_C(0) + (IEMTLB_REVISION_INCR * 4U);
208 uint64_t const uInitialTlbPhysRev = UINT64_C(0) + (IEMTLB_PHYS_REV_INCR * 4U);
209#endif
210
211 for (VMCPUID idCpu = 0; idCpu < pVM->cCpus; idCpu++)
212 {
213 PVMCPU const pVCpu = pVM->apCpusR3[idCpu];
214 AssertCompile(sizeof(pVCpu->iem.s) <= sizeof(pVCpu->iem.padding)); /* (tstVMStruct can't do it's job w/o instruction stats) */
215
216 pVCpu->iem.s.CodeTlb.uTlbRevision = pVCpu->iem.s.DataTlb.uTlbRevision = uInitialTlbRevision;
217#ifndef VBOX_VMM_TARGET_ARMV8
218 pVCpu->iem.s.CodeTlb.uTlbRevisionGlobal = pVCpu->iem.s.DataTlb.uTlbRevisionGlobal = uInitialTlbRevision;
219#endif
220 pVCpu->iem.s.CodeTlb.uTlbPhysRev = pVCpu->iem.s.DataTlb.uTlbPhysRev = uInitialTlbPhysRev;
221#ifndef VBOX_VMM_TARGET_ARMV8
222 pVCpu->iem.s.CodeTlb.NonGlobalLargePageRange.uFirstTag = UINT64_MAX;
223 pVCpu->iem.s.CodeTlb.GlobalLargePageRange.uFirstTag = UINT64_MAX;
224 pVCpu->iem.s.DataTlb.NonGlobalLargePageRange.uFirstTag = UINT64_MAX;
225 pVCpu->iem.s.DataTlb.GlobalLargePageRange.uFirstTag = UINT64_MAX;
226#endif
227
228 /*
229 * Host and guest CPU information.
230 */
231 if (idCpu == 0)
232 {
233 pVCpu->iem.s.enmCpuVendor = CPUMGetGuestCpuVendor(pVM);
234 pVCpu->iem.s.enmHostCpuVendor = CPUMGetHostCpuVendor(pVM);
235#if !defined(VBOX_VMM_TARGET_ARMV8)
236 pVCpu->iem.s.aidxTargetCpuEflFlavour[0] = pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL
237 || pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_VIA /*??*/
238 ? IEMTARGETCPU_EFL_BEHAVIOR_INTEL : IEMTARGETCPU_EFL_BEHAVIOR_AMD;
239# if defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)
240 if (pVCpu->iem.s.enmCpuVendor == pVCpu->iem.s.enmHostCpuVendor)
241 pVCpu->iem.s.aidxTargetCpuEflFlavour[1] = IEMTARGETCPU_EFL_BEHAVIOR_NATIVE;
242 else
243# endif
244 pVCpu->iem.s.aidxTargetCpuEflFlavour[1] = pVCpu->iem.s.aidxTargetCpuEflFlavour[0];
245#else
246 pVCpu->iem.s.aidxTargetCpuEflFlavour[0] = IEMTARGETCPU_EFL_BEHAVIOR_NATIVE;
247 pVCpu->iem.s.aidxTargetCpuEflFlavour[1] = pVCpu->iem.s.aidxTargetCpuEflFlavour[0];
248#endif
249
250#if !defined(VBOX_VMM_TARGET_ARMV8) && (IEM_CFG_TARGET_CPU == IEMTARGETCPU_DYNAMIC)
251 switch (pVM->cpum.ro.GuestFeatures.enmMicroarch)
252 {
253 case kCpumMicroarch_Intel_8086: pVCpu->iem.s.uTargetCpu = IEMTARGETCPU_8086; break;
254 case kCpumMicroarch_Intel_80186: pVCpu->iem.s.uTargetCpu = IEMTARGETCPU_186; break;
255 case kCpumMicroarch_Intel_80286: pVCpu->iem.s.uTargetCpu = IEMTARGETCPU_286; break;
256 case kCpumMicroarch_Intel_80386: pVCpu->iem.s.uTargetCpu = IEMTARGETCPU_386; break;
257 case kCpumMicroarch_Intel_80486: pVCpu->iem.s.uTargetCpu = IEMTARGETCPU_486; break;
258 case kCpumMicroarch_Intel_P5: pVCpu->iem.s.uTargetCpu = IEMTARGETCPU_PENTIUM; break;
259 case kCpumMicroarch_Intel_P6: pVCpu->iem.s.uTargetCpu = IEMTARGETCPU_PPRO; break;
260 case kCpumMicroarch_NEC_V20: pVCpu->iem.s.uTargetCpu = IEMTARGETCPU_V20; break;
261 case kCpumMicroarch_NEC_V30: pVCpu->iem.s.uTargetCpu = IEMTARGETCPU_V20; break;
262 default: pVCpu->iem.s.uTargetCpu = IEMTARGETCPU_CURRENT; break;
263 }
264 LogRel(("IEM: TargetCpu=%s, Microarch=%s aidxTargetCpuEflFlavour={%d,%d}\n",
265 iemGetTargetCpuName(pVCpu->iem.s.uTargetCpu), CPUMMicroarchName(pVM->cpum.ro.GuestFeatures.enmMicroarch),
266 pVCpu->iem.s.aidxTargetCpuEflFlavour[0], pVCpu->iem.s.aidxTargetCpuEflFlavour[1]));
267#else
268 LogRel(("IEM: Microarch=%s aidxTargetCpuEflFlavour={%d,%d}\n",
269 CPUMMicroarchName(pVM->cpum.ro.GuestFeatures.enmMicroarch),
270 pVCpu->iem.s.aidxTargetCpuEflFlavour[0], pVCpu->iem.s.aidxTargetCpuEflFlavour[1]));
271#endif
272 }
273 else
274 {
275 pVCpu->iem.s.enmCpuVendor = pVM->apCpusR3[0]->iem.s.enmCpuVendor;
276 pVCpu->iem.s.enmHostCpuVendor = pVM->apCpusR3[0]->iem.s.enmHostCpuVendor;
277 pVCpu->iem.s.aidxTargetCpuEflFlavour[0] = pVM->apCpusR3[0]->iem.s.aidxTargetCpuEflFlavour[0];
278 pVCpu->iem.s.aidxTargetCpuEflFlavour[1] = pVM->apCpusR3[0]->iem.s.aidxTargetCpuEflFlavour[1];
279#if IEM_CFG_TARGET_CPU == IEMTARGETCPU_DYNAMIC
280 pVCpu->iem.s.uTargetCpu = pVM->apCpusR3[0]->iem.s.uTargetCpu;
281#endif
282 }
283
284 /*
285 * Mark all buffers free.
286 */
287 uint32_t iMemMap = RT_ELEMENTS(pVCpu->iem.s.aMemMappings);
288 while (iMemMap-- > 0)
289 pVCpu->iem.s.aMemMappings[iMemMap].fAccess = IEM_ACCESS_INVALID;
290
291#ifdef VBOX_WITH_IEM_RECOMPILER
292 /*
293 * Recompiler state and configuration distribution.
294 */
295 pVCpu->iem.s.uRegFpCtrl = IEMNATIVE_SIMD_FP_CTRL_REG_NOT_MODIFIED;
296 pVCpu->iem.s.uTbNativeRecompileAtUsedCount = uTbNativeRecompileAtUsedCount;
297#endif
298
299#ifdef IEM_WITH_TLB_TRACE
300 /*
301 * Allocate trace buffer.
302 */
303 pVCpu->iem.s.idxTlbTraceEntry = 0;
304 pVCpu->iem.s.cTlbTraceEntriesShift = 16;
305 pVCpu->iem.s.paTlbTraceEntries = (PIEMTLBTRACEENTRY)RTMemPageAlloc( RT_BIT_Z(pVCpu->iem.s.cTlbTraceEntriesShift)
306 * sizeof(*pVCpu->iem.s.paTlbTraceEntries));
307 AssertLogRelReturn(pVCpu->iem.s.paTlbTraceEntries, VERR_NO_PAGE_MEMORY);
308#endif
309 }
310
311
312#ifdef VBOX_WITH_IEM_RECOMPILER
313 /*
314 * Initialize the TB allocator and cache (/ hash table).
315 *
316 * This is done by each EMT to try get more optimal thread/numa locality of
317 * the allocations.
318 */
319 rc = VMR3ReqCallWait(pVM, VMCPUID_ALL, (PFNRT)iemTbInit, 6,
320 pVM, cInitialTbs, cMaxTbs, cbInitialExec, cbMaxExec, cbChunkExec);
321 AssertLogRelRCReturn(rc, rc);
322#endif
323
324 /*
325 * Register statistics.
326 */
327 for (VMCPUID idCpu = 0; idCpu < pVM->cCpus; idCpu++)
328 {
329#if !defined(VBOX_VMM_TARGET_ARMV8) && defined(VBOX_WITH_NESTED_HWVIRT_VMX) /* quick fix for stupid structure duplication non-sense */
330 PVMCPU pVCpu = pVM->apCpusR3[idCpu];
331 char szPat[128];
332 RT_NOREF_PV(szPat); /* lazy bird */
333 char szVal[128];
334 RT_NOREF_PV(szVal); /* lazy bird */
335
336 STAMR3RegisterF(pVM, &pVCpu->iem.s.cInstructions, STAMTYPE_U32, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
337 "Instructions interpreted", "/IEM/CPU%u/cInstructions", idCpu);
338 STAMR3RegisterF(pVM, &pVCpu->iem.s.cLongJumps, STAMTYPE_U32, STAMVISIBILITY_ALWAYS, STAMUNIT_BYTES,
339 "Number of longjmp calls", "/IEM/CPU%u/cLongJumps", idCpu);
340 STAMR3RegisterF(pVM, &pVCpu->iem.s.cPotentialExits, STAMTYPE_U32, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
341 "Potential exits", "/IEM/CPU%u/cPotentialExits", idCpu);
342 STAMR3RegisterF(pVM, &pVCpu->iem.s.cRetAspectNotImplemented, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
343 "VERR_IEM_ASPECT_NOT_IMPLEMENTED", "/IEM/CPU%u/cRetAspectNotImplemented", idCpu);
344 STAMR3RegisterF(pVM, &pVCpu->iem.s.cRetInstrNotImplemented, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
345 "VERR_IEM_INSTR_NOT_IMPLEMENTED", "/IEM/CPU%u/cRetInstrNotImplemented", idCpu);
346 STAMR3RegisterF(pVM, &pVCpu->iem.s.cRetInfStatuses, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
347 "Informational statuses returned", "/IEM/CPU%u/cRetInfStatuses", idCpu);
348 STAMR3RegisterF(pVM, &pVCpu->iem.s.cRetErrStatuses, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
349 "Error statuses returned", "/IEM/CPU%u/cRetErrStatuses", idCpu);
350 STAMR3RegisterF(pVM, &pVCpu->iem.s.cbWritten, STAMTYPE_U32, STAMVISIBILITY_ALWAYS, STAMUNIT_BYTES,
351 "Approx bytes written", "/IEM/CPU%u/cbWritten", idCpu);
352 STAMR3RegisterF(pVM, &pVCpu->iem.s.cPendingCommit, STAMTYPE_U32, STAMVISIBILITY_ALWAYS, STAMUNIT_BYTES,
353 "Times RC/R0 had to postpone instruction committing to ring-3", "/IEM/CPU%u/cPendingCommit", idCpu);
354 STAMR3RegisterF(pVM, &pVCpu->iem.s.cMisalignedAtomics, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_BYTES,
355 "Number of misaligned (for the host) atomic instructions", "/IEM/CPU%u/cMisalignedAtomics", idCpu);
356
357 /* Code TLB: */
358 STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.uTlbRevision, STAMTYPE_X64, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE,
359 "Code TLB non-global revision", "/IEM/CPU%u/Tlb/Code/RevisionNonGlobal", idCpu);
360 STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.uTlbRevisionGlobal, STAMTYPE_X64, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE,
361 "Code TLB global revision", "/IEM/CPU%u/Tlb/Code/RevisionGlobal", idCpu);
362 STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.cTlsFlushes, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE,
363 "Code TLB non-global flushes", "/IEM/CPU%u/Tlb/Code/RevisionNonGlobalFlushes", idCpu);
364 STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.cTlsGlobalFlushes, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE,
365 "Code TLB global flushes", "/IEM/CPU%u/Tlb/Code/RevisionGlobalFlushes", idCpu);
366 STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.cTlbRevisionRollovers, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE,
367 "Code TLB revision rollovers", "/IEM/CPU%u/Tlb/Code/RevisionRollovers", idCpu);
368
369 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.CodeTlb.uTlbPhysRev, STAMTYPE_X64, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE,
370 "Code TLB physical revision", "/IEM/CPU%u/Tlb/Code/PhysicalRevision", idCpu);
371 STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.cTlbPhysRevFlushes, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE,
372 "Code TLB revision flushes", "/IEM/CPU%u/Tlb/Code/PhysicalRevisionFlushes", idCpu);
373 STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.cTlbPhysRevRollovers, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE,
374 "Code TLB revision rollovers", "/IEM/CPU%u/Tlb/Code/PhysicalRevisionRollovers", idCpu);
375
376 STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.cTlbGlobalLargePageCurLoads, STAMTYPE_U32, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
377 "Code TLB global large page loads since flush", "/IEM/CPU%u/Tlb/Code/LargePageGlobalCurLoads", idCpu);
378 STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.GlobalLargePageRange.uFirstTag, STAMTYPE_X64, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
379 "Code TLB global large page range: lowest tag", "/IEM/CPU%u/Tlb/Code/LargePageGlobalFirstTag", idCpu);
380 STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.GlobalLargePageRange.uLastTag, STAMTYPE_X64, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
381 "Code TLB global large page range: last tag", "/IEM/CPU%u/Tlb/Code/LargePageGlobalLastTag", idCpu);
382
383 STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.cTlbNonGlobalLargePageCurLoads, STAMTYPE_U32, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
384 "Code TLB non-global large page loads since flush", "/IEM/CPU%u/Tlb/Code/LargePageNonGlobalCurLoads", idCpu);
385 STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.NonGlobalLargePageRange.uFirstTag, STAMTYPE_X64, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
386 "Code TLB non-global large page range: lowest tag", "/IEM/CPU%u/Tlb/Code/LargePageNonGlobalFirstTag", idCpu);
387 STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.NonGlobalLargePageRange.uLastTag, STAMTYPE_X64, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
388 "Code TLB non-global large page range: last tag", "/IEM/CPU%u/Tlb/Code/LargePageNonGlobalLastTag", idCpu);
389
390 STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.cTlbInvlPg, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE,
391 "Code TLB page invalidation requests", "/IEM/CPU%u/Tlb/Code/InvlPg", idCpu);
392 STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.cTlbInvlPgLargeGlobal, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE,
393 "Code TLB page invlpg scanning for global large pages", "/IEM/CPU%u/Tlb/Code/InvlPg/LargeGlobal", idCpu);
394 STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.cTlbInvlPgLargeNonGlobal, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE,
395 "Code TLB page invlpg scanning for non-global large pages", "/IEM/CPU%u/Tlb/Code/InvlPg/LargeNonGlobal", idCpu);
396
397 STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.cTlbCoreMisses, STAMTYPE_U64_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
398 "Code TLB misses", "/IEM/CPU%u/Tlb/Code/Misses", idCpu);
399 STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.cTlbCoreGlobalLoads, STAMTYPE_U64_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
400 "Code TLB global loads", "/IEM/CPU%u/Tlb/Code/Misses/GlobalLoads", idCpu);
401 STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.cTlbSlowCodeReadPath, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
402 "Code TLB slow read path", "/IEM/CPU%u/Tlb/Code/SlowReads", idCpu);
403# ifdef IEM_WITH_TLB_STATISTICS
404 STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.cTlbCoreHits, STAMTYPE_U64_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
405 "Code TLB hits (non-native)", "/IEM/CPU%u/Tlb/Code/Hits/Other", idCpu);
406# if defined(VBOX_WITH_IEM_NATIVE_RECOMPILER)
407 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatNativeCodeTlbHitsForNewPage, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
408 "Code TLB native hits on new page", "/IEM/CPU%u/Tlb/Code/Hits/New-Page", idCpu);
409 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatNativeCodeTlbHitsForNewPageWithOffset, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
410 "Code TLB native hits on new page /w offset", "/IEM/CPU%u/Tlb/Code/Hits/New-Page-With-Offset", idCpu);
411# endif
412
413 RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/Tlb/Code/Hits/*", idCpu);
414 STAMR3RegisterSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, szPat, "Code TLB hits",
415 "/IEM/CPU%u/Tlb/Code/Hits", idCpu);
416
417 RTStrPrintf(szVal, sizeof(szVal), "/IEM/CPU%u/Tlb/Code/Hits|/IEM/CPU%u/Tlb/Code/Misses", idCpu, idCpu);
418 STAMR3RegisterSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, szPat, "Code TLB lookups (sum of hits and misses)",
419 "/IEM/CPU%u/Tlb/Code/AllLookups", idCpu);
420
421 RTStrPrintf(szVal, sizeof(szVal), "/IEM/CPU%u/Tlb/Code/Misses", idCpu);
422 RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/Tlb/Code/Hits", idCpu);
423 STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PPM, szVal, true, szPat,
424 "Code TLB actual miss rate", "/IEM/CPU%u/Tlb/Code/RateMisses", idCpu);
425
426# if defined(VBOX_WITH_IEM_NATIVE_RECOMPILER)
427 STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.cTlbNativeMissTag, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
428 "Code TLB misses in native code: Tag mismatch [not directly included grand parent sum]",
429 "/IEM/CPU%u/Tlb/Code/Misses/NativeBreakdown/Tag", idCpu);
430 STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.cTlbNativeMissFlagsAndPhysRev, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
431 "Code TLB misses in native code: Flags or physical revision mistmatch [not directly included grand parent sum]",
432 "/IEM/CPU%u/Tlb/Code/Misses/NativeBreakdown/FlagsAndPhysRev", idCpu);
433 STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.cTlbNativeMissAlignment, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
434 "Code TLB misses in native code: Alignment [not directly included grand parent sum]",
435 "/IEM/CPU%u/Tlb/Code/Misses/NativeBreakdown/Alignment", idCpu);
436 STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.cTlbNativeMissCrossPage, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
437 "Code TLB misses in native code: Cross page [not directly included grand parent sum]",
438 "/IEM/CPU%u/Tlb/Code/Misses/NativeBreakdown/CrossPage", idCpu);
439 STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.cTlbNativeMissNonCanonical, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
440 "Code TLB misses in native code: Non-canonical [not directly included grand parent sum]",
441 "/IEM/CPU%u/Tlb/Code/Misses/NativeBreakdown/NonCanonical", idCpu);
442
443 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatNativeCodeTlbMissesNewPage, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
444 "Code TLB native misses on new page",
445 "/IEM/CPU%u/Tlb/Code/Misses/NativeBreakdown2/New-Page", idCpu);
446 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatNativeCodeTlbMissesNewPageWithOffset, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
447 "Code TLB native misses on new page w/ offset",
448 "/IEM/CPU%u/Tlb/Code/Misses/NativeBreakdown2/New-Page-With-Offset", idCpu);
449# endif
450# endif /* IEM_WITH_TLB_STATISTICS */
451
452 /* Data TLB organized as best we can... */
453 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.uTlbRevision, STAMTYPE_X64, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE,
454 "Data TLB non-global revision", "/IEM/CPU%u/Tlb/Data/RevisionNonGlobal", idCpu);
455 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.uTlbRevisionGlobal, STAMTYPE_X64, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE,
456 "Data TLB global revision", "/IEM/CPU%u/Tlb/Data/RevisionGlobal", idCpu);
457 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.cTlsFlushes, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE,
458 "Data TLB non-global flushes", "/IEM/CPU%u/Tlb/Data/RevisionNonGlobalFlushes", idCpu);
459 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.cTlsGlobalFlushes, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE,
460 "Data TLB global flushes", "/IEM/CPU%u/Tlb/Data/RevisionGlobalFlushes", idCpu);
461 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.cTlbRevisionRollovers, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE,
462 "Data TLB revision rollovers", "/IEM/CPU%u/Tlb/Data/RevisionRollovers", idCpu);
463
464 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.DataTlb.uTlbPhysRev, STAMTYPE_X64, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE,
465 "Data TLB physical revision", "/IEM/CPU%u/Tlb/Data/PhysicalRevision", idCpu);
466 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.cTlbPhysRevFlushes, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE,
467 "Data TLB revision flushes", "/IEM/CPU%u/Tlb/Data/PhysicalRevisionFlushes", idCpu);
468 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.cTlbPhysRevRollovers, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE,
469 "Data TLB revision rollovers", "/IEM/CPU%u/Tlb/Data/PhysicalRevisionRollovers", idCpu);
470
471 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.cTlbGlobalLargePageCurLoads, STAMTYPE_U32, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
472 "Data TLB global large page loads since flush", "/IEM/CPU%u/Tlb/Data/LargePageGlobalCurLoads", idCpu);
473 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.GlobalLargePageRange.uFirstTag, STAMTYPE_X64, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
474 "Data TLB global large page range: lowest tag", "/IEM/CPU%u/Tlb/Data/LargePageGlobalFirstTag", idCpu);
475 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.GlobalLargePageRange.uLastTag, STAMTYPE_X64, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
476 "Data TLB global large page range: last tag", "/IEM/CPU%u/Tlb/Data/LargePageGlobalLastTag", idCpu);
477
478 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.cTlbNonGlobalLargePageCurLoads, STAMTYPE_U32, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
479 "Data TLB non-global large page loads since flush", "/IEM/CPU%u/Tlb/Data/LargePageNonGlobalCurLoads", idCpu);
480 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.NonGlobalLargePageRange.uFirstTag, STAMTYPE_X64, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
481 "Data TLB non-global large page range: lowest tag", "/IEM/CPU%u/Tlb/Data/LargePageNonGlobalFirstTag", idCpu);
482 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.NonGlobalLargePageRange.uLastTag, STAMTYPE_X64, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
483 "Data TLB non-global large page range: last tag", "/IEM/CPU%u/Tlb/Data/LargePageNonGlobalLastTag", idCpu);
484
485 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.cTlbInvlPg, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE,
486 "Data TLB page invalidation requests", "/IEM/CPU%u/Tlb/Data/InvlPg", idCpu);
487 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.cTlbInvlPgLargeGlobal, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE,
488 "Data TLB page invlpg scanning for global large pages", "/IEM/CPU%u/Tlb/Data/InvlPg/LargeGlobal", idCpu);
489 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.cTlbInvlPgLargeNonGlobal, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE,
490 "Data TLB page invlpg scanning for non-global large pages", "/IEM/CPU%u/Tlb/Data/InvlPg/LargeNonGlobal", idCpu);
491
492 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.cTlbCoreMisses, STAMTYPE_U64_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
493 "Data TLB core misses (iemMemMap, direct iemMemMapJmp (not safe path))",
494 "/IEM/CPU%u/Tlb/Data/Misses/Core", idCpu);
495 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.cTlbCoreGlobalLoads, STAMTYPE_U64_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
496 "Data TLB global loads",
497 "/IEM/CPU%u/Tlb/Data/Misses/Core/GlobalLoads", idCpu);
498 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.cTlbSafeReadPath, STAMTYPE_U64_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
499 "Data TLB safe read path (inline/native misses going to iemMemMapJmp)",
500 "/IEM/CPU%u/Tlb/Data/Misses/Safe/Reads", idCpu);
501 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.cTlbSafeWritePath, STAMTYPE_U64_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
502 "Data TLB safe write path (inline/native misses going to iemMemMapJmp)",
503 "/IEM/CPU%u/Tlb/Data/Misses/Safe/Writes", idCpu);
504 RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/Tlb/Data/Misses/*", idCpu);
505 STAMR3RegisterSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, szPat, "Data TLB misses",
506 "/IEM/CPU%u/Tlb/Data/Misses", idCpu);
507
508 RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/Tlb/Data/Misses/Safe/*", idCpu);
509 STAMR3RegisterSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, szPat, "Data TLB actual safe path calls (read + write)",
510 "/IEM/CPU%u/Tlb/Data/Misses/Safe", idCpu);
511 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.cTlbSafeHits, STAMTYPE_U64_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
512 "Data TLB hits in iemMemMapJmp - not part of safe-path total",
513 "/IEM/CPU%u/Tlb/Data/Misses/Safe/SubPartHits", idCpu);
514 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.cTlbSafeMisses, STAMTYPE_U64_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
515 "Data TLB misses in iemMemMapJmp - not part of safe-path total",
516 "/IEM/CPU%u/Tlb/Data/Misses/Safe/SubPartMisses", idCpu);
517 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.cTlbSafeGlobalLoads, STAMTYPE_U64_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
518 "Data TLB global loads",
519 "/IEM/CPU%u/Tlb/Data/Misses/Safe/SubPartMisses/GlobalLoads", idCpu);
520
521# ifdef IEM_WITH_TLB_STATISTICS
522# ifdef VBOX_WITH_IEM_NATIVE_RECOMPILER
523 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.cTlbNativeMissTag, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
524 "Data TLB misses in native code: Tag mismatch [not directly included grand parent sum]",
525 "/IEM/CPU%u/Tlb/Data/Misses/NativeBreakdown/Tag", idCpu);
526 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.cTlbNativeMissFlagsAndPhysRev, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
527 "Data TLB misses in native code: Flags or physical revision mistmatch [not directly included grand parent sum]",
528 "/IEM/CPU%u/Tlb/Data/Misses/NativeBreakdown/FlagsAndPhysRev", idCpu);
529 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.cTlbNativeMissAlignment, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
530 "Data TLB misses in native code: Alignment [not directly included grand parent sum]",
531 "/IEM/CPU%u/Tlb/Data/Misses/NativeBreakdown/Alignment", idCpu);
532 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.cTlbNativeMissCrossPage, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
533 "Data TLB misses in native code: Cross page [not directly included grand parent sum]",
534 "/IEM/CPU%u/Tlb/Data/Misses/NativeBreakdown/CrossPage", idCpu);
535 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.cTlbNativeMissNonCanonical, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
536 "Data TLB misses in native code: Non-canonical [not directly included grand parent sum]",
537 "/IEM/CPU%u/Tlb/Data/Misses/NativeBreakdown/NonCanonical", idCpu);
538# endif
539# endif
540
541# ifdef IEM_WITH_TLB_STATISTICS
542 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.cTlbCoreHits, STAMTYPE_U64_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
543 "Data TLB core hits (iemMemMap, direct iemMemMapJmp (not safe path))",
544 "/IEM/CPU%u/Tlb/Data/Hits/Core", idCpu);
545 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.cTlbInlineCodeHits, STAMTYPE_U64_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
546 "Data TLB hits in IEMAllMemRWTmplInline.cpp.h",
547 "/IEM/CPU%u/Tlb/Data/Hits/Inline", idCpu);
548# ifdef VBOX_WITH_IEM_NATIVE_RECOMPILER
549 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatNativeTlbHitsForStack, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
550 "Data TLB native stack access hits",
551 "/IEM/CPU%u/Tlb/Data/Hits/Native/Stack", idCpu);
552 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatNativeTlbHitsForFetch, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
553 "Data TLB native data fetch hits",
554 "/IEM/CPU%u/Tlb/Data/Hits/Native/Fetch", idCpu);
555 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatNativeTlbHitsForStore, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
556 "Data TLB native data store hits",
557 "/IEM/CPU%u/Tlb/Data/Hits/Native/Store", idCpu);
558 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatNativeTlbHitsForMapped, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
559 "Data TLB native mapped data hits",
560 "/IEM/CPU%u/Tlb/Data/Hits/Native/Mapped", idCpu);
561# endif
562 RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/Tlb/Data/Hits/*", idCpu);
563 STAMR3RegisterSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, szPat, "Data TLB hits",
564 "/IEM/CPU%u/Tlb/Data/Hits", idCpu);
565
566# ifdef VBOX_WITH_IEM_NATIVE_RECOMPILER
567 RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/Tlb/Data/Hits/Native/*", idCpu);
568 STAMR3RegisterSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, szPat, "Data TLB hits from native code",
569 "/IEM/CPU%u/Tlb/Data/Hits/Native", idCpu);
570# endif
571
572 RTStrPrintf(szVal, sizeof(szVal), "/IEM/CPU%u/Tlb/Data/Hits|/IEM/CPU%u/Tlb/Data/Misses", idCpu, idCpu);
573 STAMR3RegisterSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, szPat, "Data TLB lookups (sum of hits and misses)",
574 "/IEM/CPU%u/Tlb/Data/AllLookups", idCpu);
575
576 RTStrPrintf(szVal, sizeof(szVal), "/IEM/CPU%u/Tlb/Data/Misses", idCpu);
577 RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/Tlb/Data/Hits", idCpu);
578 STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PPM, szVal, true, szPat,
579 "Data TLB actual miss rate", "/IEM/CPU%u/Tlb/Data/RateMisses", idCpu);
580
581# endif /* IEM_WITH_TLB_STATISTICS */
582
583
584#ifdef VBOX_WITH_IEM_RECOMPILER
585 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.cTbExecNative, STAMTYPE_U64_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
586 "Executed native translation block", "/IEM/CPU%u/re/cTbExecNative", idCpu);
587 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.cTbExecThreaded, STAMTYPE_U64_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
588 "Executed threaded translation block", "/IEM/CPU%u/re/cTbExecThreaded", idCpu);
589 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatTbThreadedExecBreaks, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_OCCURENCES,
590 "Times threaded TB execution was interrupted/broken off", "/IEM/CPU%u/re/cTbExecThreadedBreaks", idCpu);
591# ifdef VBOX_WITH_STATISTICS
592 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatTbThreadedExecBreaksWithLookup, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_OCCURENCES,
593 "Times threaded TB execution was interrupted/broken off on a call with lookup entries", "/IEM/CPU%u/re/cTbExecThreadedBreaksWithLookup", idCpu);
594 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatTbThreadedExecBreaksWithoutLookup, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_OCCURENCES,
595 "Times threaded TB execution was interrupted/broken off on a call without lookup entries", "/IEM/CPU%u/re/cTbExecThreadedBreaksWithoutLookup", idCpu);
596# endif
597
598 PIEMTBALLOCATOR const pTbAllocator = pVCpu->iem.s.pTbAllocatorR3;
599 STAMR3RegisterF(pVM, (void *)&pTbAllocator->StatAllocs, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_CALLS,
600 "Translation block allocations", "/IEM/CPU%u/re/cTbAllocCalls", idCpu);
601 STAMR3RegisterF(pVM, (void *)&pTbAllocator->StatFrees, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_CALLS,
602 "Translation block frees", "/IEM/CPU%u/re/cTbFreeCalls", idCpu);
603# ifdef VBOX_WITH_STATISTICS
604 STAMR3RegisterF(pVM, (void *)&pTbAllocator->StatPrune, STAMTYPE_PROFILE, STAMVISIBILITY_ALWAYS, STAMUNIT_TICKS_PER_CALL,
605 "Time spent freeing up TBs when full at alloc", "/IEM/CPU%u/re/TbPruningAlloc", idCpu);
606# endif
607 STAMR3RegisterF(pVM, (void *)&pTbAllocator->StatPruneNative, STAMTYPE_PROFILE, STAMVISIBILITY_ALWAYS, STAMUNIT_TICKS_PER_CALL,
608 "Time spent freeing up native TBs when out of executable memory", "/IEM/CPU%u/re/ExecMem/TbPruningNative", idCpu);
609 STAMR3RegisterF(pVM, (void *)&pTbAllocator->cAllocatedChunks, STAMTYPE_U16, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
610 "Populated TB chunks", "/IEM/CPU%u/re/cTbChunks", idCpu);
611 STAMR3RegisterF(pVM, (void *)&pTbAllocator->cMaxChunks, STAMTYPE_U8, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
612 "Max number of TB chunks", "/IEM/CPU%u/re/cTbChunksMax", idCpu);
613 STAMR3RegisterF(pVM, (void *)&pTbAllocator->cTotalTbs, STAMTYPE_U32, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
614 "Total number of TBs in the allocator", "/IEM/CPU%u/re/cTbTotal", idCpu);
615 STAMR3RegisterF(pVM, (void *)&pTbAllocator->cMaxTbs, STAMTYPE_U32, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
616 "Max total number of TBs allowed", "/IEM/CPU%u/re/cTbTotalMax", idCpu);
617 STAMR3RegisterF(pVM, (void *)&pTbAllocator->cInUseTbs, STAMTYPE_U32, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
618 "Number of currently allocated TBs", "/IEM/CPU%u/re/cTbAllocated", idCpu);
619 STAMR3RegisterF(pVM, (void *)&pTbAllocator->cNativeTbs, STAMTYPE_U32, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
620 "Number of currently allocated native TBs", "/IEM/CPU%u/re/cTbAllocatedNative", idCpu);
621 STAMR3RegisterF(pVM, (void *)&pTbAllocator->cThreadedTbs, STAMTYPE_U32, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
622 "Number of currently allocated threaded TBs", "/IEM/CPU%u/re/cTbAllocatedThreaded", idCpu);
623
624 PIEMTBCACHE const pTbCache = pVCpu->iem.s.pTbCacheR3;
625 STAMR3RegisterF(pVM, (void *)&pTbCache->cHash, STAMTYPE_U32, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
626 "Translation block lookup table size", "/IEM/CPU%u/re/cTbHashTab", idCpu);
627
628 STAMR3RegisterF(pVM, (void *)&pTbCache->cLookupHits, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_OCCURENCES,
629 "Translation block lookup hits", "/IEM/CPU%u/re/cTbLookupHits", idCpu);
630 STAMR3RegisterF(pVM, (void *)&pTbCache->cLookupHitsViaTbLookupTable, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_OCCURENCES,
631 "Translation block lookup hits via TB lookup table associated with the previous TB", "/IEM/CPU%u/re/cTbLookupHitsViaTbLookupTable", idCpu);
632 STAMR3RegisterF(pVM, (void *)&pTbCache->cLookupMisses, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_OCCURENCES,
633 "Translation block lookup misses", "/IEM/CPU%u/re/cTbLookupMisses", idCpu);
634 STAMR3RegisterF(pVM, (void *)&pTbCache->cCollisions, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_OCCURENCES,
635 "Translation block hash table collisions", "/IEM/CPU%u/re/cTbCollisions", idCpu);
636# ifdef VBOX_WITH_STATISTICS
637 STAMR3RegisterF(pVM, (void *)&pTbCache->StatPrune, STAMTYPE_PROFILE, STAMVISIBILITY_ALWAYS, STAMUNIT_TICKS_PER_CALL,
638 "Time spent shortening collision lists", "/IEM/CPU%u/re/TbPruningCollisions", idCpu);
639# endif
640
641 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatTbThreadedCalls, STAMTYPE_PROFILE, STAMVISIBILITY_ALWAYS, STAMUNIT_CALLS_PER_TB,
642 "Calls per threaded translation block", "/IEM/CPU%u/re/ThrdCallsPerTb", idCpu);
643 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatTbInstr, STAMTYPE_PROFILE, STAMVISIBILITY_ALWAYS, STAMUNIT_INSTR_PER_TB,
644 "Instruction per threaded translation block", "/IEM/CPU%u/re/ThrdInstrPerTb", idCpu);
645 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatTbLookupEntries, STAMTYPE_PROFILE, STAMVISIBILITY_ALWAYS, STAMUNIT_INSTR_PER_TB,
646 "TB lookup table entries per threaded translation block", "/IEM/CPU%u/re/ThrdLookupEntriesPerTb", idCpu);
647
648 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatCheckIrqBreaks, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
649 "TB breaks by CheckIrq", "/IEM/CPU%u/re/CheckIrqBreaks", idCpu);
650 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatCheckModeBreaks, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
651 "TB breaks by CheckMode", "/IEM/CPU%u/re/CheckModeBreaks", idCpu);
652 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatCheckBranchMisses, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
653 "Branch target misses", "/IEM/CPU%u/re/CheckTbJmpMisses", idCpu);
654 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatCheckNeedCsLimChecking, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
655 "Needing CS.LIM checking TB after branch or on page crossing", "/IEM/CPU%u/re/CheckTbNeedCsLimChecking", idCpu);
656# ifdef VBOX_WITH_STATISTICS
657 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatTbLoopInTbDetected, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
658 "Detected loop within TB", "/IEM/CPU%u/re/LoopInTbDetected", idCpu);
659#endif
660
661 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatNativeExecMemInstrBufAllocFailed, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
662 "Number of times the exec memory allocator failed to allocate a large enough buffer",
663 "/IEM/CPU%u/re/NativeExecMemInstrBufAllocFailed", idCpu);
664
665 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatNativeCallsRecompiled, STAMTYPE_PROFILE, STAMVISIBILITY_ALWAYS, STAMUNIT_CALLS_PER_TB,
666 "Number of threaded calls per TB that have been properly recompiled to native code",
667 "/IEM/CPU%u/re/NativeCallsRecompiledPerTb", idCpu);
668 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatNativeCallsThreaded, STAMTYPE_PROFILE, STAMVISIBILITY_ALWAYS, STAMUNIT_CALLS_PER_TB,
669 "Number of threaded calls per TB that could not be recompiler to native code",
670 "/IEM/CPU%u/re/NativeCallsThreadedPerTb", idCpu);
671 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatNativeFullyRecompiledTbs, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
672 "Number of threaded calls that could not be recompiler to native code",
673 "/IEM/CPU%u/re/NativeFullyRecompiledTbs", idCpu);
674
675 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatTbNativeCode, STAMTYPE_PROFILE, STAMVISIBILITY_ALWAYS, STAMUNIT_BYTES_PER_TB,
676 "Size of native code per TB", "/IEM/CPU%u/re/NativeCodeSizePerTb", idCpu);
677 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatNativeRecompilation, STAMTYPE_PROFILE, STAMVISIBILITY_ALWAYS, STAMUNIT_TICKS_PER_CALL,
678 "Profiling iemNativeRecompile()", "/IEM/CPU%u/re/NativeRecompilation", idCpu);
679
680# ifdef VBOX_WITH_IEM_NATIVE_RECOMPILER
681# ifdef VBOX_WITH_STATISTICS
682 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeRegFindFree, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
683 "Number of calls to iemNativeRegAllocFindFree.",
684 "/IEM/CPU%u/re/NativeRegFindFree", idCpu);
685# endif
686 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeRegFindFreeVar, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
687 "Number of times iemNativeRegAllocFindFree needed to free a variable.",
688 "/IEM/CPU%u/re/NativeRegFindFreeVar", idCpu);
689# ifdef VBOX_WITH_STATISTICS
690 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeRegFindFreeNoVar, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
691 "Number of times iemNativeRegAllocFindFree did not needed to free any variables.",
692 "/IEM/CPU%u/re/NativeRegFindFreeNoVar", idCpu);
693 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeRegFindFreeLivenessUnshadowed, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
694 "Times liveness info freeed up shadowed guest registers in iemNativeRegAllocFindFree.",
695 "/IEM/CPU%u/re/NativeRegFindFreeLivenessUnshadowed", idCpu);
696 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeRegFindFreeLivenessHelped, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
697 "Times liveness info helped finding the return register in iemNativeRegAllocFindFree.",
698 "/IEM/CPU%u/re/NativeRegFindFreeLivenessHelped", idCpu);
699
700 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeEflSkippedArithmetic, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
701 "Skipped all status flag updating, arithmetic instructions",
702 "/IEM/CPU%u/re/NativeEFlagsSkippedArithmetic", idCpu);
703 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeEflSkippedLogical, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
704 "Skipped all status flag updating, logical instructions",
705 "/IEM/CPU%u/re/NativeEFlagsSkippedLogical", idCpu);
706
707 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflCfSkippable, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Skippable EFLAGS.CF updating", "/IEM/CPU%u/re/NativeLivenessEFlagsCfSkippable", idCpu);
708 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflPfSkippable, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Skippable EFLAGS.PF updating", "/IEM/CPU%u/re/NativeLivenessEFlagsPfSkippable", idCpu);
709 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflAfSkippable, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Skippable EFLAGS.AF updating", "/IEM/CPU%u/re/NativeLivenessEFlagsAfSkippable", idCpu);
710 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflZfSkippable, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Skippable EFLAGS.ZF updating", "/IEM/CPU%u/re/NativeLivenessEFlagsZfSkippable", idCpu);
711 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflSfSkippable, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Skippable EFLAGS.SF updating", "/IEM/CPU%u/re/NativeLivenessEFlagsSfSkippable", idCpu);
712 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflOfSkippable, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Skippable EFLAGS.OF updating", "/IEM/CPU%u/re/NativeLivenessEFlagsOfSkippable", idCpu);
713
714 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflCfRequired, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Required EFLAGS.CF updating", "/IEM/CPU%u/re/NativeLivenessEFlagsCfRequired", idCpu);
715 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflPfRequired, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Required EFLAGS.PF updating", "/IEM/CPU%u/re/NativeLivenessEFlagsPfRequired", idCpu);
716 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflAfRequired, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Required EFLAGS.AF updating", "/IEM/CPU%u/re/NativeLivenessEFlagsAfRequired", idCpu);
717 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflZfRequired, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Required EFLAGS.ZF updating", "/IEM/CPU%u/re/NativeLivenessEFlagsZfRequired", idCpu);
718 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflSfRequired, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Required EFLAGS.SF updating", "/IEM/CPU%u/re/NativeLivenessEFlagsSfRequired", idCpu);
719 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflOfRequired, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Required EFLAGS.OF updating", "/IEM/CPU%u/re/NativeLivenessEFlagsOfRequired", idCpu);
720
721# ifdef IEMLIVENESS_EXTENDED_LAYOUT
722 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflCfDelayable, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Maybe delayable EFLAGS.CF updating", "/IEM/CPU%u/re/NativeLivenessEFlagsCfDelayable", idCpu);
723 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflPfDelayable, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Maybe delayable EFLAGS.PF updating", "/IEM/CPU%u/re/NativeLivenessEFlagsPfDelayable", idCpu);
724 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflAfDelayable, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Maybe delayable EFLAGS.AF updating", "/IEM/CPU%u/re/NativeLivenessEFlagsAfDelayable", idCpu);
725 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflZfDelayable, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Maybe delayable EFLAGS.ZF updating", "/IEM/CPU%u/re/NativeLivenessEFlagsZfDelayable", idCpu);
726 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflSfDelayable, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Maybe delayable EFLAGS.SF updating", "/IEM/CPU%u/re/NativeLivenessEFlagsSfDelayable", idCpu);
727 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeLivenessEflOfDelayable, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Maybe delayable EFLAGS.OF updating", "/IEM/CPU%u/re/NativeLivenessEFlagsOfDelayable", idCpu);
728# endif
729
730 /* Sum up all status bits ('_' is a sorting hack). */
731 RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/re/NativeLivenessEFlags?fSkippable*", idCpu);
732 STAMR3RegisterSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, szPat, "Total skippable EFLAGS status bit updating",
733 "/IEM/CPU%u/re/NativeLivenessEFlags_StatusSkippable", idCpu);
734
735 RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/re/NativeLivenessEFlags?fRequired*", idCpu);
736 STAMR3RegisterSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, szPat, "Total required STATUS status bit updating",
737 "/IEM/CPU%u/re/NativeLivenessEFlags_StatusRequired", idCpu);
738
739# ifdef IEMLIVENESS_EXTENDED_LAYOUT
740 RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/re/NativeLivenessEFlags?fDelayable*", idCpu);
741 STAMR3RegisterSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, szPat, "Total potentially delayable STATUS status bit updating",
742 "/IEM/CPU%u/re/NativeLivenessEFlags_StatusDelayable", idCpu);
743# endif
744
745 RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/re/NativeLivenessEFlags?f*", idCpu);
746 STAMR3RegisterSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, szPat, "Total STATUS status bit events of any kind",
747 "/IEM/CPU%u/re/NativeLivenessEFlags_StatusTotal", idCpu);
748
749 /* Ratio of the status bit skippables. */
750 RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/re/NativeLivenessEFlags_StatusTotal", idCpu);
751 RTStrPrintf(szVal, sizeof(szVal), "/IEM/CPU%u/re/NativeLivenessEFlags_StatusSkippable", idCpu);
752 STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PCT, szVal, false, szPat,
753 "Total skippable EFLAGS status bit updating percentage",
754 "/IEM/CPU%u/re/NativeLivenessEFlags_StatusSkippablePct", idCpu);
755
756# ifdef IEMLIVENESS_EXTENDED_LAYOUT
757 /* Ratio of the status bit skippables. */
758 RTStrPrintf(szVal, sizeof(szVal), "/IEM/CPU%u/re/NativeLivenessEFlags_StatusDelayable", idCpu);
759 STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PCT, szVal, false, szPat,
760 "Total potentially delayable EFLAGS status bit updating percentage",
761 "/IEM/CPU%u/re/NativeLivenessEFlags_StatusDelayablePct", idCpu);
762# endif
763
764 /* Ratios of individual bits. */
765 size_t const offFlagChar = RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/re/NativeLivenessEFlagsCf*", idCpu) - 3;
766 Assert(szPat[offFlagChar] == 'C');
767 RTStrPrintf(szVal, sizeof(szVal), "/IEM/CPU%u/re/NativeLivenessEFlagsCfSkippable", idCpu);
768 Assert(szVal[offFlagChar] == 'C');
769 szPat[offFlagChar] = szVal[offFlagChar] = 'C'; STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PCT, szVal, true, szPat, "Skippable EFLAGS.CF updating percentage", "/IEM/CPU%u/re/NativeLivenessEFlagsCfSkippablePct", idCpu);
770 szPat[offFlagChar] = szVal[offFlagChar] = 'P'; STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PCT, szVal, true, szPat, "Skippable EFLAGS.PF updating percentage", "/IEM/CPU%u/re/NativeLivenessEFlagsPfSkippablePct", idCpu);
771 szPat[offFlagChar] = szVal[offFlagChar] = 'A'; STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PCT, szVal, true, szPat, "Skippable EFLAGS.AF updating percentage", "/IEM/CPU%u/re/NativeLivenessEFlagsAfSkippablePct", idCpu);
772 szPat[offFlagChar] = szVal[offFlagChar] = 'Z'; STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PCT, szVal, true, szPat, "Skippable EFLAGS.ZF updating percentage", "/IEM/CPU%u/re/NativeLivenessEFlagsZfSkippablePct", idCpu);
773 szPat[offFlagChar] = szVal[offFlagChar] = 'S'; STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PCT, szVal, true, szPat, "Skippable EFLAGS.SF updating percentage", "/IEM/CPU%u/re/NativeLivenessEFlagsSfSkippablePct", idCpu);
774 szPat[offFlagChar] = szVal[offFlagChar] = 'O'; STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PCT, szVal, true, szPat, "Skippable EFLAGS.OF updating percentage", "/IEM/CPU%u/re/NativeLivenessEFlagsOfSkippablePct", idCpu);
775
776 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativePcUpdateTotal, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Total RIP updates", "/IEM/CPU%u/re/NativePcUpdateTotal", idCpu);
777 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativePcUpdateDelayed, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Delayed RIP updates", "/IEM/CPU%u/re/NativePcUpdateDelayed", idCpu);
778
779# ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR
780 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeSimdRegFindFree, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
781 "Number of calls to iemNativeSimdRegAllocFindFree.",
782 "/IEM/CPU%u/re/NativeSimdRegFindFree", idCpu);
783 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeSimdRegFindFreeVar, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
784 "Number of times iemNativeSimdRegAllocFindFree needed to free a variable.",
785 "/IEM/CPU%u/re/NativeSimdRegFindFreeVar", idCpu);
786 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeSimdRegFindFreeNoVar, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
787 "Number of times iemNativeSimdRegAllocFindFree did not needed to free any variables.",
788 "/IEM/CPU%u/re/NativeSimdRegFindFreeNoVar", idCpu);
789 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeSimdRegFindFreeLivenessUnshadowed, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
790 "Times liveness info freeed up shadowed guest registers in iemNativeSimdRegAllocFindFree.",
791 "/IEM/CPU%u/re/NativeSimdRegFindFreeLivenessUnshadowed", idCpu);
792 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeSimdRegFindFreeLivenessHelped, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
793 "Times liveness info helped finding the return register in iemNativeSimdRegAllocFindFree.",
794 "/IEM/CPU%u/re/NativeSimdRegFindFreeLivenessHelped", idCpu);
795
796 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeMaybeDeviceNotAvailXcptCheckPotential, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Potential IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE() checks",
797 "/IEM/CPU%u/re/NativeMaybeDeviceNotAvailXcptCheckPotential", idCpu);
798 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeMaybeWaitDeviceNotAvailXcptCheckPotential, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Potential IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE() checks",
799 "/IEM/CPU%u/re/NativeMaybeWaitDeviceNotAvailXcptCheckPotential", idCpu);
800 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeMaybeSseXcptCheckPotential, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Potential IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT() checks",
801 "/IEM/CPU%u/re/NativeMaybeSseXcptCheckPotential", idCpu);
802 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeMaybeAvxXcptCheckPotential, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Potential IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT() checks",
803 "/IEM/CPU%u/re/NativeMaybeAvxXcptCheckPotential", idCpu);
804
805 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeMaybeDeviceNotAvailXcptCheckOmitted, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE() checks omitted",
806 "/IEM/CPU%u/re/NativeMaybeDeviceNotAvailXcptCheckOmitted", idCpu);
807 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeMaybeWaitDeviceNotAvailXcptCheckOmitted, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE() checks omitted",
808 "/IEM/CPU%u/re/NativeMaybeWaitDeviceNotAvailXcptCheckOmitted", idCpu);
809 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeMaybeSseXcptCheckOmitted, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT() checks omitted",
810 "/IEM/CPU%u/re/NativeMaybeSseXcptCheckOmitted", idCpu);
811 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeMaybeAvxXcptCheckOmitted, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT() checks omitted",
812 "/IEM/CPU%u/re/NativeMaybeAvxXcptCheckOmitted", idCpu);
813# endif
814
815 /* Ratio of the status bit skippables. */
816 RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/re/NativePcUpdateTotal", idCpu);
817 RTStrPrintf(szVal, sizeof(szVal), "/IEM/CPU%u/re/NativePcUpdateDelayed", idCpu);
818 STAMR3RegisterPctOfSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, STAMUNIT_PCT, szVal, false, szPat,
819 "Delayed RIP updating percentage",
820 "/IEM/CPU%u/re/NativePcUpdateDelayed_StatusDelayedPct", idCpu);
821
822 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbFinished, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
823 "Number of times the TB finishes execution completely",
824 "/IEM/CPU%u/re/NativeTbFinished", idCpu);
825# endif /* VBOX_WITH_STATISTICS */
826 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitReturnBreak, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
827 "Number of times the TB finished through the ReturnBreak label",
828 "/IEM/CPU%u/re/NativeTbExit/ReturnBreak", idCpu);
829 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitReturnBreakFF, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
830 "Number of times the TB finished through the ReturnBreak label",
831 "/IEM/CPU%u/re/NativeTbExit/ReturnBreakFF", idCpu);
832 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitReturnWithFlags, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
833 "Number of times the TB finished through the ReturnWithFlags label",
834 "/IEM/CPU%u/re/NativeTbExit/ReturnWithFlags", idCpu);
835 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitReturnOtherStatus, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
836 "Number of times the TB finished with some other status value",
837 "/IEM/CPU%u/re/NativeTbExit/ReturnOtherStatus", idCpu);
838 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitLongJump, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
839 "Number of times the TB finished via long jump / throw",
840 "/IEM/CPU%u/re/NativeTbExit/LongJumps", idCpu);
841 /* These end up returning VINF_IEM_REEXEC_BREAK and are thus already counted under NativeTbExit/ReturnBreak: */
842 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitObsoleteTb, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
843 "Number of times the TB finished through the ObsoleteTb label",
844 "/IEM/CPU%u/re/NativeTbExit/ReturnBreak/ObsoleteTb", idCpu);
845 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatCheckNeedCsLimChecking, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
846 "Number of times the TB finished through the NeedCsLimChecking label",
847 "/IEM/CPU%u/re/NativeTbExit/ReturnBreak/NeedCsLimChecking", idCpu);
848 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatCheckBranchMisses, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
849 "Number of times the TB finished through the CheckBranchMiss label",
850 "/IEM/CPU%u/re/NativeTbExit/ReturnBreak/CheckBranchMiss", idCpu);
851 /* Raising stuff will either increment NativeTbExit/LongJumps or NativeTbExit/ReturnOtherStatus
852 depending on whether VBOX_WITH_IEM_NATIVE_RECOMPILER_LONGJMP is defined: */
853# ifdef VBOX_WITH_IEM_NATIVE_RECOMPILER_LONGJMP
854# define RAISE_PREFIX "/IEM/CPU%u/re/NativeTbExit/ReturnOtherStatus/"
855# else
856# define RAISE_PREFIX "/IEM/CPU%u/re/NativeTbExit/LongJumps/"
857# endif
858 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitRaiseDe, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
859 "Number of times the TB finished raising a #DE exception",
860 RAISE_PREFIX "RaiseDe", idCpu);
861 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitRaiseUd, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
862 "Number of times the TB finished raising a #UD exception",
863 RAISE_PREFIX "RaiseUd", idCpu);
864 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitRaiseSseRelated, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
865 "Number of times the TB finished raising a SSE related exception",
866 RAISE_PREFIX "RaiseSseRelated", idCpu);
867 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitRaiseAvxRelated, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
868 "Number of times the TB finished raising a AVX related exception",
869 RAISE_PREFIX "RaiseAvxRelated", idCpu);
870 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitRaiseSseAvxFpRelated, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
871 "Number of times the TB finished raising a SSE/AVX floating point related exception",
872 RAISE_PREFIX "RaiseSseAvxFpRelated", idCpu);
873 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitRaiseNm, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
874 "Number of times the TB finished raising a #NM exception",
875 RAISE_PREFIX "RaiseNm", idCpu);
876 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitRaiseGp0, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
877 "Number of times the TB finished raising a #GP(0) exception",
878 RAISE_PREFIX "RaiseGp0", idCpu);
879 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitRaiseMf, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
880 "Number of times the TB finished raising a #MF exception",
881 RAISE_PREFIX "RaiseMf", idCpu);
882 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitRaiseXf, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
883 "Number of times the TB finished raising a #XF exception",
884 RAISE_PREFIX "RaiseXf", idCpu);
885
886 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitDirectLinking1Irq, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
887 "Direct linking #1 with IRQ check succeeded",
888 "/IEM/CPU%u/re/NativeTbExit/DirectLinking1Irq", idCpu);
889 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitDirectLinking1NoIrq, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
890 "Direct linking #1 w/o IRQ check succeeded",
891 "/IEM/CPU%u/re/NativeTbExit/DirectLinking1NoIrq", idCpu);
892# ifdef VBOX_WITH_STATISTICS
893 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitDirectLinking1NoTb, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
894 "Direct linking #1 failed: No TB in lookup table",
895 "/IEM/CPU%u/re/NativeTbExit/ReturnBreak/DirectLinking1NoTb", idCpu);
896 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitDirectLinking1MismatchGCPhysPc, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
897 "Direct linking #1 failed: GCPhysPc mismatch",
898 "/IEM/CPU%u/re/NativeTbExit/ReturnBreak/DirectLinking1MismatchGCPhysPc", idCpu);
899 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitDirectLinking1MismatchFlags, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
900 "Direct linking #1 failed: TB flags mismatch",
901 "/IEM/CPU%u/re/NativeTbExit/ReturnBreak/DirectLinking1MismatchFlags", idCpu);
902 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitDirectLinking1PendingIrq, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
903 "Direct linking #1 failed: IRQ or FF pending",
904 "/IEM/CPU%u/re/NativeTbExit/ReturnBreak/DirectLinking1PendingIrq", idCpu);
905# endif
906
907 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitDirectLinking2Irq, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
908 "Direct linking #2 with IRQ check succeeded",
909 "/IEM/CPU%u/re/NativeTbExit/DirectLinking2Irq", idCpu);
910 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitDirectLinking2NoIrq, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
911 "Direct linking #2 w/o IRQ check succeeded",
912 "/IEM/CPU%u/re/NativeTbExit/DirectLinking2NoIrq", idCpu);
913# ifdef VBOX_WITH_STATISTICS
914 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitDirectLinking2NoTb, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
915 "Direct linking #2 failed: No TB in lookup table",
916 "/IEM/CPU%u/re/NativeTbExit/ReturnBreak/DirectLinking2NoTb", idCpu);
917 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitDirectLinking2MismatchGCPhysPc, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
918 "Direct linking #2 failed: GCPhysPc mismatch",
919 "/IEM/CPU%u/re/NativeTbExit/ReturnBreak/DirectLinking2MismatchGCPhysPc", idCpu);
920 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitDirectLinking2MismatchFlags, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
921 "Direct linking #2 failed: TB flags mismatch",
922 "/IEM/CPU%u/re/NativeTbExit/ReturnBreak/DirectLinking2MismatchFlags", idCpu);
923 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeTbExitDirectLinking2PendingIrq, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
924 "Direct linking #2 failed: IRQ or FF pending",
925 "/IEM/CPU%u/re/NativeTbExit/ReturnBreak/DirectLinking2PendingIrq", idCpu);
926# endif
927
928 RTStrPrintf(szPat, sizeof(szPat), "/IEM/CPU%u/re/NativeTbExit/*", idCpu); /* only immediate children, no sub folders */
929 STAMR3RegisterSum(pVM->pUVM, STAMVISIBILITY_ALWAYS, szPat,
930 "Number of times native TB execution finished before the end (not counting thrown memory++ exceptions)",
931 "/IEM/CPU%u/re/NativeTbExit", idCpu);
932
933
934# endif /* VBOX_WITH_IEM_NATIVE_RECOMPILER */
935
936
937# ifdef VBOX_WITH_STATISTICS
938 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatMemMapJmp, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
939 "iemMemMapJmp calls", "/IEM/CPU%u/iemMemMapJmp", idCpu);
940 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatMemMapNoJmp, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
941 "iemMemMap calls", "/IEM/CPU%u/iemMemMapNoJmp", idCpu);
942 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatMemBounceBufferCrossPage, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
943 "iemMemBounceBufferMapCrossPage calls", "/IEM/CPU%u/iemMemMapBounceBufferCrossPage", idCpu);
944 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatMemBounceBufferMapPhys, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
945 "iemMemBounceBufferMapPhys calls", "/IEM/CPU%u/iemMemMapBounceBufferMapPhys", idCpu);
946# endif
947
948
949#endif /* VBOX_WITH_IEM_RECOMPILER */
950
951 for (uint32_t i = 0; i < RT_ELEMENTS(pVCpu->iem.s.aStatXcpts); i++)
952 STAMR3RegisterF(pVM, &pVCpu->iem.s.aStatXcpts[i], STAMTYPE_COUNTER, STAMVISIBILITY_USED, STAMUNIT_OCCURENCES,
953 "", "/IEM/CPU%u/Exceptions/%02x", idCpu, i);
954 for (uint32_t i = 0; i < RT_ELEMENTS(pVCpu->iem.s.aStatInts); i++)
955 STAMR3RegisterF(pVM, &pVCpu->iem.s.aStatInts[i], STAMTYPE_U32_RESET, STAMVISIBILITY_USED, STAMUNIT_OCCURENCES,
956 "", "/IEM/CPU%u/Interrupts/%02x", idCpu, i);
957
958# if !defined(VBOX_VMM_TARGET_ARMV8) && defined(VBOX_WITH_STATISTICS) && !defined(DOXYGEN_RUNNING)
959 /* Instruction statistics: */
960# define IEM_DO_INSTR_STAT(a_Name, a_szDesc) \
961 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatsRZ.a_Name, STAMTYPE_U32_RESET, STAMVISIBILITY_USED, \
962 STAMUNIT_COUNT, a_szDesc, "/IEM/CPU%u/instr-RZ/" #a_Name, idCpu); \
963 STAMR3RegisterF(pVM, &pVCpu->iem.s.StatsR3.a_Name, STAMTYPE_U32_RESET, STAMVISIBILITY_USED, \
964 STAMUNIT_COUNT, a_szDesc, "/IEM/CPU%u/instr-R3/" #a_Name, idCpu);
965# include "IEMInstructionStatisticsTmpl.h"
966# undef IEM_DO_INSTR_STAT
967# endif
968
969# if defined(VBOX_WITH_STATISTICS) && defined(VBOX_WITH_IEM_RECOMPILER) && !defined(VBOX_VMM_TARGET_ARMV8)
970 /* Threaded function statistics: */
971 for (unsigned i = 1; i < (unsigned)kIemThreadedFunc_End; i++)
972 STAMR3RegisterF(pVM, &pVCpu->iem.s.acThreadedFuncStats[i], STAMTYPE_U32_RESET, STAMVISIBILITY_USED,
973 STAMUNIT_COUNT, NULL, "/IEM/CPU%u/ThrdFuncs/%s", idCpu, g_apszIemThreadedFunctionStats[i]);
974# endif
975
976#endif /* !defined(VBOX_VMM_TARGET_ARMV8) && defined(VBOX_WITH_NESTED_HWVIRT_VMX) - quick fix for stupid structure duplication non-sense */
977 }
978
979#if !defined(VBOX_VMM_TARGET_ARMV8) && defined(VBOX_WITH_NESTED_HWVIRT_VMX)
980 /*
981 * Register the per-VM VMX APIC-access page handler type.
982 */
983 if (pVM->cpum.ro.GuestFeatures.fVmx)
984 {
985 rc = PGMR3HandlerPhysicalTypeRegister(pVM, PGMPHYSHANDLERKIND_ALL, PGMPHYSHANDLER_F_NOT_IN_HM,
986 iemVmxApicAccessPageHandler,
987 "VMX APIC-access page", &pVM->iem.s.hVmxApicAccessPage);
988 AssertLogRelRCReturn(rc, rc);
989 }
990#endif
991
992 DBGFR3InfoRegisterInternalArgv(pVM, "itlb", "IEM instruction TLB", iemR3InfoITlb, DBGFINFO_FLAGS_RUN_ON_EMT);
993 DBGFR3InfoRegisterInternalArgv(pVM, "dtlb", "IEM instruction TLB", iemR3InfoDTlb, DBGFINFO_FLAGS_RUN_ON_EMT);
994#ifdef IEM_WITH_TLB_TRACE
995 DBGFR3InfoRegisterInternalArgv(pVM, "tlbtrace", "IEM TLB trace log", iemR3InfoTlbTrace, DBGFINFO_FLAGS_RUN_ON_EMT);
996#endif
997#if defined(VBOX_WITH_IEM_RECOMPILER) && !defined(VBOX_VMM_TARGET_ARMV8)
998 DBGFR3InfoRegisterInternalArgv(pVM, "tb", "IEM translation block", iemR3InfoTb, DBGFINFO_FLAGS_RUN_ON_EMT);
999#endif
1000#ifdef VBOX_WITH_DEBUGGER
1001 iemR3RegisterDebuggerCommands();
1002#endif
1003
1004 return VINF_SUCCESS;
1005}
1006
1007
1008VMMR3DECL(int) IEMR3Term(PVM pVM)
1009{
1010 NOREF(pVM);
1011#ifdef IEM_WITH_TLB_TRACE
1012 for (VMCPUID idCpu = 0; idCpu < pVM->cCpus; idCpu++)
1013 {
1014 PVMCPU const pVCpu = pVM->apCpusR3[idCpu];
1015 RTMemPageFree(pVCpu->iem.s.paTlbTraceEntries,
1016 RT_BIT_Z(pVCpu->iem.s.cTlbTraceEntriesShift) * sizeof(*pVCpu->iem.s.paTlbTraceEntries));
1017 }
1018#endif
1019 return VINF_SUCCESS;
1020}
1021
1022
1023VMMR3DECL(void) IEMR3Relocate(PVM pVM)
1024{
1025 RT_NOREF(pVM);
1026}
1027
1028
1029/**
1030 * Gets the name of a generic IEM exit code.
1031 *
1032 * @returns Pointer to read only string if @a uExit is known, otherwise NULL.
1033 * @param uExit The IEM exit to name.
1034 */
1035VMMR3DECL(const char *) IEMR3GetExitName(uint32_t uExit)
1036{
1037 static const char * const s_apszNames[] =
1038 {
1039 /* external interrupts */
1040 "ExtInt 00h", "ExtInt 01h", "ExtInt 02h", "ExtInt 03h", "ExtInt 04h", "ExtInt 05h", "ExtInt 06h", "ExtInt 07h",
1041 "ExtInt 08h", "ExtInt 09h", "ExtInt 0ah", "ExtInt 0bh", "ExtInt 0ch", "ExtInt 0dh", "ExtInt 0eh", "ExtInt 0fh",
1042 "ExtInt 10h", "ExtInt 11h", "ExtInt 12h", "ExtInt 13h", "ExtInt 14h", "ExtInt 15h", "ExtInt 16h", "ExtInt 17h",
1043 "ExtInt 18h", "ExtInt 19h", "ExtInt 1ah", "ExtInt 1bh", "ExtInt 1ch", "ExtInt 1dh", "ExtInt 1eh", "ExtInt 1fh",
1044 "ExtInt 20h", "ExtInt 21h", "ExtInt 22h", "ExtInt 23h", "ExtInt 24h", "ExtInt 25h", "ExtInt 26h", "ExtInt 27h",
1045 "ExtInt 28h", "ExtInt 29h", "ExtInt 2ah", "ExtInt 2bh", "ExtInt 2ch", "ExtInt 2dh", "ExtInt 2eh", "ExtInt 2fh",
1046 "ExtInt 30h", "ExtInt 31h", "ExtInt 32h", "ExtInt 33h", "ExtInt 34h", "ExtInt 35h", "ExtInt 36h", "ExtInt 37h",
1047 "ExtInt 38h", "ExtInt 39h", "ExtInt 3ah", "ExtInt 3bh", "ExtInt 3ch", "ExtInt 3dh", "ExtInt 3eh", "ExtInt 3fh",
1048 "ExtInt 40h", "ExtInt 41h", "ExtInt 42h", "ExtInt 43h", "ExtInt 44h", "ExtInt 45h", "ExtInt 46h", "ExtInt 47h",
1049 "ExtInt 48h", "ExtInt 49h", "ExtInt 4ah", "ExtInt 4bh", "ExtInt 4ch", "ExtInt 4dh", "ExtInt 4eh", "ExtInt 4fh",
1050 "ExtInt 50h", "ExtInt 51h", "ExtInt 52h", "ExtInt 53h", "ExtInt 54h", "ExtInt 55h", "ExtInt 56h", "ExtInt 57h",
1051 "ExtInt 58h", "ExtInt 59h", "ExtInt 5ah", "ExtInt 5bh", "ExtInt 5ch", "ExtInt 5dh", "ExtInt 5eh", "ExtInt 5fh",
1052 "ExtInt 60h", "ExtInt 61h", "ExtInt 62h", "ExtInt 63h", "ExtInt 64h", "ExtInt 65h", "ExtInt 66h", "ExtInt 67h",
1053 "ExtInt 68h", "ExtInt 69h", "ExtInt 6ah", "ExtInt 6bh", "ExtInt 6ch", "ExtInt 6dh", "ExtInt 6eh", "ExtInt 6fh",
1054 "ExtInt 70h", "ExtInt 71h", "ExtInt 72h", "ExtInt 73h", "ExtInt 74h", "ExtInt 75h", "ExtInt 76h", "ExtInt 77h",
1055 "ExtInt 78h", "ExtInt 79h", "ExtInt 7ah", "ExtInt 7bh", "ExtInt 7ch", "ExtInt 7dh", "ExtInt 7eh", "ExtInt 7fh",
1056 "ExtInt 80h", "ExtInt 81h", "ExtInt 82h", "ExtInt 83h", "ExtInt 84h", "ExtInt 85h", "ExtInt 86h", "ExtInt 87h",
1057 "ExtInt 88h", "ExtInt 89h", "ExtInt 8ah", "ExtInt 8bh", "ExtInt 8ch", "ExtInt 8dh", "ExtInt 8eh", "ExtInt 8fh",
1058 "ExtInt 90h", "ExtInt 91h", "ExtInt 92h", "ExtInt 93h", "ExtInt 94h", "ExtInt 95h", "ExtInt 96h", "ExtInt 97h",
1059 "ExtInt 98h", "ExtInt 99h", "ExtInt 9ah", "ExtInt 9bh", "ExtInt 9ch", "ExtInt 9dh", "ExtInt 9eh", "ExtInt 9fh",
1060 "ExtInt a0h", "ExtInt a1h", "ExtInt a2h", "ExtInt a3h", "ExtInt a4h", "ExtInt a5h", "ExtInt a6h", "ExtInt a7h",
1061 "ExtInt a8h", "ExtInt a9h", "ExtInt aah", "ExtInt abh", "ExtInt ach", "ExtInt adh", "ExtInt aeh", "ExtInt afh",
1062 "ExtInt b0h", "ExtInt b1h", "ExtInt b2h", "ExtInt b3h", "ExtInt b4h", "ExtInt b5h", "ExtInt b6h", "ExtInt b7h",
1063 "ExtInt b8h", "ExtInt b9h", "ExtInt bah", "ExtInt bbh", "ExtInt bch", "ExtInt bdh", "ExtInt beh", "ExtInt bfh",
1064 "ExtInt c0h", "ExtInt c1h", "ExtInt c2h", "ExtInt c3h", "ExtInt c4h", "ExtInt c5h", "ExtInt c6h", "ExtInt c7h",
1065 "ExtInt c8h", "ExtInt c9h", "ExtInt cah", "ExtInt cbh", "ExtInt cch", "ExtInt cdh", "ExtInt ceh", "ExtInt cfh",
1066 "ExtInt d0h", "ExtInt d1h", "ExtInt d2h", "ExtInt d3h", "ExtInt d4h", "ExtInt d5h", "ExtInt d6h", "ExtInt d7h",
1067 "ExtInt d8h", "ExtInt d9h", "ExtInt dah", "ExtInt dbh", "ExtInt dch", "ExtInt ddh", "ExtInt deh", "ExtInt dfh",
1068 "ExtInt e0h", "ExtInt e1h", "ExtInt e2h", "ExtInt e3h", "ExtInt e4h", "ExtInt e5h", "ExtInt e6h", "ExtInt e7h",
1069 "ExtInt e8h", "ExtInt e9h", "ExtInt eah", "ExtInt ebh", "ExtInt ech", "ExtInt edh", "ExtInt eeh", "ExtInt efh",
1070 "ExtInt f0h", "ExtInt f1h", "ExtInt f2h", "ExtInt f3h", "ExtInt f4h", "ExtInt f5h", "ExtInt f6h", "ExtInt f7h",
1071 "ExtInt f8h", "ExtInt f9h", "ExtInt fah", "ExtInt fbh", "ExtInt fch", "ExtInt fdh", "ExtInt feh", "ExtInt ffh",
1072 /* software interrups */
1073 "SoftInt 00h", "SoftInt 01h", "SoftInt 02h", "SoftInt 03h", "SoftInt 04h", "SoftInt 05h", "SoftInt 06h", "SoftInt 07h",
1074 "SoftInt 08h", "SoftInt 09h", "SoftInt 0ah", "SoftInt 0bh", "SoftInt 0ch", "SoftInt 0dh", "SoftInt 0eh", "SoftInt 0fh",
1075 "SoftInt 10h", "SoftInt 11h", "SoftInt 12h", "SoftInt 13h", "SoftInt 14h", "SoftInt 15h", "SoftInt 16h", "SoftInt 17h",
1076 "SoftInt 18h", "SoftInt 19h", "SoftInt 1ah", "SoftInt 1bh", "SoftInt 1ch", "SoftInt 1dh", "SoftInt 1eh", "SoftInt 1fh",
1077 "SoftInt 20h", "SoftInt 21h", "SoftInt 22h", "SoftInt 23h", "SoftInt 24h", "SoftInt 25h", "SoftInt 26h", "SoftInt 27h",
1078 "SoftInt 28h", "SoftInt 29h", "SoftInt 2ah", "SoftInt 2bh", "SoftInt 2ch", "SoftInt 2dh", "SoftInt 2eh", "SoftInt 2fh",
1079 "SoftInt 30h", "SoftInt 31h", "SoftInt 32h", "SoftInt 33h", "SoftInt 34h", "SoftInt 35h", "SoftInt 36h", "SoftInt 37h",
1080 "SoftInt 38h", "SoftInt 39h", "SoftInt 3ah", "SoftInt 3bh", "SoftInt 3ch", "SoftInt 3dh", "SoftInt 3eh", "SoftInt 3fh",
1081 "SoftInt 40h", "SoftInt 41h", "SoftInt 42h", "SoftInt 43h", "SoftInt 44h", "SoftInt 45h", "SoftInt 46h", "SoftInt 47h",
1082 "SoftInt 48h", "SoftInt 49h", "SoftInt 4ah", "SoftInt 4bh", "SoftInt 4ch", "SoftInt 4dh", "SoftInt 4eh", "SoftInt 4fh",
1083 "SoftInt 50h", "SoftInt 51h", "SoftInt 52h", "SoftInt 53h", "SoftInt 54h", "SoftInt 55h", "SoftInt 56h", "SoftInt 57h",
1084 "SoftInt 58h", "SoftInt 59h", "SoftInt 5ah", "SoftInt 5bh", "SoftInt 5ch", "SoftInt 5dh", "SoftInt 5eh", "SoftInt 5fh",
1085 "SoftInt 60h", "SoftInt 61h", "SoftInt 62h", "SoftInt 63h", "SoftInt 64h", "SoftInt 65h", "SoftInt 66h", "SoftInt 67h",
1086 "SoftInt 68h", "SoftInt 69h", "SoftInt 6ah", "SoftInt 6bh", "SoftInt 6ch", "SoftInt 6dh", "SoftInt 6eh", "SoftInt 6fh",
1087 "SoftInt 70h", "SoftInt 71h", "SoftInt 72h", "SoftInt 73h", "SoftInt 74h", "SoftInt 75h", "SoftInt 76h", "SoftInt 77h",
1088 "SoftInt 78h", "SoftInt 79h", "SoftInt 7ah", "SoftInt 7bh", "SoftInt 7ch", "SoftInt 7dh", "SoftInt 7eh", "SoftInt 7fh",
1089 "SoftInt 80h", "SoftInt 81h", "SoftInt 82h", "SoftInt 83h", "SoftInt 84h", "SoftInt 85h", "SoftInt 86h", "SoftInt 87h",
1090 "SoftInt 88h", "SoftInt 89h", "SoftInt 8ah", "SoftInt 8bh", "SoftInt 8ch", "SoftInt 8dh", "SoftInt 8eh", "SoftInt 8fh",
1091 "SoftInt 90h", "SoftInt 91h", "SoftInt 92h", "SoftInt 93h", "SoftInt 94h", "SoftInt 95h", "SoftInt 96h", "SoftInt 97h",
1092 "SoftInt 98h", "SoftInt 99h", "SoftInt 9ah", "SoftInt 9bh", "SoftInt 9ch", "SoftInt 9dh", "SoftInt 9eh", "SoftInt 9fh",
1093 "SoftInt a0h", "SoftInt a1h", "SoftInt a2h", "SoftInt a3h", "SoftInt a4h", "SoftInt a5h", "SoftInt a6h", "SoftInt a7h",
1094 "SoftInt a8h", "SoftInt a9h", "SoftInt aah", "SoftInt abh", "SoftInt ach", "SoftInt adh", "SoftInt aeh", "SoftInt afh",
1095 "SoftInt b0h", "SoftInt b1h", "SoftInt b2h", "SoftInt b3h", "SoftInt b4h", "SoftInt b5h", "SoftInt b6h", "SoftInt b7h",
1096 "SoftInt b8h", "SoftInt b9h", "SoftInt bah", "SoftInt bbh", "SoftInt bch", "SoftInt bdh", "SoftInt beh", "SoftInt bfh",
1097 "SoftInt c0h", "SoftInt c1h", "SoftInt c2h", "SoftInt c3h", "SoftInt c4h", "SoftInt c5h", "SoftInt c6h", "SoftInt c7h",
1098 "SoftInt c8h", "SoftInt c9h", "SoftInt cah", "SoftInt cbh", "SoftInt cch", "SoftInt cdh", "SoftInt ceh", "SoftInt cfh",
1099 "SoftInt d0h", "SoftInt d1h", "SoftInt d2h", "SoftInt d3h", "SoftInt d4h", "SoftInt d5h", "SoftInt d6h", "SoftInt d7h",
1100 "SoftInt d8h", "SoftInt d9h", "SoftInt dah", "SoftInt dbh", "SoftInt dch", "SoftInt ddh", "SoftInt deh", "SoftInt dfh",
1101 "SoftInt e0h", "SoftInt e1h", "SoftInt e2h", "SoftInt e3h", "SoftInt e4h", "SoftInt e5h", "SoftInt e6h", "SoftInt e7h",
1102 "SoftInt e8h", "SoftInt e9h", "SoftInt eah", "SoftInt ebh", "SoftInt ech", "SoftInt edh", "SoftInt eeh", "SoftInt efh",
1103 "SoftInt f0h", "SoftInt f1h", "SoftInt f2h", "SoftInt f3h", "SoftInt f4h", "SoftInt f5h", "SoftInt f6h", "SoftInt f7h",
1104 "SoftInt f8h", "SoftInt f9h", "SoftInt fah", "SoftInt fbh", "SoftInt fch", "SoftInt fdh", "SoftInt feh", "SoftInt ffh",
1105 };
1106 if (uExit < RT_ELEMENTS(s_apszNames))
1107 return s_apszNames[uExit];
1108 return NULL;
1109}
1110
1111
1112/** Worker for iemR3InfoTlbPrintSlots and iemR3InfoTlbPrintAddress. */
1113static void iemR3InfoTlbPrintHeader(PVMCPU pVCpu, PCDBGFINFOHLP pHlp, IEMTLB const *pTlb, bool *pfHeader)
1114{
1115 if (*pfHeader)
1116 return;
1117 pHlp->pfnPrintf(pHlp, "%cTLB for CPU %u:\n", &pVCpu->iem.s.CodeTlb == pTlb ? 'I' : 'D', pVCpu->idCpu);
1118 *pfHeader = true;
1119}
1120
1121
1122#define IEMR3INFOTLB_F_ONLY_VALID RT_BIT_32(0)
1123#define IEMR3INFOTLB_F_CHECK RT_BIT_32(1)
1124
1125/** Worker for iemR3InfoTlbPrintSlots and iemR3InfoTlbPrintAddress. */
1126static void iemR3InfoTlbPrintSlot(PVMCPU pVCpu, PCDBGFINFOHLP pHlp, IEMTLB const *pTlb, IEMTLBENTRY const *pTlbe,
1127 uint32_t uSlot, uint32_t fFlags)
1128{
1129#ifndef VBOX_VMM_TARGET_ARMV8
1130 uint64_t const uTlbRevision = !(uSlot & 1) ? pTlb->uTlbRevision : pTlb->uTlbRevisionGlobal;
1131#else
1132 uint64_t const uTlbRevision = pTlb->uTlbRevision;
1133#endif
1134 if ((fFlags & IEMR3INFOTLB_F_ONLY_VALID) && (pTlbe->uTag & IEMTLB_REVISION_MASK) != uTlbRevision)
1135 return;
1136
1137 /* The address needs to be sign extended, thus the shifting fun here.*/
1138 RTGCPTR const GCPtr = (RTGCINTPTR)((pTlbe->uTag & ~IEMTLB_REVISION_MASK) << (64 - IEMTLB_TAG_ADDR_WIDTH))
1139 >> (64 - IEMTLB_TAG_ADDR_WIDTH - GUEST_PAGE_SHIFT);
1140 const char *pszValid = "";
1141#ifndef VBOX_VMM_TARGET_ARMV8
1142 char szTmp[128];
1143 if (fFlags & IEMR3INFOTLB_F_CHECK)
1144 {
1145 uint32_t const fInvSlotG = (uint32_t)!(uSlot & 1) << X86_PTE_BIT_G;
1146 PGMPTWALKFAST WalkFast;
1147 int rc = PGMGstQueryPageFast(pVCpu, GCPtr, 0 /*fFlags - don't check or modify anything */, &WalkFast);
1148 pszValid = szTmp;
1149 if (RT_FAILURE(rc))
1150 switch (rc)
1151 {
1152 case VERR_PAGE_TABLE_NOT_PRESENT:
1153 switch ((WalkFast.fFailed & PGM_WALKFAIL_LEVEL_MASK) >> PGM_WALKFAIL_LEVEL_SHIFT)
1154 {
1155 case 1: pszValid = " stale(page-not-present)"; break;
1156 case 2: pszValid = " stale(pd-entry-not-present)"; break;
1157 case 3: pszValid = " stale(pdptr-entry-not-present)"; break;
1158 case 4: pszValid = " stale(pml4-entry-not-present)"; break;
1159 case 5: pszValid = " stale(pml5-entry-not-present)"; break;
1160 default: pszValid = " stale(VERR_PAGE_TABLE_NOT_PRESENT)"; break;
1161 }
1162 break;
1163 default: RTStrPrintf(szTmp, sizeof(szTmp), " stale(rc=%d)", rc); break;
1164 }
1165 else if (WalkFast.GCPhys != pTlbe->GCPhys)
1166 RTStrPrintf(szTmp, sizeof(szTmp), " stale(GCPhys=%RGp)", WalkFast.GCPhys);
1167 else if ( (~WalkFast.fEffective & (X86_PTE_RW | X86_PTE_US | X86_PTE_G | X86_PTE_A | X86_PTE_D))
1168 == ( (pTlbe->fFlagsAndPhysRev & ( IEMTLBE_F_PT_NO_WRITE | IEMTLBE_F_PT_NO_USER
1169 | IEMTLBE_F_PT_NO_DIRTY | IEMTLBE_F_PT_NO_ACCESSED))
1170 | fInvSlotG ) )
1171 pszValid = " still-valid";
1172 else if ( (~WalkFast.fEffective & (X86_PTE_RW | X86_PTE_US | X86_PTE_G))
1173 == ((pTlbe->fFlagsAndPhysRev & (IEMTLBE_F_PT_NO_WRITE | IEMTLBE_F_PT_NO_USER)) | fInvSlotG) )
1174 switch ( (~WalkFast.fEffective & (X86_PTE_A | X86_PTE_D))
1175 ^ (pTlbe->fFlagsAndPhysRev & (IEMTLBE_F_PT_NO_DIRTY | IEMTLBE_F_PT_NO_ACCESSED)) )
1176 {
1177 case X86_PTE_A:
1178 pszValid = WalkFast.fEffective & X86_PTE_A ? " still-valid(accessed-now)" : " still-valid(accessed-no-more)";
1179 break;
1180 case X86_PTE_D:
1181 pszValid = WalkFast.fEffective & X86_PTE_D ? " still-valid(dirty-now)" : " still-valid(dirty-no-more)";
1182 break;
1183 case X86_PTE_D | X86_PTE_A:
1184 RTStrPrintf(szTmp, sizeof(szTmp), " still-valid(%s%s)",
1185 (~WalkFast.fEffective & X86_PTE_D) == (pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PT_NO_DIRTY) ? ""
1186 : WalkFast.fEffective & X86_PTE_D ? "dirty-now" : "dirty-no-more",
1187 (~WalkFast.fEffective & X86_PTE_A) == (pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PT_NO_ACCESSED) ? ""
1188 : WalkFast.fEffective & X86_PTE_A ? " accessed-now" : " accessed-no-more");
1189 break;
1190 default: AssertFailed(); break;
1191 }
1192 else
1193 RTStrPrintf(szTmp, sizeof(szTmp), " stale(%s%s%s%s%s)",
1194 (~WalkFast.fEffective & X86_PTE_RW) == (pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PT_NO_WRITE) ? ""
1195 : WalkFast.fEffective & X86_PTE_RW ? "writeable-now" : "writable-no-more",
1196 (~WalkFast.fEffective & X86_PTE_US) == (pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PT_NO_USER) ? ""
1197 : WalkFast.fEffective & X86_PTE_US ? " user-now" : " user-no-more",
1198 (~WalkFast.fEffective & X86_PTE_G) == fInvSlotG ? ""
1199 : WalkFast.fEffective & X86_PTE_G ? " global-now" : " global-no-more",
1200 (~WalkFast.fEffective & X86_PTE_D) == (pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PT_NO_DIRTY) ? ""
1201 : WalkFast.fEffective & X86_PTE_D ? " dirty-now" : " dirty-no-more",
1202 (~WalkFast.fEffective & X86_PTE_A) == (pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PT_NO_ACCESSED) ? ""
1203 : WalkFast.fEffective & X86_PTE_A ? " accessed-now" : " accessed-no-more");
1204 }
1205#else
1206 RT_NOREF(pVCpu);
1207#endif
1208
1209 pHlp->pfnPrintf(pHlp, IEMTLB_SLOT_FMT ": %s %#018RX64 -> %RGp / %p / %#05x %s%s%s%s%s%s%s/%s%s%s%s/%s %s%s\n",
1210 uSlot,
1211 (pTlbe->uTag & IEMTLB_REVISION_MASK) == uTlbRevision ? "valid "
1212 : (pTlbe->uTag & IEMTLB_REVISION_MASK) == 0 ? "empty "
1213 : "expired",
1214 GCPtr, /* -> */
1215 pTlbe->GCPhys, /* / */ pTlbe->pbMappingR3,
1216 /* / */
1217 (uint32_t)(pTlbe->fFlagsAndPhysRev & ~IEMTLBE_F_PHYS_REV),
1218 /* */
1219 pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PT_NO_WRITE ? "R-" : "RW",
1220 pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PT_NO_EXEC ? "-" : "X",
1221 pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PT_NO_ACCESSED ? "-" : "A",
1222 pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PT_NO_DIRTY ? "-" : "D",
1223 pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PT_NO_USER ? "U" : "S",
1224 !(uSlot & 1) ? "-" : "G",
1225 pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PT_LARGE_PAGE ? "4K" : "2M",
1226 /* / */
1227 pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PG_NO_WRITE ? "-" : "w",
1228 pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PG_NO_READ ? "-" : "r",
1229 pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PG_UNASSIGNED ? "u" : "-",
1230 pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PG_CODE_PAGE ? "c" : "-",
1231 /* / */
1232 pTlbe->fFlagsAndPhysRev & IEMTLBE_F_NO_MAPPINGR3 ? "N" : "M",
1233 (pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PHYS_REV) == pTlb->uTlbPhysRev ? "phys-valid"
1234 : (pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PHYS_REV) == 0 ? "phys-empty" : "phys-expired",
1235 pszValid);
1236}
1237
1238
1239/** Displays one or more TLB slots. */
1240static void iemR3InfoTlbPrintSlots(PVMCPU pVCpu, PCDBGFINFOHLP pHlp, IEMTLB const *pTlb,
1241 uint32_t uSlot, uint32_t cSlots, uint32_t fFlags, bool *pfHeader)
1242{
1243 if (uSlot < RT_ELEMENTS(pTlb->aEntries))
1244 {
1245 if (cSlots > RT_ELEMENTS(pTlb->aEntries))
1246 {
1247 pHlp->pfnPrintf(pHlp, "error: Too many slots given: %u, adjusting it down to the max (%u)\n",
1248 cSlots, RT_ELEMENTS(pTlb->aEntries));
1249 cSlots = RT_ELEMENTS(pTlb->aEntries);
1250 }
1251
1252 iemR3InfoTlbPrintHeader(pVCpu, pHlp, pTlb, pfHeader);
1253 while (cSlots-- > 0)
1254 {
1255 IEMTLBENTRY const Tlbe = pTlb->aEntries[uSlot];
1256 iemR3InfoTlbPrintSlot(pVCpu, pHlp, pTlb, &Tlbe, uSlot, fFlags);
1257 uSlot = (uSlot + 1) % RT_ELEMENTS(pTlb->aEntries);
1258 }
1259 }
1260 else
1261 pHlp->pfnPrintf(pHlp, "error: TLB slot is out of range: %u (%#x), max %u (%#x)\n",
1262 uSlot, uSlot, RT_ELEMENTS(pTlb->aEntries) - 1, RT_ELEMENTS(pTlb->aEntries) - 1);
1263}
1264
1265
1266/** Displays the TLB slot for the given address. */
1267static void iemR3InfoTlbPrintAddress(PVMCPU pVCpu, PCDBGFINFOHLP pHlp, IEMTLB const *pTlb,
1268 uint64_t uAddress, uint32_t fFlags, bool *pfHeader)
1269{
1270 iemR3InfoTlbPrintHeader(pVCpu, pHlp, pTlb, pfHeader);
1271
1272 uint64_t const uTag = IEMTLB_CALC_TAG_NO_REV(uAddress);
1273#ifdef IEMTLB_TAG_TO_EVEN_INDEX
1274 uint32_t const uSlot = IEMTLB_TAG_TO_EVEN_INDEX(uTag);
1275#else
1276 uint32_t const uSlot = IEMTLB_TAG_TO_INDEX(uTag);
1277#endif
1278 IEMTLBENTRY const TlbeL = pTlb->aEntries[uSlot];
1279#ifndef VBOX_VMM_TARGET_ARMV8
1280 IEMTLBENTRY const TlbeG = pTlb->aEntries[uSlot + 1];
1281#endif
1282 pHlp->pfnPrintf(pHlp, "Address %#RX64 -> slot %#x - %s\n", uAddress, uSlot,
1283 TlbeL.uTag == (uTag | pTlb->uTlbRevision) ? "match"
1284 : (TlbeL.uTag & ~IEMTLB_REVISION_MASK) == uTag ? "expired" : "mismatch");
1285 iemR3InfoTlbPrintSlot(pVCpu, pHlp, pTlb, &TlbeL, uSlot, fFlags);
1286
1287#ifndef VBOX_VMM_TARGET_ARMV8
1288 pHlp->pfnPrintf(pHlp, "Address %#RX64 -> slot %#x - %s\n", uAddress, uSlot + 1,
1289 TlbeG.uTag == (uTag | pTlb->uTlbRevisionGlobal) ? "match"
1290 : (TlbeG.uTag & ~IEMTLB_REVISION_MASK) == uTag ? "expired" : "mismatch");
1291 iemR3InfoTlbPrintSlot(pVCpu, pHlp, pTlb, &TlbeG, uSlot + 1, fFlags);
1292#endif
1293}
1294
1295
1296/** Common worker for iemR3InfoDTlb and iemR3InfoITlb. */
1297static void iemR3InfoTlbCommon(PVM pVM, PCDBGFINFOHLP pHlp, int cArgs, char **papszArgs, bool fITlb)
1298{
1299 /*
1300 * This is entirely argument driven.
1301 */
1302 static RTGETOPTDEF const s_aOptions[] =
1303 {
1304 { "--cpu", 'c', RTGETOPT_REQ_UINT32 },
1305 { "--vcpu", 'c', RTGETOPT_REQ_UINT32 },
1306 { "--check", 'C', RTGETOPT_REQ_NOTHING },
1307 { "all", 'A', RTGETOPT_REQ_NOTHING },
1308 { "--all", 'A', RTGETOPT_REQ_NOTHING },
1309 { "--address", 'a', RTGETOPT_REQ_UINT64 | RTGETOPT_FLAG_HEX },
1310 { "--range", 'r', RTGETOPT_REQ_UINT32_PAIR | RTGETOPT_FLAG_HEX },
1311 { "--slot", 's', RTGETOPT_REQ_UINT32 | RTGETOPT_FLAG_HEX },
1312 { "--only-valid", 'v', RTGETOPT_REQ_NOTHING },
1313 };
1314
1315 RTGETOPTSTATE State;
1316 int rc = RTGetOptInit(&State, cArgs, papszArgs, s_aOptions, RT_ELEMENTS(s_aOptions), 0 /*iFirst*/, 0 /*fFlags*/);
1317 AssertRCReturnVoid(rc);
1318
1319 uint32_t cActionArgs = 0;
1320 bool fNeedHeader = true;
1321 bool fAddressMode = true;
1322 uint32_t fFlags = 0;
1323 PVMCPU const pVCpuCall = VMMGetCpu(pVM);
1324 PVMCPU pVCpu = pVCpuCall;
1325 if (!pVCpu)
1326 pVCpu = VMMGetCpuById(pVM, 0);
1327
1328 RTGETOPTUNION ValueUnion;
1329 while ((rc = RTGetOpt(&State, &ValueUnion)) != 0)
1330 {
1331 switch (rc)
1332 {
1333 case 'c':
1334 if (ValueUnion.u32 >= pVM->cCpus)
1335 pHlp->pfnPrintf(pHlp, "error: Invalid CPU ID: %u\n", ValueUnion.u32);
1336 else if (!pVCpu || pVCpu->idCpu != ValueUnion.u32)
1337 {
1338 pVCpu = VMMGetCpuById(pVM, ValueUnion.u32);
1339 fNeedHeader = true;
1340 if (!pVCpuCall || pVCpuCall->idCpu != ValueUnion.u32)
1341 {
1342 pHlp->pfnPrintf(pHlp, "info: Can't check guest PTs when switching to a different VCpu! Targetting %u, on %u.\n",
1343 ValueUnion.u32, pVCpuCall->idCpu);
1344 fFlags &= ~IEMR3INFOTLB_F_CHECK;
1345 }
1346 }
1347 break;
1348
1349 case 'C':
1350 if (!pVCpuCall)
1351 pHlp->pfnPrintf(pHlp, "error: Can't check guest PT when not running on an EMT!\n");
1352 else if (pVCpu != pVCpuCall)
1353 pHlp->pfnPrintf(pHlp, "error: Can't check guest PTs when on a different EMT! Targetting %u, on %u.\n",
1354 pVCpu->idCpu, pVCpuCall->idCpu);
1355 else
1356 fFlags |= IEMR3INFOTLB_F_CHECK;
1357 break;
1358
1359 case 'a':
1360 iemR3InfoTlbPrintAddress(pVCpu, pHlp, fITlb ? &pVCpu->iem.s.CodeTlb : &pVCpu->iem.s.DataTlb,
1361 ValueUnion.u64, fFlags, &fNeedHeader);
1362 fAddressMode = true;
1363 cActionArgs++;
1364 break;
1365
1366 case 'A':
1367 iemR3InfoTlbPrintSlots(pVCpu, pHlp, fITlb ? &pVCpu->iem.s.CodeTlb : &pVCpu->iem.s.DataTlb,
1368 0, RT_ELEMENTS(pVCpu->iem.s.CodeTlb.aEntries), fFlags, &fNeedHeader);
1369 cActionArgs++;
1370 break;
1371
1372 case 'r':
1373 iemR3InfoTlbPrintSlots(pVCpu, pHlp, fITlb ? &pVCpu->iem.s.CodeTlb : &pVCpu->iem.s.DataTlb,
1374 ValueUnion.PairU32.uFirst, ValueUnion.PairU32.uSecond, fFlags, &fNeedHeader);
1375 fAddressMode = false;
1376 cActionArgs++;
1377 break;
1378
1379 case 's':
1380 iemR3InfoTlbPrintSlots(pVCpu, pHlp, fITlb ? &pVCpu->iem.s.CodeTlb : &pVCpu->iem.s.DataTlb,
1381 ValueUnion.u32, 1, fFlags, &fNeedHeader);
1382 fAddressMode = false;
1383 cActionArgs++;
1384 break;
1385
1386 case 'v':
1387 fFlags |= IEMR3INFOTLB_F_ONLY_VALID;
1388 break;
1389
1390 case VINF_GETOPT_NOT_OPTION:
1391 if (fAddressMode)
1392 {
1393 uint64_t uAddr;
1394 rc = RTStrToUInt64Full(ValueUnion.psz, 16, &uAddr);
1395 if (RT_SUCCESS(rc) && rc != VWRN_NUMBER_TOO_BIG)
1396 iemR3InfoTlbPrintAddress(pVCpu, pHlp, fITlb ? &pVCpu->iem.s.CodeTlb : &pVCpu->iem.s.DataTlb,
1397 uAddr, fFlags, &fNeedHeader);
1398 else
1399 pHlp->pfnPrintf(pHlp, "error: Invalid or malformed guest address '%s': %Rrc\n", ValueUnion.psz, rc);
1400 }
1401 else
1402 {
1403 uint32_t uSlot;
1404 rc = RTStrToUInt32Full(ValueUnion.psz, 16, &uSlot);
1405 if (RT_SUCCESS(rc) && rc != VWRN_NUMBER_TOO_BIG)
1406 iemR3InfoTlbPrintSlots(pVCpu, pHlp, fITlb ? &pVCpu->iem.s.CodeTlb : &pVCpu->iem.s.DataTlb,
1407 uSlot, 1, fFlags, &fNeedHeader);
1408 else
1409 pHlp->pfnPrintf(pHlp, "error: Invalid or malformed TLB slot number '%s': %Rrc\n", ValueUnion.psz, rc);
1410 }
1411 cActionArgs++;
1412 break;
1413
1414 case 'h':
1415 pHlp->pfnPrintf(pHlp,
1416 "Usage: info %ctlb [options]\n"
1417 "\n"
1418 "Options:\n"
1419 " -c<n>, --cpu=<n>, --vcpu=<n>\n"
1420 " Selects the CPU which TLBs we're looking at. Default: Caller / 0\n"
1421 " -C,--check\n"
1422 " Check valid entries against guest PTs.\n"
1423 " -A, --all, all\n"
1424 " Display all the TLB entries (default if no other args).\n"
1425 " -a<virt>, --address=<virt>\n"
1426 " Shows the TLB entry for the specified guest virtual address.\n"
1427 " -r<slot:count>, --range=<slot:count>\n"
1428 " Shows the TLB entries for the specified slot range.\n"
1429 " -s<slot>,--slot=<slot>\n"
1430 " Shows the given TLB slot.\n"
1431 " -v,--only-valid\n"
1432 " Only show valid TLB entries (TAG, not phys)\n"
1433 "\n"
1434 "Non-options are interpreted according to the last -a, -r or -s option,\n"
1435 "defaulting to addresses if not preceeded by any of those options.\n"
1436 , fITlb ? 'i' : 'd');
1437 return;
1438
1439 default:
1440 pHlp->pfnGetOptError(pHlp, rc, &ValueUnion, &State);
1441 return;
1442 }
1443 }
1444
1445 /*
1446 * If no action taken, we display all (-A) by default.
1447 */
1448 if (!cActionArgs)
1449 iemR3InfoTlbPrintSlots(pVCpu, pHlp, fITlb ? &pVCpu->iem.s.CodeTlb : &pVCpu->iem.s.DataTlb,
1450 0, RT_ELEMENTS(pVCpu->iem.s.CodeTlb.aEntries), fFlags, &fNeedHeader);
1451}
1452
1453
1454/**
1455 * @callback_method_impl{FNDBGFINFOARGVINT, itlb}
1456 */
1457static DECLCALLBACK(void) iemR3InfoITlb(PVM pVM, PCDBGFINFOHLP pHlp, int cArgs, char **papszArgs)
1458{
1459 return iemR3InfoTlbCommon(pVM, pHlp, cArgs, papszArgs, true /*fITlb*/);
1460}
1461
1462
1463/**
1464 * @callback_method_impl{FNDBGFINFOARGVINT, dtlb}
1465 */
1466static DECLCALLBACK(void) iemR3InfoDTlb(PVM pVM, PCDBGFINFOHLP pHlp, int cArgs, char **papszArgs)
1467{
1468 return iemR3InfoTlbCommon(pVM, pHlp, cArgs, papszArgs, false /*fITlb*/);
1469}
1470
1471
1472#ifdef IEM_WITH_TLB_TRACE
1473/**
1474 * @callback_method_impl{FNDBGFINFOARGVINT, tlbtrace}
1475 */
1476static DECLCALLBACK(void) iemR3InfoTlbTrace(PVM pVM, PCDBGFINFOHLP pHlp, int cArgs, char **papszArgs)
1477{
1478 /*
1479 * Parse arguments.
1480 */
1481 static RTGETOPTDEF const s_aOptions[] =
1482 {
1483 { "--cpu", 'c', RTGETOPT_REQ_UINT32 },
1484 { "--vcpu", 'c', RTGETOPT_REQ_UINT32 },
1485 { "--last", 'l', RTGETOPT_REQ_UINT32 },
1486 { "--limit", 'l', RTGETOPT_REQ_UINT32 },
1487 { "--stop-at-global-flush", 'g', RTGETOPT_REQ_NOTHING },
1488 { "--resolve-rip", 'r', RTGETOPT_REQ_NOTHING },
1489 };
1490
1491 RTGETOPTSTATE State;
1492 int rc = RTGetOptInit(&State, cArgs, papszArgs, s_aOptions, RT_ELEMENTS(s_aOptions), 0 /*iFirst*/, 0 /*fFlags*/);
1493 AssertRCReturnVoid(rc);
1494
1495 uint32_t cLimit = UINT32_MAX;
1496 bool fStopAtGlobalFlush = false;
1497 bool fResolveRip = false;
1498 PVMCPU const pVCpuCall = VMMGetCpu(pVM);
1499 PVMCPU pVCpu = pVCpuCall;
1500 if (!pVCpu)
1501 pVCpu = VMMGetCpuById(pVM, 0);
1502
1503 RTGETOPTUNION ValueUnion;
1504 while ((rc = RTGetOpt(&State, &ValueUnion)) != 0)
1505 {
1506 switch (rc)
1507 {
1508 case 'c':
1509 if (ValueUnion.u32 >= pVM->cCpus)
1510 pHlp->pfnPrintf(pHlp, "error: Invalid CPU ID: %u\n", ValueUnion.u32);
1511 else if (!pVCpu || pVCpu->idCpu != ValueUnion.u32)
1512 pVCpu = VMMGetCpuById(pVM, ValueUnion.u32);
1513 break;
1514
1515 case 'l':
1516 cLimit = ValueUnion.u32;
1517 break;
1518
1519 case 'g':
1520 fStopAtGlobalFlush = true;
1521 break;
1522
1523 case 'r':
1524 fResolveRip = true;
1525 break;
1526
1527 case 'h':
1528 pHlp->pfnPrintf(pHlp,
1529 "Usage: info tlbtrace [options] [n]\n"
1530 "\n"
1531 "Options:\n"
1532 " -c<n>, --cpu=<n>, --vcpu=<n>\n"
1533 " Selects the CPU which TLB trace we're looking at. Default: Caller / 0\n"
1534 " [n], -l<n>, --last=<n>\n"
1535 " Limit display to the last N entries. Default: all\n"
1536 " -g, --stop-at-global-flush\n"
1537 " Stop after the first global flush entry.\n"
1538 " -r, --resolve-rip\n"
1539 " Resolve symbols for the flattened RIP addresses.\n"
1540 );
1541 return;
1542
1543 case VINF_GETOPT_NOT_OPTION:
1544 rc = RTStrToUInt32Full(ValueUnion.psz, 0, &cLimit);
1545 if (RT_SUCCESS(rc))
1546 break;
1547 pHlp->pfnPrintf(pHlp, "error: failed to convert '%s' to a number: %Rrc\n", ValueUnion.psz, rc);
1548 return;
1549
1550 default:
1551 pHlp->pfnGetOptError(pHlp, rc, &ValueUnion, &State);
1552 return;
1553 }
1554 }
1555
1556 /*
1557 * Get the details.
1558 */
1559 AssertReturnVoid(pVCpu);
1560 Assert(pVCpu->iem.s.cTlbTraceEntriesShift <= 28);
1561 uint32_t idx = pVCpu->iem.s.idxTlbTraceEntry;
1562 uint32_t const cShift = RT_MIN(pVCpu->iem.s.cTlbTraceEntriesShift, 28);
1563 uint32_t const fMask = RT_BIT_32(cShift) - 1;
1564 uint32_t cLeft = RT_MIN(RT_MIN(idx, RT_BIT_32(cShift)), cLimit);
1565 PCIEMTLBTRACEENTRY paEntries = pVCpu->iem.s.paTlbTraceEntries;
1566 if (cLeft && paEntries)
1567 {
1568 /*
1569 * Display the entries.
1570 */
1571 pHlp->pfnPrintf(pHlp, "TLB Trace for CPU %u:\n", pVCpu->idCpu);
1572 while (cLeft-- > 0)
1573 {
1574 PCIEMTLBTRACEENTRY const pCur = &paEntries[--idx & fMask];
1575 const char *pszSymbol = "";
1576 union
1577 {
1578 RTDBGSYMBOL Symbol;
1579 char ach[sizeof(RTDBGSYMBOL) + 32];
1580 } uBuf;
1581 if (fResolveRip)
1582 {
1583 RTGCINTPTR offDisp = 0;
1584 DBGFADDRESS Addr;
1585 rc = DBGFR3AsSymbolByAddr(pVM->pUVM, DBGF_AS_GLOBAL, DBGFR3AddrFromFlat(pVM->pUVM, &Addr, pCur->rip),
1586 RTDBGSYMADDR_FLAGS_LESS_OR_EQUAL
1587 | RTDBGSYMADDR_FLAGS_SKIP_ABS
1588 | RTDBGSYMADDR_FLAGS_SKIP_ABS_IN_DEFERRED,
1589 &offDisp, &uBuf.Symbol, NULL);
1590 if (RT_SUCCESS(rc))
1591 {
1592 /* Add displacement. */
1593 if (offDisp)
1594 {
1595 size_t const cchName = strlen(uBuf.Symbol.szName);
1596 char * const pszEndName = &uBuf.Symbol.szName[cchName];
1597 size_t const cbLeft = sizeof(uBuf) - sizeof(uBuf.Symbol) + sizeof(uBuf.Symbol.szName) - cchName;
1598 if (offDisp > 0)
1599 RTStrPrintf(pszEndName, cbLeft, "+%#1RGv", offDisp);
1600 else
1601 RTStrPrintf(pszEndName, cbLeft, "-%#1RGv", -offDisp);
1602 }
1603
1604 /* Put a space before it. */
1605 AssertCompile(RTASSERT_OFFSET_OF(RTDBGSYMBOL, szName) > 0);
1606 char *pszName = uBuf.Symbol.szName;
1607 *--pszName = ' ';
1608 pszSymbol = pszName;
1609 }
1610 }
1611 static const char *s_apszTlbType[2] = { "code", "data" };
1612 static const char *s_apszScanType[4] = { "skipped", "global", "non-global", "both" };
1613 switch (pCur->enmType)
1614 {
1615 case kIemTlbTraceType_InvlPg:
1616 pHlp->pfnPrintf(pHlp, "%u: %016RX64 invlpg %RGv slot=" IEMTLB_SLOT_FMT "%s\n", idx, pCur->rip,
1617 pCur->u64Param, (uint32_t)IEMTLB_ADDR_TO_EVEN_INDEX(pCur->u64Param), pszSymbol);
1618 break;
1619 case kIemTlbTraceType_EvictSlot:
1620 pHlp->pfnPrintf(pHlp, "%u: %016RX64 evict %s slot=" IEMTLB_SLOT_FMT " %RGv (%#RX64) gcphys=%RGp%s\n",
1621 idx, pCur->rip, s_apszTlbType[pCur->bParam & 1], pCur->u32Param,
1622 (RTGCINTPTR)((pCur->u64Param & ~IEMTLB_REVISION_MASK) << (64 - IEMTLB_TAG_ADDR_WIDTH))
1623 >> (64 - IEMTLB_TAG_ADDR_WIDTH - GUEST_PAGE_SHIFT), pCur->u64Param,
1624 pCur->u64Param2, pszSymbol);
1625 break;
1626 case kIemTlbTraceType_LargeEvictSlot:
1627 pHlp->pfnPrintf(pHlp, "%u: %016RX64 large evict %s slot=" IEMTLB_SLOT_FMT " %RGv (%#RX64) gcphys=%RGp%s\n",
1628 idx, pCur->rip, s_apszTlbType[pCur->bParam & 1], pCur->u32Param,
1629 (RTGCINTPTR)((pCur->u64Param & ~IEMTLB_REVISION_MASK) << (64 - IEMTLB_TAG_ADDR_WIDTH))
1630 >> (64 - IEMTLB_TAG_ADDR_WIDTH - GUEST_PAGE_SHIFT), pCur->u64Param,
1631 pCur->u64Param2, pszSymbol);
1632 break;
1633 case kIemTlbTraceType_LargeScan:
1634 pHlp->pfnPrintf(pHlp, "%u: %016RX64 large scan %s %s%s\n", idx, pCur->rip, s_apszTlbType[pCur->bParam & 1],
1635 s_apszScanType[pCur->u32Param & 3], pszSymbol);
1636 break;
1637
1638 case kIemTlbTraceType_Flush:
1639 pHlp->pfnPrintf(pHlp, "%u: %016RX64 flush %s rev=%#RX64%s\n", idx, pCur->rip,
1640 s_apszTlbType[pCur->bParam & 1], pCur->u64Param, pszSymbol);
1641 break;
1642 case kIemTlbTraceType_FlushGlobal:
1643 pHlp->pfnPrintf(pHlp, "%u: %016RX64 flush %s rev=%#RX64 grev=%#RX64%s\n", idx, pCur->rip,
1644 s_apszTlbType[pCur->bParam & 1], pCur->u64Param, pCur->u64Param2, pszSymbol);
1645 if (fStopAtGlobalFlush)
1646 return;
1647 break;
1648 case kIemTlbTraceType_Load:
1649 case kIemTlbTraceType_LoadGlobal:
1650 pHlp->pfnPrintf(pHlp, "%u: %016RX64 %cload %s %RGv slot=" IEMTLB_SLOT_FMT " gcphys=%RGp fTlb=%#RX32%s\n",
1651 idx, pCur->rip,
1652 pCur->enmType == kIemTlbTraceType_LoadGlobal ? 'g' : 'l', s_apszTlbType[pCur->bParam & 1],
1653 pCur->u64Param,
1654 (uint32_t)IEMTLB_ADDR_TO_EVEN_INDEX(pCur->u64Param)
1655 | (pCur->enmType == kIemTlbTraceType_LoadGlobal),
1656 (RTGCPTR)pCur->u64Param2, pCur->u32Param, pszSymbol);
1657 break;
1658
1659 case kIemTlbTraceType_Load_Cr0:
1660 pHlp->pfnPrintf(pHlp, "%u: %016RX64 load cr0 %08RX64 (was %08RX64)%s\n",
1661 idx, pCur->rip, pCur->u64Param, pCur->u64Param2, pszSymbol);
1662 break;
1663 case kIemTlbTraceType_Load_Cr3:
1664 pHlp->pfnPrintf(pHlp, "%u: %016RX64 load cr3 %016RX64 (was %016RX64)%s\n",
1665 idx, pCur->rip, pCur->u64Param, pCur->u64Param2, pszSymbol);
1666 break;
1667 case kIemTlbTraceType_Load_Cr4:
1668 pHlp->pfnPrintf(pHlp, "%u: %016RX64 load cr4 %08RX64 (was %08RX64)%s\n",
1669 idx, pCur->rip, pCur->u64Param, pCur->u64Param2, pszSymbol);
1670 break;
1671 case kIemTlbTraceType_Load_Efer:
1672 pHlp->pfnPrintf(pHlp, "%u: %016RX64 load efer %016RX64 (was %016RX64)%s\n",
1673 idx, pCur->rip, pCur->u64Param, pCur->u64Param2, pszSymbol);
1674 break;
1675
1676 case kIemTlbTraceType_Irq:
1677 pHlp->pfnPrintf(pHlp, "%u: %016RX64 irq %#04x flags=%#x eflboth=%#RX64%s\n",
1678 idx, pCur->rip, pCur->bParam, pCur->u32Param,
1679 pCur->u64Param & ((RT_BIT_64(CPUMX86EFLAGS_HW_BITS) - 1) | CPUMX86EFLAGS_INT_MASK_64),
1680 pszSymbol);
1681 break;
1682 case kIemTlbTraceType_Xcpt:
1683 if (pCur->u32Param & IEM_XCPT_FLAGS_CR2)
1684 pHlp->pfnPrintf(pHlp, "%u: %016RX64 xcpt %#04x flags=%#x errcd=%#x cr2=%RX64%s\n",
1685 idx, pCur->rip, pCur->bParam, pCur->u32Param, pCur->u64Param, pCur->u64Param2, pszSymbol);
1686 else if (pCur->u32Param & IEM_XCPT_FLAGS_ERR)
1687 pHlp->pfnPrintf(pHlp, "%u: %016RX64 xcpt %#04x flags=%#x errcd=%#x%s\n",
1688 idx, pCur->rip, pCur->bParam, pCur->u32Param, pCur->u64Param, pszSymbol);
1689 else
1690 pHlp->pfnPrintf(pHlp, "%u: %016RX64 xcpt %#04x flags=%#x%s\n",
1691 idx, pCur->rip, pCur->bParam, pCur->u32Param, pszSymbol);
1692 break;
1693 case kIemTlbTraceType_IRet:
1694 pHlp->pfnPrintf(pHlp, "%u: %016RX64 iret cs:rip=%04x:%016RX64 efl=%08RX32%s\n",
1695 idx, pCur->rip, pCur->u32Param, pCur->u64Param, (uint32_t)pCur->u64Param2, pszSymbol);
1696 break;
1697
1698 case kIemTlbTraceType_Tb_Compile:
1699 pHlp->pfnPrintf(pHlp, "%u: %016RX64 tb comp GCPhysPc=%012RX64%s\n",
1700 idx, pCur->rip, pCur->u64Param, pszSymbol);
1701 break;
1702 case kIemTlbTraceType_Tb_Exec_Threaded:
1703 pHlp->pfnPrintf(pHlp, "%u: %016RX64 tb thrd GCPhysPc=%012RX64 tb=%p used=%u%s\n",
1704 idx, pCur->rip, pCur->u64Param, (uintptr_t)pCur->u64Param2, pCur->u32Param, pszSymbol);
1705 break;
1706 case kIemTlbTraceType_Tb_Exec_Native:
1707 pHlp->pfnPrintf(pHlp, "%u: %016RX64 tb n8ve GCPhysPc=%012RX64 tb=%p used=%u%s\n",
1708 idx, pCur->rip, pCur->u64Param, (uintptr_t)pCur->u64Param2, pCur->u32Param, pszSymbol);
1709 break;
1710
1711 case kIemTlbTraceType_User0:
1712 pHlp->pfnPrintf(pHlp, "%u: %016RX64 user0 %016RX64 %016RX64 %08RX32 %02RX8%s\n",
1713 idx, pCur->rip, pCur->u64Param, pCur->u64Param2, pCur->u32Param, pCur->bParam, pszSymbol);
1714 break;
1715 case kIemTlbTraceType_User1:
1716 pHlp->pfnPrintf(pHlp, "%u: %016RX64 user1 %016RX64 %016RX64 %08RX32 %02RX8%s\n",
1717 idx, pCur->rip, pCur->u64Param, pCur->u64Param2, pCur->u32Param, pCur->bParam, pszSymbol);
1718 break;
1719 case kIemTlbTraceType_User2:
1720 pHlp->pfnPrintf(pHlp, "%u: %016RX64 user2 %016RX64 %016RX64 %08RX32 %02RX8%s\n",
1721 idx, pCur->rip, pCur->u64Param, pCur->u64Param2, pCur->u32Param, pCur->bParam, pszSymbol);
1722 break;
1723 case kIemTlbTraceType_User3:
1724 pHlp->pfnPrintf(pHlp, "%u: %016RX64 user3 %016RX64 %016RX64 %08RX32 %02RX8%s\n",
1725 idx, pCur->rip, pCur->u64Param, pCur->u64Param2, pCur->u32Param, pCur->bParam, pszSymbol);
1726 break;
1727
1728 case kIemTlbTraceType_Invalid:
1729 pHlp->pfnPrintf(pHlp, "%u: Invalid!\n");
1730 break;
1731 }
1732 }
1733 }
1734 else
1735 pHlp->pfnPrintf(pHlp, "No trace entries to display\n");
1736}
1737#endif /* IEM_WITH_TLB_TRACE */
1738
1739#if defined(VBOX_WITH_IEM_RECOMPILER) && !defined(VBOX_VMM_TARGET_ARMV8)
1740/**
1741 * @callback_method_impl{FNDBGFINFOARGVINT, tb}
1742 */
1743static DECLCALLBACK(void) iemR3InfoTb(PVM pVM, PCDBGFINFOHLP pHlp, int cArgs, char **papszArgs)
1744{
1745 /*
1746 * Parse arguments.
1747 */
1748 static RTGETOPTDEF const s_aOptions[] =
1749 {
1750 { "--cpu", 'c', RTGETOPT_REQ_UINT32 },
1751 { "--vcpu", 'c', RTGETOPT_REQ_UINT32 },
1752 { "--addr", 'a', RTGETOPT_REQ_UINT64 | RTGETOPT_FLAG_HEX },
1753 { "--address", 'a', RTGETOPT_REQ_UINT64 | RTGETOPT_FLAG_HEX },
1754 { "--phys", 'p', RTGETOPT_REQ_UINT64 | RTGETOPT_FLAG_HEX },
1755 { "--physical", 'p', RTGETOPT_REQ_UINT64 | RTGETOPT_FLAG_HEX },
1756 { "--phys-addr", 'p', RTGETOPT_REQ_UINT64 | RTGETOPT_FLAG_HEX },
1757 { "--phys-address", 'p', RTGETOPT_REQ_UINT64 | RTGETOPT_FLAG_HEX },
1758 { "--physical-address", 'p', RTGETOPT_REQ_UINT64 | RTGETOPT_FLAG_HEX },
1759 { "--flags", 'f', RTGETOPT_REQ_UINT32 | RTGETOPT_FLAG_HEX },
1760 };
1761
1762 RTGETOPTSTATE State;
1763 int rc = RTGetOptInit(&State, cArgs, papszArgs, s_aOptions, RT_ELEMENTS(s_aOptions), 0 /*iFirst*/, 0 /*fFlags*/);
1764 AssertRCReturnVoid(rc);
1765
1766 PVMCPU const pVCpuThis = VMMGetCpu(pVM);
1767 PVMCPU pVCpu = pVCpuThis ? pVCpuThis : VMMGetCpuById(pVM, 0);
1768 RTGCPHYS GCPhysPc = NIL_RTGCPHYS;
1769 RTGCPHYS GCVirt = NIL_RTGCPTR;
1770 uint32_t fFlags = UINT32_MAX;
1771
1772 RTGETOPTUNION ValueUnion;
1773 while ((rc = RTGetOpt(&State, &ValueUnion)) != 0)
1774 {
1775 switch (rc)
1776 {
1777 case 'c':
1778 if (ValueUnion.u32 >= pVM->cCpus)
1779 pHlp->pfnPrintf(pHlp, "error: Invalid CPU ID: %u\n", ValueUnion.u32);
1780 else if (!pVCpu || pVCpu->idCpu != ValueUnion.u32)
1781 pVCpu = VMMGetCpuById(pVM, ValueUnion.u32);
1782 break;
1783
1784 case 'a':
1785 GCVirt = ValueUnion.u64;
1786 GCPhysPc = NIL_RTGCPHYS;
1787 break;
1788
1789 case 'p':
1790 GCVirt = NIL_RTGCPHYS;
1791 GCPhysPc = ValueUnion.u64;
1792 break;
1793
1794 case 'f':
1795 fFlags = ValueUnion.u32;
1796 break;
1797
1798 case 'h':
1799 pHlp->pfnPrintf(pHlp,
1800 "Usage: info tb [options]\n"
1801 "\n"
1802 "Options:\n"
1803 " -c<n>, --cpu=<n>, --vcpu=<n>\n"
1804 " Selects the CPU which TBs we're looking at. Default: Caller / 0\n"
1805 " -a<virt>, --address=<virt>\n"
1806 " Shows the TB for the specified guest virtual address.\n"
1807 " -p<phys>, --phys=<phys>, --phys-addr=<phys>\n"
1808 " Shows the TB for the specified guest physical address.\n"
1809 " -f<flags>,--flags=<flags>\n"
1810 " The TB flags value (hex) to use when looking up the TB.\n"
1811 "\n"
1812 "The default is to use CS:RIP and derive flags from the CPU mode.\n");
1813 return;
1814
1815 default:
1816 pHlp->pfnGetOptError(pHlp, rc, &ValueUnion, &State);
1817 return;
1818 }
1819 }
1820
1821 /* Currently, only do work on the same EMT. */
1822 if (pVCpu != pVCpuThis)
1823 {
1824 pHlp->pfnPrintf(pHlp, "TODO: Cross EMT calling not supported yet: targeting %u, caller on %d\n",
1825 pVCpu->idCpu, pVCpuThis ? (int)pVCpuThis->idCpu : -1);
1826 return;
1827 }
1828
1829 /*
1830 * Defaults.
1831 */
1832 if (GCPhysPc == NIL_RTGCPHYS)
1833 {
1834 if (GCVirt == NIL_RTGCPTR)
1835 GCVirt = CPUMGetGuestFlatPC(pVCpu);
1836 rc = PGMPhysGCPtr2GCPhys(pVCpu, GCVirt, &GCPhysPc);
1837 if (RT_FAILURE(rc))
1838 {
1839 pHlp->pfnPrintf(pHlp, "Failed to convert %%%RGv to an guest physical address: %Rrc\n", GCVirt, rc);
1840 return;
1841 }
1842 }
1843 if (fFlags == UINT32_MAX)
1844 {
1845 /* Note! This is duplicating code in IEMAllThrdRecompiler. */
1846 fFlags = iemCalcExecFlags(pVCpu);
1847 if (pVM->cCpus == 1)
1848 fFlags |= IEM_F_X86_DISREGARD_LOCK;
1849 if (CPUMIsInInterruptShadow(&pVCpu->cpum.GstCtx))
1850 fFlags |= IEMTB_F_INHIBIT_SHADOW;
1851 if (CPUMAreInterruptsInhibitedByNmiEx(&pVCpu->cpum.GstCtx))
1852 fFlags |= IEMTB_F_INHIBIT_NMI;
1853 if ((IEM_F_MODE_CPUMODE_MASK & fFlags) != IEMMODE_64BIT)
1854 {
1855 int64_t const offFromLim = (int64_t)pVCpu->cpum.GstCtx.cs.u32Limit - (int64_t)pVCpu->cpum.GstCtx.eip;
1856 if (offFromLim < X86_PAGE_SIZE + 16 - (int32_t)(pVCpu->cpum.GstCtx.cs.u64Base & GUEST_PAGE_OFFSET_MASK))
1857 fFlags |= IEMTB_F_CS_LIM_CHECKS;
1858 }
1859 }
1860
1861 /*
1862 * Do the lookup...
1863 *
1864 * Note! This is also duplicating code in IEMAllThrdRecompiler. We don't
1865 * have much choice since we don't want to increase use counters and
1866 * trigger native recompilation.
1867 */
1868 fFlags &= IEMTB_F_KEY_MASK;
1869 IEMTBCACHE const * const pTbCache = pVCpu->iem.s.pTbCacheR3;
1870 uint32_t const idxHash = IEMTBCACHE_HASH(pTbCache, fFlags, GCPhysPc);
1871 PCIEMTB pTb = IEMTBCACHE_PTR_GET_TB(pTbCache->apHash[idxHash]);
1872 while (pTb)
1873 {
1874 if (pTb->GCPhysPc == GCPhysPc)
1875 {
1876 if ((pTb->fFlags & IEMTB_F_KEY_MASK) == fFlags)
1877 {
1878 /// @todo if (pTb->x86.fAttr == (uint16_t)pVCpu->cpum.GstCtx.cs.Attr.u)
1879 break;
1880 }
1881 }
1882 pTb = pTb->pNext;
1883 }
1884 if (!pTb)
1885 pHlp->pfnPrintf(pHlp, "PC=%RGp fFlags=%#x - no TB found on #%u\n", GCPhysPc, fFlags, pVCpu->idCpu);
1886 else
1887 {
1888 /*
1889 * Disassemble according to type.
1890 */
1891 switch (pTb->fFlags & IEMTB_F_TYPE_MASK)
1892 {
1893# ifdef VBOX_WITH_IEM_NATIVE_RECOMPILER
1894 case IEMTB_F_TYPE_NATIVE:
1895 pHlp->pfnPrintf(pHlp, "PC=%RGp fFlags=%#x on #%u: %p - native\n", GCPhysPc, fFlags, pVCpu->idCpu, pTb);
1896 iemNativeDisassembleTb(pVCpu, pTb, pHlp);
1897 break;
1898# endif
1899
1900 case IEMTB_F_TYPE_THREADED:
1901 pHlp->pfnPrintf(pHlp, "PC=%RGp fFlags=%#x on #%u: %p - threaded\n", GCPhysPc, fFlags, pVCpu->idCpu, pTb);
1902 iemThreadedDisassembleTb(pTb, pHlp);
1903 break;
1904
1905 default:
1906 pHlp->pfnPrintf(pHlp, "PC=%RGp fFlags=%#x on #%u: %p - ??? %#x\n",
1907 GCPhysPc, fFlags, pVCpu->idCpu, pTb, pTb->fFlags);
1908 break;
1909 }
1910 }
1911}
1912#endif /* VBOX_WITH_IEM_RECOMPILER && !VBOX_VMM_TARGET_ARMV8 */
1913
1914
1915#ifdef VBOX_WITH_DEBUGGER
1916
1917/** @callback_method_impl{FNDBGCCMD,
1918 * Implements the '.alliem' command. }
1919 */
1920static DECLCALLBACK(int) iemR3DbgFlushTlbs(PCDBGCCMD pCmd, PDBGCCMDHLP pCmdHlp, PUVM pUVM, PCDBGCVAR paArgs, unsigned cArgs)
1921{
1922 VMCPUID idCpu = DBGCCmdHlpGetCurrentCpu(pCmdHlp);
1923 PVMCPU pVCpu = VMMR3GetCpuByIdU(pUVM, idCpu);
1924 if (pVCpu)
1925 {
1926 VMR3ReqPriorityCallVoidWaitU(pUVM, idCpu, (PFNRT)IEMTlbInvalidateAllGlobal, 1, pVCpu);
1927 return VINF_SUCCESS;
1928 }
1929 RT_NOREF(paArgs, cArgs);
1930 return DBGCCmdHlpFail(pCmdHlp, pCmd, "failed to get the PVMCPU for the current CPU");
1931}
1932
1933
1934/**
1935 * Called by IEMR3Init to register debugger commands.
1936 */
1937static void iemR3RegisterDebuggerCommands(void)
1938{
1939 /*
1940 * Register debugger commands.
1941 */
1942 static DBGCCMD const s_aCmds[] =
1943 {
1944 {
1945 /* .pszCmd = */ "iemflushtlb",
1946 /* .cArgsMin = */ 0,
1947 /* .cArgsMax = */ 0,
1948 /* .paArgDescs = */ NULL,
1949 /* .cArgDescs = */ 0,
1950 /* .fFlags = */ 0,
1951 /* .pfnHandler = */ iemR3DbgFlushTlbs,
1952 /* .pszSyntax = */ "",
1953 /* .pszDescription = */ "Flushed the code and data TLBs"
1954 },
1955 };
1956
1957 int rc = DBGCRegisterCommands(&s_aCmds[0], RT_ELEMENTS(s_aCmds));
1958 AssertLogRelRC(rc);
1959}
1960
1961#endif /* VBOX_WITH_DEBUGGER */
1962
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette