VirtualBox

source: vbox/trunk/src/VBox/ValidationKit/bootsectors/bs3-cpu-basic-2-x0.c@ 97576

最後變更 在這個檔案從97576是 97516,由 vboxsync 提交於 2 年 前

ValKit/bs3-cpu-basic-2: Added some basic indirect far call tests. bugref:9898

  • 屬性 svn:eol-style 設為 native
  • 屬性 svn:keywords 設為 Author Date Id Revision
檔案大小: 219.4 KB
 
1/* $Id: bs3-cpu-basic-2-x0.c 97516 2022-11-11 22:54:35Z vboxsync $ */
2/** @file
3 * BS3Kit - bs3-cpu-basic-2, C test driver code (16-bit).
4 */
5
6/*
7 * Copyright (C) 2007-2022 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.alldomusa.eu.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * The contents of this file may alternatively be used under the terms
26 * of the Common Development and Distribution License Version 1.0
27 * (CDDL), a copy of it is provided in the "COPYING.CDDL" file included
28 * in the VirtualBox distribution, in which case the provisions of the
29 * CDDL are applicable instead of those of the GPL.
30 *
31 * You may elect to license modified versions of this file under the
32 * terms and conditions of either the GPL or the CDDL or both.
33 *
34 * SPDX-License-Identifier: GPL-3.0-only OR CDDL-1.0
35 */
36
37
38/*********************************************************************************************************************************
39* Header Files *
40*********************************************************************************************************************************/
41#define BS3_USE_X0_TEXT_SEG
42#include <bs3kit.h>
43#include <iprt/asm.h>
44#include <iprt/asm-amd64-x86.h>
45
46
47/*********************************************************************************************************************************
48* Defined Constants And Macros *
49*********************************************************************************************************************************/
50#undef CHECK_MEMBER
51#define CHECK_MEMBER(a_szName, a_szFmt, a_Actual, a_Expected) \
52 do \
53 { \
54 if ((a_Actual) == (a_Expected)) { /* likely */ } \
55 else bs3CpuBasic2_FailedF(a_szName "=" a_szFmt " expected " a_szFmt, (a_Actual), (a_Expected)); \
56 } while (0)
57
58
59/** Indicating that we've got operand size prefix and that it matters. */
60#define BS3CB2SIDTSGDT_F_OPSIZE UINT8_C(0x01)
61/** Worker requires 386 or later. */
62#define BS3CB2SIDTSGDT_F_386PLUS UINT8_C(0x02)
63
64
65/** @name MYOP_XXX - Values for FNBS3CPUBASIC2ACTSTCODE::fOp.
66 *
67 * These are flags, though we've precombined a few shortening things down.
68 *
69 * @{ */
70#define MYOP_LD 0x1 /**< The instruction loads. */
71#define MYOP_ST 0x2 /**< The instruction stores */
72#define MYOP_EFL 0x4 /**< The instruction modifies EFLAGS. */
73#define MYOP_AC_GP 0x8 /**< The instruction may cause either \#AC or \#GP (FXSAVE). */
74
75#define MYOP_LD_ST 0x3 /**< Convenience: The instruction both loads and stores. */
76#define MYOP_LD_DIV 0x5 /**< Convenience: DIV instruction - loading and modifying flags. */
77/** @} */
78
79
80/*********************************************************************************************************************************
81* Structures and Typedefs *
82*********************************************************************************************************************************/
83typedef struct BS3CB2INVLDESCTYPE
84{
85 uint8_t u4Type;
86 uint8_t u1DescType;
87} BS3CB2INVLDESCTYPE;
88
89typedef struct BS3CB2SIDTSGDT
90{
91 const char *pszDesc;
92 FPFNBS3FAR fpfnWorker;
93 uint8_t cbInstr;
94 bool fSs;
95 uint8_t bMode;
96 uint8_t fFlags;
97} BS3CB2SIDTSGDT;
98
99
100typedef void BS3_CALL FNBS3CPUBASIC2ACSNIPPET(void);
101
102typedef struct FNBS3CPUBASIC2ACTSTCODE
103{
104 FNBS3CPUBASIC2ACSNIPPET BS3_FAR *pfn;
105 uint8_t fOp;
106 uint16_t cbMem;
107 uint8_t cbAlign;
108 uint8_t offFaultInstr; /**< For skipping fninit with the fld test. */
109} FNBS3CPUBASIC2ACTSTCODE;
110typedef FNBS3CPUBASIC2ACTSTCODE const *PCFNBS3CPUBASIC2ACTSTCODE;
111
112typedef struct BS3CPUBASIC2ACTTSTCMNMODE
113{
114 uint8_t bMode;
115 uint16_t cEntries;
116 PCFNBS3CPUBASIC2ACTSTCODE paEntries;
117} BS3CPUBASIC2PFTTSTCMNMODE;
118typedef BS3CPUBASIC2PFTTSTCMNMODE const *PCBS3CPUBASIC2PFTTSTCMNMODE;
119
120
121/*********************************************************************************************************************************
122* External Symbols *
123*********************************************************************************************************************************/
124extern FNBS3FAR bs3CpuBasic2_Int80;
125extern FNBS3FAR bs3CpuBasic2_Int81;
126extern FNBS3FAR bs3CpuBasic2_Int82;
127extern FNBS3FAR bs3CpuBasic2_Int83;
128
129extern FNBS3FAR bs3CpuBasic2_ud2;
130#define g_bs3CpuBasic2_ud2_FlatAddr BS3_DATA_NM(g_bs3CpuBasic2_ud2_FlatAddr)
131extern uint32_t g_bs3CpuBasic2_ud2_FlatAddr;
132
133extern FNBS3FAR bs3CpuBasic2_iret;
134extern FNBS3FAR bs3CpuBasic2_iret_opsize;
135extern FNBS3FAR bs3CpuBasic2_iret_rexw;
136
137extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c16;
138extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c32;
139extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c64;
140extern FNBS3FAR bs3CpuBasic2_sidt_ss_bx_ud2_c16;
141extern FNBS3FAR bs3CpuBasic2_sidt_ss_bx_ud2_c32;
142extern FNBS3FAR bs3CpuBasic2_sidt_rexw_bx_ud2_c64;
143extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c16;
144extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c32;
145extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c64;
146extern FNBS3FAR bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c16;
147extern FNBS3FAR bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c32;
148extern FNBS3FAR bs3CpuBasic2_sidt_opsize_rexw_bx_ud2_c64;
149
150extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c16;
151extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c32;
152extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c64;
153extern FNBS3FAR bs3CpuBasic2_sgdt_ss_bx_ud2_c16;
154extern FNBS3FAR bs3CpuBasic2_sgdt_ss_bx_ud2_c32;
155extern FNBS3FAR bs3CpuBasic2_sgdt_rexw_bx_ud2_c64;
156extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c16;
157extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c32;
158extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c64;
159extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c16;
160extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c32;
161extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_rexw_bx_ud2_c64;
162
163extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c16;
164extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c32;
165extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c64;
166extern FNBS3FAR bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c16;
167extern FNBS3FAR bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c32;
168extern FNBS3FAR bs3CpuBasic2_lidt_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64;
169extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c16;
170extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt32_es_di__lidt_es_si__ud2_c16;
171extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c32;
172extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c64;
173extern FNBS3FAR bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c16;
174extern FNBS3FAR bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c32;
175extern FNBS3FAR bs3CpuBasic2_lidt_opsize_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64;
176
177extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
178extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
179extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
180extern FNBS3FAR bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
181extern FNBS3FAR bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
182extern FNBS3FAR bs3CpuBasic2_lgdt_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
183extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
184extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
185extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
186extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
187extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
188extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
189
190
191/* bs3-cpu-basic-2-template.mac: */
192FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c16;
193FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c16;
194FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16;
195FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16;
196FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c16;
197FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fld_ds_bx__ud2_c16;
198FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c16;
199FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c16;
200FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fxsave_ds_bx__ud2_c16;
201
202FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c32;
203FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c32;
204FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c32;
205FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c32;
206FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c32;
207FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fld_ds_bx__ud2_c32;
208FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c32;
209FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c32;
210FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fxsave_ds_bx__ud2_c32;
211
212FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c64;
213FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c64;
214FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c64;
215FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c64;
216FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c64;
217FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fld_ds_bx__ud2_c64;
218FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c64;
219FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c64;
220FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fxsave_ds_bx__ud2_c64;
221
222
223/*********************************************************************************************************************************
224* Global Variables *
225*********************************************************************************************************************************/
226static const char BS3_FAR *g_pszTestMode = (const char *)1;
227static uint8_t g_bTestMode = 1;
228static bool g_f16BitSys = 1;
229
230
231/** SIDT test workers. */
232static BS3CB2SIDTSGDT const g_aSidtWorkers[] =
233{
234 { "sidt [bx]", bs3CpuBasic2_sidt_bx_ud2_c16, 3, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
235 { "sidt [ss:bx]", bs3CpuBasic2_sidt_ss_bx_ud2_c16, 4, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
236 { "o32 sidt [bx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c16, 4, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
237 { "o32 sidt [ss:bx]", bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c16, 5, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
238 { "sidt [ebx]", bs3CpuBasic2_sidt_bx_ud2_c32, 3, false, BS3_MODE_CODE_32, 0 },
239 { "sidt [ss:ebx]", bs3CpuBasic2_sidt_ss_bx_ud2_c32, 4, true, BS3_MODE_CODE_32, 0 },
240 { "o16 sidt [ebx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c32, 4, false, BS3_MODE_CODE_32, 0 },
241 { "o16 sidt [ss:ebx]", bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c32, 5, true, BS3_MODE_CODE_32, 0 },
242 { "sidt [rbx]", bs3CpuBasic2_sidt_bx_ud2_c64, 3, false, BS3_MODE_CODE_64, 0 },
243 { "o64 sidt [rbx]", bs3CpuBasic2_sidt_rexw_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
244 { "o32 sidt [rbx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
245 { "o32 o64 sidt [rbx]", bs3CpuBasic2_sidt_opsize_rexw_bx_ud2_c64, 5, false, BS3_MODE_CODE_64, 0 },
246};
247
248/** SGDT test workers. */
249static BS3CB2SIDTSGDT const g_aSgdtWorkers[] =
250{
251 { "sgdt [bx]", bs3CpuBasic2_sgdt_bx_ud2_c16, 3, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
252 { "sgdt [ss:bx]", bs3CpuBasic2_sgdt_ss_bx_ud2_c16, 4, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
253 { "o32 sgdt [bx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c16, 4, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
254 { "o32 sgdt [ss:bx]", bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c16, 5, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
255 { "sgdt [ebx]", bs3CpuBasic2_sgdt_bx_ud2_c32, 3, false, BS3_MODE_CODE_32, 0 },
256 { "sgdt [ss:ebx]", bs3CpuBasic2_sgdt_ss_bx_ud2_c32, 4, true, BS3_MODE_CODE_32, 0 },
257 { "o16 sgdt [ebx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c32, 4, false, BS3_MODE_CODE_32, 0 },
258 { "o16 sgdt [ss:ebx]", bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c32, 5, true, BS3_MODE_CODE_32, 0 },
259 { "sgdt [rbx]", bs3CpuBasic2_sgdt_bx_ud2_c64, 3, false, BS3_MODE_CODE_64, 0 },
260 { "o64 sgdt [rbx]", bs3CpuBasic2_sgdt_rexw_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
261 { "o32 sgdt [rbx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
262 { "o32 o64 sgdt [rbx]", bs3CpuBasic2_sgdt_opsize_rexw_bx_ud2_c64, 5, false, BS3_MODE_CODE_64, 0 },
263};
264
265/** LIDT test workers. */
266static BS3CB2SIDTSGDT const g_aLidtWorkers[] =
267{
268 { "lidt [bx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c16, 11, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
269 { "lidt [ss:bx]", bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c16, 12, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
270 { "o32 lidt [bx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c16, 12, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
271 { "o32 lidt [bx]; sidt32", bs3CpuBasic2_lidt_opsize_bx__sidt32_es_di__lidt_es_si__ud2_c16, 27, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
272 { "o32 lidt [ss:bx]", bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c16, 13, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
273 { "lidt [ebx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c32, 11, false, BS3_MODE_CODE_32, 0 },
274 { "lidt [ss:ebx]", bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c32, 12, true, BS3_MODE_CODE_32, 0 },
275 { "o16 lidt [ebx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c32, 12, false, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
276 { "o16 lidt [ss:ebx]", bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c32, 13, true, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
277 { "lidt [rbx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c64, 9, false, BS3_MODE_CODE_64, 0 },
278 { "o64 lidt [rbx]", bs3CpuBasic2_lidt_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
279 { "o32 lidt [rbx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
280 { "o32 o64 lidt [rbx]", bs3CpuBasic2_lidt_opsize_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64, 11, false, BS3_MODE_CODE_64, 0 },
281};
282
283/** LGDT test workers. */
284static BS3CB2SIDTSGDT const g_aLgdtWorkers[] =
285{
286 { "lgdt [bx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 11, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
287 { "lgdt [ss:bx]", bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 12, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
288 { "o32 lgdt [bx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 12, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
289 { "o32 lgdt [ss:bx]", bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 13, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
290 { "lgdt [ebx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 11, false, BS3_MODE_CODE_32, 0 },
291 { "lgdt [ss:ebx]", bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 12, true, BS3_MODE_CODE_32, 0 },
292 { "o16 lgdt [ebx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 12, false, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
293 { "o16 lgdt [ss:ebx]", bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 13, true, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
294 { "lgdt [rbx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 9, false, BS3_MODE_CODE_64, 0 },
295 { "o64 lgdt [rbx]", bs3CpuBasic2_lgdt_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
296 { "o32 lgdt [rbx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
297 { "o32 o64 lgdt [rbx]", bs3CpuBasic2_lgdt_opsize_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 11, false, BS3_MODE_CODE_64, 0 },
298};
299
300
301
302#if 0
303/** Table containing invalid CS selector types. */
304static const BS3CB2INVLDESCTYPE g_aInvalidCsTypes[] =
305{
306 { X86_SEL_TYPE_RO, 1 },
307 { X86_SEL_TYPE_RO_ACC, 1 },
308 { X86_SEL_TYPE_RW, 1 },
309 { X86_SEL_TYPE_RW_ACC, 1 },
310 { X86_SEL_TYPE_RO_DOWN, 1 },
311 { X86_SEL_TYPE_RO_DOWN_ACC, 1 },
312 { X86_SEL_TYPE_RW_DOWN, 1 },
313 { X86_SEL_TYPE_RW_DOWN_ACC, 1 },
314 { 0, 0 },
315 { 1, 0 },
316 { 2, 0 },
317 { 3, 0 },
318 { 4, 0 },
319 { 5, 0 },
320 { 6, 0 },
321 { 7, 0 },
322 { 8, 0 },
323 { 9, 0 },
324 { 10, 0 },
325 { 11, 0 },
326 { 12, 0 },
327 { 13, 0 },
328 { 14, 0 },
329 { 15, 0 },
330};
331
332/** Table containing invalid SS selector types. */
333static const BS3CB2INVLDESCTYPE g_aInvalidSsTypes[] =
334{
335 { X86_SEL_TYPE_EO, 1 },
336 { X86_SEL_TYPE_EO_ACC, 1 },
337 { X86_SEL_TYPE_ER, 1 },
338 { X86_SEL_TYPE_ER_ACC, 1 },
339 { X86_SEL_TYPE_EO_CONF, 1 },
340 { X86_SEL_TYPE_EO_CONF_ACC, 1 },
341 { X86_SEL_TYPE_ER_CONF, 1 },
342 { X86_SEL_TYPE_ER_CONF_ACC, 1 },
343 { 0, 0 },
344 { 1, 0 },
345 { 2, 0 },
346 { 3, 0 },
347 { 4, 0 },
348 { 5, 0 },
349 { 6, 0 },
350 { 7, 0 },
351 { 8, 0 },
352 { 9, 0 },
353 { 10, 0 },
354 { 11, 0 },
355 { 12, 0 },
356 { 13, 0 },
357 { 14, 0 },
358 { 15, 0 },
359};
360#endif
361
362
363static const FNBS3CPUBASIC2ACTSTCODE g_aCmn16[] =
364{
365 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c16, MYOP_LD, 2, 2 },
366 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c16, MYOP_ST, 2, 2 },
367 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16, MYOP_LD_ST, 2, 2 },
368 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16, MYOP_LD_ST | MYOP_EFL, 2, 2 },
369 { bs3CpuBasic2_div_ds_bx__ud2_c16, MYOP_LD_DIV, 2, 2 },
370 { bs3CpuBasic2_fninit_fld_ds_bx__ud2_c16, MYOP_LD, 10, 8, 2 /*fninit*/ },
371 { bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c16, MYOP_LD, 10, 8, 2 /*fninit*/ },
372 { bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c16, MYOP_ST, 10, 8, 4 /*fninit+fldz*/ },
373 { bs3CpuBasic2_fxsave_ds_bx__ud2_c16, MYOP_ST | MYOP_AC_GP, 512, 16 },
374};
375
376static const FNBS3CPUBASIC2ACTSTCODE g_aCmn32[] =
377{
378 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c32, MYOP_LD, 4, 4 },
379 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c32, MYOP_ST, 4, 4 },
380 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c32, MYOP_LD_ST, 4, 4 },
381 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c32, MYOP_LD_ST | MYOP_EFL, 4, 4 },
382 { bs3CpuBasic2_div_ds_bx__ud2_c32, MYOP_LD_DIV, 4, 4 },
383 { bs3CpuBasic2_fninit_fld_ds_bx__ud2_c32, MYOP_LD, 10, 8, 2 /*fninit*/ },
384 { bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c32, MYOP_LD, 10, 8, 2 /*fninit*/ },
385 { bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c32, MYOP_ST, 10, 8, 4 /*fninit+fldz*/ },
386 { bs3CpuBasic2_fxsave_ds_bx__ud2_c32, MYOP_ST | MYOP_AC_GP, 512, 16 },
387};
388
389static const FNBS3CPUBASIC2ACTSTCODE g_aCmn64[] =
390{
391 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c64, MYOP_LD, 8, 8 },
392 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c64, MYOP_ST, 8, 8 },
393 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c64, MYOP_LD_ST, 8, 8 },
394 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c64, MYOP_LD_ST | MYOP_EFL, 8, 8 },
395 { bs3CpuBasic2_div_ds_bx__ud2_c64, MYOP_LD_DIV, 8, 8 },
396 { bs3CpuBasic2_fninit_fld_ds_bx__ud2_c64, MYOP_LD, 10, 8, 2 /*fninit*/ },
397 { bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c64, MYOP_LD, 10, 8, 2 /*fninit*/ },
398 { bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c64, MYOP_ST, 10, 8, 4 /*fninit+fldz*/ },
399 { bs3CpuBasic2_fxsave_ds_bx__ud2_c64, MYOP_ST | MYOP_AC_GP, 512, 16 },
400};
401
402static const BS3CPUBASIC2PFTTSTCMNMODE g_aCmnModes[] =
403{
404 { BS3_MODE_CODE_16, RT_ELEMENTS(g_aCmn16), g_aCmn16 },
405 { BS3_MODE_CODE_V86, RT_ELEMENTS(g_aCmn16), g_aCmn16 },
406 { BS3_MODE_CODE_32, RT_ELEMENTS(g_aCmn32), g_aCmn32 },
407 { BS3_MODE_CODE_64, RT_ELEMENTS(g_aCmn64), g_aCmn64 },
408};
409
410
411/**
412 * Sets globals according to the mode.
413 *
414 * @param bTestMode The test mode.
415 */
416static void bs3CpuBasic2_SetGlobals(uint8_t bTestMode)
417{
418 g_bTestMode = bTestMode;
419 g_pszTestMode = Bs3GetModeName(bTestMode);
420 g_f16BitSys = BS3_MODE_IS_16BIT_SYS(bTestMode);
421 g_usBs3TestStep = 0;
422}
423
424
425uint32_t ASMGetESP(void);
426#pragma aux ASMGetESP = \
427 ".386" \
428 "mov ax, sp" \
429 "mov edx, esp" \
430 "shr edx, 16" \
431 value [ax dx] \
432 modify exact [ax dx];
433
434
435/**
436 * Wrapper around Bs3TestFailedF that prefixes the error with g_usBs3TestStep
437 * and g_pszTestMode.
438 */
439static void bs3CpuBasic2_FailedF(const char *pszFormat, ...)
440{
441 va_list va;
442
443 char szTmp[168];
444 va_start(va, pszFormat);
445 Bs3StrPrintfV(szTmp, sizeof(szTmp), pszFormat, va);
446 va_end(va);
447
448 Bs3TestFailedF("%u - %s: %s", g_usBs3TestStep, g_pszTestMode, szTmp);
449}
450
451
452#if 0
453/**
454 * Compares trap stuff.
455 */
456static void bs3CpuBasic2_CompareIntCtx1(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint8_t bXcpt)
457{
458 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
459 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
460 CHECK_MEMBER("bErrCd", "%#06RX64", pTrapCtx->uErrCd, 0);
461 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, 2 /*int xx*/, 0 /*cbSpAdjust*/, 0 /*fExtraEfl*/, g_pszTestMode, g_usBs3TestStep);
462 if (Bs3TestSubErrorCount() != cErrorsBefore)
463 {
464 Bs3TrapPrintFrame(pTrapCtx);
465#if 1
466 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
467 Bs3TestPrintf("Halting in CompareTrapCtx1: bXcpt=%#x\n", bXcpt);
468 ASMHalt();
469#endif
470 }
471}
472#endif
473
474
475#if 0
476/**
477 * Compares trap stuff.
478 */
479static void bs3CpuBasic2_CompareTrapCtx2(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t cbIpAdjust,
480 uint8_t bXcpt, uint16_t uHandlerCs)
481{
482 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
483 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
484 CHECK_MEMBER("bErrCd", "%#06RX64", pTrapCtx->uErrCd, 0);
485 CHECK_MEMBER("uHandlerCs", "%#06x", pTrapCtx->uHandlerCs, uHandlerCs);
486 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, cbIpAdjust, 0 /*cbSpAdjust*/, 0 /*fExtraEfl*/, g_pszTestMode, g_usBs3TestStep);
487 if (Bs3TestSubErrorCount() != cErrorsBefore)
488 {
489 Bs3TrapPrintFrame(pTrapCtx);
490#if 1
491 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
492 Bs3TestPrintf("Halting in CompareTrapCtx2: bXcpt=%#x\n", bXcpt);
493 ASMHalt();
494#endif
495 }
496}
497#endif
498
499/**
500 * Compares a CPU trap.
501 */
502static void bs3CpuBasic2_CompareCpuTrapCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd,
503 uint8_t bXcpt, bool f486ResumeFlagHint, uint8_t cbIpAdjust)
504{
505 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
506 uint32_t fExtraEfl;
507
508 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
509 CHECK_MEMBER("bErrCd", "%#06RX16", (uint16_t)pTrapCtx->uErrCd, (uint16_t)uErrCd); /* 486 only writes a word */
510
511 if ( g_f16BitSys
512 || bXcpt == X86_XCPT_DB /* hack (10980xe)... */
513 || ( !f486ResumeFlagHint
514 && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) <= BS3CPU_80486 ) )
515 fExtraEfl = 0;
516 else
517 fExtraEfl = X86_EFL_RF;
518#if 0 /** @todo Running on an AMD Phenom II X6 1100T under AMD-V I'm not getting good X86_EFL_RF results. Enable this to get on with other work. */
519 fExtraEfl = pTrapCtx->Ctx.rflags.u32 & X86_EFL_RF;
520#endif
521 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, cbIpAdjust, 0 /*cbSpAdjust*/, fExtraEfl, g_pszTestMode, g_usBs3TestStep);
522 if (Bs3TestSubErrorCount() != cErrorsBefore)
523 {
524 Bs3TrapPrintFrame(pTrapCtx);
525#if 1
526 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
527 Bs3TestPrintf("Halting: bXcpt=%#x uErrCd=%#x\n", bXcpt, uErrCd);
528 ASMHalt();
529#endif
530 }
531}
532
533
534/**
535 * Compares \#GP trap.
536 */
537static void bs3CpuBasic2_CompareGpCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
538{
539 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_GP, true /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
540}
541
542#if 0
543/**
544 * Compares \#NP trap.
545 */
546static void bs3CpuBasic2_CompareNpCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
547{
548 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_NP, true /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
549}
550#endif
551
552/**
553 * Compares \#SS trap.
554 */
555static void bs3CpuBasic2_CompareSsCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd, bool f486ResumeFlagHint)
556{
557 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_SS, f486ResumeFlagHint, 0 /*cbIpAdjust*/);
558}
559
560#if 0
561/**
562 * Compares \#TS trap.
563 */
564static void bs3CpuBasic2_CompareTsCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
565{
566 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_TS, false /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
567}
568#endif
569
570/**
571 * Compares \#PF trap.
572 */
573static void bs3CpuBasic2_ComparePfCtx(PCBS3TRAPFRAME pTrapCtx, PBS3REGCTX pStartCtx, uint16_t uErrCd,
574 uint64_t uCr2Expected, uint8_t cbIpAdjust)
575{
576 uint64_t const uCr2Saved = pStartCtx->cr2.u;
577 pStartCtx->cr2.u = uCr2Expected;
578 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_PF, true /*f486ResumeFlagHint*/, cbIpAdjust);
579 pStartCtx->cr2.u = uCr2Saved;
580}
581
582/**
583 * Compares \#UD trap.
584 */
585static void bs3CpuBasic2_CompareUdCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx)
586{
587 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, 0 /*no error code*/, X86_XCPT_UD,
588 true /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
589}
590
591/**
592 * Compares \#AC trap.
593 */
594static void bs3CpuBasic2_CompareAcCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint8_t cbIpAdjust)
595{
596 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, 0 /*always zero*/, X86_XCPT_AC, true /*f486ResumeFlagHint*/, cbIpAdjust);
597}
598
599/**
600 * Compares \#DB trap.
601 */
602static void bs3CpuBasic2_CompareDbCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint32_t fDr6Expect)
603{
604 uint32_t const fDr6 = Bs3RegGetDr6();
605 fDr6Expect |= X86_DR6_RA1_MASK;
606 CHECK_MEMBER("dr6", "%#08RX32", fDr6, fDr6Expect);
607
608 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, 0 /*always zero*/, X86_XCPT_DB, false /*f486ResumeFlagHint?*/, 0 /*cbIpAdjust*/);
609}
610
611
612/**
613 * Checks that DR6 has the initial value, i.e. is unchanged when other exception
614 * was raised before a \#DB could occur.
615 */
616static void bs3CpuBasic2_CheckDr6InitVal(void)
617{
618 uint32_t const fDr6 = Bs3RegGetDr6();
619 uint32_t const fDr6Expect = X86_DR6_INIT_VAL;
620 CHECK_MEMBER("dr6", "%#08RX32", fDr6, fDr6Expect);
621}
622
623#if 0 /* convert me */
624static void bs3CpuBasic2_RaiseXcpt1Common(uint16_t const uSysR0Cs, uint16_t const uSysR0CsConf, uint16_t const uSysR0Ss,
625 PX86DESC const paIdt, unsigned const cIdteShift)
626{
627 BS3TRAPFRAME TrapCtx;
628 BS3REGCTX Ctx80;
629 BS3REGCTX Ctx81;
630 BS3REGCTX Ctx82;
631 BS3REGCTX Ctx83;
632 BS3REGCTX CtxTmp;
633 BS3REGCTX CtxTmp2;
634 PBS3REGCTX apCtx8x[4];
635 unsigned iCtx;
636 unsigned iRing;
637 unsigned iDpl;
638 unsigned iRpl;
639 unsigned i, j, k;
640 uint32_t uExpected;
641 bool const f486Plus = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486;
642# if TMPL_BITS == 16
643 bool const f386Plus = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386;
644 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
645# else
646 bool const f286 = false;
647 bool const f386Plus = true;
648 int rc;
649 uint8_t *pbIdtCopyAlloc;
650 PX86DESC pIdtCopy;
651 const unsigned cbIdte = 1 << (3 + cIdteShift);
652 RTCCUINTXREG uCr0Saved = ASMGetCR0();
653 RTGDTR GdtrSaved;
654# endif
655 RTIDTR IdtrSaved;
656 RTIDTR Idtr;
657
658 ASMGetIDTR(&IdtrSaved);
659# if TMPL_BITS != 16
660 ASMGetGDTR(&GdtrSaved);
661# endif
662
663 /* make sure they're allocated */
664 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
665 Bs3MemZero(&Ctx80, sizeof(Ctx80));
666 Bs3MemZero(&Ctx81, sizeof(Ctx81));
667 Bs3MemZero(&Ctx82, sizeof(Ctx82));
668 Bs3MemZero(&Ctx83, sizeof(Ctx83));
669 Bs3MemZero(&CtxTmp, sizeof(CtxTmp));
670 Bs3MemZero(&CtxTmp2, sizeof(CtxTmp2));
671
672 /* Context array. */
673 apCtx8x[0] = &Ctx80;
674 apCtx8x[1] = &Ctx81;
675 apCtx8x[2] = &Ctx82;
676 apCtx8x[3] = &Ctx83;
677
678# if TMPL_BITS != 16
679 /* Allocate memory for playing around with the IDT. */
680 pbIdtCopyAlloc = NULL;
681 if (BS3_MODE_IS_PAGED(g_bTestMode))
682 pbIdtCopyAlloc = Bs3MemAlloc(BS3MEMKIND_FLAT32, 12*_1K);
683# endif
684
685 /*
686 * IDT entry 80 thru 83 are assigned DPLs according to the number.
687 * (We'll be useing more, but this'll do for now.)
688 */
689 paIdt[0x80 << cIdteShift].Gate.u2Dpl = 0;
690 paIdt[0x81 << cIdteShift].Gate.u2Dpl = 1;
691 paIdt[0x82 << cIdteShift].Gate.u2Dpl = 2;
692 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 3;
693
694 Bs3RegCtxSave(&Ctx80);
695 Ctx80.rsp.u -= 0x300;
696 Ctx80.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int80);
697# if TMPL_BITS == 16
698 Ctx80.cs = BS3_MODE_IS_RM_OR_V86(g_bTestMode) ? BS3_SEL_TEXT16 : BS3_SEL_R0_CS16;
699# elif TMPL_BITS == 32
700 g_uBs3TrapEipHint = Ctx80.rip.u32;
701# endif
702 Bs3MemCpy(&Ctx81, &Ctx80, sizeof(Ctx80));
703 Ctx81.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int81);
704 Bs3MemCpy(&Ctx82, &Ctx80, sizeof(Ctx80));
705 Ctx82.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int82);
706 Bs3MemCpy(&Ctx83, &Ctx80, sizeof(Ctx80));
707 Ctx83.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int83);
708
709 /*
710 * Check that all the above gates work from ring-0.
711 */
712 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
713 {
714 g_usBs3TestStep = iCtx;
715# if TMPL_BITS == 32
716 g_uBs3TrapEipHint = apCtx8x[iCtx]->rip.u32;
717# endif
718 Bs3TrapSetJmpAndRestore(apCtx8x[iCtx], &TrapCtx);
719 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, apCtx8x[iCtx], 0x80+iCtx /*bXcpt*/);
720 }
721
722 /*
723 * Check that the gate DPL checks works.
724 */
725 g_usBs3TestStep = 100;
726 for (iRing = 0; iRing <= 3; iRing++)
727 {
728 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
729 {
730 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
731 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
732# if TMPL_BITS == 32
733 g_uBs3TrapEipHint = CtxTmp.rip.u32;
734# endif
735 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
736 if (iCtx < iRing)
737 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
738 else
739 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
740 g_usBs3TestStep++;
741 }
742 }
743
744 /*
745 * Modify the gate CS value and run the handler at a different CPL.
746 * Throw RPL variations into the mix (completely ignored) together
747 * with gate presence.
748 * 1. CPL <= GATE.DPL
749 * 2. GATE.P
750 * 3. GATE.CS.DPL <= CPL (non-conforming segments)
751 */
752 g_usBs3TestStep = 1000;
753 for (i = 0; i <= 3; i++)
754 {
755 for (iRing = 0; iRing <= 3; iRing++)
756 {
757 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
758 {
759# if TMPL_BITS == 32
760 g_uBs3TrapEipHint = apCtx8x[iCtx]->rip.u32;
761# endif
762 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
763 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
764
765 for (j = 0; j <= 3; j++)
766 {
767 uint16_t const uCs = (uSysR0Cs | j) + (i << BS3_SEL_RING_SHIFT);
768 for (k = 0; k < 2; k++)
769 {
770 g_usBs3TestStep++;
771 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
772 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
773 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = k;
774 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
775 /*Bs3TrapPrintFrame(&TrapCtx);*/
776 if (iCtx < iRing)
777 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
778 else if (k == 0)
779 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
780 else if (i > iRing)
781 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
782 else
783 {
784 uint16_t uExpectedCs = uCs & X86_SEL_MASK_OFF_RPL;
785 if (i <= iCtx && i <= iRing)
786 uExpectedCs |= i;
787 bs3CpuBasic2_CompareTrapCtx2(&TrapCtx, &CtxTmp, 2 /*int 8xh*/, 0x80 + iCtx /*bXcpt*/, uExpectedCs);
788 }
789 }
790 }
791
792 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
793 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
794 }
795 }
796 }
797 BS3_ASSERT(g_usBs3TestStep < 1600);
798
799 /*
800 * Various CS and SS related faults
801 *
802 * We temporarily reconfigure gate 80 and 83 with new CS selectors, the
803 * latter have a CS.DPL of 2 for testing ring transisions and SS loading
804 * without making it impossible to handle faults.
805 */
806 g_usBs3TestStep = 1600;
807 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
808 Bs3GdteTestPage00.Gen.u1Present = 0;
809 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
810 paIdt[0x80 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_00;
811
812 /* CS.PRESENT = 0 */
813 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
814 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
815 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
816 bs3CpuBasic2_FailedF("selector was accessed");
817 g_usBs3TestStep++;
818
819 /* Check that GATE.DPL is checked before CS.PRESENT. */
820 for (iRing = 1; iRing < 4; iRing++)
821 {
822 Bs3MemCpy(&CtxTmp, &Ctx80, sizeof(CtxTmp));
823 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
824 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
825 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, (0x80 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
826 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
827 bs3CpuBasic2_FailedF("selector was accessed");
828 g_usBs3TestStep++;
829 }
830
831 /* CS.DPL mismatch takes precedence over CS.PRESENT = 0. */
832 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
833 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
834 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
835 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
836 bs3CpuBasic2_FailedF("CS selector was accessed");
837 g_usBs3TestStep++;
838 for (iDpl = 1; iDpl < 4; iDpl++)
839 {
840 Bs3GdteTestPage00.Gen.u2Dpl = iDpl;
841 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
842 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
843 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
844 bs3CpuBasic2_FailedF("CS selector was accessed");
845 g_usBs3TestStep++;
846 }
847
848 /* 1608: Check all the invalid CS selector types alone. */
849 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
850 for (i = 0; i < RT_ELEMENTS(g_aInvalidCsTypes); i++)
851 {
852 Bs3GdteTestPage00.Gen.u4Type = g_aInvalidCsTypes[i].u4Type;
853 Bs3GdteTestPage00.Gen.u1DescType = g_aInvalidCsTypes[i].u1DescType;
854 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
855 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
856 if (Bs3GdteTestPage00.Gen.u4Type != g_aInvalidCsTypes[i].u4Type)
857 bs3CpuBasic2_FailedF("Invalid CS type %#x/%u -> %#x/%u\n",
858 g_aInvalidCsTypes[i].u4Type, g_aInvalidCsTypes[i].u1DescType,
859 Bs3GdteTestPage00.Gen.u4Type, Bs3GdteTestPage00.Gen.u1DescType);
860 g_usBs3TestStep++;
861
862 /* Incorrect CS.TYPE takes precedence over CS.PRESENT = 0. */
863 Bs3GdteTestPage00.Gen.u1Present = 0;
864 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
865 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
866 Bs3GdteTestPage00.Gen.u1Present = 1;
867 g_usBs3TestStep++;
868 }
869
870 /* Fix CS again. */
871 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
872
873 /* 1632: Test SS. */
874 if (!BS3_MODE_IS_64BIT_SYS(g_bTestMode))
875 {
876 uint16_t BS3_FAR *puTssSs2 = BS3_MODE_IS_16BIT_SYS(g_bTestMode) ? &Bs3Tss16.ss2 : &Bs3Tss32.ss2;
877 uint16_t const uSavedSs2 = *puTssSs2;
878 X86DESC const SavedGate83 = paIdt[0x83 << cIdteShift];
879
880 /* Make the handler execute in ring-2. */
881 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
882 Bs3GdteTestPage02.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
883 paIdt[0x83 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_02 | 2;
884
885 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
886 Bs3RegCtxConvertToRingX(&CtxTmp, 3); /* yeah, from 3 so SS:xSP is reloaded. */
887 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
888 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
889 if (!(Bs3GdteTestPage02.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
890 bs3CpuBasic2_FailedF("CS selector was not access");
891 g_usBs3TestStep++;
892
893 /* Create a SS.DPL=2 stack segment and check that SS2.RPL matters and
894 that we get #SS if the selector isn't present. */
895 i = 0; /* used for cycling thru invalid CS types */
896 for (k = 0; k < 10; k++)
897 {
898 /* k=0: present,
899 k=1: not-present,
900 k=2: present but very low limit,
901 k=3: not-present, low limit.
902 k=4: present, read-only.
903 k=5: not-present, read-only.
904 k=6: present, code-selector.
905 k=7: not-present, code-selector.
906 k=8: present, read-write / no access + system (=LDT).
907 k=9: not-present, read-write / no access + system (=LDT).
908 */
909 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
910 Bs3GdteTestPage03.Gen.u1Present = !(k & 1);
911 if (k >= 8)
912 {
913 Bs3GdteTestPage03.Gen.u1DescType = 0; /* system */
914 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RW; /* = LDT */
915 }
916 else if (k >= 6)
917 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_ER;
918 else if (k >= 4)
919 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RO;
920 else if (k >= 2)
921 {
922 Bs3GdteTestPage03.Gen.u16LimitLow = 0x400;
923 Bs3GdteTestPage03.Gen.u4LimitHigh = 0;
924 Bs3GdteTestPage03.Gen.u1Granularity = 0;
925 }
926
927 for (iDpl = 0; iDpl < 4; iDpl++)
928 {
929 Bs3GdteTestPage03.Gen.u2Dpl = iDpl;
930
931 for (iRpl = 0; iRpl < 4; iRpl++)
932 {
933 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | iRpl;
934 //Bs3TestPrintf("k=%u iDpl=%u iRpl=%u step=%u\n", k, iDpl, iRpl, g_usBs3TestStep);
935 Bs3GdteTestPage02.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
936 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
937 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
938 if (iRpl != 2 || iRpl != iDpl || k >= 4)
939 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
940 else if (k != 0)
941 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03,
942 k == 2 /*f486ResumeFlagHint*/);
943 else
944 {
945 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
946 if (TrapCtx.uHandlerSs != (BS3_SEL_TEST_PAGE_03 | 2))
947 bs3CpuBasic2_FailedF("uHandlerSs=%#x expected %#x\n", TrapCtx.uHandlerSs, BS3_SEL_TEST_PAGE_03 | 2);
948 }
949 if (!(Bs3GdteTestPage02.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
950 bs3CpuBasic2_FailedF("CS selector was not access");
951 if ( TrapCtx.bXcpt == 0x83
952 || (TrapCtx.bXcpt == X86_XCPT_SS && k == 2) )
953 {
954 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
955 bs3CpuBasic2_FailedF("SS selector was not accessed");
956 }
957 else if (Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
958 bs3CpuBasic2_FailedF("SS selector was accessed");
959 g_usBs3TestStep++;
960
961 /* +1: Modify the gate DPL to check that this is checked before SS.DPL and SS.PRESENT. */
962 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 2;
963 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
964 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, (0x83 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
965 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 3;
966 g_usBs3TestStep++;
967
968 /* +2: Check the CS.DPL check is done before the SS ones. Restoring the
969 ring-0 INT 83 context triggers the CS.DPL < CPL check. */
970 Bs3TrapSetJmpAndRestore(&Ctx83, &TrapCtx);
971 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx83, BS3_SEL_TEST_PAGE_02);
972 g_usBs3TestStep++;
973
974 /* +3: Now mark the CS selector not present and check that that also triggers before SS stuff. */
975 Bs3GdteTestPage02.Gen.u1Present = 0;
976 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
977 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_02);
978 Bs3GdteTestPage02.Gen.u1Present = 1;
979 g_usBs3TestStep++;
980
981 /* +4: Make the CS selector some invalid type and check it triggers before SS stuff. */
982 Bs3GdteTestPage02.Gen.u4Type = g_aInvalidCsTypes[i].u4Type;
983 Bs3GdteTestPage02.Gen.u1DescType = g_aInvalidCsTypes[i].u1DescType;
984 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
985 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_02);
986 Bs3GdteTestPage02.Gen.u4Type = X86_SEL_TYPE_ER_ACC;
987 Bs3GdteTestPage02.Gen.u1DescType = 1;
988 g_usBs3TestStep++;
989
990 /* +5: Now, make the CS selector limit too small and that it triggers after SS trouble.
991 The 286 had a simpler approach to these GP(0). */
992 Bs3GdteTestPage02.Gen.u16LimitLow = 0;
993 Bs3GdteTestPage02.Gen.u4LimitHigh = 0;
994 Bs3GdteTestPage02.Gen.u1Granularity = 0;
995 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
996 if (f286)
997 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/);
998 else if (iRpl != 2 || iRpl != iDpl || k >= 4)
999 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
1000 else if (k != 0)
1001 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, k == 2 /*f486ResumeFlagHint*/);
1002 else
1003 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/);
1004 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1005 g_usBs3TestStep++;
1006 }
1007 }
1008 }
1009
1010 /* Check all the invalid SS selector types alone. */
1011 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1012 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1013 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | 2;
1014 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1015 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
1016 g_usBs3TestStep++;
1017 for (i = 0; i < RT_ELEMENTS(g_aInvalidSsTypes); i++)
1018 {
1019 Bs3GdteTestPage03.Gen.u4Type = g_aInvalidSsTypes[i].u4Type;
1020 Bs3GdteTestPage03.Gen.u1DescType = g_aInvalidSsTypes[i].u1DescType;
1021 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1022 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
1023 if (Bs3GdteTestPage03.Gen.u4Type != g_aInvalidSsTypes[i].u4Type)
1024 bs3CpuBasic2_FailedF("Invalid SS type %#x/%u -> %#x/%u\n",
1025 g_aInvalidSsTypes[i].u4Type, g_aInvalidSsTypes[i].u1DescType,
1026 Bs3GdteTestPage03.Gen.u4Type, Bs3GdteTestPage03.Gen.u1DescType);
1027 g_usBs3TestStep++;
1028 }
1029
1030 /*
1031 * Continue the SS experiments with a expand down segment. We'll use
1032 * the same setup as we already have with gate 83h being DPL and
1033 * having CS.DPL=2.
1034 *
1035 * Expand down segments are weird. The valid area is practically speaking
1036 * reversed. So, a 16-bit segment with a limit of 0x6000 will have valid
1037 * addresses from 0xffff thru 0x6001.
1038 *
1039 * So, with expand down segments we can more easily cut partially into the
1040 * pushing of the iret frame and trigger more interesting behavior than
1041 * with regular "expand up" segments where the whole pushing area is either
1042 * all fine or not not fine.
1043 */
1044 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1045 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1046 Bs3GdteTestPage03.Gen.u2Dpl = 2;
1047 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RW_DOWN;
1048 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | 2;
1049
1050 /* First test, limit = max --> no bytes accessible --> #GP */
1051 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1052 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, true /*f486ResumeFlagHint*/);
1053
1054 /* Second test, limit = 0 --> all by zero byte accessible --> works */
1055 Bs3GdteTestPage03.Gen.u16LimitLow = 0;
1056 Bs3GdteTestPage03.Gen.u4LimitHigh = 0;
1057 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1058 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
1059
1060 /* Modify the gate handler to be a dummy that immediately does UD2
1061 and triggers #UD, then advance the limit down till we get the #UD. */
1062 Bs3GdteTestPage03.Gen.u1Granularity = 0;
1063
1064 Bs3MemCpy(&CtxTmp2, &CtxTmp, sizeof(CtxTmp2)); /* #UD result context */
1065 if (g_f16BitSys)
1066 {
1067 CtxTmp2.rip.u = g_bs3CpuBasic2_ud2_FlatAddr - BS3_ADDR_BS3TEXT16;
1068 Bs3Trap16SetGate(0x83, X86_SEL_TYPE_SYS_286_INT_GATE, 3, BS3_SEL_TEST_PAGE_02, CtxTmp2.rip.u16, 0 /*cParams*/);
1069 CtxTmp2.rsp.u = Bs3Tss16.sp2 - 2*5;
1070 }
1071 else
1072 {
1073 CtxTmp2.rip.u = g_bs3CpuBasic2_ud2_FlatAddr;
1074 Bs3Trap32SetGate(0x83, X86_SEL_TYPE_SYS_386_INT_GATE, 3, BS3_SEL_TEST_PAGE_02, CtxTmp2.rip.u32, 0 /*cParams*/);
1075 CtxTmp2.rsp.u = Bs3Tss32.esp2 - 4*5;
1076 }
1077 CtxTmp2.bMode = g_bTestMode; /* g_bBs3CurrentMode not changed by the UD2 handler. */
1078 CtxTmp2.cs = BS3_SEL_TEST_PAGE_02 | 2;
1079 CtxTmp2.ss = BS3_SEL_TEST_PAGE_03 | 2;
1080 CtxTmp2.bCpl = 2;
1081
1082 /* test run. */
1083 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1084 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1085 g_usBs3TestStep++;
1086
1087 /* Real run. */
1088 i = (g_f16BitSys ? 2 : 4) * 6 + 1;
1089 while (i-- > 0)
1090 {
1091 Bs3GdteTestPage03.Gen.u16LimitLow = CtxTmp2.rsp.u16 + i - 1;
1092 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1093 if (i > 0)
1094 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, true /*f486ResumeFlagHint*/);
1095 else
1096 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1097 g_usBs3TestStep++;
1098 }
1099
1100 /* Do a run where we do the same-ring kind of access. */
1101 Bs3RegCtxConvertToRingX(&CtxTmp, 2);
1102 if (g_f16BitSys)
1103 {
1104 CtxTmp2.rsp.u32 = CtxTmp.rsp.u32 - 2*3;
1105 i = 2*3 - 1;
1106 }
1107 else
1108 {
1109 CtxTmp2.rsp.u32 = CtxTmp.rsp.u32 - 4*3;
1110 i = 4*3 - 1;
1111 }
1112 CtxTmp.ss = BS3_SEL_TEST_PAGE_03 | 2;
1113 CtxTmp2.ds = CtxTmp.ds;
1114 CtxTmp2.es = CtxTmp.es;
1115 CtxTmp2.fs = CtxTmp.fs;
1116 CtxTmp2.gs = CtxTmp.gs;
1117 while (i-- > 0)
1118 {
1119 Bs3GdteTestPage03.Gen.u16LimitLow = CtxTmp2.rsp.u16 + i - 1;
1120 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1121 if (i > 0)
1122 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, 0 /*BS3_SEL_TEST_PAGE_03*/, true /*f486ResumeFlagHint*/);
1123 else
1124 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1125 g_usBs3TestStep++;
1126 }
1127
1128 *puTssSs2 = uSavedSs2;
1129 paIdt[0x83 << cIdteShift] = SavedGate83;
1130 }
1131 paIdt[0x80 << cIdteShift].Gate.u16Sel = uSysR0Cs;
1132 BS3_ASSERT(g_usBs3TestStep < 3000);
1133
1134 /*
1135 * Modify the gate CS value with a conforming segment.
1136 */
1137 g_usBs3TestStep = 3000;
1138 for (i = 0; i <= 3; i++) /* cs.dpl */
1139 {
1140 for (iRing = 0; iRing <= 3; iRing++)
1141 {
1142 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1143 {
1144 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1145 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1146# if TMPL_BITS == 32
1147 g_uBs3TrapEipHint = CtxTmp.rip.u32;
1148# endif
1149
1150 for (j = 0; j <= 3; j++) /* rpl */
1151 {
1152 uint16_t const uCs = (uSysR0CsConf | j) + (i << BS3_SEL_RING_SHIFT);
1153 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
1154 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1155 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1156 //Bs3TestPrintf("%u/%u/%u/%u: cs=%04x hcs=%04x xcpt=%02x\n", i, iRing, iCtx, j, uCs, TrapCtx.uHandlerCs, TrapCtx.bXcpt);
1157 /*Bs3TrapPrintFrame(&TrapCtx);*/
1158 g_usBs3TestStep++;
1159 if (iCtx < iRing)
1160 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1161 else if (i > iRing)
1162 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
1163 else
1164 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1165 }
1166 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1167 }
1168 }
1169 }
1170 BS3_ASSERT(g_usBs3TestStep < 3500);
1171
1172 /*
1173 * The gates must be 64-bit in long mode.
1174 */
1175 if (cIdteShift != 0)
1176 {
1177 g_usBs3TestStep = 3500;
1178 for (i = 0; i <= 3; i++)
1179 {
1180 for (iRing = 0; iRing <= 3; iRing++)
1181 {
1182 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1183 {
1184 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1185 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1186
1187 for (j = 0; j < 2; j++)
1188 {
1189 static const uint16_t s_auCSes[2] = { BS3_SEL_R0_CS16, BS3_SEL_R0_CS32 };
1190 uint16_t uCs = (s_auCSes[j] | i) + (i << BS3_SEL_RING_SHIFT);
1191 g_usBs3TestStep++;
1192 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
1193 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1194 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1195 /*Bs3TrapPrintFrame(&TrapCtx);*/
1196 if (iCtx < iRing)
1197 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1198 else
1199 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
1200 }
1201 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1202 }
1203 }
1204 }
1205 BS3_ASSERT(g_usBs3TestStep < 4000);
1206 }
1207
1208 /*
1209 * IDT limit check. The 286 does not access X86DESCGATE::u16OffsetHigh.
1210 */
1211 g_usBs3TestStep = 5000;
1212 i = (0x80 << (cIdteShift + 3)) - 1;
1213 j = (0x82 << (cIdteShift + 3)) - (!f286 ? 1 : 3);
1214 k = (0x83 << (cIdteShift + 3)) - 1;
1215 for (; i <= k; i++, g_usBs3TestStep++)
1216 {
1217 Idtr = IdtrSaved;
1218 Idtr.cbIdt = i;
1219 ASMSetIDTR(&Idtr);
1220 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1221 if (i < j)
1222 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx81, (0x81 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1223 else
1224 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1225 }
1226 ASMSetIDTR(&IdtrSaved);
1227 BS3_ASSERT(g_usBs3TestStep < 5100);
1228
1229# if TMPL_BITS != 16 /* Only do the paging related stuff in 32-bit and 64-bit modes. */
1230
1231 /*
1232 * IDT page not present. Placing the IDT copy such that 0x80 is on the
1233 * first page and 0x81 is on the second page. We need proceed to move
1234 * it down byte by byte to check that any inaccessible byte means #PF.
1235 *
1236 * Note! We must reload the alternative IDTR for each run as any kind of
1237 * printing to the string (like error reporting) will cause a switch
1238 * to real mode and back, reloading the default IDTR.
1239 */
1240 g_usBs3TestStep = 5200;
1241 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1242 {
1243 uint32_t const uCr2Expected = Bs3SelPtrToFlat(pbIdtCopyAlloc) + _4K;
1244 for (j = 0; j < cbIdte; j++)
1245 {
1246 pIdtCopy = (PX86DESC)&pbIdtCopyAlloc[_4K - cbIdte * 0x81 - j];
1247 Bs3MemCpy(pIdtCopy, paIdt, cbIdte * 256);
1248
1249 Idtr.cbIdt = IdtrSaved.cbIdt;
1250 Idtr.pIdt = Bs3SelPtrToFlat(pIdtCopy);
1251
1252 ASMSetIDTR(&Idtr);
1253 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1254 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1255 g_usBs3TestStep++;
1256
1257 ASMSetIDTR(&Idtr);
1258 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1259 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1260 g_usBs3TestStep++;
1261
1262 rc = Bs3PagingProtect(uCr2Expected, _4K, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1263 if (RT_SUCCESS(rc))
1264 {
1265 ASMSetIDTR(&Idtr);
1266 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1267 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1268 g_usBs3TestStep++;
1269
1270 ASMSetIDTR(&Idtr);
1271 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1272 if (f486Plus)
1273 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, 0 /*uErrCd*/, uCr2Expected);
1274 else
1275 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, X86_TRAP_PF_RW /*uErrCd*/, uCr2Expected + 4 - RT_MIN(j, 4));
1276 g_usBs3TestStep++;
1277
1278 Bs3PagingProtect(uCr2Expected, _4K, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1279
1280 /* Check if that the entry type is checked after the whole IDTE has been cleared for #PF. */
1281 pIdtCopy[0x80 << cIdteShift].Gate.u4Type = 0;
1282 rc = Bs3PagingProtect(uCr2Expected, _4K, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1283 if (RT_SUCCESS(rc))
1284 {
1285 ASMSetIDTR(&Idtr);
1286 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1287 if (f486Plus)
1288 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, 0 /*uErrCd*/, uCr2Expected);
1289 else
1290 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, X86_TRAP_PF_RW /*uErrCd*/, uCr2Expected + 4 - RT_MIN(j, 4));
1291 g_usBs3TestStep++;
1292
1293 Bs3PagingProtect(uCr2Expected, _4K, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1294 }
1295 }
1296 else
1297 Bs3TestPrintf("Bs3PagingProtectPtr: %d\n", i);
1298
1299 ASMSetIDTR(&IdtrSaved);
1300 }
1301 }
1302
1303 /*
1304 * The read/write and user/supervisor bits the IDT PTEs are irrelevant.
1305 */
1306 g_usBs3TestStep = 5300;
1307 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1308 {
1309 Bs3MemCpy(pbIdtCopyAlloc, paIdt, cbIdte * 256);
1310 Idtr.cbIdt = IdtrSaved.cbIdt;
1311 Idtr.pIdt = Bs3SelPtrToFlat(pbIdtCopyAlloc);
1312
1313 ASMSetIDTR(&Idtr);
1314 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1315 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1316 g_usBs3TestStep++;
1317
1318 rc = Bs3PagingProtect(Idtr.pIdt, _4K, 0 /*fSet*/, X86_PTE_RW | X86_PTE_US /*fClear*/);
1319 if (RT_SUCCESS(rc))
1320 {
1321 ASMSetIDTR(&Idtr);
1322 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1323 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1324 g_usBs3TestStep++;
1325
1326 Bs3PagingProtect(Idtr.pIdt, _4K, X86_PTE_RW | X86_PTE_US /*fSet*/, 0 /*fClear*/);
1327 }
1328 ASMSetIDTR(&IdtrSaved);
1329 }
1330
1331 /*
1332 * Check that CS.u1Accessed is set to 1. Use the test page selector #0 and #3 together
1333 * with interrupt gates 80h and 83h, respectively.
1334 */
1335/** @todo Throw in SS.u1Accessed too. */
1336 g_usBs3TestStep = 5400;
1337 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1338 {
1339 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
1340 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1341 paIdt[0x80 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_00;
1342
1343 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Cs + (3 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1344 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1345 paIdt[0x83 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_03; /* rpl is ignored, so leave it as zero. */
1346
1347 /* Check that the CS.A bit is being set on a general basis and that
1348 the special CS values work with out generic handler code. */
1349 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1350 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1351 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1352 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed", Bs3GdteTestPage00.Gen.u4Type);
1353 g_usBs3TestStep++;
1354
1355 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1356 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1357 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1358 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1359 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1360 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1361 if (TrapCtx.uHandlerCs != (BS3_SEL_TEST_PAGE_03 | 3))
1362 bs3CpuBasic2_FailedF("uHandlerCs=%#x, expected %#x", TrapCtx.uHandlerCs, (BS3_SEL_TEST_PAGE_03 | 3));
1363 g_usBs3TestStep++;
1364
1365 /*
1366 * Now check that setting CS.u1Access to 1 does __NOT__ trigger a page
1367 * fault due to the RW bit being zero.
1368 * (We check both with with and without the WP bit if 80486.)
1369 */
1370 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
1371 ASMSetCR0(uCr0Saved | X86_CR0_WP);
1372
1373 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1374 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1375 rc = Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, 0 /*fSet*/, X86_PTE_RW /*fClear*/);
1376 if (RT_SUCCESS(rc))
1377 {
1378 /* ring-0 handler */
1379 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1380 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1381 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1382 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1383 g_usBs3TestStep++;
1384
1385 /* ring-3 handler */
1386 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1387 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1388 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1389 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1390 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1391 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1392 g_usBs3TestStep++;
1393
1394 /* clear WP and repeat the above. */
1395 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
1396 ASMSetCR0(uCr0Saved & ~X86_CR0_WP);
1397 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* (No need to RW the page - ring-0, WP=0.) */
1398 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* (No need to RW the page - ring-0, WP=0.) */
1399
1400 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1401 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1402 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1403 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1404 g_usBs3TestStep++;
1405
1406 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1407 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1408 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1409 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!n", Bs3GdteTestPage03.Gen.u4Type);
1410 g_usBs3TestStep++;
1411
1412 Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, X86_PTE_RW /*fSet*/, 0 /*fClear*/);
1413 }
1414
1415 ASMSetCR0(uCr0Saved);
1416
1417 /*
1418 * While we're here, check that if the CS GDT entry is a non-present
1419 * page we do get a #PF with the rigth error code and CR2.
1420 */
1421 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* Just for fun, really a pointless gesture. */
1422 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1423 rc = Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1424 if (RT_SUCCESS(rc))
1425 {
1426 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1427 if (f486Plus)
1428 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx80, 0 /*uErrCd*/, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00);
1429 else
1430 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx80, X86_TRAP_PF_RW, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00 + 4);
1431 g_usBs3TestStep++;
1432
1433 /* Do it from ring-3 to check ErrCd, which doesn't set X86_TRAP_PF_US it turns out. */
1434 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1435 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1436 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1437
1438 if (f486Plus)
1439 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_03);
1440 else
1441 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &CtxTmp, X86_TRAP_PF_RW, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_03 + 4);
1442 g_usBs3TestStep++;
1443
1444 Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1445 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
1446 bs3CpuBasic2_FailedF("u4Type=%#x, accessed! #1", Bs3GdteTestPage00.Gen.u4Type);
1447 if (Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
1448 bs3CpuBasic2_FailedF("u4Type=%#x, accessed! #2", Bs3GdteTestPage03.Gen.u4Type);
1449 }
1450
1451 /* restore */
1452 paIdt[0x80 << cIdteShift].Gate.u16Sel = uSysR0Cs;
1453 paIdt[0x83 << cIdteShift].Gate.u16Sel = uSysR0Cs;// + (3 << BS3_SEL_RING_SHIFT) + 3;
1454 }
1455
1456# endif /* 32 || 64*/
1457
1458 /*
1459 * Check broad EFLAGS effects.
1460 */
1461 g_usBs3TestStep = 5600;
1462 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1463 {
1464 for (iRing = 0; iRing < 4; iRing++)
1465 {
1466 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1467 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1468
1469 /* all set */
1470 CtxTmp.rflags.u32 &= X86_EFL_VM | X86_EFL_1;
1471 CtxTmp.rflags.u32 |= X86_EFL_CF | X86_EFL_PF | X86_EFL_AF | X86_EFL_ZF | X86_EFL_SF /* | X86_EFL_TF */ /*| X86_EFL_IF*/
1472 | X86_EFL_DF | X86_EFL_OF | X86_EFL_IOPL /* | X86_EFL_NT*/;
1473 if (f486Plus)
1474 CtxTmp.rflags.u32 |= X86_EFL_AC;
1475 if (f486Plus && !g_f16BitSys)
1476 CtxTmp.rflags.u32 |= X86_EFL_RF;
1477 if (g_uBs3CpuDetected & BS3CPU_F_CPUID)
1478 CtxTmp.rflags.u32 |= X86_EFL_VIF | X86_EFL_VIP;
1479 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1480 CtxTmp.rflags.u32 &= ~X86_EFL_RF;
1481
1482 if (iCtx >= iRing)
1483 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1484 else
1485 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1486 uExpected = CtxTmp.rflags.u32
1487 & ( X86_EFL_1 | X86_EFL_CF | X86_EFL_PF | X86_EFL_AF | X86_EFL_ZF | X86_EFL_SF | X86_EFL_DF
1488 | X86_EFL_OF | X86_EFL_IOPL | X86_EFL_NT | X86_EFL_VM | X86_EFL_AC | X86_EFL_VIF | X86_EFL_VIP
1489 | X86_EFL_ID /*| X86_EFL_TF*/ /*| X86_EFL_IF*/ /*| X86_EFL_RF*/ );
1490 if (TrapCtx.fHandlerRfl != uExpected)
1491 bs3CpuBasic2_FailedF("unexpected handler rflags value: %RX64 expected %RX32; CtxTmp.rflags=%RX64 Ctx.rflags=%RX64\n",
1492 TrapCtx.fHandlerRfl, uExpected, CtxTmp.rflags.u, TrapCtx.Ctx.rflags.u);
1493 g_usBs3TestStep++;
1494
1495 /* all cleared */
1496 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) < BS3CPU_80286)
1497 CtxTmp.rflags.u32 = apCtx8x[iCtx]->rflags.u32 & (X86_EFL_RA1_MASK | UINT16_C(0xf000));
1498 else
1499 CtxTmp.rflags.u32 = apCtx8x[iCtx]->rflags.u32 & (X86_EFL_VM | X86_EFL_RA1_MASK);
1500 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1501 if (iCtx >= iRing)
1502 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1503 else
1504 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1505 uExpected = CtxTmp.rflags.u32;
1506 if (TrapCtx.fHandlerRfl != uExpected)
1507 bs3CpuBasic2_FailedF("unexpected handler rflags value: %RX64 expected %RX32; CtxTmp.rflags=%RX64 Ctx.rflags=%RX64\n",
1508 TrapCtx.fHandlerRfl, uExpected, CtxTmp.rflags.u, TrapCtx.Ctx.rflags.u);
1509 g_usBs3TestStep++;
1510 }
1511 }
1512
1513/** @todo CS.LIMIT / canonical(CS) */
1514
1515
1516 /*
1517 * Check invalid gate types.
1518 */
1519 g_usBs3TestStep = 32000;
1520 for (iRing = 0; iRing <= 3; iRing++)
1521 {
1522 static const uint16_t s_auCSes[] = { BS3_SEL_R0_CS16, BS3_SEL_R0_CS32, BS3_SEL_R0_CS64,
1523 BS3_SEL_TSS16, BS3_SEL_TSS32, BS3_SEL_TSS64, 0, BS3_SEL_SPARE_1f };
1524 static uint16_t const s_auInvlTypes64[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13,
1525 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
1526 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f };
1527 static uint16_t const s_auInvlTypes32[] = { 0, 1, 2, 3, 8, 9, 10, 11, 13,
1528 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
1529 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
1530 /*286:*/ 12, 14, 15 };
1531 uint16_t const * const pauInvTypes = cIdteShift != 0 ? s_auInvlTypes64 : s_auInvlTypes32;
1532 uint16_t const cInvTypes = cIdteShift != 0 ? RT_ELEMENTS(s_auInvlTypes64)
1533 : f386Plus ? RT_ELEMENTS(s_auInvlTypes32) - 3 : RT_ELEMENTS(s_auInvlTypes32);
1534
1535
1536 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1537 {
1538 unsigned iType;
1539
1540 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1541 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1542# if TMPL_BITS == 32
1543 g_uBs3TrapEipHint = CtxTmp.rip.u32;
1544# endif
1545 for (iType = 0; iType < cInvTypes; iType++)
1546 {
1547 uint8_t const bSavedType = paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type;
1548 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1DescType = pauInvTypes[iType] >> 4;
1549 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type = pauInvTypes[iType] & 0xf;
1550
1551 for (i = 0; i < 4; i++)
1552 {
1553 for (j = 0; j < RT_ELEMENTS(s_auCSes); j++)
1554 {
1555 uint16_t uCs = (unsigned)(s_auCSes[j] - BS3_SEL_R0_FIRST) < (unsigned)(4 << BS3_SEL_RING_SHIFT)
1556 ? (s_auCSes[j] | i) + (i << BS3_SEL_RING_SHIFT)
1557 : s_auCSes[j] | i;
1558 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x type=%#x\n", g_usBs3TestStep, iCtx, iRing, i, uCs, pauInvTypes[iType]);*/
1559 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1560 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1561 g_usBs3TestStep++;
1562 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1563
1564 /* Mark it not-present to check that invalid type takes precedence. */
1565 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 0;
1566 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1567 g_usBs3TestStep++;
1568 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1569 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
1570 }
1571 }
1572
1573 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1574 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type = bSavedType;
1575 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1DescType = 0;
1576 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
1577 }
1578 }
1579 }
1580 BS3_ASSERT(g_usBs3TestStep < 62000U && g_usBs3TestStep > 32000U);
1581
1582
1583 /** @todo
1584 * - Run \#PF and \#GP (and others?) at CPLs other than zero.
1585 * - Quickly generate all faults.
1586 * - All the peculiarities v8086.
1587 */
1588
1589# if TMPL_BITS != 16
1590 Bs3MemFree(pbIdtCopyAlloc, 12*_1K);
1591# endif
1592}
1593#endif /* convert me */
1594
1595
1596static void bs3CpuBasic2_RaiseXcpt11Worker(uint8_t bMode, uint8_t *pbBuf, unsigned cbCacheLine, bool fAm, bool fPf,
1597 RTCCUINTXREG uFlatBufPtr, BS3CPUBASIC2PFTTSTCMNMODE const BS3_FAR *pCmn)
1598{
1599 BS3TRAPFRAME TrapCtx;
1600 BS3REGCTX Ctx;
1601 BS3REGCTX CtxUdExpected;
1602 uint8_t const cRings = bMode == BS3_MODE_RM ? 1 : 4;
1603 uint8_t iRing;
1604 uint16_t iTest;
1605
1606 /* make sure they're allocated */
1607 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
1608 Bs3MemZero(&Ctx, sizeof(Ctx));
1609 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
1610
1611 /*
1612 * Test all relevant rings.
1613 *
1614 * The memory operand is ds:xBX, so point it to pbBuf.
1615 * The test snippets mostly use xAX as operand, with the div
1616 * one also using xDX, so make sure they make some sense.
1617 */
1618 Bs3RegCtxSaveEx(&Ctx, bMode, 512);
1619
1620 Ctx.cr0.u32 &= ~(X86_CR0_MP | X86_CR0_EM | X86_CR0_TS); /* so fninit + fld works */
1621
1622 for (iRing = BS3_MODE_IS_V86(bMode) ? 3 : 0; iRing < cRings; iRing++)
1623 {
1624 uint32_t uEbx;
1625 uint8_t fAc;
1626
1627 if (!BS3_MODE_IS_RM_OR_V86(bMode))
1628 Bs3RegCtxConvertToRingX(&Ctx, iRing);
1629
1630 if (!fPf || BS3_MODE_IS_32BIT_CODE(bMode) || BS3_MODE_IS_64BIT_CODE(bMode))
1631 Bs3RegCtxSetGrpDsFromCurPtr(&Ctx, &Ctx.rbx, pbBuf);
1632 else
1633 {
1634 /* Bs3RegCtxSetGrpDsFromCurPtr barfs when trying to output a sel:off address for the aliased buffer. */
1635 Ctx.ds = BS3_FP_SEG(pbBuf);
1636 Ctx.rbx.u32 = BS3_FP_OFF(pbBuf);
1637 }
1638 uEbx = Ctx.rbx.u32;
1639
1640 Ctx.rax.u = (bMode & BS3_MODE_CODE_MASK) == BS3_MODE_CODE_64
1641 ? UINT64_C(0x80868028680386fe) : UINT32_C(0x65020686);
1642 Ctx.rdx.u = UINT32_C(0x00100100); /* careful with range due to div */
1643
1644 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
1645
1646 /*
1647 * AC flag loop.
1648 */
1649 for (fAc = 0; fAc < 2; fAc++)
1650 {
1651 if (fAc)
1652 Ctx.rflags.u32 |= X86_EFL_AC;
1653 else
1654 Ctx.rflags.u32 &= ~X86_EFL_AC;
1655
1656 /*
1657 * Loop over the test snippets.
1658 */
1659 for (iTest = 0; iTest < pCmn->cEntries; iTest++)
1660 {
1661 uint8_t const fOp = pCmn->paEntries[iTest].fOp;
1662 uint16_t const cbMem = pCmn->paEntries[iTest].cbMem;
1663 uint8_t const cbAlign = pCmn->paEntries[iTest].cbAlign;
1664 uint16_t const cbMax = cbCacheLine + cbMem;
1665 uint16_t offMem;
1666 uint8_t BS3_FAR *poffUd = (uint8_t BS3_FAR *)Bs3SelLnkPtrToCurPtr(pCmn->paEntries[iTest].pfn);
1667 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pCmn->paEntries[iTest].pfn);
1668 CtxUdExpected.rip = Ctx.rip;
1669 CtxUdExpected.rip.u = Ctx.rip.u + poffUd[-1];
1670 CtxUdExpected.cs = Ctx.cs;
1671 CtxUdExpected.rflags = Ctx.rflags;
1672 if (bMode == BS3_MODE_RM)
1673 CtxUdExpected.rflags.u32 &= ~X86_EFL_AC; /** @todo investigate. automatically cleared, or is it just our code? Observed with bs3-cpu-instr-3 too (10980xe), seems to be the CPU doing it. */
1674 CtxUdExpected.rdx = Ctx.rdx;
1675 CtxUdExpected.rax = Ctx.rax;
1676 if (fOp & MYOP_LD)
1677 {
1678 switch (cbMem)
1679 {
1680 case 2:
1681 CtxUdExpected.rax.u16 = 0x0101;
1682 break;
1683 case 4:
1684 CtxUdExpected.rax.u32 = UINT32_C(0x01010101);
1685 break;
1686 case 8:
1687 CtxUdExpected.rax.u64 = UINT64_C(0x0101010101010101);
1688 break;
1689 }
1690 }
1691
1692 /*
1693 * Buffer misalignment loop.
1694 * Note! We must make sure to cross a cache line here to make sure
1695 * to cover the split-lock scenario. (The buffer is cache
1696 * line aligned.)
1697 */
1698 for (offMem = 0; offMem < cbMax; offMem++)
1699 {
1700 bool const fMisaligned = (offMem & (cbAlign - 1)) != 0;
1701 unsigned offBuf = cbMax + cbMem * 2;
1702 while (offBuf-- > 0)
1703 pbBuf[offBuf] = 1; /* byte-by-byte to make sure it doesn't trigger AC. */
1704
1705 CtxUdExpected.rbx.u32 = Ctx.rbx.u32 = uEbx + offMem; /* ASSUMES memory in first 4GB. */
1706 if (BS3_MODE_IS_16BIT_SYS(bMode))
1707 g_uBs3TrapEipHint = Ctx.rip.u32;
1708
1709 //Bs3TestPrintf("iRing=%d iTest=%d cs:rip=%04RX16:%08RX32 ds:rbx=%04RX16:%08RX32 ss:esp=%04RX16:%08RX32 bXcpt=%#x errcd=%#x fAm=%d fAc=%d ESP=%#RX32\n",
1710 // iRing, iTest, Ctx.cs, Ctx.rip.u32, Ctx.ds, Ctx.rbx.u32, Ctx.ss, Ctx.rsp.u32, TrapCtx.bXcpt, (unsigned)TrapCtx.uErrCd, fAm, fAc, ASMGetESP());
1711
1712 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1713
1714 if ( (pCmn->paEntries[iTest].fOp & MYOP_AC_GP)
1715 && fMisaligned
1716 && (!fAm || iRing != 3 || !fAc || (offMem & 3 /* 10980XE */) == 0) )
1717 {
1718 if (fAc && bMode == BS3_MODE_RM)
1719 TrapCtx.Ctx.rflags.u32 |= X86_EFL_AC;
1720 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
1721 }
1722 else if (fPf && iRing == 3 && (!fAm || !fAc || !fMisaligned)) /* #AC beats #PF */
1723 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx,
1724 X86_TRAP_PF_P | X86_TRAP_PF_US
1725 | (pCmn->paEntries[iTest].fOp & MYOP_ST ? X86_TRAP_PF_RW : 0),
1726 uFlatBufPtr + offMem + (cbMem > 64 ? cbMem - 1 /*FXSAVE*/ : 0),
1727 pCmn->paEntries[iTest].offFaultInstr);
1728 else if (!fAm || iRing != 3 || !fAc || !fMisaligned)
1729 {
1730 if (fOp & MYOP_EFL)
1731 {
1732 CtxUdExpected.rflags.u16 &= ~X86_EFL_STATUS_BITS;
1733 CtxUdExpected.rflags.u16 |= TrapCtx.Ctx.rflags.u16 & X86_EFL_STATUS_BITS;
1734 }
1735 if (fOp == MYOP_LD_DIV)
1736 {
1737 CtxUdExpected.rax = TrapCtx.Ctx.rax;
1738 CtxUdExpected.rdx = TrapCtx.Ctx.rdx;
1739 }
1740 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1741 }
1742 else
1743 bs3CpuBasic2_CompareAcCtx(&TrapCtx, &Ctx, pCmn->paEntries[iTest].offFaultInstr);
1744
1745 g_usBs3TestStep++;
1746 }
1747 }
1748 }
1749 }
1750}
1751
1752
1753/**
1754 * Entrypoint for \#AC tests.
1755 *
1756 * @returns 0 or BS3TESTDOMODE_SKIPPED.
1757 * @param bMode The CPU mode we're testing.
1758 *
1759 * @note When testing v8086 code, we'll be running in v8086 mode. So, careful
1760 * with control registers and such.
1761 */
1762BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_RaiseXcpt11)(uint8_t bMode)
1763{
1764 unsigned cbCacheLine = 128; /** @todo detect */
1765 uint8_t BS3_FAR *pbBufAlloc;
1766 uint8_t BS3_FAR *pbBuf;
1767 unsigned idxCmnModes;
1768 uint32_t fCr0;
1769
1770 /*
1771 * Skip if 386 or older.
1772 */
1773 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) < BS3CPU_80486)
1774 {
1775 Bs3TestSkipped("#AC test requires 486 or later");
1776 return BS3TESTDOMODE_SKIPPED;
1777 }
1778
1779 bs3CpuBasic2_SetGlobals(bMode);
1780
1781 /* Get us a 64-byte aligned buffer. */
1782 pbBufAlloc = pbBuf = Bs3MemAllocZ(BS3_MODE_IS_RM_OR_V86(bMode) ? BS3MEMKIND_REAL : BS3MEMKIND_TILED, X86_PAGE_SIZE * 2);
1783 if (!pbBufAlloc)
1784 return Bs3TestFailed("Failed to allocate 2 pages of real-mode memory");
1785 if (BS3_FP_OFF(pbBuf) & (X86_PAGE_SIZE - 1))
1786 pbBuf = &pbBufAlloc[X86_PAGE_SIZE - (BS3_FP_OFF(pbBuf) & X86_PAGE_OFFSET_MASK)];
1787 BS3_ASSERT(pbBuf - pbBufAlloc <= X86_PAGE_SIZE);
1788 //Bs3TestPrintf("pbBuf=%p\n", pbBuf);
1789
1790 /* Find the g_aCmnModes entry. */
1791 idxCmnModes = 0;
1792 while (g_aCmnModes[idxCmnModes].bMode != (bMode & BS3_MODE_CODE_MASK))
1793 idxCmnModes++;
1794 //Bs3TestPrintf("idxCmnModes=%d bMode=%#x\n", idxCmnModes, bMode);
1795
1796 /* First round is w/o alignment checks enabled. */
1797 //Bs3TestPrintf("round 1\n");
1798 fCr0 = Bs3RegGetCr0();
1799 BS3_ASSERT(!(fCr0 & X86_CR0_AM));
1800 Bs3RegSetCr0(fCr0 & ~X86_CR0_AM);
1801#if 1
1802 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBuf, cbCacheLine, false /*fAm*/, false /*fPf*/, 0, &g_aCmnModes[idxCmnModes]);
1803#endif
1804
1805 /* The second round is with aligment checks enabled. */
1806#if 1
1807 //Bs3TestPrintf("round 2\n");
1808 Bs3RegSetCr0(Bs3RegGetCr0() | X86_CR0_AM);
1809 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBuf, cbCacheLine, true /*fAm*/, false /*fPf*/, 0, &g_aCmnModes[idxCmnModes]);
1810#endif
1811
1812#if 1
1813 /* The third and fourth round access the buffer via a page alias that's not
1814 accessible from ring-3. The third round has ACs disabled and the fourth
1815 has them enabled. */
1816 if (BS3_MODE_IS_PAGED(bMode) && !BS3_MODE_IS_V86(bMode))
1817 {
1818 /* Alias the buffer as system memory so ring-3 access with AC+AM will cause #PF: */
1819 /** @todo the aliasing is not necessary any more... */
1820 int rc;
1821 RTCCUINTXREG uFlatBufPtr = Bs3SelPtrToFlat(pbBuf);
1822 uint64_t const uAliasPgPtr = bMode & BS3_MODE_CODE_64 ? UINT64_C(0x0000648680000000) : UINT32_C(0x80000000);
1823 rc = Bs3PagingAlias(uAliasPgPtr, uFlatBufPtr & ~(uint64_t)X86_PAGE_OFFSET_MASK, X86_PAGE_SIZE * 2,
1824 X86_PTE_P | X86_PTE_RW);
1825 if (RT_SUCCESS(rc))
1826 {
1827 /* We 'misalign' the segment base here to make sure it's the final
1828 address that gets alignment checked and not just the operand value. */
1829 RTCCUINTXREG uAliasBufPtr = (RTCCUINTXREG)uAliasPgPtr + (uFlatBufPtr & X86_PAGE_OFFSET_MASK);
1830 uint8_t BS3_FAR *pbBufAlias = BS3_FP_MAKE(BS3_SEL_SPARE_00 | 3, (uFlatBufPtr & X86_PAGE_OFFSET_MASK) + 1);
1831 Bs3SelSetup16BitData(&Bs3GdteSpare00, uAliasPgPtr - 1);
1832
1833 //Bs3TestPrintf("round 3 pbBufAlias=%p\n", pbBufAlias);
1834 Bs3RegSetCr0(Bs3RegGetCr0() & ~X86_CR0_AM);
1835 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBufAlias, cbCacheLine, false /*fAm*/,
1836 true /*fPf*/, uAliasBufPtr, &g_aCmnModes[idxCmnModes]);
1837
1838 //Bs3TestPrintf("round 4\n");
1839 Bs3RegSetCr0(Bs3RegGetCr0() | X86_CR0_AM);
1840 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBufAlias, cbCacheLine, true /*fAm*/,
1841 true /*fPf*/, uAliasBufPtr, &g_aCmnModes[idxCmnModes]);
1842
1843 Bs3PagingUnalias(uAliasPgPtr, X86_PAGE_SIZE * 2);
1844 }
1845 else
1846 Bs3TestFailedF("Bs3PagingAlias failed with %Rrc", rc);
1847 }
1848#endif
1849
1850 Bs3MemFree(pbBufAlloc, X86_PAGE_SIZE * 2);
1851 Bs3RegSetCr0(fCr0);
1852 return 0;
1853}
1854
1855
1856/**
1857 * Executes one round of SIDT and SGDT tests using one assembly worker.
1858 *
1859 * This is written with driving everything from the 16-bit or 32-bit worker in
1860 * mind, i.e. not assuming the test bitcount is the same as the current.
1861 */
1862static void bs3CpuBasic2_sidt_sgdt_One(BS3CB2SIDTSGDT const BS3_FAR *pWorker, uint8_t bTestMode, uint8_t bRing,
1863 uint8_t const *pbExpected)
1864{
1865 BS3TRAPFRAME TrapCtx;
1866 BS3REGCTX Ctx;
1867 BS3REGCTX CtxUdExpected;
1868 BS3REGCTX TmpCtx;
1869 uint8_t const cbBuf = 8*2; /* test buffer area */
1870 uint8_t abBuf[8*2 + 8 + 8]; /* test buffer w/ misalignment test space and some extra guard. */
1871 uint8_t BS3_FAR *pbBuf = abBuf;
1872 uint8_t const cbIdtr = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 2+8 : 2+4;
1873 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
1874 uint8_t bFiller;
1875 int off;
1876 int off2;
1877 unsigned cb;
1878 uint8_t BS3_FAR *pbTest;
1879
1880 /* make sure they're allocated */
1881 Bs3MemZero(&Ctx, sizeof(Ctx));
1882 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
1883 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
1884 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
1885 Bs3MemZero(&abBuf, sizeof(abBuf));
1886
1887 /* Create a context, give this routine some more stack space, point the context
1888 at our SIDT [xBX] + UD2 combo, and point DS:xBX at abBuf. */
1889 Bs3RegCtxSaveEx(&Ctx, bTestMode, 256 /*cbExtraStack*/);
1890 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
1891 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pWorker->fpfnWorker);
1892 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
1893 g_uBs3TrapEipHint = Ctx.rip.u32;
1894 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
1895 Bs3RegCtxConvertToRingX(&Ctx, bRing);
1896
1897 /* For successful SIDT attempts, we'll stop at the UD2. */
1898 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
1899 CtxUdExpected.rip.u += pWorker->cbInstr;
1900
1901 /*
1902 * Check that it works at all and that only bytes we expect gets written to.
1903 */
1904 /* First with zero buffer. */
1905 Bs3MemZero(abBuf, sizeof(abBuf));
1906 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), 0))
1907 Bs3TestFailedF("ASMMemIsAllU8 or Bs3MemZero is busted: abBuf=%.*Rhxs\n", sizeof(abBuf), pbBuf);
1908 if (!ASMMemIsZero(abBuf, sizeof(abBuf)))
1909 Bs3TestFailedF("ASMMemIsZero or Bs3MemZero is busted: abBuf=%.*Rhxs\n", sizeof(abBuf), pbBuf);
1910 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1911 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1912 if (f286 && abBuf[cbIdtr - 1] != 0xff)
1913 Bs3TestFailedF("286: Top base byte isn't 0xff (#1): %#x\n", abBuf[cbIdtr - 1]);
1914 if (!ASMMemIsZero(&abBuf[cbIdtr], cbBuf - cbIdtr))
1915 Bs3TestFailedF("Unexpected buffer bytes set (#1): cbIdtr=%u abBuf=%.*Rhxs\n", cbIdtr, cbBuf, pbBuf);
1916 if (Bs3MemCmp(abBuf, pbExpected, cbIdtr) != 0)
1917 Bs3TestFailedF("Mismatch (%s,#1): expected %.*Rhxs, got %.*Rhxs\n", pWorker->pszDesc, cbIdtr, pbExpected, cbIdtr, abBuf);
1918 g_usBs3TestStep++;
1919
1920 /* Again with a buffer filled with a byte not occuring in the previous result. */
1921 bFiller = 0x55;
1922 while (Bs3MemChr(abBuf, bFiller, cbBuf) != NULL)
1923 bFiller++;
1924 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
1925 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
1926 Bs3TestFailedF("ASMMemIsAllU8 or Bs3MemSet is busted: bFiller=%#x abBuf=%.*Rhxs\n", bFiller, sizeof(abBuf), pbBuf);
1927
1928 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1929 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1930 if (f286 && abBuf[cbIdtr - 1] != 0xff)
1931 Bs3TestFailedF("286: Top base byte isn't 0xff (#2): %#x\n", abBuf[cbIdtr - 1]);
1932 if (!ASMMemIsAllU8(&abBuf[cbIdtr], cbBuf - cbIdtr, bFiller))
1933 Bs3TestFailedF("Unexpected buffer bytes set (#2): cbIdtr=%u bFiller=%#x abBuf=%.*Rhxs\n", cbIdtr, bFiller, cbBuf, pbBuf);
1934 if (Bs3MemChr(abBuf, bFiller, cbIdtr) != NULL)
1935 Bs3TestFailedF("Not all bytes touched: cbIdtr=%u bFiller=%#x abBuf=%.*Rhxs\n", cbIdtr, bFiller, cbBuf, pbBuf);
1936 if (Bs3MemCmp(abBuf, pbExpected, cbIdtr) != 0)
1937 Bs3TestFailedF("Mismatch (%s,#2): expected %.*Rhxs, got %.*Rhxs\n", pWorker->pszDesc, cbIdtr, pbExpected, cbIdtr, abBuf);
1938 g_usBs3TestStep++;
1939
1940 /*
1941 * Slide the buffer along 8 bytes to cover misalignment.
1942 */
1943 for (off = 0; off < 8; off++)
1944 {
1945 pbBuf = &abBuf[off];
1946 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &abBuf[off]);
1947 CtxUdExpected.rbx.u = Ctx.rbx.u;
1948
1949 /* First with zero buffer. */
1950 Bs3MemZero(abBuf, sizeof(abBuf));
1951 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1952 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1953 if (off > 0 && !ASMMemIsZero(abBuf, off))
1954 Bs3TestFailedF("Unexpected buffer bytes set before (#3): cbIdtr=%u off=%u abBuf=%.*Rhxs\n",
1955 cbIdtr, off, off + cbBuf, abBuf);
1956 if (!ASMMemIsZero(&abBuf[off + cbIdtr], sizeof(abBuf) - cbIdtr - off))
1957 Bs3TestFailedF("Unexpected buffer bytes set after (#3): cbIdtr=%u off=%u abBuf=%.*Rhxs\n",
1958 cbIdtr, off, off + cbBuf, abBuf);
1959 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
1960 Bs3TestFailedF("286: Top base byte isn't 0xff (#3): %#x\n", abBuf[off + cbIdtr - 1]);
1961 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
1962 Bs3TestFailedF("Mismatch (#3): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
1963 g_usBs3TestStep++;
1964
1965 /* Again with a buffer filled with a byte not occuring in the previous result. */
1966 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
1967 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1968 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1969 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
1970 Bs3TestFailedF("Unexpected buffer bytes set before (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1971 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1972 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - cbIdtr - off, bFiller))
1973 Bs3TestFailedF("Unexpected buffer bytes set after (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1974 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1975 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
1976 Bs3TestFailedF("Not all bytes touched (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1977 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1978 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
1979 Bs3TestFailedF("286: Top base byte isn't 0xff (#4): %#x\n", abBuf[off + cbIdtr - 1]);
1980 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
1981 Bs3TestFailedF("Mismatch (#4): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
1982 g_usBs3TestStep++;
1983 }
1984 pbBuf = abBuf;
1985 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
1986 CtxUdExpected.rbx.u = Ctx.rbx.u;
1987
1988 /*
1989 * Play with the selector limit if the target mode supports limit checking
1990 * We use BS3_SEL_TEST_PAGE_00 for this
1991 */
1992 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
1993 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
1994 {
1995 uint16_t cbLimit;
1996 uint32_t uFlatBuf = Bs3SelPtrToFlat(abBuf);
1997 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
1998 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
1999 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatBuf;
2000 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatBuf >> 16);
2001 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatBuf >> 24);
2002
2003 if (pWorker->fSs)
2004 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2005 else
2006 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2007
2008 /* Expand up (normal). */
2009 for (off = 0; off < 8; off++)
2010 {
2011 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2012 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2013 {
2014 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2015 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
2016 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2017 if (off + cbIdtr <= cbLimit + 1)
2018 {
2019 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2020 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
2021 Bs3TestFailedF("Not all bytes touched (#5): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2022 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2023 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
2024 Bs3TestFailedF("Mismatch (#5): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
2025 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
2026 Bs3TestFailedF("286: Top base byte isn't 0xff (#5): %#x\n", abBuf[off + cbIdtr - 1]);
2027 }
2028 else
2029 {
2030 if (pWorker->fSs)
2031 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2032 else
2033 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2034 if (off + 2 <= cbLimit + 1)
2035 {
2036 if (Bs3MemChr(&abBuf[off], bFiller, 2) != NULL)
2037 Bs3TestFailedF("Limit bytes not touched (#6): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2038 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2039 if (Bs3MemCmp(&abBuf[off], pbExpected, 2) != 0)
2040 Bs3TestFailedF("Mismatch (#6): expected %.2Rhxs, got %.2Rhxs\n", pbExpected, &abBuf[off]);
2041 if (!ASMMemIsAllU8(&abBuf[off + 2], cbIdtr - 2, bFiller))
2042 Bs3TestFailedF("Base bytes touched on #GP (#6): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2043 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2044 }
2045 else if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
2046 Bs3TestFailedF("Bytes touched on #GP: cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2047 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2048 }
2049
2050 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
2051 Bs3TestFailedF("Leading bytes touched (#7): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2052 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2053 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - off - cbIdtr, bFiller))
2054 Bs3TestFailedF("Trailing bytes touched (#7): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2055 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2056
2057 g_usBs3TestStep++;
2058 }
2059 }
2060
2061 /* Expand down (weird). Inverted valid area compared to expand up,
2062 so a limit of zero give us a valid range for 0001..0ffffh (instead of
2063 a segment with one valid byte at 0000h). Whereas a limit of 0fffeh
2064 means one valid byte at 0ffffh, and a limit of 0ffffh means none
2065 (because in a normal expand up the 0ffffh means all 64KB are
2066 accessible). */
2067 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2068 for (off = 0; off < 8; off++)
2069 {
2070 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2071 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2072 {
2073 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2074 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
2075 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2076
2077 if (off > cbLimit)
2078 {
2079 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2080 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
2081 Bs3TestFailedF("Not all bytes touched (#8): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2082 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2083 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
2084 Bs3TestFailedF("Mismatch (#8): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
2085 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
2086 Bs3TestFailedF("286: Top base byte isn't 0xff (#8): %#x\n", abBuf[off + cbIdtr - 1]);
2087 }
2088 else
2089 {
2090 if (pWorker->fSs)
2091 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2092 else
2093 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2094 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
2095 Bs3TestFailedF("Bytes touched on #GP: cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2096 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2097 }
2098
2099 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
2100 Bs3TestFailedF("Leading bytes touched (#9): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2101 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2102 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - off - cbIdtr, bFiller))
2103 Bs3TestFailedF("Trailing bytes touched (#9): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2104 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2105
2106 g_usBs3TestStep++;
2107 }
2108 }
2109
2110 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
2111 CtxUdExpected.rbx.u = Ctx.rbx.u;
2112 CtxUdExpected.ss = Ctx.ss;
2113 CtxUdExpected.ds = Ctx.ds;
2114 }
2115
2116 /*
2117 * Play with the paging.
2118 */
2119 if ( BS3_MODE_IS_PAGED(bTestMode)
2120 && (!pWorker->fSs || bRing == 3) /* SS.DPL == CPL, we'll get some tiled ring-3 selector here. */
2121 && (pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED)) != NULL)
2122 {
2123 RTCCUINTXREG uFlatTest = Bs3SelPtrToFlat(pbTest);
2124
2125 /*
2126 * Slide the buffer towards the trailing guard page. We'll observe the
2127 * first word being written entirely separately from the 2nd dword/qword.
2128 */
2129 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2130 {
2131 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller, cbIdtr * 2);
2132 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2133 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2134 if (off + cbIdtr <= X86_PAGE_SIZE)
2135 {
2136 CtxUdExpected.rbx = Ctx.rbx;
2137 CtxUdExpected.ss = Ctx.ss;
2138 CtxUdExpected.ds = Ctx.ds;
2139 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2140 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2141 Bs3TestFailedF("Mismatch (#9): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2142 }
2143 else
2144 {
2145 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2146 uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2147 if ( off <= X86_PAGE_SIZE - 2
2148 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2149 Bs3TestFailedF("Mismatch (#10): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2150 pbExpected, &pbTest[off], off);
2151 if ( off < X86_PAGE_SIZE - 2
2152 && !ASMMemIsAllU8(&pbTest[off + 2], X86_PAGE_SIZE - off - 2, bFiller))
2153 Bs3TestFailedF("Wrote partial base on #PF (#10): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2154 bFiller, X86_PAGE_SIZE - off - 2, &pbTest[off + 2], off);
2155 if (off == X86_PAGE_SIZE - 1 && pbTest[off] != bFiller)
2156 Bs3TestFailedF("Wrote partial limit on #PF (#10): Expected %02x, got %02x\n", bFiller, pbTest[off]);
2157 }
2158 g_usBs3TestStep++;
2159 }
2160
2161 /*
2162 * Now, do it the other way around. It should look normal now since writing
2163 * the limit will #PF first and nothing should be written.
2164 */
2165 for (off = cbIdtr + 4; off >= -cbIdtr - 4; off--)
2166 {
2167 Bs3MemSet(pbTest, bFiller, 48);
2168 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2169 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2170 if (off >= 0)
2171 {
2172 CtxUdExpected.rbx = Ctx.rbx;
2173 CtxUdExpected.ss = Ctx.ss;
2174 CtxUdExpected.ds = Ctx.ds;
2175 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2176 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2177 Bs3TestFailedF("Mismatch (#11): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2178 }
2179 else
2180 {
2181 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2182 uFlatTest + off, 0 /*cbIpAdjust*/);
2183 if ( -off < cbIdtr
2184 && !ASMMemIsAllU8(pbTest, cbIdtr + off, bFiller))
2185 Bs3TestFailedF("Wrote partial content on #PF (#12): bFiller=%#x, found %.*Rhxs; off=%d\n",
2186 bFiller, cbIdtr + off, pbTest, off);
2187 }
2188 if (!ASMMemIsAllU8(&pbTest[RT_MAX(cbIdtr + off, 0)], 16, bFiller))
2189 Bs3TestFailedF("Wrote beyond expected area (#13): bFiller=%#x, found %.16Rhxs; off=%d\n",
2190 bFiller, &pbTest[RT_MAX(cbIdtr + off, 0)], off);
2191 g_usBs3TestStep++;
2192 }
2193
2194 /*
2195 * Combine paging and segment limit and check ordering.
2196 * This is kind of interesting here since it the instruction seems to
2197 * be doing two separate writes.
2198 */
2199 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
2200 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
2201 {
2202 uint16_t cbLimit;
2203
2204 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
2205 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
2206 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
2207 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
2208 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
2209
2210 if (pWorker->fSs)
2211 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2212 else
2213 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2214
2215 /* Expand up (normal), approaching tail guard page. */
2216 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2217 {
2218 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2219 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
2220 {
2221 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2222 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller, cbIdtr * 2);
2223 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2224 if (off + cbIdtr <= cbLimit + 1)
2225 {
2226 /* No #GP, but maybe #PF. */
2227 if (off + cbIdtr <= X86_PAGE_SIZE)
2228 {
2229 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2230 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2231 Bs3TestFailedF("Mismatch (#14): expected %.*Rhxs, got %.*Rhxs\n",
2232 cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2233 }
2234 else
2235 {
2236 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2237 uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2238 if ( off <= X86_PAGE_SIZE - 2
2239 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2240 Bs3TestFailedF("Mismatch (#15): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2241 pbExpected, &pbTest[off], off);
2242 cb = X86_PAGE_SIZE - off - 2;
2243 if ( off < X86_PAGE_SIZE - 2
2244 && !ASMMemIsAllU8(&pbTest[off + 2], cb, bFiller))
2245 Bs3TestFailedF("Wrote partial base on #PF (#15): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2246 bFiller, cb, &pbTest[off + 2], off);
2247 if (off == X86_PAGE_SIZE - 1 && pbTest[off] != bFiller)
2248 Bs3TestFailedF("Wrote partial limit on #PF (#15): Expected %02x, got %02x\n", bFiller, pbTest[off]);
2249 }
2250 }
2251 else if (off + 2 <= cbLimit + 1)
2252 {
2253 /* [ig]tr.limit writing does not cause #GP, but may cause #PG, if not writing the base causes #GP. */
2254 if (off <= X86_PAGE_SIZE - 2)
2255 {
2256 if (pWorker->fSs)
2257 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2258 else
2259 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2260 if (Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2261 Bs3TestFailedF("Mismatch (#16): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2262 pbExpected, &pbTest[off], off);
2263 cb = X86_PAGE_SIZE - off - 2;
2264 if ( off < X86_PAGE_SIZE - 2
2265 && !ASMMemIsAllU8(&pbTest[off + 2], cb, bFiller))
2266 Bs3TestFailedF("Wrote partial base with limit (#16): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2267 bFiller, cb, &pbTest[off + 2], off);
2268 }
2269 else
2270 {
2271 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2272 uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2273 if ( off < X86_PAGE_SIZE
2274 && !ASMMemIsAllU8(&pbTest[off], X86_PAGE_SIZE - off, bFiller))
2275 Bs3TestFailedF("Mismatch (#16): Partial limit write on #PF: bFiller=%#x, got %.*Rhxs\n",
2276 bFiller, X86_PAGE_SIZE - off, &pbTest[off]);
2277 }
2278 }
2279 else
2280 {
2281 /* #GP/#SS on limit. */
2282 if (pWorker->fSs)
2283 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2284 else
2285 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2286 if ( off < X86_PAGE_SIZE
2287 && !ASMMemIsAllU8(&pbTest[off], X86_PAGE_SIZE - off, bFiller))
2288 Bs3TestFailedF("Mismatch (#17): Partial write on #GP: bFiller=%#x, got %.*Rhxs\n",
2289 bFiller, X86_PAGE_SIZE - off, &pbTest[off]);
2290 }
2291
2292 cb = RT_MIN(cbIdtr * 2, off - (X86_PAGE_SIZE - cbIdtr*2));
2293 if (!ASMMemIsAllU8(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], cb, bFiller))
2294 Bs3TestFailedF("Leading bytes touched (#18): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2295 cbIdtr, off, cbLimit, bFiller, cb, pbTest[X86_PAGE_SIZE - cbIdtr * 2]);
2296
2297 g_usBs3TestStep++;
2298
2299 /* Set DS to 0 and check that we get #GP(0). */
2300 if (!pWorker->fSs)
2301 {
2302 Ctx.ds = 0;
2303 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2304 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2305 Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2306 g_usBs3TestStep++;
2307 }
2308 }
2309 }
2310
2311 /* Expand down. */
2312 pbTest -= X86_PAGE_SIZE; /* Note! we're backing up a page to simplify things */
2313 uFlatTest -= X86_PAGE_SIZE;
2314
2315 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2316 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
2317 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
2318 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
2319
2320 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2321 {
2322 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2323 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
2324 {
2325 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2326 Bs3MemSet(&pbTest[X86_PAGE_SIZE], bFiller, cbIdtr * 2);
2327 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2328 if (cbLimit < off && off >= X86_PAGE_SIZE)
2329 {
2330 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2331 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2332 Bs3TestFailedF("Mismatch (#19): expected %.*Rhxs, got %.*Rhxs\n",
2333 cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2334 cb = X86_PAGE_SIZE + cbIdtr*2 - off;
2335 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], cb, bFiller))
2336 Bs3TestFailedF("Trailing bytes touched (#20): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2337 cbIdtr, off, cbLimit, bFiller, cb, pbTest[off + cbIdtr]);
2338 }
2339 else
2340 {
2341 if (cbLimit < off && off < X86_PAGE_SIZE)
2342 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2343 uFlatTest + off, 0 /*cbIpAdjust*/);
2344 else if (pWorker->fSs)
2345 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2346 else
2347 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2348 cb = cbIdtr*2;
2349 if (!ASMMemIsAllU8(&pbTest[X86_PAGE_SIZE], cb, bFiller))
2350 Bs3TestFailedF("Trailing bytes touched (#20): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2351 cbIdtr, off, cbLimit, bFiller, cb, pbTest[X86_PAGE_SIZE]);
2352 }
2353 g_usBs3TestStep++;
2354 }
2355 }
2356
2357 pbTest += X86_PAGE_SIZE;
2358 uFlatTest += X86_PAGE_SIZE;
2359 }
2360
2361 Bs3MemGuardedTestPageFree(pbTest);
2362 }
2363
2364 /*
2365 * Check non-canonical 64-bit space.
2366 */
2367 if ( BS3_MODE_IS_64BIT_CODE(bTestMode)
2368 && (pbTest = (uint8_t BS3_FAR *)Bs3PagingSetupCanonicalTraps()) != NULL)
2369 {
2370 /* Make our references relative to the gap. */
2371 pbTest += g_cbBs3PagingOneCanonicalTrap;
2372
2373 /* Hit it from below. */
2374 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
2375 {
2376 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0x0000800000000000) + off;
2377 Bs3MemSet(&pbTest[-64], bFiller, 64*2);
2378 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2379 if (off + cbIdtr <= 0)
2380 {
2381 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2382 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2383 Bs3TestFailedF("Mismatch (#21): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2384 }
2385 else
2386 {
2387 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2388 if (off <= -2 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2389 Bs3TestFailedF("Mismatch (#21): expected limit %.2Rhxs, got %.2Rhxs\n", pbExpected, &pbTest[off]);
2390 off2 = off <= -2 ? 2 : 0;
2391 cb = cbIdtr - off2;
2392 if (!ASMMemIsAllU8(&pbTest[off + off2], cb, bFiller))
2393 Bs3TestFailedF("Mismatch (#21): touched base %.*Rhxs, got %.*Rhxs\n",
2394 cb, &pbExpected[off], cb, &pbTest[off + off2]);
2395 }
2396 if (!ASMMemIsAllU8(&pbTest[off - 16], 16, bFiller))
2397 Bs3TestFailedF("Leading bytes touched (#21): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off]);
2398 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], 16, bFiller))
2399 Bs3TestFailedF("Trailing bytes touched (#21): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off + cbIdtr]);
2400 }
2401
2402 /* Hit it from above. */
2403 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
2404 {
2405 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0xffff800000000000) + off;
2406 Bs3MemSet(&pbTest[-64], bFiller, 64*2);
2407 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2408 if (off >= 0)
2409 {
2410 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2411 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2412 Bs3TestFailedF("Mismatch (#22): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2413 }
2414 else
2415 {
2416 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2417 if (!ASMMemIsAllU8(&pbTest[off], cbIdtr, bFiller))
2418 Bs3TestFailedF("Mismatch (#22): touched base %.*Rhxs, got %.*Rhxs\n",
2419 cbIdtr, &pbExpected[off], cbIdtr, &pbTest[off]);
2420 }
2421 if (!ASMMemIsAllU8(&pbTest[off - 16], 16, bFiller))
2422 Bs3TestFailedF("Leading bytes touched (#22): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off]);
2423 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], 16, bFiller))
2424 Bs3TestFailedF("Trailing bytes touched (#22): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off + cbIdtr]);
2425 }
2426
2427 }
2428}
2429
2430
2431static void bs3CpuBasic2_sidt_sgdt_Common(uint8_t bTestMode, BS3CB2SIDTSGDT const BS3_FAR *paWorkers, unsigned cWorkers,
2432 uint8_t const *pbExpected)
2433{
2434 unsigned idx;
2435 unsigned bRing;
2436 unsigned iStep = 0;
2437
2438 /* Note! We skip the SS checks for ring-0 since we badly mess up SS in the
2439 test and don't want to bother with double faults. */
2440 for (bRing = 0; bRing <= 3; bRing++)
2441 {
2442 for (idx = 0; idx < cWorkers; idx++)
2443 if ( (paWorkers[idx].bMode & (bTestMode & BS3_MODE_CODE_MASK))
2444 && (!paWorkers[idx].fSs || bRing != 0 /** @todo || BS3_MODE_IS_64BIT_SYS(bTestMode)*/ ))
2445 {
2446 g_usBs3TestStep = iStep;
2447 bs3CpuBasic2_sidt_sgdt_One(&paWorkers[idx], bTestMode, bRing, pbExpected);
2448 iStep += 1000;
2449 }
2450 if (BS3_MODE_IS_RM_OR_V86(bTestMode))
2451 break;
2452 }
2453}
2454
2455
2456BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_sidt)(uint8_t bMode)
2457{
2458 union
2459 {
2460 RTIDTR Idtr;
2461 uint8_t ab[16];
2462 } Expected;
2463
2464 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
2465 bs3CpuBasic2_SetGlobals(bMode);
2466
2467 /*
2468 * Pass to common worker which is only compiled once per mode.
2469 */
2470 Bs3MemZero(&Expected, sizeof(Expected));
2471 ASMGetIDTR(&Expected.Idtr);
2472 bs3CpuBasic2_sidt_sgdt_Common(bMode, g_aSidtWorkers, RT_ELEMENTS(g_aSidtWorkers), Expected.ab);
2473
2474 /*
2475 * Re-initialize the IDT.
2476 */
2477 Bs3TrapReInit();
2478 return 0;
2479}
2480
2481
2482BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_sgdt)(uint8_t bMode)
2483{
2484 uint64_t const uOrgAddr = Bs3Lgdt_Gdt.uAddr;
2485 uint64_t uNew = 0;
2486 union
2487 {
2488 RTGDTR Gdtr;
2489 uint8_t ab[16];
2490 } Expected;
2491
2492 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
2493 bs3CpuBasic2_SetGlobals(bMode);
2494
2495 /*
2496 * If paged mode, try push the GDT way up.
2497 */
2498 Bs3MemZero(&Expected, sizeof(Expected));
2499 ASMGetGDTR(&Expected.Gdtr);
2500 if (BS3_MODE_IS_PAGED(bMode))
2501 {
2502/** @todo loading non-canonical base addresses. */
2503 int rc;
2504 uNew = BS3_MODE_IS_64BIT_SYS(bMode) ? UINT64_C(0xffff80fedcb70000) : UINT64_C(0xc2d28000);
2505 uNew |= uOrgAddr & X86_PAGE_OFFSET_MASK;
2506 rc = Bs3PagingAlias(uNew, uOrgAddr, Bs3Lgdt_Gdt.cb, X86_PTE_P | X86_PTE_RW | X86_PTE_US | X86_PTE_D | X86_PTE_A);
2507 if (RT_SUCCESS(rc))
2508 {
2509 Bs3Lgdt_Gdt.uAddr = uNew;
2510 Bs3UtilSetFullGdtr(Bs3Lgdt_Gdt.cb, uNew);
2511 ASMGetGDTR(&Expected.Gdtr);
2512 if (BS3_MODE_IS_64BIT_SYS(bMode) && ARCH_BITS != 64)
2513 *(uint32_t *)&Expected.ab[6] = (uint32_t)(uNew >> 32);
2514 }
2515 }
2516
2517 /*
2518 * Pass to common worker which is only compiled once per mode.
2519 */
2520 bs3CpuBasic2_sidt_sgdt_Common(bMode, g_aSgdtWorkers, RT_ELEMENTS(g_aSgdtWorkers), Expected.ab);
2521
2522 /*
2523 * Unalias the GDT.
2524 */
2525 if (uNew != 0)
2526 {
2527 Bs3Lgdt_Gdt.uAddr = uOrgAddr;
2528 Bs3UtilSetFullGdtr(Bs3Lgdt_Gdt.cb, uOrgAddr);
2529 Bs3PagingUnalias(uNew, Bs3Lgdt_Gdt.cb);
2530 }
2531
2532 /*
2533 * Re-initialize the IDT.
2534 */
2535 Bs3TrapReInit();
2536 return 0;
2537}
2538
2539
2540
2541/*
2542 * LIDT & LGDT
2543 */
2544
2545/**
2546 * Executes one round of LIDT and LGDT tests using one assembly worker.
2547 *
2548 * This is written with driving everything from the 16-bit or 32-bit worker in
2549 * mind, i.e. not assuming the test bitcount is the same as the current.
2550 */
2551static void bs3CpuBasic2_lidt_lgdt_One(BS3CB2SIDTSGDT const BS3_FAR *pWorker, uint8_t bTestMode, uint8_t bRing,
2552 uint8_t const *pbRestore, size_t cbRestore, uint8_t const *pbExpected)
2553{
2554 static const struct
2555 {
2556 bool fGP;
2557 uint16_t cbLimit;
2558 uint64_t u64Base;
2559 } s_aValues64[] =
2560 {
2561 { false, 0x0000, UINT64_C(0x0000000000000000) },
2562 { false, 0x0001, UINT64_C(0x0000000000000001) },
2563 { false, 0x0002, UINT64_C(0x0000000000000010) },
2564 { false, 0x0003, UINT64_C(0x0000000000000123) },
2565 { false, 0x0004, UINT64_C(0x0000000000001234) },
2566 { false, 0x0005, UINT64_C(0x0000000000012345) },
2567 { false, 0x0006, UINT64_C(0x0000000000123456) },
2568 { false, 0x0007, UINT64_C(0x0000000001234567) },
2569 { false, 0x0008, UINT64_C(0x0000000012345678) },
2570 { false, 0x0009, UINT64_C(0x0000000123456789) },
2571 { false, 0x000a, UINT64_C(0x000000123456789a) },
2572 { false, 0x000b, UINT64_C(0x00000123456789ab) },
2573 { false, 0x000c, UINT64_C(0x0000123456789abc) },
2574 { false, 0x001c, UINT64_C(0x00007ffffeefefef) },
2575 { false, 0xffff, UINT64_C(0x00007fffffffffff) },
2576 { true, 0xf3f1, UINT64_C(0x0000800000000000) },
2577 { true, 0x0000, UINT64_C(0x0000800000000000) },
2578 { true, 0x0000, UINT64_C(0x0000800000000333) },
2579 { true, 0x00f0, UINT64_C(0x0001000000000000) },
2580 { true, 0x0ff0, UINT64_C(0x0012000000000000) },
2581 { true, 0x0eff, UINT64_C(0x0123000000000000) },
2582 { true, 0xe0fe, UINT64_C(0x1234000000000000) },
2583 { true, 0x00ad, UINT64_C(0xffff300000000000) },
2584 { true, 0x0000, UINT64_C(0xffff7fffffffffff) },
2585 { true, 0x00f0, UINT64_C(0xffff7fffffffffff) },
2586 { false, 0x5678, UINT64_C(0xffff800000000000) },
2587 { false, 0x2969, UINT64_C(0xffffffffffeefefe) },
2588 { false, 0x1221, UINT64_C(0xffffffffffffffff) },
2589 { false, 0x1221, UINT64_C(0xffffffffffffffff) },
2590 };
2591 static const struct
2592 {
2593 uint16_t cbLimit;
2594 uint32_t u32Base;
2595 } s_aValues32[] =
2596 {
2597 { 0xdfdf, UINT32_C(0xefefefef) },
2598 { 0x0000, UINT32_C(0x00000000) },
2599 { 0x0001, UINT32_C(0x00000001) },
2600 { 0x0002, UINT32_C(0x00000012) },
2601 { 0x0003, UINT32_C(0x00000123) },
2602 { 0x0004, UINT32_C(0x00001234) },
2603 { 0x0005, UINT32_C(0x00012345) },
2604 { 0x0006, UINT32_C(0x00123456) },
2605 { 0x0007, UINT32_C(0x01234567) },
2606 { 0x0008, UINT32_C(0x12345678) },
2607 { 0x0009, UINT32_C(0x80204060) },
2608 { 0x000a, UINT32_C(0xddeeffaa) },
2609 { 0x000b, UINT32_C(0xfdecdbca) },
2610 { 0x000c, UINT32_C(0x6098456b) },
2611 { 0x000d, UINT32_C(0x98506099) },
2612 { 0x000e, UINT32_C(0x206950bc) },
2613 { 0x000f, UINT32_C(0x9740395d) },
2614 { 0x0334, UINT32_C(0x64a9455e) },
2615 { 0xb423, UINT32_C(0xd20b6eff) },
2616 { 0x4955, UINT32_C(0x85296d46) },
2617 { 0xffff, UINT32_C(0x07000039) },
2618 { 0xefe1, UINT32_C(0x0007fe00) },
2619 };
2620
2621 BS3TRAPFRAME TrapCtx;
2622 BS3REGCTX Ctx;
2623 BS3REGCTX CtxUdExpected;
2624 BS3REGCTX TmpCtx;
2625 uint8_t abBufLoad[40]; /* Test buffer w/ misalignment test space and some (cbIdtr) extra guard. */
2626 uint8_t abBufSave[32]; /* For saving the result after loading. */
2627 uint8_t abBufRestore[24]; /* For restoring sane value (same seg as abBufSave!). */
2628 uint8_t abExpectedFilled[32]; /* Same as pbExpected, except it's filled with bFiller2 instead of zeros. */
2629 uint8_t BS3_FAR *pbBufSave; /* Correctly aligned pointer into abBufSave. */
2630 uint8_t BS3_FAR *pbBufRestore; /* Correctly aligned pointer into abBufRestore. */
2631 uint8_t const cbIdtr = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 2+8 : 2+4;
2632 uint8_t const cbBaseLoaded = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 8
2633 : BS3_MODE_IS_16BIT_CODE(bTestMode) == !(pWorker->fFlags & BS3CB2SIDTSGDT_F_OPSIZE)
2634 ? 3 : 4;
2635 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
2636 uint8_t const bTop16BitBase = f286 ? 0xff : 0x00;
2637 uint8_t bFiller1; /* For filling abBufLoad. */
2638 uint8_t bFiller2; /* For filling abBufSave and expectations. */
2639 int off;
2640 uint8_t BS3_FAR *pbTest;
2641 unsigned i;
2642
2643 /* make sure they're allocated */
2644 Bs3MemZero(&Ctx, sizeof(Ctx));
2645 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
2646 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
2647 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
2648 Bs3MemZero(abBufSave, sizeof(abBufSave));
2649 Bs3MemZero(abBufLoad, sizeof(abBufLoad));
2650 Bs3MemZero(abBufRestore, sizeof(abBufRestore));
2651
2652 /*
2653 * Create a context, giving this routine some more stack space.
2654 * - Point the context at our LIDT [xBX] + SIDT [xDI] + LIDT [xSI] + UD2 combo.
2655 * - Point DS/SS:xBX at abBufLoad.
2656 * - Point ES:xDI at abBufSave.
2657 * - Point ES:xSI at abBufRestore.
2658 */
2659 Bs3RegCtxSaveEx(&Ctx, bTestMode, 256 /*cbExtraStack*/);
2660 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pWorker->fpfnWorker);
2661 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
2662 g_uBs3TrapEipHint = Ctx.rip.u32;
2663 Ctx.rflags.u16 &= ~X86_EFL_IF;
2664 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2665
2666 pbBufSave = abBufSave;
2667 if ((BS3_FP_OFF(pbBufSave) + 2) & 7)
2668 pbBufSave += 8 - ((BS3_FP_OFF(pbBufSave) + 2) & 7);
2669 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rdi, &Ctx.es, pbBufSave);
2670
2671 pbBufRestore = abBufRestore;
2672 if ((BS3_FP_OFF(pbBufRestore) + 2) & 7)
2673 pbBufRestore += 8 - ((BS3_FP_OFF(pbBufRestore) + 2) & 7);
2674 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsi, &Ctx.es, pbBufRestore);
2675 Bs3MemCpy(pbBufRestore, pbRestore, cbRestore);
2676
2677 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
2678 Bs3RegCtxConvertToRingX(&Ctx, bRing);
2679
2680 /* For successful SIDT attempts, we'll stop at the UD2. */
2681 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
2682 CtxUdExpected.rip.u += pWorker->cbInstr;
2683
2684 /*
2685 * Check that it works at all.
2686 */
2687 Bs3MemZero(abBufLoad, sizeof(abBufLoad));
2688 Bs3MemCpy(abBufLoad, pbBufRestore, cbIdtr);
2689 Bs3MemZero(abBufSave, sizeof(abBufSave));
2690 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2691 if (bRing != 0)
2692 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2693 else
2694 {
2695 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2696 if (Bs3MemCmp(pbBufSave, pbExpected, cbIdtr * 2) != 0)
2697 Bs3TestFailedF("Mismatch (%s, #1): expected %.*Rhxs, got %.*Rhxs\n",
2698 pWorker->pszDesc, cbIdtr*2, pbExpected, cbIdtr*2, pbBufSave);
2699 }
2700 g_usBs3TestStep++;
2701
2702 /* Determine two filler bytes that doesn't appear in the previous result or our expectations. */
2703 bFiller1 = ~0x55;
2704 while ( Bs3MemChr(pbBufSave, bFiller1, cbIdtr) != NULL
2705 || Bs3MemChr(pbRestore, bFiller1, cbRestore) != NULL
2706 || bFiller1 == 0xff)
2707 bFiller1++;
2708 bFiller2 = 0x33;
2709 while ( Bs3MemChr(pbBufSave, bFiller2, cbIdtr) != NULL
2710 || Bs3MemChr(pbRestore, bFiller2, cbRestore) != NULL
2711 || bFiller2 == 0xff
2712 || bFiller2 == bFiller1)
2713 bFiller2++;
2714 Bs3MemSet(abExpectedFilled, bFiller2, sizeof(abExpectedFilled));
2715 Bs3MemCpy(abExpectedFilled, pbExpected, cbIdtr);
2716
2717 /* Again with a buffer filled with a byte not occuring in the previous result. */
2718 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2719 Bs3MemCpy(abBufLoad, pbBufRestore, cbIdtr);
2720 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2721 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2722 if (bRing != 0)
2723 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2724 else
2725 {
2726 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2727 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2728 Bs3TestFailedF("Mismatch (%s, #2): expected %.*Rhxs, got %.*Rhxs\n",
2729 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2730 }
2731 g_usBs3TestStep++;
2732
2733 /*
2734 * Try loading a bunch of different limit+base value to check what happens,
2735 * especially what happens wrt the top part of the base in 16-bit mode.
2736 */
2737 if (BS3_MODE_IS_64BIT_CODE(bTestMode))
2738 {
2739 for (i = 0; i < RT_ELEMENTS(s_aValues64); i++)
2740 {
2741 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2742 Bs3MemCpy(&abBufLoad[0], &s_aValues64[i].cbLimit, 2);
2743 Bs3MemCpy(&abBufLoad[2], &s_aValues64[i].u64Base, 8);
2744 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2745 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2746 if (bRing != 0 || s_aValues64[i].fGP)
2747 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2748 else
2749 {
2750 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2751 if ( Bs3MemCmp(&pbBufSave[0], &s_aValues64[i].cbLimit, 2) != 0
2752 || Bs3MemCmp(&pbBufSave[2], &s_aValues64[i].u64Base, 8) != 0
2753 || !ASMMemIsAllU8(&pbBufSave[10], cbIdtr, bFiller2))
2754 Bs3TestFailedF("Mismatch (%s, #2): expected %04RX16:%016RX64, fillers %#x %#x, got %.*Rhxs\n",
2755 pWorker->pszDesc, s_aValues64[i].cbLimit, s_aValues64[i].u64Base,
2756 bFiller1, bFiller2, cbIdtr*2, pbBufSave);
2757 }
2758 g_usBs3TestStep++;
2759 }
2760 }
2761 else
2762 {
2763 for (i = 0; i < RT_ELEMENTS(s_aValues32); i++)
2764 {
2765 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2766 Bs3MemCpy(&abBufLoad[0], &s_aValues32[i].cbLimit, 2);
2767 Bs3MemCpy(&abBufLoad[2], &s_aValues32[i].u32Base, cbBaseLoaded);
2768 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2769 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2770 if (bRing != 0)
2771 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2772 else
2773 {
2774 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2775 if ( Bs3MemCmp(&pbBufSave[0], &s_aValues32[i].cbLimit, 2) != 0
2776 || Bs3MemCmp(&pbBufSave[2], &s_aValues32[i].u32Base, cbBaseLoaded) != 0
2777 || ( cbBaseLoaded != 4
2778 && pbBufSave[2+3] != bTop16BitBase)
2779 || !ASMMemIsAllU8(&pbBufSave[8], cbIdtr, bFiller2))
2780 Bs3TestFailedF("Mismatch (%s,#3): loaded %04RX16:%08RX32, fillers %#x %#x%s, got %.*Rhxs\n",
2781 pWorker->pszDesc, s_aValues32[i].cbLimit, s_aValues32[i].u32Base, bFiller1, bFiller2,
2782 f286 ? ", 286" : "", cbIdtr*2, pbBufSave);
2783 }
2784 g_usBs3TestStep++;
2785 }
2786 }
2787
2788 /*
2789 * Slide the buffer along 8 bytes to cover misalignment.
2790 */
2791 for (off = 0; off < 8; off++)
2792 {
2793 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &abBufLoad[off]);
2794 CtxUdExpected.rbx.u = Ctx.rbx.u;
2795
2796 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2797 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2798 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2799 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2800 if (bRing != 0)
2801 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2802 else
2803 {
2804 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2805 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2806 Bs3TestFailedF("Mismatch (%s, #4): expected %.*Rhxs, got %.*Rhxs\n",
2807 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2808 }
2809 g_usBs3TestStep++;
2810 }
2811 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2812 CtxUdExpected.rbx.u = Ctx.rbx.u;
2813
2814 /*
2815 * Play with the selector limit if the target mode supports limit checking
2816 * We use BS3_SEL_TEST_PAGE_00 for this
2817 */
2818 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
2819 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
2820 {
2821 uint16_t cbLimit;
2822 uint32_t uFlatBuf = Bs3SelPtrToFlat(abBufLoad);
2823 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
2824 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
2825 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatBuf;
2826 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatBuf >> 16);
2827 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatBuf >> 24);
2828
2829 if (pWorker->fSs)
2830 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2831 else
2832 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2833
2834 /* Expand up (normal). */
2835 for (off = 0; off < 8; off++)
2836 {
2837 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2838 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2839 {
2840 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2841
2842 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2843 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2844 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2845 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2846 if (bRing != 0)
2847 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2848 else if (off + cbIdtr <= cbLimit + 1)
2849 {
2850 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2851 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2852 Bs3TestFailedF("Mismatch (%s, #5): expected %.*Rhxs, got %.*Rhxs\n",
2853 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2854 }
2855 else if (pWorker->fSs)
2856 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2857 else
2858 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2859 g_usBs3TestStep++;
2860
2861 /* Again with zero limit and messed up base (should trigger tripple fault if partially loaded). */
2862 abBufLoad[off] = abBufLoad[off + 1] = 0;
2863 abBufLoad[off + 2] |= 1;
2864 abBufLoad[off + cbIdtr - 2] ^= 0x5a;
2865 abBufLoad[off + cbIdtr - 1] ^= 0xa5;
2866 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2867 if (bRing != 0)
2868 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2869 else if (off + cbIdtr <= cbLimit + 1)
2870 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2871 else if (pWorker->fSs)
2872 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2873 else
2874 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2875 }
2876 }
2877
2878 /* Expand down (weird). Inverted valid area compared to expand up,
2879 so a limit of zero give us a valid range for 0001..0ffffh (instead of
2880 a segment with one valid byte at 0000h). Whereas a limit of 0fffeh
2881 means one valid byte at 0ffffh, and a limit of 0ffffh means none
2882 (because in a normal expand up the 0ffffh means all 64KB are
2883 accessible). */
2884 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2885 for (off = 0; off < 8; off++)
2886 {
2887 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2888 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2889 {
2890 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2891
2892 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2893 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2894 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2895 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2896 if (bRing != 0)
2897 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2898 else if (off > cbLimit)
2899 {
2900 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2901 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2902 Bs3TestFailedF("Mismatch (%s, #6): expected %.*Rhxs, got %.*Rhxs\n",
2903 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2904 }
2905 else if (pWorker->fSs)
2906 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2907 else
2908 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2909 g_usBs3TestStep++;
2910
2911 /* Again with zero limit and messed up base (should trigger triple fault if partially loaded). */
2912 abBufLoad[off] = abBufLoad[off + 1] = 0;
2913 abBufLoad[off + 2] |= 3;
2914 abBufLoad[off + cbIdtr - 2] ^= 0x55;
2915 abBufLoad[off + cbIdtr - 1] ^= 0xaa;
2916 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2917 if (bRing != 0)
2918 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2919 else if (off > cbLimit)
2920 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2921 else if (pWorker->fSs)
2922 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2923 else
2924 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2925 }
2926 }
2927
2928 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2929 CtxUdExpected.rbx.u = Ctx.rbx.u;
2930 CtxUdExpected.ss = Ctx.ss;
2931 CtxUdExpected.ds = Ctx.ds;
2932 }
2933
2934 /*
2935 * Play with the paging.
2936 */
2937 if ( BS3_MODE_IS_PAGED(bTestMode)
2938 && (!pWorker->fSs || bRing == 3) /* SS.DPL == CPL, we'll get some tiled ring-3 selector here. */
2939 && (pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED)) != NULL)
2940 {
2941 RTCCUINTXREG uFlatTest = Bs3SelPtrToFlat(pbTest);
2942
2943 /*
2944 * Slide the load buffer towards the trailing guard page.
2945 */
2946 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[X86_PAGE_SIZE]);
2947 CtxUdExpected.ss = Ctx.ss;
2948 CtxUdExpected.ds = Ctx.ds;
2949 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2950 {
2951 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller1, cbIdtr*2);
2952 if (off < X86_PAGE_SIZE)
2953 Bs3MemCpy(&pbTest[off], pbBufRestore, RT_MIN(X86_PAGE_SIZE - off, cbIdtr));
2954 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2955 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2956 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2957 if (bRing != 0)
2958 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2959 else if (off + cbIdtr <= X86_PAGE_SIZE)
2960 {
2961 CtxUdExpected.rbx = Ctx.rbx;
2962 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2963 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr*2) != 0)
2964 Bs3TestFailedF("Mismatch (%s, #7): expected %.*Rhxs, got %.*Rhxs\n",
2965 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2966 }
2967 else
2968 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2969 g_usBs3TestStep++;
2970
2971 /* Again with zero limit and maybe messed up base as well (triple fault if buggy).
2972 The 386DX-40 here triple faults (or something) with off == 0xffe, nothing else. */
2973 if ( off < X86_PAGE_SIZE && off + cbIdtr > X86_PAGE_SIZE
2974 && ( off != X86_PAGE_SIZE - 2
2975 || (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) != BS3CPU_80386)
2976 )
2977 {
2978 pbTest[off] = 0;
2979 if (off + 1 < X86_PAGE_SIZE)
2980 pbTest[off + 1] = 0;
2981 if (off + 2 < X86_PAGE_SIZE)
2982 pbTest[off + 2] |= 7;
2983 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2984 if (bRing != 0)
2985 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2986 else
2987 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2988 g_usBs3TestStep++;
2989 }
2990 }
2991
2992 /*
2993 * Now, do it the other way around. It should look normal now since writing
2994 * the limit will #PF first and nothing should be written.
2995 */
2996 for (off = cbIdtr + 4; off >= -cbIdtr - 4; off--)
2997 {
2998 Bs3MemSet(pbTest, bFiller1, 48);
2999 if (off >= 0)
3000 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3001 else if (off + cbIdtr > 0)
3002 Bs3MemCpy(pbTest, &pbBufRestore[-off], cbIdtr + off);
3003 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
3004 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3005 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3006 if (bRing != 0)
3007 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3008 else if (off >= 0)
3009 {
3010 CtxUdExpected.rbx = Ctx.rbx;
3011 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3012 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr*2) != 0)
3013 Bs3TestFailedF("Mismatch (%s, #8): expected %.*Rhxs, got %.*Rhxs\n",
3014 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3015 }
3016 else
3017 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off, 0 /*cbIpAdjust*/);
3018 g_usBs3TestStep++;
3019
3020 /* Again with messed up base as well (triple fault if buggy). */
3021 if (off < 0 && off > -cbIdtr)
3022 {
3023 if (off + 2 >= 0)
3024 pbTest[off + 2] |= 15;
3025 pbTest[off + cbIdtr - 1] ^= 0xaa;
3026 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3027 if (bRing != 0)
3028 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3029 else
3030 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off, 0 /*cbIpAdjust*/);
3031 g_usBs3TestStep++;
3032 }
3033 }
3034
3035 /*
3036 * Combine paging and segment limit and check ordering.
3037 * This is kind of interesting here since it the instruction seems to
3038 * actually be doing two separate read, just like it's S[IG]DT counterpart.
3039 *
3040 * Note! My 486DX4 does a DWORD limit read when the operand size is 32-bit,
3041 * that's what f486Weirdness deals with.
3042 */
3043 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
3044 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
3045 {
3046 bool const f486Weirdness = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80486
3047 && BS3_MODE_IS_32BIT_CODE(bTestMode) == !(pWorker->fFlags & BS3CB2SIDTSGDT_F_OPSIZE);
3048 uint16_t cbLimit;
3049
3050 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
3051 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
3052 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
3053 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
3054 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
3055
3056 if (pWorker->fSs)
3057 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
3058 else
3059 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
3060
3061 /* Expand up (normal), approaching tail guard page. */
3062 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
3063 {
3064 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
3065 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
3066 {
3067 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
3068 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller1, cbIdtr * 2);
3069 if (off < X86_PAGE_SIZE)
3070 Bs3MemCpy(&pbTest[off], pbBufRestore, RT_MIN(cbIdtr, X86_PAGE_SIZE - off));
3071 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3072 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3073 if (bRing != 0)
3074 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3075 else if (off + cbIdtr <= cbLimit + 1)
3076 {
3077 /* No #GP, but maybe #PF. */
3078 if (off + cbIdtr <= X86_PAGE_SIZE)
3079 {
3080 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3081 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3082 Bs3TestFailedF("Mismatch (%s, #9): expected %.*Rhxs, got %.*Rhxs\n",
3083 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3084 }
3085 else
3086 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
3087 }
3088 /* No #GP/#SS on limit, but instead #PF? */
3089 else if ( !f486Weirdness
3090 ? off < cbLimit && off >= 0xfff
3091 : off + 2 < cbLimit && off >= 0xffd)
3092 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
3093 /* #GP/#SS on limit or base. */
3094 else if (pWorker->fSs)
3095 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
3096 else
3097 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3098
3099 g_usBs3TestStep++;
3100
3101 /* Set DS to 0 and check that we get #GP(0). */
3102 if (!pWorker->fSs)
3103 {
3104 Ctx.ds = 0;
3105 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3106 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3107 Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
3108 g_usBs3TestStep++;
3109 }
3110 }
3111 }
3112
3113 /* Expand down. */
3114 pbTest -= X86_PAGE_SIZE; /* Note! we're backing up a page to simplify things */
3115 uFlatTest -= X86_PAGE_SIZE;
3116
3117 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
3118 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
3119 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
3120 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
3121
3122 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
3123 {
3124 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
3125 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
3126 {
3127 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
3128 Bs3MemSet(&pbTest[X86_PAGE_SIZE], bFiller1, cbIdtr * 2);
3129 if (off >= X86_PAGE_SIZE)
3130 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3131 else if (off > X86_PAGE_SIZE - cbIdtr)
3132 Bs3MemCpy(&pbTest[X86_PAGE_SIZE], &pbBufRestore[X86_PAGE_SIZE - off], cbIdtr - (X86_PAGE_SIZE - off));
3133 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3134 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3135 if (bRing != 0)
3136 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3137 else if (cbLimit < off && off >= X86_PAGE_SIZE)
3138 {
3139 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3140 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3141 Bs3TestFailedF("Mismatch (%s, #10): expected %.*Rhxs, got %.*Rhxs\n",
3142 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3143 }
3144 else if (cbLimit < off && off < X86_PAGE_SIZE)
3145 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off, 0 /*cbIpAdjust*/);
3146 else if (pWorker->fSs)
3147 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
3148 else
3149 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3150 g_usBs3TestStep++;
3151 }
3152 }
3153
3154 pbTest += X86_PAGE_SIZE;
3155 uFlatTest += X86_PAGE_SIZE;
3156 }
3157
3158 Bs3MemGuardedTestPageFree(pbTest);
3159 }
3160
3161 /*
3162 * Check non-canonical 64-bit space.
3163 */
3164 if ( BS3_MODE_IS_64BIT_CODE(bTestMode)
3165 && (pbTest = (uint8_t BS3_FAR *)Bs3PagingSetupCanonicalTraps()) != NULL)
3166 {
3167 /* Make our references relative to the gap. */
3168 pbTest += g_cbBs3PagingOneCanonicalTrap;
3169
3170 /* Hit it from below. */
3171 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
3172 {
3173 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0x0000800000000000) + off;
3174 Bs3MemSet(&pbTest[-64], bFiller1, 64*2);
3175 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3176 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3177 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3178 if (off + cbIdtr > 0 || bRing != 0)
3179 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3180 else
3181 {
3182 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3183 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3184 Bs3TestFailedF("Mismatch (%s, #11): expected %.*Rhxs, got %.*Rhxs\n",
3185 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3186 }
3187 }
3188
3189 /* Hit it from above. */
3190 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
3191 {
3192 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0xffff800000000000) + off;
3193 Bs3MemSet(&pbTest[-64], bFiller1, 64*2);
3194 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3195 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3196 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3197 if (off < 0 || bRing != 0)
3198 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3199 else
3200 {
3201 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3202 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3203 Bs3TestFailedF("Mismatch (%s, #19): expected %.*Rhxs, got %.*Rhxs\n",
3204 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3205 }
3206 }
3207
3208 }
3209}
3210
3211
3212static void bs3CpuBasic2_lidt_lgdt_Common(uint8_t bTestMode, BS3CB2SIDTSGDT const BS3_FAR *paWorkers, unsigned cWorkers,
3213 void const *pvRestore, size_t cbRestore, uint8_t const *pbExpected)
3214{
3215 unsigned idx;
3216 unsigned bRing;
3217 unsigned iStep = 0;
3218
3219 /* Note! We skip the SS checks for ring-0 since we badly mess up SS in the
3220 test and don't want to bother with double faults. */
3221 for (bRing = BS3_MODE_IS_V86(bTestMode) ? 3 : 0; bRing <= 3; bRing++)
3222 {
3223 for (idx = 0; idx < cWorkers; idx++)
3224 if ( (paWorkers[idx].bMode & (bTestMode & BS3_MODE_CODE_MASK))
3225 && (!paWorkers[idx].fSs || bRing != 0 /** @todo || BS3_MODE_IS_64BIT_SYS(bTestMode)*/ )
3226 && ( !(paWorkers[idx].fFlags & BS3CB2SIDTSGDT_F_386PLUS)
3227 || ( bTestMode > BS3_MODE_PE16
3228 || ( bTestMode == BS3_MODE_PE16
3229 && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386)) ) )
3230 {
3231 //Bs3TestPrintf("idx=%-2d fpfnWorker=%p fSs=%d cbInstr=%d\n",
3232 // idx, paWorkers[idx].fpfnWorker, paWorkers[idx].fSs, paWorkers[idx].cbInstr);
3233 g_usBs3TestStep = iStep;
3234 bs3CpuBasic2_lidt_lgdt_One(&paWorkers[idx], bTestMode, bRing, pvRestore, cbRestore, pbExpected);
3235 iStep += 1000;
3236 }
3237 if (BS3_MODE_IS_RM_SYS(bTestMode))
3238 break;
3239 }
3240}
3241
3242
3243BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_lidt)(uint8_t bMode)
3244{
3245 union
3246 {
3247 RTIDTR Idtr;
3248 uint8_t ab[32]; /* At least cbIdtr*2! */
3249 } Expected;
3250
3251 //if (bMode != BS3_MODE_LM64) return 0;
3252 bs3CpuBasic2_SetGlobals(bMode);
3253
3254 /*
3255 * Pass to common worker which is only compiled once per mode.
3256 */
3257 Bs3MemZero(&Expected, sizeof(Expected));
3258 ASMGetIDTR(&Expected.Idtr);
3259
3260 if (BS3_MODE_IS_RM_SYS(bMode))
3261 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3262 &Bs3Lidt_Ivt, sizeof(Bs3Lidt_Ivt), Expected.ab);
3263 else if (BS3_MODE_IS_16BIT_SYS(bMode))
3264 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3265 &Bs3Lidt_Idt16, sizeof(Bs3Lidt_Idt16), Expected.ab);
3266 else if (BS3_MODE_IS_32BIT_SYS(bMode))
3267 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3268 &Bs3Lidt_Idt32, sizeof(Bs3Lidt_Idt32), Expected.ab);
3269 else
3270 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3271 &Bs3Lidt_Idt64, sizeof(Bs3Lidt_Idt64), Expected.ab);
3272
3273 /*
3274 * Re-initialize the IDT.
3275 */
3276 Bs3TrapReInit();
3277 return 0;
3278}
3279
3280
3281BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_lgdt)(uint8_t bMode)
3282{
3283 union
3284 {
3285 RTGDTR Gdtr;
3286 uint8_t ab[32]; /* At least cbIdtr*2! */
3287 } Expected;
3288
3289 //if (!BS3_MODE_IS_64BIT_SYS(bMode)) return 0;
3290 bs3CpuBasic2_SetGlobals(bMode);
3291
3292 /*
3293 * Pass to common worker which is only compiled once per mode.
3294 */
3295 if (BS3_MODE_IS_RM_SYS(bMode))
3296 ASMSetGDTR((PRTGDTR)&Bs3LgdtDef_Gdt);
3297 Bs3MemZero(&Expected, sizeof(Expected));
3298 ASMGetGDTR(&Expected.Gdtr);
3299
3300 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLgdtWorkers, RT_ELEMENTS(g_aLgdtWorkers),
3301 &Bs3LgdtDef_Gdt, sizeof(Bs3LgdtDef_Gdt), Expected.ab);
3302
3303 /*
3304 * Re-initialize the IDT.
3305 */
3306 Bs3TrapReInit();
3307 return 0;
3308}
3309
3310typedef union IRETBUF
3311{
3312 uint64_t au64[6]; /* max req is 5 */
3313 uint32_t au32[12]; /* max req is 9 */
3314 uint16_t au16[24]; /* max req is 5 */
3315 uint8_t ab[48];
3316} IRETBUF;
3317typedef IRETBUF BS3_FAR *PIRETBUF;
3318
3319
3320static void iretbuf_SetupFrame(PIRETBUF pIretBuf, unsigned const cbPop,
3321 uint16_t uCS, uint64_t uPC, uint32_t fEfl, uint16_t uSS, uint64_t uSP)
3322{
3323 if (cbPop == 2)
3324 {
3325 pIretBuf->au16[0] = (uint16_t)uPC;
3326 pIretBuf->au16[1] = uCS;
3327 pIretBuf->au16[2] = (uint16_t)fEfl;
3328 pIretBuf->au16[3] = (uint16_t)uSP;
3329 pIretBuf->au16[4] = uSS;
3330 }
3331 else if (cbPop != 8)
3332 {
3333 pIretBuf->au32[0] = (uint32_t)uPC;
3334 pIretBuf->au16[1*2] = uCS;
3335 pIretBuf->au32[2] = (uint32_t)fEfl;
3336 pIretBuf->au32[3] = (uint32_t)uSP;
3337 pIretBuf->au16[4*2] = uSS;
3338 }
3339 else
3340 {
3341 pIretBuf->au64[0] = uPC;
3342 pIretBuf->au16[1*4] = uCS;
3343 pIretBuf->au64[2] = fEfl;
3344 pIretBuf->au64[3] = uSP;
3345 pIretBuf->au16[4*4] = uSS;
3346 }
3347}
3348
3349
3350static void bs3CpuBasic2_iret_Worker(uint8_t bTestMode, FPFNBS3FAR pfnIret, unsigned const cbPop,
3351 PIRETBUF pIretBuf, const char BS3_FAR *pszDesc)
3352{
3353 BS3TRAPFRAME TrapCtx;
3354 BS3REGCTX Ctx;
3355 BS3REGCTX CtxUdExpected;
3356 BS3REGCTX TmpCtx;
3357 BS3REGCTX TmpCtxExpected;
3358 uint8_t abLowUd[8];
3359 uint8_t abLowIret[8];
3360 FPFNBS3FAR pfnUdLow = (FPFNBS3FAR)abLowUd;
3361 FPFNBS3FAR pfnIretLow = (FPFNBS3FAR)abLowIret;
3362 unsigned const cbSameCplFrame = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 5*cbPop : 3*cbPop;
3363 bool const fUseLowCode = cbPop == 2 && !BS3_MODE_IS_16BIT_CODE(bTestMode);
3364 int iRingDst;
3365 int iRingSrc;
3366 uint16_t uDplSs;
3367 uint16_t uRplCs;
3368 uint16_t uRplSs;
3369// int i;
3370 uint8_t BS3_FAR *pbTest;
3371
3372 NOREF(abLowUd);
3373#define IRETBUF_SET_SEL(a_idx, a_uValue) \
3374 do { *(uint16_t)&pIretBuf->ab[a_idx * cbPop] = (a_uValue); } while (0)
3375#define IRETBUF_SET_REG(a_idx, a_uValue) \
3376 do { uint8_t const BS3_FAR *pbTmp = &pIretBuf->ab[a_idx * cbPop]; \
3377 if (cbPop == 2) *(uint16_t)pbTmp = (uint16_t)(a_uValue); \
3378 else if (cbPop != 8) *(uint32_t)pbTmp = (uint32_t)(a_uValue); \
3379 else *(uint64_t)pbTmp = (a_uValue); \
3380 } while (0)
3381
3382 /* make sure they're allocated */
3383 Bs3MemZero(&Ctx, sizeof(Ctx));
3384 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
3385 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
3386 Bs3MemZero(&TmpCtxExpected, sizeof(TmpCtxExpected));
3387 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
3388
3389 /*
3390 * When dealing with 16-bit irets in 32-bit or 64-bit mode, we must have
3391 * copies of both iret and ud in the first 64KB of memory. The stack is
3392 * below 64KB, so we'll just copy the instructions onto the stack.
3393 */
3394 Bs3MemCpy(abLowUd, bs3CpuBasic2_ud2, 4);
3395 Bs3MemCpy(abLowIret, pfnIret, 4);
3396
3397 /*
3398 * Create a context (stack is irrelevant, we'll mainly be using pIretBuf).
3399 * - Point the context at our iret instruction.
3400 * - Point SS:xSP at pIretBuf.
3401 */
3402 Bs3RegCtxSaveEx(&Ctx, bTestMode, 0);
3403 if (!fUseLowCode)
3404 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pfnIret);
3405 else
3406 Bs3RegCtxSetRipCsFromCurPtr(&Ctx, pfnIretLow);
3407 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
3408 g_uBs3TrapEipHint = Ctx.rip.u32;
3409 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, pIretBuf);
3410
3411 /*
3412 * The first success (UD) context keeps the same code bit-count as the iret.
3413 */
3414 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
3415 if (!fUseLowCode)
3416 Bs3RegCtxSetRipCsFromLnkPtr(&CtxUdExpected, bs3CpuBasic2_ud2);
3417 else
3418 Bs3RegCtxSetRipCsFromCurPtr(&CtxUdExpected, pfnUdLow);
3419 CtxUdExpected.rsp.u += cbSameCplFrame;
3420
3421 /*
3422 * Check that it works at all.
3423 */
3424 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3425 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3426
3427 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3428 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3429 g_usBs3TestStep++;
3430
3431 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
3432 {
3433 /* Selectors are modified when switching rings, so we need to know
3434 what we're dealing with there. */
3435 if ( !BS3_SEL_IS_IN_R0_RANGE(Ctx.cs) || !BS3_SEL_IS_IN_R0_RANGE(Ctx.ss)
3436 || !BS3_SEL_IS_IN_R0_RANGE(Ctx.ds) || !BS3_SEL_IS_IN_R0_RANGE(Ctx.es))
3437 Bs3TestFailedF("Expected R0 CS, SS, DS and ES; not %#x, %#x, %#x and %#x\n", Ctx.cs, Ctx.ss, Ctx.ds, Ctx.es);
3438 if (Ctx.fs || Ctx.gs)
3439 Bs3TestFailed("Expected R0 FS and GS to be 0!\n");
3440
3441 /*
3442 * Test returning to outer rings if protected mode.
3443 */
3444 Bs3MemCpy(&TmpCtx, &Ctx, sizeof(TmpCtx));
3445 Bs3MemCpy(&TmpCtxExpected, &CtxUdExpected, sizeof(TmpCtxExpected));
3446 for (iRingDst = 3; iRingDst >= 0; iRingDst--)
3447 {
3448 Bs3RegCtxConvertToRingX(&TmpCtxExpected, iRingDst);
3449 TmpCtxExpected.ds = iRingDst ? 0 : TmpCtx.ds;
3450 TmpCtx.es = TmpCtxExpected.es;
3451 iretbuf_SetupFrame(pIretBuf, cbPop, TmpCtxExpected.cs, TmpCtxExpected.rip.u,
3452 TmpCtxExpected.rflags.u32, TmpCtxExpected.ss, TmpCtxExpected.rsp.u);
3453 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3454 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3455 g_usBs3TestStep++;
3456 }
3457
3458 /*
3459 * Check CS.RPL and SS.RPL.
3460 */
3461 for (iRingDst = 3; iRingDst >= 0; iRingDst--)
3462 {
3463 uint16_t const uDstSsR0 = (CtxUdExpected.ss & BS3_SEL_RING_SUB_MASK) + BS3_SEL_R0_FIRST;
3464 Bs3MemCpy(&TmpCtxExpected, &CtxUdExpected, sizeof(TmpCtxExpected));
3465 Bs3RegCtxConvertToRingX(&TmpCtxExpected, iRingDst);
3466 for (iRingSrc = 3; iRingSrc >= 0; iRingSrc--)
3467 {
3468 Bs3MemCpy(&TmpCtx, &Ctx, sizeof(TmpCtx));
3469 Bs3RegCtxConvertToRingX(&TmpCtx, iRingSrc);
3470 TmpCtx.es = TmpCtxExpected.es;
3471 TmpCtxExpected.ds = iRingDst != iRingSrc ? 0 : TmpCtx.ds;
3472 for (uRplCs = 0; uRplCs <= 3; uRplCs++)
3473 {
3474 uint16_t const uSrcEs = TmpCtx.es;
3475 uint16_t const uDstCs = (TmpCtxExpected.cs & X86_SEL_MASK_OFF_RPL) | uRplCs;
3476 //Bs3TestPrintf("dst=%d src=%d rplCS=%d\n", iRingDst, iRingSrc, uRplCs);
3477
3478 /* CS.RPL */
3479 iretbuf_SetupFrame(pIretBuf, cbPop, uDstCs, TmpCtxExpected.rip.u, TmpCtxExpected.rflags.u32,
3480 TmpCtxExpected.ss, TmpCtxExpected.rsp.u);
3481 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3482 if (uRplCs == iRingDst && iRingDst >= iRingSrc)
3483 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3484 else
3485 {
3486 if (iRingDst < iRingSrc)
3487 TmpCtx.es = 0;
3488 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstCs & X86_SEL_MASK_OFF_RPL);
3489 TmpCtx.es = uSrcEs;
3490 }
3491 g_usBs3TestStep++;
3492
3493 /* SS.RPL */
3494 if (iRingDst != iRingSrc || BS3_MODE_IS_64BIT_CODE(bTestMode))
3495 {
3496 uint16_t uSavedDstSs = TmpCtxExpected.ss;
3497 for (uRplSs = 0; uRplSs <= 3; uRplSs++)
3498 {
3499 /* SS.DPL (iRingDst == CS.DPL) */
3500 for (uDplSs = 0; uDplSs <= 3; uDplSs++)
3501 {
3502 uint16_t const uDstSs = ((uDplSs << BS3_SEL_RING_SHIFT) | uRplSs) + uDstSsR0;
3503 //Bs3TestPrintf("dst=%d src=%d rplCS=%d rplSS=%d dplSS=%d dst %04x:%08RX64 %08RX32 %04x:%08RX64\n",
3504 // iRingDst, iRingSrc, uRplCs, uRplSs, uDplSs, uDstCs, TmpCtxExpected.rip.u,
3505 // TmpCtxExpected.rflags.u32, uDstSs, TmpCtxExpected.rsp.u);
3506
3507 iretbuf_SetupFrame(pIretBuf, cbPop, uDstCs, TmpCtxExpected.rip.u,
3508 TmpCtxExpected.rflags.u32, uDstSs, TmpCtxExpected.rsp.u);
3509 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3510 if (uRplCs != iRingDst || iRingDst < iRingSrc)
3511 {
3512 if (iRingDst < iRingSrc)
3513 TmpCtx.es = 0;
3514 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstCs & X86_SEL_MASK_OFF_RPL);
3515 }
3516 else if (uRplSs != iRingDst || uDplSs != iRingDst)
3517 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstSs & X86_SEL_MASK_OFF_RPL);
3518 else
3519 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3520 TmpCtx.es = uSrcEs;
3521 g_usBs3TestStep++;
3522 }
3523 }
3524
3525 TmpCtxExpected.ss = uSavedDstSs;
3526 }
3527 }
3528 }
3529 }
3530 }
3531
3532 /*
3533 * Special 64-bit checks.
3534 */
3535 if (BS3_MODE_IS_64BIT_CODE(bTestMode))
3536 {
3537 /* The VM flag is completely ignored. */
3538 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3539 CtxUdExpected.rflags.u32 | X86_EFL_VM, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3540 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3541 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3542 g_usBs3TestStep++;
3543
3544 /* The NT flag can be loaded just fine. */
3545 CtxUdExpected.rflags.u32 |= X86_EFL_NT;
3546 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3547 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3548 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3549 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3550 CtxUdExpected.rflags.u32 &= ~X86_EFL_NT;
3551 g_usBs3TestStep++;
3552
3553 /* However, we'll #GP(0) if it's already set (in RFLAGS) when executing IRET. */
3554 Ctx.rflags.u32 |= X86_EFL_NT;
3555 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3556 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3557 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3558 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3559 g_usBs3TestStep++;
3560
3561 /* The NT flag #GP(0) should trump all other exceptions - pit it against #PF. */
3562 pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED);
3563 if (pbTest != NULL)
3564 {
3565 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, &pbTest[X86_PAGE_SIZE]);
3566 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3567 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3568 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3569 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3570 g_usBs3TestStep++;
3571
3572 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, pIretBuf);
3573 Bs3MemGuardedTestPageFree(pbTest);
3574 }
3575 Ctx.rflags.u32 &= ~X86_EFL_NT;
3576 }
3577}
3578
3579
3580BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_iret)(uint8_t bMode)
3581{
3582 struct
3583 {
3584 uint8_t abExtraStack[4096]; /**< we've got ~30KB of stack, so 4KB for the trap handlers++ is not a problem. */
3585 IRETBUF IRetBuf;
3586 uint8_t abGuard[32];
3587 } uBuf;
3588 size_t cbUnused;
3589
3590 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
3591 bs3CpuBasic2_SetGlobals(bMode);
3592
3593 /*
3594 * Primary instruction form.
3595 */
3596 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3597 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3598 if (BS3_MODE_IS_16BIT_CODE(bMode))
3599 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 2, &uBuf.IRetBuf, "iret");
3600 else if (BS3_MODE_IS_32BIT_CODE(bMode))
3601 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 4, &uBuf.IRetBuf, "iretd");
3602 else
3603 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_rexw, 8, &uBuf.IRetBuf, "o64 iret");
3604
3605 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3606 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3607 - (uintptr_t)uBuf.abExtraStack;
3608 if (cbUnused < 2048)
3609 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 1);
3610
3611 /*
3612 * Secondary variation: opsize prefixed.
3613 */
3614 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3615 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3616 if (BS3_MODE_IS_16BIT_CODE(bMode) && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386)
3617 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 4, &uBuf.IRetBuf, "o32 iret");
3618 else if (BS3_MODE_IS_32BIT_CODE(bMode))
3619 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 2, &uBuf.IRetBuf, "o16 iret");
3620 else if (BS3_MODE_IS_64BIT_CODE(bMode))
3621 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 4, &uBuf.IRetBuf, "iretd");
3622 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3623 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3624 - (uintptr_t)uBuf.abExtraStack;
3625 if (cbUnused < 2048)
3626 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 2);
3627
3628 /*
3629 * Third variation: 16-bit in 64-bit mode (truly unlikely)
3630 */
3631 if (BS3_MODE_IS_64BIT_CODE(bMode))
3632 {
3633 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3634 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3635 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 2, &uBuf.IRetBuf, "o16 iret");
3636 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3637 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3638 - (uintptr_t)uBuf.abExtraStack;
3639 if (cbUnused < 2048)
3640 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 3);
3641 }
3642
3643 return 0;
3644}
3645
3646
3647
3648/*********************************************************************************************************************************
3649* Non-far JMP & CALL Tests *
3650*********************************************************************************************************************************/
3651#define PROTO_ALL(a_Template) \
3652 FNBS3FAR a_Template ## _c16, \
3653 a_Template ## _c32, \
3654 a_Template ## _c64
3655PROTO_ALL(bs3CpuBasic2_jmp_jb__ud2);
3656PROTO_ALL(bs3CpuBasic2_jmp_jb_back__ud2);
3657PROTO_ALL(bs3CpuBasic2_jmp_jv__ud2);
3658PROTO_ALL(bs3CpuBasic2_jmp_jv_back__ud2);
3659PROTO_ALL(bs3CpuBasic2_jmp_ind_mem__ud2);
3660PROTO_ALL(bs3CpuBasic2_jmp_ind_xAX__ud2);
3661PROTO_ALL(bs3CpuBasic2_jmp_ind_xDI__ud2);
3662FNBS3FAR bs3CpuBasic2_jmp_ind_r9__ud2_c64;
3663PROTO_ALL(bs3CpuBasic2_call_jv__ud2);
3664PROTO_ALL(bs3CpuBasic2_call_jv_back__ud2);
3665PROTO_ALL(bs3CpuBasic2_call_ind_mem__ud2);
3666PROTO_ALL(bs3CpuBasic2_call_ind_xAX__ud2);
3667PROTO_ALL(bs3CpuBasic2_call_ind_xDI__ud2);
3668FNBS3FAR bs3CpuBasic2_call_ind_r9__ud2_c64;
3669
3670PROTO_ALL(bs3CpuBasic2_jmp_opsize_begin);
3671PROTO_ALL(bs3CpuBasic2_jmp_jb_opsize__ud2);
3672PROTO_ALL(bs3CpuBasic2_jmp_jb_opsize_back__ud2);
3673PROTO_ALL(bs3CpuBasic2_jmp_jv_opsize__ud2);
3674PROTO_ALL(bs3CpuBasic2_jmp_jv_opsize_back__ud2);
3675PROTO_ALL(bs3CpuBasic2_jmp_ind_mem_opsize__ud2);
3676FNBS3FAR bs3CpuBasic2_jmp_ind_mem_opsize__ud2__intel_c64;
3677PROTO_ALL(bs3CpuBasic2_jmp_ind_xAX_opsize__ud2);
3678PROTO_ALL(bs3CpuBasic2_call_jv_opsize__ud2);
3679PROTO_ALL(bs3CpuBasic2_call_jv_opsize_back__ud2);
3680PROTO_ALL(bs3CpuBasic2_call_ind_mem_opsize__ud2);
3681FNBS3FAR bs3CpuBasic2_call_ind_mem_opsize__ud2__intel_c64;
3682PROTO_ALL(bs3CpuBasic2_call_ind_xAX_opsize__ud2);
3683PROTO_ALL(bs3CpuBasic2_jmp_opsize_end);
3684#undef PROTO_ALL
3685
3686FNBS3FAR bs3CpuBasic2_jmptext16_start;
3687
3688FNBS3FAR bs3CpuBasic2_jmp_target_wrap_forward;
3689FNBS3FAR bs3CpuBasic2_jmp_jb_wrap_forward__ud2;
3690FNBS3FAR bs3CpuBasic2_jmp_jb_opsize_wrap_forward__ud2;
3691FNBS3FAR bs3CpuBasic2_jmp_jv16_wrap_forward__ud2;
3692FNBS3FAR bs3CpuBasic2_jmp_jv16_opsize_wrap_forward__ud2;
3693FNBS3FAR bs3CpuBasic2_call_jv16_wrap_forward__ud2;
3694FNBS3FAR bs3CpuBasic2_call_jv16_opsize_wrap_forward__ud2;
3695
3696FNBS3FAR bs3CpuBasic2_jmp_target_wrap_backward;
3697FNBS3FAR bs3CpuBasic2_jmp_jb_wrap_backward__ud2;
3698FNBS3FAR bs3CpuBasic2_jmp_jb_opsize_wrap_backward__ud2;
3699FNBS3FAR bs3CpuBasic2_jmp_jv16_wrap_backward__ud2;
3700FNBS3FAR bs3CpuBasic2_jmp_jv16_opsize_wrap_backward__ud2;
3701FNBS3FAR bs3CpuBasic2_call_jv16_wrap_backward__ud2;
3702FNBS3FAR bs3CpuBasic2_call_jv16_opsize_wrap_backward__ud2;
3703
3704
3705
3706/**
3707 * Entrypoint for non-far JMP & CALL tests.
3708 *
3709 * @returns 0 or BS3TESTDOMODE_SKIPPED.
3710 * @param bMode The CPU mode we're testing.
3711 *
3712 * @note When testing v8086 code, we'll be running in v8086 mode. So, careful
3713 * with control registers and such.
3714 */
3715BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_jmp_call)(uint8_t bMode)
3716{
3717 BS3TRAPFRAME TrapCtx;
3718 BS3REGCTX Ctx;
3719 BS3REGCTX CtxExpected;
3720 unsigned iTest;
3721
3722 /* make sure they're allocated */
3723 Bs3MemZero(&Ctx, sizeof(Ctx));
3724 Bs3MemZero(&CtxExpected, sizeof(Ctx));
3725 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
3726
3727 bs3CpuBasic2_SetGlobals(bMode);
3728
3729 /*
3730 * Create a context.
3731 */
3732 Bs3RegCtxSaveEx(&Ctx, bMode, 768);
3733 Bs3MemCpy(&CtxExpected, &Ctx, sizeof(CtxExpected));
3734
3735 /*
3736 * 16-bit tests.
3737 *
3738 * When opsize is 16-bit relative jumps will do 16-bit calculations and
3739 * modify IP. This means that it is not possible to trigger a segment
3740 * limit #GP(0) when the limit is set to 0xffff.
3741 */
3742 if (BS3_MODE_IS_16BIT_CODE(bMode))
3743 {
3744 static struct
3745 {
3746 int8_t iWrap;
3747 bool fOpSizePfx;
3748 int8_t iGprIndirect;
3749 bool fCall;
3750 FPFNBS3FAR pfnTest;
3751 }
3752 const s_aTests[] =
3753 {
3754 { 0, false, -1, false, bs3CpuBasic2_jmp_jb__ud2_c16, },
3755 { 0, false, -1, false, bs3CpuBasic2_jmp_jb_back__ud2_c16, },
3756 { 0, true, -1, false, bs3CpuBasic2_jmp_jb_opsize__ud2_c16, },
3757 { 0, true, -1, false, bs3CpuBasic2_jmp_jb_opsize_back__ud2_c16, },
3758 { 0, false, -1, false, bs3CpuBasic2_jmp_jv__ud2_c16, },
3759 { 0, false, -1, false, bs3CpuBasic2_jmp_jv_back__ud2_c16, },
3760 { 0, true, -1, false, bs3CpuBasic2_jmp_jv_opsize__ud2_c16, },
3761 { 0, true, -1, false, bs3CpuBasic2_jmp_jv_opsize_back__ud2_c16, },
3762 { 0, false, -1, false, bs3CpuBasic2_jmp_ind_mem__ud2_c16, },
3763 { 0, true, -1, false, bs3CpuBasic2_jmp_ind_mem_opsize__ud2_c16, },
3764 { 0, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX__ud2_c16, },
3765 { 0, false, X86_GREG_xDI, false, bs3CpuBasic2_jmp_ind_xDI__ud2_c16, },
3766 { 0, true, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX_opsize__ud2_c16, },
3767 { 0, false, -1, true, bs3CpuBasic2_call_jv__ud2_c16, },
3768 { 0, false, -1, true, bs3CpuBasic2_call_jv_back__ud2_c16, },
3769 { 0, true, -1, true, bs3CpuBasic2_call_jv_opsize__ud2_c16, },
3770 { 0, true, -1, true, bs3CpuBasic2_call_jv_opsize_back__ud2_c16, },
3771 { 0, false, -1, true, bs3CpuBasic2_call_ind_mem__ud2_c16, },
3772 { 0, true, -1, true, bs3CpuBasic2_call_ind_mem_opsize__ud2_c16, },
3773 { 0, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX__ud2_c16, },
3774 { 0, false, X86_GREG_xDI, true, bs3CpuBasic2_call_ind_xDI__ud2_c16, },
3775 { 0, true, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX_opsize__ud2_c16, },
3776
3777 { -1, false, -1, false, bs3CpuBasic2_jmp_jb_wrap_backward__ud2, },
3778 { +1, false, -1, false, bs3CpuBasic2_jmp_jb_wrap_forward__ud2, },
3779 { -1, true, -1, false, bs3CpuBasic2_jmp_jb_opsize_wrap_backward__ud2, },
3780 { +1, true, -1, false, bs3CpuBasic2_jmp_jb_opsize_wrap_forward__ud2, },
3781
3782 { -1, false, -1, false, bs3CpuBasic2_jmp_jv16_wrap_backward__ud2, },
3783 { +1, false, -1, false, bs3CpuBasic2_jmp_jv16_wrap_forward__ud2, },
3784 { -1, true, -1, false, bs3CpuBasic2_jmp_jv16_opsize_wrap_backward__ud2, },
3785 { +1, true, -1, false, bs3CpuBasic2_jmp_jv16_opsize_wrap_forward__ud2, },
3786 { -1, false, -1, true, bs3CpuBasic2_call_jv16_wrap_backward__ud2, },
3787 { +1, false, -1, true, bs3CpuBasic2_call_jv16_wrap_forward__ud2, },
3788 { -1, true, -1, true, bs3CpuBasic2_call_jv16_opsize_wrap_backward__ud2, },
3789 { +1, true, -1, true, bs3CpuBasic2_call_jv16_opsize_wrap_forward__ud2, },
3790 };
3791
3792 if (!BS3_MODE_IS_RM_OR_V86(bMode))
3793 Bs3SelSetup16BitCode(&Bs3GdteSpare03, Bs3SelLnkPtrToFlat(bs3CpuBasic2_jmptext16_start), 0);
3794
3795 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
3796 {
3797 uint64_t uGprSaved;
3798 if (s_aTests[iTest].iWrap == 0)
3799 {
3800 uint8_t const BS3_FAR *fpbCode;
3801 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
3802 fpbCode = (uint8_t const BS3_FAR *)BS3_FP_MAKE(Ctx.cs, Ctx.rip.u16);
3803 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
3804 }
3805 else
3806 {
3807 if (BS3_MODE_IS_RM_OR_V86(bMode))
3808 Ctx.cs = BS3_FP_SEG(s_aTests[iTest].pfnTest);
3809 else
3810 Ctx.cs = BS3_SEL_SPARE_03;
3811 Ctx.rip.u = BS3_FP_OFF(s_aTests[iTest].pfnTest);
3812 if (s_aTests[iTest].fOpSizePfx)
3813 CtxExpected.rip.u = Ctx.rip.u;
3814 else if (s_aTests[iTest].iWrap < 0)
3815 CtxExpected.rip.u = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_backward);
3816 else
3817 CtxExpected.rip.u = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_forward);
3818 }
3819 CtxExpected.cs = Ctx.cs;
3820 if (s_aTests[iTest].iGprIndirect >= 0)
3821 {
3822 uGprSaved = (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u;
3823 (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u
3824 = (&CtxExpected.rax)[s_aTests[iTest].iGprIndirect].u = CtxExpected.rip.u;
3825 }
3826 CtxExpected.rsp.u = Ctx.rsp.u;
3827 if (s_aTests[iTest].fCall && (s_aTests[iTest].iWrap == 0 || !s_aTests[iTest].fOpSizePfx))
3828 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx ? 4 : 2;
3829 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u);
3830
3831 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3832 if (s_aTests[iTest].iWrap == 0 || !s_aTests[iTest].fOpSizePfx)
3833 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
3834 else
3835 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
3836 g_usBs3TestStep++;
3837
3838 /* Again single stepping: */
3839 //Bs3TestPrintf("stepping...\n");
3840 Bs3RegSetDr6(0);
3841 Ctx.rflags.u16 |= X86_EFL_TF;
3842 CtxExpected.rflags.u16 = Ctx.rflags.u16;
3843 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3844 if (s_aTests[iTest].iWrap == 0 || !s_aTests[iTest].fOpSizePfx)
3845 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
3846 else
3847 {
3848 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
3849 bs3CpuBasic2_CheckDr6InitVal();
3850 }
3851 Ctx.rflags.u16 &= ~X86_EFL_TF;
3852 CtxExpected.rflags.u16 = Ctx.rflags.u16;
3853 g_usBs3TestStep++;
3854
3855 if (s_aTests[iTest].iGprIndirect >= 0)
3856 (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u = (&CtxExpected.rax)[s_aTests[iTest].iGprIndirect].u = uGprSaved;
3857 }
3858
3859 /* Limit the wraparound CS segment to exclude bs3CpuBasic2_jmp_target_wrap_backward
3860 and run the backward wrapping tests. */
3861 if (!BS3_MODE_IS_RM_OR_V86(bMode))
3862 {
3863 Bs3GdteSpare03.Gen.u16LimitLow = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_backward) - 1;
3864 CtxExpected.cs = Ctx.cs = BS3_SEL_SPARE_03;
3865 CtxExpected.rsp.u = Ctx.rsp.u;
3866 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
3867 if (s_aTests[iTest].iWrap < 0)
3868 {
3869 CtxExpected.rip.u = Ctx.rip.u = BS3_FP_OFF(s_aTests[iTest].pfnTest);
3870 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 v1\n", Ctx.cs, Ctx.rip.u);
3871 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3872 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
3873 g_usBs3TestStep++;
3874 }
3875
3876 /* Do another round where we put the limit in the middle of the UD2
3877 instruction we're jumping to: */
3878 Bs3GdteSpare03.Gen.u16LimitLow = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_backward);
3879 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
3880 if (s_aTests[iTest].iWrap < 0)
3881 {
3882 Ctx.rip.u = BS3_FP_OFF(s_aTests[iTest].pfnTest);
3883 if (s_aTests[iTest].fOpSizePfx)
3884 CtxExpected.rip.u = Ctx.rip.u;
3885 else
3886 CtxExpected.rip.u = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_backward);
3887 CtxExpected.rsp.u = Ctx.rsp.u;
3888 if (s_aTests[iTest].fCall && (s_aTests[iTest].iWrap == 0 || !s_aTests[iTest].fOpSizePfx))
3889 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx ? 4 : 2;
3890 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 v2\n", Ctx.cs, Ctx.rip.u);
3891 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3892 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
3893 g_usBs3TestStep++;
3894 }
3895 }
3896
3897 }
3898 /*
3899 * 32-bit & 64-bit tests.
3900 *
3901 * When the opsize prefix is applied here, IP is updated and bits 63:16
3902 * cleared. However in 64-bit mode, Intel ignores the opsize prefix
3903 * whereas AMD doesn't and it works like you expect.
3904 */
3905 else
3906 {
3907 static struct
3908 {
3909 uint8_t cBits;
3910 bool fOpSizePfx;
3911 bool fIgnPfx;
3912 int8_t iGprIndirect;
3913 bool fCall;
3914 FPFNBS3FAR pfnTest;
3915 }
3916 const s_aTests[] =
3917 {
3918 { 32, false, false, -1, false, bs3CpuBasic2_jmp_jb__ud2_c32, },
3919 { 32, false, false, -1, false, bs3CpuBasic2_jmp_jb_back__ud2_c32, },
3920 { 32, true, false, -1, false, bs3CpuBasic2_jmp_jb_opsize__ud2_c32, },
3921 { 32, true, false, -1, false, bs3CpuBasic2_jmp_jb_opsize_back__ud2_c32, },
3922 { 32, false, false, -1, false, bs3CpuBasic2_jmp_jv__ud2_c32, },
3923 { 32, false, false, -1, false, bs3CpuBasic2_jmp_jv_back__ud2_c32, },
3924 { 32, true, false, -1, false, bs3CpuBasic2_jmp_jv_opsize__ud2_c32, },
3925 { 32, true, false, -1, false, bs3CpuBasic2_jmp_jv_opsize_back__ud2_c32, },
3926 { 32, false, false, -1, false, bs3CpuBasic2_jmp_ind_mem__ud2_c32, },
3927 { 32, true, false, -1, false, bs3CpuBasic2_jmp_ind_mem_opsize__ud2_c32, },
3928 { 32, false, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX__ud2_c32, },
3929 { 32, false, false, X86_GREG_xDI, false, bs3CpuBasic2_jmp_ind_xDI__ud2_c32, },
3930 { 32, true, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX_opsize__ud2_c32, },
3931 { 32, false, false, -1, true, bs3CpuBasic2_call_jv__ud2_c32, },
3932 { 32, false, false, -1, true, bs3CpuBasic2_call_jv_back__ud2_c32, },
3933 { 32, true, false, -1, true, bs3CpuBasic2_call_jv_opsize__ud2_c32, },
3934 { 32, true, false, -1, true, bs3CpuBasic2_call_jv_opsize_back__ud2_c32, },
3935 { 32, false, false, -1, true, bs3CpuBasic2_call_ind_mem__ud2_c32, },
3936 { 32, true, false, -1, true, bs3CpuBasic2_call_ind_mem_opsize__ud2_c32, },
3937 { 32, false, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX__ud2_c32, },
3938 { 32, false, false, X86_GREG_xDI, true, bs3CpuBasic2_call_ind_xDI__ud2_c32, },
3939 { 32, true, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX_opsize__ud2_c32, },
3940 /* 64bit/Intel: Use the _c64 tests, which are written to ignore the o16 prefix. */
3941 { 64, false, true, -1, false, bs3CpuBasic2_jmp_jb__ud2_c64, },
3942 { 64, false, true, -1, false, bs3CpuBasic2_jmp_jb_back__ud2_c64, },
3943 { 64, true, true, -1, false, bs3CpuBasic2_jmp_jb_opsize__ud2_c64, },
3944 { 64, true, true, -1, false, bs3CpuBasic2_jmp_jb_opsize_back__ud2_c64, },
3945 { 64, false, true, -1, false, bs3CpuBasic2_jmp_jv__ud2_c64, },
3946 { 64, false, true, -1, false, bs3CpuBasic2_jmp_jv_back__ud2_c64, },
3947 { 64, true, true, -1, false, bs3CpuBasic2_jmp_jv_opsize__ud2_c64, },
3948 { 64, true, true, -1, false, bs3CpuBasic2_jmp_jv_opsize_back__ud2_c64, },
3949 { 64, false, true, -1, false, bs3CpuBasic2_jmp_ind_mem__ud2_c64, },
3950 { 64, true, true, -1, false, bs3CpuBasic2_jmp_ind_mem_opsize__ud2__intel_c64, },
3951 { 64, false, true, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX__ud2_c64, },
3952 { 64, false, true, X86_GREG_xDI, false, bs3CpuBasic2_jmp_ind_xDI__ud2_c64, },
3953 { 64, false, true, X86_GREG_x9, false, bs3CpuBasic2_jmp_ind_r9__ud2_c64, },
3954 { 64, true, true, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX_opsize__ud2_c64, }, /* no intel version needed */
3955 { 64, false, true, -1, true, bs3CpuBasic2_call_jv__ud2_c64, },
3956 { 64, false, true, -1, true, bs3CpuBasic2_call_jv_back__ud2_c64, },
3957 { 64, true, true, -1, true, bs3CpuBasic2_call_jv_opsize__ud2_c64, },
3958 { 64, true, true, -1, true, bs3CpuBasic2_call_jv_opsize_back__ud2_c64, },
3959 { 64, false, true, -1, true, bs3CpuBasic2_call_ind_mem__ud2_c64, },
3960 { 64, true, true, -1, true, bs3CpuBasic2_call_ind_mem_opsize__ud2__intel_c64,},
3961 { 64, false, true, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX__ud2_c64, },
3962 { 64, false, true, X86_GREG_xDI, true, bs3CpuBasic2_call_ind_xDI__ud2_c64, },
3963 { 64, false, true, X86_GREG_x9, true, bs3CpuBasic2_call_ind_r9__ud2_c64, },
3964 { 64, true, true, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX_opsize__ud2_c64, }, /* no intel version needed */
3965 /* 64bit/AMD: Use the _c32 tests. */
3966 { 64, false, false, -1, false, bs3CpuBasic2_jmp_jb__ud2_c32, },
3967 { 64, false, false, -1, false, bs3CpuBasic2_jmp_jb_back__ud2_c32, },
3968 { 64, true, false, -1, false, bs3CpuBasic2_jmp_jb_opsize__ud2_c32, },
3969 { 64, true, false, -1, false, bs3CpuBasic2_jmp_jb_opsize_back__ud2_c32, },
3970 { 64, false, false, -1, false, bs3CpuBasic2_jmp_jv__ud2_c32, },
3971 { 64, false, false, -1, false, bs3CpuBasic2_jmp_jv_back__ud2_c32, },
3972 { 64, true, false, -1, false, bs3CpuBasic2_jmp_jv_opsize__ud2_c32, },
3973 { 64, true, false, -1, false, bs3CpuBasic2_jmp_jv_opsize_back__ud2_c32, },
3974 { 64, false, false, -1, false, bs3CpuBasic2_jmp_ind_mem__ud2_c64, }, /* using c64 here */
3975 { 64, true, false, -1, false, bs3CpuBasic2_jmp_ind_mem_opsize__ud2_c64, }, /* ditto */
3976 { 64, false, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX__ud2_c64, }, /* ditto */
3977 { 64, false, false, X86_GREG_xDI, false, bs3CpuBasic2_jmp_ind_xDI__ud2_c64, }, /* ditto */
3978 { 64, false, false, X86_GREG_x9, false, bs3CpuBasic2_jmp_ind_r9__ud2_c64, }, /* ditto */
3979 { 64, true, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX_opsize__ud2_c64, }, /* ditto */
3980 { 64, false, false, -1, true, bs3CpuBasic2_call_jv__ud2_c32, }, /* using c32 again */
3981 { 64, false, false, -1, true, bs3CpuBasic2_call_jv_back__ud2_c32, },
3982 { 64, true, false, -1, true, bs3CpuBasic2_call_jv_opsize__ud2_c32, },
3983 { 64, true, false, -1, true, bs3CpuBasic2_call_jv_opsize_back__ud2_c32, },
3984 { 64, false, false, -1, true, bs3CpuBasic2_call_ind_mem__ud2_c64, }, /* using c64 here */
3985 { 64, true, false, -1, true, bs3CpuBasic2_call_ind_mem_opsize__ud2_c64, }, /* ditto */
3986 { 64, false, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX__ud2_c64, }, /* ditto */
3987 { 64, false, false, X86_GREG_xDI, true, bs3CpuBasic2_call_ind_xDI__ud2_c64, }, /* ditto */
3988 { 64, false, false, X86_GREG_x9, true, bs3CpuBasic2_call_ind_r9__ud2_c64, }, /* ditto */
3989 { 64, true, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX_opsize__ud2_c64, }, /* ditto */
3990 };
3991 uint8_t const cBits = BS3_MODE_IS_64BIT_CODE(bMode) ? 64 : 32;
3992 BS3CPUVENDOR const enmCpuVendor = Bs3GetCpuVendor();
3993 bool const fIgnPfx = cBits == 64 && enmCpuVendor == BS3CPUVENDOR_INTEL; /** @todo what does VIA do? */
3994
3995 /* Prepare a copy of the UD2 instructions in low memory for opsize prefixed tests. */
3996 uint16_t const offLow = BS3_FP_OFF(bs3CpuBasic2_jmp_opsize_begin_c32);
3997 uint16_t const cbLow = BS3_FP_OFF(bs3CpuBasic2_jmp_opsize_end_c64) - offLow;
3998 uint8_t BS3_FAR * const pbCode16 = BS3_MAKE_PROT_R0PTR_FROM_FLAT(BS3_ADDR_BS3TEXT16);
3999 uint8_t BS3_FAR * const pbLow = BS3_FP_MAKE(BS3_SEL_TILED_R0, 0);
4000 if (offLow < 0x600 || offLow + cbLow >= BS3_ADDR_STACK_R2)
4001 Bs3TestFailedF("Opsize overriden jumps are out of place: %#x LB %#z\n", offLow, cbLow);
4002 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4003 if (!fIgnPfx)
4004 {
4005 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4006 if (s_aTests[iTest].fOpSizePfx && s_aTests[iTest].cBits == cBits && s_aTests[iTest].fIgnPfx == fIgnPfx)
4007 {
4008 uint16_t const offFn = BS3_FP_OFF(s_aTests[iTest].pfnTest);
4009 uint16_t const offUd = offFn + (int16_t)(int8_t)pbCode16[offFn - 1];
4010 pbCode16[offUd] = 0xf1; /* replace original ud2 with icebp */
4011 pbCode16[offUd + 1] = 0xf1;
4012 pbLow[offUd] = 0x0f; /* plant ud2 in low memory */
4013 pbLow[offUd + 1] = 0x0b;
4014 }
4015 }
4016
4017 /* Run the tests. */
4018 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4019 {
4020 if (s_aTests[iTest].cBits == cBits && s_aTests[iTest].fIgnPfx == fIgnPfx)
4021 {
4022 uint64_t uGprSaved;
4023 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4024 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4025 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4026 if (s_aTests[iTest].iGprIndirect >= 0)
4027 {
4028 uGprSaved = (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u;
4029 (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u
4030 = (&CtxExpected.rax)[s_aTests[iTest].iGprIndirect].u = CtxExpected.rip.u;
4031 }
4032 if (s_aTests[iTest].fOpSizePfx && !fIgnPfx)
4033 CtxExpected.rip.u &= UINT16_MAX;
4034 CtxExpected.rsp.u = Ctx.rsp.u;
4035 if (s_aTests[iTest].fCall)
4036 CtxExpected.rsp.u -= s_aTests[iTest].cBits == 64 ? 8
4037 : !s_aTests[iTest].fOpSizePfx ? 4 : 2;
4038
4039 //Bs3TestPrintf("cs:rip=%04RX16:%08RX64\n", Ctx.cs, Ctx.rip.u);
4040
4041 if (BS3_MODE_IS_16BIT_SYS(bMode))
4042 g_uBs3TrapEipHint = s_aTests[iTest].fOpSizePfx ? 0 : Ctx.rip.u32;
4043 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4044
4045 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4046 g_usBs3TestStep++;
4047
4048 /* Again single stepping: */
4049 //Bs3TestPrintf("stepping...\n");
4050 Bs3RegSetDr6(0);
4051 Ctx.rflags.u16 |= X86_EFL_TF;
4052 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4053 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4054 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4055 Ctx.rflags.u16 &= ~X86_EFL_TF;
4056 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4057 g_usBs3TestStep++;
4058
4059 if (s_aTests[iTest].iGprIndirect >= 0)
4060 (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u = (&CtxExpected.rax)[s_aTests[iTest].iGprIndirect].u = uGprSaved;
4061 }
4062 }
4063
4064 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4065 }
4066
4067 return 0;
4068}
4069
4070
4071/*********************************************************************************************************************************
4072* FAR JMP & FAR CALL Tests *
4073*********************************************************************************************************************************/
4074#define PROTO_ALL(a_Template) \
4075 FNBS3FAR a_Template ## _c16, \
4076 a_Template ## _c32, \
4077 a_Template ## _c64
4078FNBS3FAR bs3CpuBasic2_jmpf_ptr_rm__ud2_c16;
4079PROTO_ALL(bs3CpuBasic2_jmpf_ptr_same_r0__ud2);
4080PROTO_ALL(bs3CpuBasic2_jmpf_ptr_same_r1__ud2);
4081PROTO_ALL(bs3CpuBasic2_jmpf_ptr_same_r2__ud2);
4082PROTO_ALL(bs3CpuBasic2_jmpf_ptr_same_r3__ud2);
4083PROTO_ALL(bs3CpuBasic2_jmpf_ptr_opsize_flipbit_r0__ud2);
4084PROTO_ALL(bs3CpuBasic2_jmpf_ptr_r0_cs64__ud2);
4085PROTO_ALL(bs3CpuBasic2_jmpf_ptr_r0_cs16l__ud2);
4086
4087FNBS3FAR bs3CpuBasic2_callf_ptr_rm__ud2_c16;
4088PROTO_ALL(bs3CpuBasic2_callf_ptr_same_r0__ud2);
4089PROTO_ALL(bs3CpuBasic2_callf_ptr_same_r1__ud2);
4090PROTO_ALL(bs3CpuBasic2_callf_ptr_same_r2__ud2);
4091PROTO_ALL(bs3CpuBasic2_callf_ptr_same_r3__ud2);
4092PROTO_ALL(bs3CpuBasic2_callf_ptr_opsize_flipbit_r0__ud2);
4093PROTO_ALL(bs3CpuBasic2_callf_ptr_r0_cs64__ud2);
4094PROTO_ALL(bs3CpuBasic2_callf_ptr_r0_cs16l__ud2);
4095
4096FNBS3FAR bs3CpuBasic2_jmpf_mem_rm__ud2_c16;
4097PROTO_ALL(bs3CpuBasic2_jmpf_mem_same_r0__ud2);
4098PROTO_ALL(bs3CpuBasic2_jmpf_mem_same_r1__ud2);
4099PROTO_ALL(bs3CpuBasic2_jmpf_mem_same_r2__ud2);
4100PROTO_ALL(bs3CpuBasic2_jmpf_mem_same_r3__ud2);
4101PROTO_ALL(bs3CpuBasic2_jmpf_mem_r0_cs16__ud2);
4102PROTO_ALL(bs3CpuBasic2_jmpf_mem_r0_cs32__ud2);
4103PROTO_ALL(bs3CpuBasic2_jmpf_mem_r0_cs64__ud2);
4104PROTO_ALL(bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2);
4105
4106FNBS3FAR bs3CpuBasic2_jmpf_mem_same_r0__ud2_intel_c64;
4107FNBS3FAR bs3CpuBasic2_jmpf_mem_same_r1__ud2_intel_c64;
4108FNBS3FAR bs3CpuBasic2_jmpf_mem_same_r2__ud2_intel_c64;
4109FNBS3FAR bs3CpuBasic2_jmpf_mem_same_r3__ud2_intel_c64;
4110FNBS3FAR bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_intel_c64;
4111FNBS3FAR bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_intel_c64;
4112FNBS3FAR bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_intel_c64;
4113FNBS3FAR bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_intel_c64;
4114
4115FNBS3FAR bs3CpuBasic2_callf_mem_rm__ud2_c16;
4116PROTO_ALL(bs3CpuBasic2_callf_mem_same_r0__ud2);
4117PROTO_ALL(bs3CpuBasic2_callf_mem_same_r1__ud2);
4118PROTO_ALL(bs3CpuBasic2_callf_mem_same_r2__ud2);
4119PROTO_ALL(bs3CpuBasic2_callf_mem_same_r3__ud2);
4120PROTO_ALL(bs3CpuBasic2_callf_mem_r0_cs16__ud2);
4121PROTO_ALL(bs3CpuBasic2_callf_mem_r0_cs32__ud2);
4122PROTO_ALL(bs3CpuBasic2_callf_mem_r0_cs64__ud2);
4123PROTO_ALL(bs3CpuBasic2_callf_mem_r0_cs16l__ud2);
4124
4125FNBS3FAR bs3CpuBasic2_callf_mem_same_r0__ud2_intel_c64;
4126FNBS3FAR bs3CpuBasic2_callf_mem_same_r1__ud2_intel_c64;
4127FNBS3FAR bs3CpuBasic2_callf_mem_same_r2__ud2_intel_c64;
4128FNBS3FAR bs3CpuBasic2_callf_mem_same_r3__ud2_intel_c64;
4129FNBS3FAR bs3CpuBasic2_callf_mem_r0_cs16__ud2_intel_c64;
4130FNBS3FAR bs3CpuBasic2_callf_mem_r0_cs32__ud2_intel_c64;
4131FNBS3FAR bs3CpuBasic2_callf_mem_r0_cs64__ud2_intel_c64;
4132FNBS3FAR bs3CpuBasic2_callf_mem_r0_cs16l__ud2_intel_c64;
4133
4134#undef PROTO_ALL
4135
4136
4137
4138/**
4139 * Entrypoint for FAR JMP & FAR CALL tests.
4140 *
4141 * @returns 0 or BS3TESTDOMODE_SKIPPED.
4142 * @param bMode The CPU mode we're testing.
4143 *
4144 * @note When testing v8086 code, we'll be running in v8086 mode. So, careful
4145 * with control registers and such.
4146 */
4147BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_far_jmp_call)(uint8_t bMode)
4148{
4149 BS3TRAPFRAME TrapCtx;
4150 BS3REGCTX Ctx;
4151 BS3REGCTX CtxExpected;
4152 unsigned iTest;
4153
4154 /* make sure they're allocated */
4155 Bs3MemZero(&Ctx, sizeof(Ctx));
4156 Bs3MemZero(&CtxExpected, sizeof(Ctx));
4157 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
4158
4159 bs3CpuBasic2_SetGlobals(bMode);
4160
4161 /*
4162 * Create a context.
4163 */
4164 Bs3RegCtxSaveEx(&Ctx, bMode, 768);
4165 Bs3MemCpy(&CtxExpected, &Ctx, sizeof(CtxExpected));
4166
4167 if (Ctx.rax.u8 == 0 || Ctx.rax.u8 == 0xff) /* for salc & the 64-bit detection */
4168 CtxExpected.rax.u8 = Ctx.rax.u8 = 0x42;
4169
4170 /*
4171 * Set up spare selectors.
4172 */
4173 Bs3GdteSpare00 = Bs3Gdte_CODE16;
4174 Bs3GdteSpare00.Gen.u1Long = 1;
4175
4176 /*
4177 * 16-bit tests.
4178 */
4179 if (BS3_MODE_IS_16BIT_CODE(bMode))
4180 {
4181 static struct
4182 {
4183 bool fRmOrV86;
4184 bool fCall;
4185 uint16_t uDstSel;
4186 uint8_t uDstBits;
4187 bool fOpSizePfx;
4188 FPFNBS3FAR pfnTest;
4189 }
4190 const s_aTests[] =
4191 {
4192 { true, false, BS3_SEL_TEXT16, 16, false, bs3CpuBasic2_jmpf_ptr_rm__ud2_c16, },
4193 { false, false, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_jmpf_ptr_same_r0__ud2_c16, },
4194 { false, false, BS3_SEL_R1_CS16 | 1, 16, false, bs3CpuBasic2_jmpf_ptr_same_r1__ud2_c16, },
4195 { false, false, BS3_SEL_R2_CS16 | 2, 16, false, bs3CpuBasic2_jmpf_ptr_same_r2__ud2_c16, },
4196 { false, false, BS3_SEL_R3_CS16 | 3, 16, false, bs3CpuBasic2_jmpf_ptr_same_r3__ud2_c16, },
4197 { false, false, BS3_SEL_R0_CS32, 32, true, bs3CpuBasic2_jmpf_ptr_opsize_flipbit_r0__ud2_c16, },
4198 { false, false, BS3_SEL_R0_CS64, 64, true, bs3CpuBasic2_jmpf_ptr_r0_cs64__ud2_c16, }, /* 16-bit CS, except in LM. */
4199 { false, false, BS3_SEL_SPARE_00, 64, false, bs3CpuBasic2_jmpf_ptr_r0_cs16l__ud2_c16, }, /* 16-bit CS, except in LM. */
4200
4201 { true, true, BS3_SEL_TEXT16, 16, false, bs3CpuBasic2_callf_ptr_rm__ud2_c16, },
4202 { false, true, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_callf_ptr_same_r0__ud2_c16, },
4203 { false, true, BS3_SEL_R1_CS16 | 1, 16, false, bs3CpuBasic2_callf_ptr_same_r1__ud2_c16, },
4204 { false, true, BS3_SEL_R2_CS16 | 2, 16, false, bs3CpuBasic2_callf_ptr_same_r2__ud2_c16, },
4205 { false, true, BS3_SEL_R3_CS16 | 3, 16, false, bs3CpuBasic2_callf_ptr_same_r3__ud2_c16, },
4206 { false, true, BS3_SEL_R0_CS32, 32, true, bs3CpuBasic2_callf_ptr_opsize_flipbit_r0__ud2_c16, },
4207 { false, true, BS3_SEL_R0_CS64, 64, true, bs3CpuBasic2_callf_ptr_r0_cs64__ud2_c16, }, /* 16-bit CS, except in LM. */
4208 { false, true, BS3_SEL_SPARE_00, 64, false, bs3CpuBasic2_callf_ptr_r0_cs16l__ud2_c16, }, /* 16-bit CS, except in LM. */
4209
4210 { true, false, BS3_SEL_TEXT16, 16, false, bs3CpuBasic2_jmpf_mem_rm__ud2_c16, },
4211 { false, false, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_jmpf_mem_same_r0__ud2_c16, },
4212 { false, false, BS3_SEL_R1_CS16 | 1, 16, false, bs3CpuBasic2_jmpf_mem_same_r1__ud2_c16, },
4213 { false, false, BS3_SEL_R2_CS16 | 2, 16, false, bs3CpuBasic2_jmpf_mem_same_r2__ud2_c16, },
4214 { false, false, BS3_SEL_R3_CS16 | 3, 16, false, bs3CpuBasic2_jmpf_mem_same_r3__ud2_c16, },
4215 { false, false, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_c16, },
4216 { false, false, BS3_SEL_R0_CS32, 32, true, bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_c16, },
4217 { false, false, BS3_SEL_R0_CS64, 64, true, bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_c16, }, /* 16-bit CS, except in LM. */
4218 { false, false, BS3_SEL_SPARE_00, 64, false, bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_c16, }, /* 16-bit CS, except in LM. */
4219
4220 { true, true, BS3_SEL_TEXT16, 16, false, bs3CpuBasic2_callf_mem_rm__ud2_c16, },
4221 { false, true, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_callf_mem_same_r0__ud2_c16, },
4222 { false, true, BS3_SEL_R1_CS16 | 1, 16, false, bs3CpuBasic2_callf_mem_same_r1__ud2_c16, },
4223 { false, true, BS3_SEL_R2_CS16 | 2, 16, false, bs3CpuBasic2_callf_mem_same_r2__ud2_c16, },
4224 { false, true, BS3_SEL_R3_CS16 | 3, 16, false, bs3CpuBasic2_callf_mem_same_r3__ud2_c16, },
4225 { false, true, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_callf_mem_r0_cs16__ud2_c16, },
4226 { false, true, BS3_SEL_R0_CS32, 32, true, bs3CpuBasic2_callf_mem_r0_cs32__ud2_c16, },
4227 { false, true, BS3_SEL_R0_CS64, 64, true, bs3CpuBasic2_callf_mem_r0_cs64__ud2_c16, }, /* 16-bit CS, except in LM. */
4228 { false, true, BS3_SEL_SPARE_00, 64, false, bs3CpuBasic2_callf_mem_r0_cs16l__ud2_c16, }, /* 16-bit CS, except in LM. */
4229 };
4230 bool const fRmOrV86 = BS3_MODE_IS_RM_OR_V86(bMode);
4231
4232 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4233 if ( s_aTests[iTest].fRmOrV86 == fRmOrV86
4234 && (s_aTests[iTest].uDstSel != BS3_SEL_SPARE_00 || !BS3_MODE_IS_64BIT_SYS(bMode))) /* skip it in LM16 for now*/
4235 {
4236 uint64_t const uSavedRsp = Ctx.rsp.u;
4237 bool const fGp = (s_aTests[iTest].uDstSel & X86_SEL_RPL) != 0;
4238 uint8_t const BS3_FAR *fpbCode;
4239
4240 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
4241 fpbCode = (uint8_t const BS3_FAR *)BS3_FP_MAKE(Ctx.cs, Ctx.rip.u16);
4242 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4243 if ( s_aTests[iTest].uDstBits == 32
4244 || ( s_aTests[iTest].uDstBits == 64
4245 && !BS3_MODE_IS_16BIT_SYS(bMode)
4246 && s_aTests[iTest].uDstSel != BS3_SEL_SPARE_00))
4247 CtxExpected.rip.u += BS3_ADDR_BS3TEXT16;
4248 CtxExpected.cs = s_aTests[iTest].uDstSel;
4249 if (fGp)
4250 {
4251 CtxExpected.rip.u = Ctx.rip.u;
4252 CtxExpected.cs = Ctx.cs;
4253 }
4254 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4255 CtxExpected.rsp.u = Ctx.rsp.u;
4256 if (s_aTests[iTest].fCall && !fGp)
4257 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx ? 8 : 4;
4258 if (s_aTests[iTest].uDstBits == 64 && !fGp)
4259 {
4260 if (BS3_MODE_IS_64BIT_SYS(bMode))
4261 CtxExpected.rip.u -= 1;
4262 else
4263 CtxExpected.rax.u8 = CtxExpected.rflags.u & X86_EFL_CF ? 0xff : 0x00;
4264 }
4265 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4266 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4267 if (!fGp)
4268 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4269 else
4270 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4271 Ctx.rsp.u = uSavedRsp;
4272 g_usBs3TestStep++;
4273
4274 /* Again single stepping: */
4275 //Bs3TestPrintf("stepping...\n");
4276 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4277 Ctx.rflags.u16 |= X86_EFL_TF;
4278 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4279 CtxExpected.rax.u = Ctx.rax.u;
4280 if (s_aTests[iTest].uDstBits == 64 && !fGp && !BS3_MODE_IS_64BIT_SYS(bMode))
4281 CtxExpected.rip.u -= 1;
4282 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4283 if (!fGp)
4284 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4285 else
4286 {
4287 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4288 bs3CpuBasic2_CheckDr6InitVal();
4289 }
4290 Ctx.rflags.u16 &= ~X86_EFL_TF;
4291 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4292 Ctx.rsp.u = uSavedRsp;
4293 g_usBs3TestStep++;
4294 }
4295 }
4296 /*
4297 * 32-bit tests.
4298 */
4299 else if (BS3_MODE_IS_32BIT_CODE(bMode))
4300 {
4301 static struct
4302 {
4303 bool fCall;
4304 uint16_t uDstSel;
4305 uint8_t uDstBits;
4306 bool fOpSizePfx;
4307 FPFNBS3FAR pfnTest;
4308 }
4309 const s_aTests[] =
4310 {
4311 { false, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_jmpf_ptr_same_r0__ud2_c32, },
4312 { false, BS3_SEL_R1_CS32 | 1, 32, false, bs3CpuBasic2_jmpf_ptr_same_r1__ud2_c32, },
4313 { false, BS3_SEL_R2_CS32 | 2, 32, false, bs3CpuBasic2_jmpf_ptr_same_r2__ud2_c32, },
4314 { false, BS3_SEL_R3_CS32 | 3, 32, false, bs3CpuBasic2_jmpf_ptr_same_r3__ud2_c32, },
4315 { false, BS3_SEL_R0_CS16, 16, true, bs3CpuBasic2_jmpf_ptr_opsize_flipbit_r0__ud2_c32, },
4316 { false, BS3_SEL_R0_CS64, 64, false, bs3CpuBasic2_jmpf_ptr_r0_cs64__ud2_c32, }, /* 16-bit CS, except in LM. */
4317 { false, BS3_SEL_SPARE_00, 64, true, bs3CpuBasic2_jmpf_ptr_r0_cs16l__ud2_c32, }, /* 16-bit CS, except in LM. */
4318
4319 { true, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_callf_ptr_same_r0__ud2_c32, },
4320 { true, BS3_SEL_R1_CS32 | 1, 32, false, bs3CpuBasic2_callf_ptr_same_r1__ud2_c32, },
4321 { true, BS3_SEL_R2_CS32 | 2, 32, false, bs3CpuBasic2_callf_ptr_same_r2__ud2_c32, },
4322 { true, BS3_SEL_R3_CS32 | 3, 32, false, bs3CpuBasic2_callf_ptr_same_r3__ud2_c32, },
4323 { true, BS3_SEL_R0_CS16, 16, true, bs3CpuBasic2_callf_ptr_opsize_flipbit_r0__ud2_c32, },
4324 { true, BS3_SEL_R0_CS64, 64, false, bs3CpuBasic2_callf_ptr_r0_cs64__ud2_c32, }, /* 16-bit CS, except in LM. */
4325 { true, BS3_SEL_SPARE_00, 64, true, bs3CpuBasic2_callf_ptr_r0_cs16l__ud2_c32, }, /* 16-bit CS, except in LM. */
4326
4327 { false, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_jmpf_mem_same_r0__ud2_c32, },
4328 { false, BS3_SEL_R1_CS32 | 1, 32, false, bs3CpuBasic2_jmpf_mem_same_r1__ud2_c32, },
4329 { false, BS3_SEL_R2_CS32 | 2, 32, false, bs3CpuBasic2_jmpf_mem_same_r2__ud2_c32, },
4330 { false, BS3_SEL_R3_CS32 | 3, 32, false, bs3CpuBasic2_jmpf_mem_same_r3__ud2_c32, },
4331 { false, BS3_SEL_R0_CS16, 16, true, bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_c32, },
4332 { false, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_c32, },
4333 { false, BS3_SEL_R0_CS64, 64, false, bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_c32, }, /* 16-bit CS, except in LM. */
4334 { false, BS3_SEL_SPARE_00, 64, true, bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_c32, }, /* 16-bit CS, except in LM. */
4335
4336 { true, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_callf_mem_same_r0__ud2_c32, },
4337 { true, BS3_SEL_R1_CS32 | 1, 32, false, bs3CpuBasic2_callf_mem_same_r1__ud2_c32, },
4338 { true, BS3_SEL_R2_CS32 | 2, 32, false, bs3CpuBasic2_callf_mem_same_r2__ud2_c32, },
4339 { true, BS3_SEL_R3_CS32 | 3, 32, false, bs3CpuBasic2_callf_mem_same_r3__ud2_c32, },
4340 { true, BS3_SEL_R0_CS16, 16, true, bs3CpuBasic2_callf_mem_r0_cs16__ud2_c32, },
4341 { true, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_callf_mem_r0_cs32__ud2_c32, },
4342 { true, BS3_SEL_R0_CS64, 64, false, bs3CpuBasic2_callf_mem_r0_cs64__ud2_c32, }, /* 16-bit CS, except in LM. */
4343 { true, BS3_SEL_SPARE_00, 64, true, bs3CpuBasic2_callf_mem_r0_cs16l__ud2_c32, }, /* 16-bit CS, except in LM. */
4344 };
4345
4346 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4347 if (s_aTests[iTest].uDstSel != BS3_SEL_SPARE_00 || !BS3_MODE_IS_64BIT_SYS(bMode)) /* skip it in LM32 for now*/
4348 {
4349 uint64_t const uSavedRsp = Ctx.rsp.u;
4350 bool const fGp = (s_aTests[iTest].uDstSel & X86_SEL_RPL) != 0;
4351 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4352
4353 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4354 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4355 if ( s_aTests[iTest].uDstBits == 16
4356 || ( s_aTests[iTest].uDstBits == 64
4357 && ( BS3_MODE_IS_16BIT_SYS(bMode))
4358 || s_aTests[iTest].uDstSel == BS3_SEL_SPARE_00))
4359 CtxExpected.rip.u -= BS3_ADDR_BS3TEXT16;
4360 CtxExpected.cs = s_aTests[iTest].uDstSel;
4361 if (fGp)
4362 {
4363 CtxExpected.rip.u = Ctx.rip.u;
4364 CtxExpected.cs = Ctx.cs;
4365 }
4366 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4367 CtxExpected.rsp.u = Ctx.rsp.u;
4368 if (s_aTests[iTest].fCall && !fGp)
4369 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx ? 4 : 8;
4370 if (s_aTests[iTest].uDstBits == 64 && !fGp)
4371 {
4372 if (BS3_MODE_IS_64BIT_SYS(bMode))
4373 CtxExpected.rip.u -= 1;
4374 else
4375 CtxExpected.rax.u8 = CtxExpected.rflags.u & X86_EFL_CF ? 0xff : 0x00;
4376 }
4377 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4378 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4379 if (!fGp)
4380 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4381 else
4382 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4383 Ctx.rsp.u = uSavedRsp;
4384 g_usBs3TestStep++;
4385
4386 /* Again single stepping: */
4387 //Bs3TestPrintf("stepping...\n");
4388 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4389 Ctx.rflags.u16 |= X86_EFL_TF;
4390 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4391 CtxExpected.rax.u = Ctx.rax.u;
4392 if (s_aTests[iTest].uDstBits == 64 && !fGp && !BS3_MODE_IS_64BIT_SYS(bMode))
4393 CtxExpected.rip.u -= 1;
4394 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4395 if (!fGp)
4396 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4397 else
4398 {
4399 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4400 bs3CpuBasic2_CheckDr6InitVal();
4401 }
4402 Ctx.rflags.u16 &= ~X86_EFL_TF;
4403 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4404 Ctx.rsp.u = uSavedRsp;
4405 g_usBs3TestStep++;
4406 }
4407 }
4408 /*
4409 * 64-bit tests.
4410 */
4411 else if (BS3_MODE_IS_64BIT_CODE(bMode))
4412 {
4413 static struct
4414 {
4415 bool fInvalid;
4416 bool fCall;
4417 uint16_t uDstSel;
4418 uint8_t uDstBits;
4419 uint8_t fOpSizePfx; /**< 0: none, 1: 066h, 2: REX.W, 3: 066h REX.W */
4420 int8_t fFix64OpSize;
4421 FPFNBS3FAR pfnTest;
4422 }
4423 const s_aTests[] =
4424 {
4425 /* invalid opcodes: */
4426 { true, false, BS3_SEL_R0_CS32, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_same_r0__ud2_c32, },
4427 { true, false, BS3_SEL_R1_CS32 | 1, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_same_r1__ud2_c32, },
4428 { true, false, BS3_SEL_R2_CS32 | 2, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_same_r2__ud2_c32, },
4429 { true, false, BS3_SEL_R3_CS32 | 3, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_same_r3__ud2_c32, },
4430 { true, false, BS3_SEL_R0_CS16, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_opsize_flipbit_r0__ud2_c32, },
4431 { true, false, BS3_SEL_R0_CS64, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_r0_cs64__ud2_c32, },
4432 { true, false, BS3_SEL_SPARE_00, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_r0_cs16l__ud2_c32, },
4433
4434 { true, true, BS3_SEL_R0_CS32, 64, 0, -1, bs3CpuBasic2_callf_ptr_same_r0__ud2_c32, },
4435 { true, true, BS3_SEL_R1_CS32 | 1, 64, 0, -1, bs3CpuBasic2_callf_ptr_same_r1__ud2_c32, },
4436 { true, true, BS3_SEL_R2_CS32 | 2, 64, 0, -1, bs3CpuBasic2_callf_ptr_same_r2__ud2_c32, },
4437 { true, true, BS3_SEL_R3_CS32 | 3, 64, 0, -1, bs3CpuBasic2_callf_ptr_same_r3__ud2_c32, },
4438 { true, true, BS3_SEL_R0_CS16, 64, 0, -1, bs3CpuBasic2_callf_ptr_opsize_flipbit_r0__ud2_c32, },
4439 { true, true, BS3_SEL_R0_CS64, 64, 0, -1, bs3CpuBasic2_callf_ptr_r0_cs64__ud2_c32, },
4440 { true, true, BS3_SEL_SPARE_00, 64, 0, -1, bs3CpuBasic2_callf_ptr_r0_cs16l__ud2_c32, },
4441
4442 { false, false, BS3_SEL_R0_CS64, 64, 0, false, bs3CpuBasic2_jmpf_mem_same_r0__ud2_c64, },
4443 { false, false, BS3_SEL_R1_CS64 | 1, 64, 0, false, bs3CpuBasic2_jmpf_mem_same_r1__ud2_c64, },
4444 { false, false, BS3_SEL_R2_CS64 | 2, 64, 0, false, bs3CpuBasic2_jmpf_mem_same_r2__ud2_c64, },
4445 { false, false, BS3_SEL_R3_CS64 | 3, 64, 0, false, bs3CpuBasic2_jmpf_mem_same_r3__ud2_c64, },
4446 { false, false, BS3_SEL_R0_CS16, 16, 1, false, bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_c64, },
4447 { false, false, BS3_SEL_R0_CS32, 32, 0, false, bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_c64, },
4448 { false, false, BS3_SEL_R0_CS64, 64, 0, false, bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_c64, }, /* 16-bit CS, except in LM. */
4449 { false, false, BS3_SEL_SPARE_00, 64, 0, false, bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_c64, }, /* 16-bit CS, except in LM. */
4450
4451 { false, false, BS3_SEL_R0_CS64, 64, 2, true, bs3CpuBasic2_jmpf_mem_same_r0__ud2_intel_c64, },
4452 { false, false, BS3_SEL_R1_CS64 | 1, 64, 2, true, bs3CpuBasic2_jmpf_mem_same_r1__ud2_intel_c64, },
4453 { false, false, BS3_SEL_R2_CS64 | 2, 64, 0, true, bs3CpuBasic2_jmpf_mem_same_r2__ud2_intel_c64, },
4454 { false, false, BS3_SEL_R3_CS64 | 3, 64, 2, true, bs3CpuBasic2_jmpf_mem_same_r3__ud2_intel_c64, },
4455 { false, false, BS3_SEL_R0_CS16, 16, 1, true, bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_intel_c64, },
4456 { false, false, BS3_SEL_R0_CS32, 32, 0, true, bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_intel_c64, },
4457 { false, false, BS3_SEL_R0_CS64, 64, 2, true, bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_intel_c64, }, /* 16-bit CS, except in LM. */
4458 { false, false, BS3_SEL_SPARE_00, 64, 0, true, bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_intel_c64, }, /* 16-bit CS, except in LM. */
4459
4460 { false, true, BS3_SEL_R0_CS64, 64, 2, false, bs3CpuBasic2_callf_mem_same_r0__ud2_c64, },
4461 { false, true, BS3_SEL_R1_CS64 | 1, 64, 2, false, bs3CpuBasic2_callf_mem_same_r1__ud2_c64, },
4462 { false, true, BS3_SEL_R2_CS64 | 2, 64, 0, false, bs3CpuBasic2_callf_mem_same_r2__ud2_c64, },
4463 { false, true, BS3_SEL_R3_CS64 | 3, 64, 2, false, bs3CpuBasic2_callf_mem_same_r3__ud2_c64, },
4464 { false, true, BS3_SEL_R0_CS16, 16, 1, false, bs3CpuBasic2_callf_mem_r0_cs16__ud2_c64, },
4465 { false, true, BS3_SEL_R0_CS32, 32, 2, false, bs3CpuBasic2_callf_mem_r0_cs32__ud2_c64, },
4466 { false, true, BS3_SEL_R0_CS64, 64, 0, false, bs3CpuBasic2_callf_mem_r0_cs64__ud2_c64, }, /* 16-bit CS, except in LM. */
4467 { false, true, BS3_SEL_SPARE_00, 64, 0, false, bs3CpuBasic2_callf_mem_r0_cs16l__ud2_c64, }, /* 16-bit CS, except in LM. */
4468
4469 { false, true, BS3_SEL_R0_CS64, 64, 2, true, bs3CpuBasic2_callf_mem_same_r0__ud2_intel_c64, },
4470 { false, true, BS3_SEL_R1_CS64 | 1, 64, 2, true, bs3CpuBasic2_callf_mem_same_r1__ud2_intel_c64, },
4471 { false, true, BS3_SEL_R2_CS64 | 2, 64, 0, true, bs3CpuBasic2_callf_mem_same_r2__ud2_intel_c64, },
4472 { false, true, BS3_SEL_R3_CS64 | 3, 64, 2, true, bs3CpuBasic2_callf_mem_same_r3__ud2_intel_c64, },
4473 { false, true, BS3_SEL_R0_CS16, 16, 1, true, bs3CpuBasic2_callf_mem_r0_cs16__ud2_intel_c64, },
4474 { false, true, BS3_SEL_R0_CS32, 32, 0, true, bs3CpuBasic2_callf_mem_r0_cs32__ud2_intel_c64, },
4475 { false, true, BS3_SEL_R0_CS64, 64, 2, true, bs3CpuBasic2_callf_mem_r0_cs64__ud2_intel_c64, }, /* 16-bit CS, except in LM. */
4476 { false, true, BS3_SEL_SPARE_00, 64, 0, true, bs3CpuBasic2_callf_mem_r0_cs16l__ud2_intel_c64, }, /* 16-bit CS, except in LM. */
4477 };
4478 BS3CPUVENDOR const enmCpuVendor = Bs3GetCpuVendor();
4479 bool const fFix64OpSize = enmCpuVendor == BS3CPUVENDOR_INTEL; /** @todo what does VIA do? */
4480
4481 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4482 {
4483 uint64_t const uSavedRsp = Ctx.rsp.u;
4484 bool const fUd = s_aTests[iTest].fInvalid;
4485 bool const fGp = (s_aTests[iTest].uDstSel & X86_SEL_RPL) != 0;
4486 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4487
4488 if (s_aTests[iTest].fFix64OpSize != fFix64OpSize && s_aTests[iTest].fFix64OpSize >= 0)
4489 continue;
4490
4491 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4492 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4493 CtxExpected.cs = s_aTests[iTest].uDstSel;
4494 if (s_aTests[iTest].uDstBits == 16)
4495 CtxExpected.rip.u &= UINT16_MAX;
4496 else if (s_aTests[iTest].uDstBits == 64 && fFix64OpSize && s_aTests[iTest].uDstSel != BS3_SEL_SPARE_00)
4497 CtxExpected.rip.u |= UINT64_C(0xfffff00000000000);
4498
4499 if (fGp || fUd)
4500 {
4501 CtxExpected.rip.u = Ctx.rip.u;
4502 CtxExpected.cs = Ctx.cs;
4503 }
4504 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4505 CtxExpected.rsp.u = Ctx.rsp.u;
4506 if (s_aTests[iTest].fCall && !fGp && !fUd)
4507 {
4508 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx == 0 ? 8
4509 : s_aTests[iTest].fOpSizePfx == 1 ? 4 : 16;
4510 //Bs3TestPrintf("cs:rsp=%04RX16:%04RX64 -> %04RX64 (fOpSizePfx=%d)\n", Ctx.ss, Ctx.rsp.u, CtxExpected.rsp.u, s_aTests[iTest].fOpSizePfx);
4511 }
4512 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4513 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4514 if (!fGp || fUd)
4515 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4516 else
4517 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4518 Ctx.rsp.u = uSavedRsp;
4519 g_usBs3TestStep++;
4520
4521 /* Again single stepping: */
4522 //Bs3TestPrintf("stepping...\n");
4523 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4524 Ctx.rflags.u16 |= X86_EFL_TF;
4525 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4526 CtxExpected.rax.u = Ctx.rax.u;
4527 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4528 if (fUd)
4529 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4530 else if (!fGp)
4531 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4532 else
4533 {
4534 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4535 bs3CpuBasic2_CheckDr6InitVal();
4536 }
4537 Ctx.rflags.u16 &= ~X86_EFL_TF;
4538 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4539 Ctx.rsp.u = uSavedRsp;
4540 g_usBs3TestStep++;
4541 }
4542 }
4543 else
4544 Bs3TestFailed("wtf?");
4545
4546 return 0;
4547}
4548
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette