VirtualBox

source: vbox/trunk/src/VBox/ValidationKit/bootsectors/bs3-cpu-basic-2-x0.c@ 97624

最後變更 在這個檔案從97624是 97623,由 vboxsync 提交於 2 年 前

ValKit/bs3-cpu-basic-2: More far return testing. bugref:9898

  • 屬性 svn:eol-style 設為 native
  • 屬性 svn:keywords 設為 Author Date Id Revision
檔案大小: 313.6 KB
 
1/* $Id: bs3-cpu-basic-2-x0.c 97623 2022-11-21 11:12:49Z vboxsync $ */
2/** @file
3 * BS3Kit - bs3-cpu-basic-2, C test driver code (16-bit).
4 */
5
6/*
7 * Copyright (C) 2007-2022 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.alldomusa.eu.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * The contents of this file may alternatively be used under the terms
26 * of the Common Development and Distribution License Version 1.0
27 * (CDDL), a copy of it is provided in the "COPYING.CDDL" file included
28 * in the VirtualBox distribution, in which case the provisions of the
29 * CDDL are applicable instead of those of the GPL.
30 *
31 * You may elect to license modified versions of this file under the
32 * terms and conditions of either the GPL or the CDDL or both.
33 *
34 * SPDX-License-Identifier: GPL-3.0-only OR CDDL-1.0
35 */
36
37
38/*********************************************************************************************************************************
39* Header Files *
40*********************************************************************************************************************************/
41#define BS3_USE_X0_TEXT_SEG
42#include <bs3kit.h>
43#include <iprt/asm.h>
44#include <iprt/asm-amd64-x86.h>
45
46
47/*********************************************************************************************************************************
48* Defined Constants And Macros *
49*********************************************************************************************************************************/
50#undef CHECK_MEMBER
51#define CHECK_MEMBER(a_szName, a_szFmt, a_Actual, a_Expected) \
52 do \
53 { \
54 if ((a_Actual) == (a_Expected)) { /* likely */ } \
55 else bs3CpuBasic2_FailedF(a_szName "=" a_szFmt " expected " a_szFmt, (a_Actual), (a_Expected)); \
56 } while (0)
57
58
59/** Indicating that we've got operand size prefix and that it matters. */
60#define BS3CB2SIDTSGDT_F_OPSIZE UINT8_C(0x01)
61/** Worker requires 386 or later. */
62#define BS3CB2SIDTSGDT_F_386PLUS UINT8_C(0x02)
63
64
65/** @name MYOP_XXX - Values for FNBS3CPUBASIC2ACTSTCODE::fOp.
66 *
67 * These are flags, though we've precombined a few shortening things down.
68 *
69 * @{ */
70#define MYOP_LD 0x1 /**< The instruction loads. */
71#define MYOP_ST 0x2 /**< The instruction stores */
72#define MYOP_EFL 0x4 /**< The instruction modifies EFLAGS. */
73#define MYOP_AC_GP 0x8 /**< The instruction may cause either \#AC or \#GP (FXSAVE). */
74
75#define MYOP_LD_ST 0x3 /**< Convenience: The instruction both loads and stores. */
76#define MYOP_LD_DIV 0x5 /**< Convenience: DIV instruction - loading and modifying flags. */
77/** @} */
78
79
80/*********************************************************************************************************************************
81* Structures and Typedefs *
82*********************************************************************************************************************************/
83/** Near void pointer. */
84typedef void BS3_NEAR *NPVOID;
85
86typedef struct BS3CB2INVLDESCTYPE
87{
88 uint8_t u4Type;
89 uint8_t u1DescType;
90} BS3CB2INVLDESCTYPE;
91
92typedef struct BS3CB2SIDTSGDT
93{
94 const char *pszDesc;
95 FPFNBS3FAR fpfnWorker;
96 uint8_t cbInstr;
97 bool fSs;
98 uint8_t bMode;
99 uint8_t fFlags;
100} BS3CB2SIDTSGDT;
101
102
103typedef void BS3_CALL FNBS3CPUBASIC2ACSNIPPET(void);
104
105typedef struct FNBS3CPUBASIC2ACTSTCODE
106{
107 FNBS3CPUBASIC2ACSNIPPET BS3_FAR *pfn;
108 uint8_t fOp;
109 uint16_t cbMem;
110 uint8_t cbAlign;
111 uint8_t offFaultInstr; /**< For skipping fninit with the fld test. */
112} FNBS3CPUBASIC2ACTSTCODE;
113typedef FNBS3CPUBASIC2ACTSTCODE const *PCFNBS3CPUBASIC2ACTSTCODE;
114
115typedef struct BS3CPUBASIC2ACTTSTCMNMODE
116{
117 uint8_t bMode;
118 uint16_t cEntries;
119 PCFNBS3CPUBASIC2ACTSTCODE paEntries;
120} BS3CPUBASIC2PFTTSTCMNMODE;
121typedef BS3CPUBASIC2PFTTSTCMNMODE const *PCBS3CPUBASIC2PFTTSTCMNMODE;
122
123
124/*********************************************************************************************************************************
125* External Symbols *
126*********************************************************************************************************************************/
127extern FNBS3FAR bs3CpuBasic2_Int80;
128extern FNBS3FAR bs3CpuBasic2_Int81;
129extern FNBS3FAR bs3CpuBasic2_Int82;
130extern FNBS3FAR bs3CpuBasic2_Int83;
131
132extern FNBS3FAR bs3CpuBasic2_ud2;
133#define g_bs3CpuBasic2_ud2_FlatAddr BS3_DATA_NM(g_bs3CpuBasic2_ud2_FlatAddr)
134extern uint32_t g_bs3CpuBasic2_ud2_FlatAddr;
135
136extern FNBS3FAR bs3CpuBasic2_salc_ud2;
137
138extern FNBS3FAR bs3CpuBasic2_iret;
139extern FNBS3FAR bs3CpuBasic2_iret_opsize;
140extern FNBS3FAR bs3CpuBasic2_iret_rexw;
141
142extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c16;
143extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c32;
144extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c64;
145extern FNBS3FAR bs3CpuBasic2_sidt_ss_bx_ud2_c16;
146extern FNBS3FAR bs3CpuBasic2_sidt_ss_bx_ud2_c32;
147extern FNBS3FAR bs3CpuBasic2_sidt_rexw_bx_ud2_c64;
148extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c16;
149extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c32;
150extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c64;
151extern FNBS3FAR bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c16;
152extern FNBS3FAR bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c32;
153extern FNBS3FAR bs3CpuBasic2_sidt_opsize_rexw_bx_ud2_c64;
154
155extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c16;
156extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c32;
157extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c64;
158extern FNBS3FAR bs3CpuBasic2_sgdt_ss_bx_ud2_c16;
159extern FNBS3FAR bs3CpuBasic2_sgdt_ss_bx_ud2_c32;
160extern FNBS3FAR bs3CpuBasic2_sgdt_rexw_bx_ud2_c64;
161extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c16;
162extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c32;
163extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c64;
164extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c16;
165extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c32;
166extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_rexw_bx_ud2_c64;
167
168extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c16;
169extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c32;
170extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c64;
171extern FNBS3FAR bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c16;
172extern FNBS3FAR bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c32;
173extern FNBS3FAR bs3CpuBasic2_lidt_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64;
174extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c16;
175extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt32_es_di__lidt_es_si__ud2_c16;
176extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c32;
177extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c64;
178extern FNBS3FAR bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c16;
179extern FNBS3FAR bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c32;
180extern FNBS3FAR bs3CpuBasic2_lidt_opsize_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64;
181
182extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
183extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
184extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
185extern FNBS3FAR bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
186extern FNBS3FAR bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
187extern FNBS3FAR bs3CpuBasic2_lgdt_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
188extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
189extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
190extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
191extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
192extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
193extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
194
195
196/* bs3-cpu-basic-2-template.mac: */
197FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c16;
198FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c16;
199FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16;
200FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16;
201FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c16;
202FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fld_ds_bx__ud2_c16;
203FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c16;
204FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c16;
205FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fxsave_ds_bx__ud2_c16;
206
207FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c32;
208FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c32;
209FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c32;
210FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c32;
211FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c32;
212FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fld_ds_bx__ud2_c32;
213FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c32;
214FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c32;
215FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fxsave_ds_bx__ud2_c32;
216
217FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c64;
218FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c64;
219FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c64;
220FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c64;
221FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c64;
222FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fld_ds_bx__ud2_c64;
223FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c64;
224FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c64;
225FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fxsave_ds_bx__ud2_c64;
226
227
228/*********************************************************************************************************************************
229* Global Variables *
230*********************************************************************************************************************************/
231static const char BS3_FAR *g_pszTestMode = (const char *)1;
232static uint8_t g_bTestMode = 1;
233static bool g_f16BitSys = 1;
234
235
236/** SIDT test workers. */
237static BS3CB2SIDTSGDT const g_aSidtWorkers[] =
238{
239 { "sidt [bx]", bs3CpuBasic2_sidt_bx_ud2_c16, 3, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
240 { "sidt [ss:bx]", bs3CpuBasic2_sidt_ss_bx_ud2_c16, 4, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
241 { "o32 sidt [bx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c16, 4, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
242 { "o32 sidt [ss:bx]", bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c16, 5, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
243 { "sidt [ebx]", bs3CpuBasic2_sidt_bx_ud2_c32, 3, false, BS3_MODE_CODE_32, 0 },
244 { "sidt [ss:ebx]", bs3CpuBasic2_sidt_ss_bx_ud2_c32, 4, true, BS3_MODE_CODE_32, 0 },
245 { "o16 sidt [ebx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c32, 4, false, BS3_MODE_CODE_32, 0 },
246 { "o16 sidt [ss:ebx]", bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c32, 5, true, BS3_MODE_CODE_32, 0 },
247 { "sidt [rbx]", bs3CpuBasic2_sidt_bx_ud2_c64, 3, false, BS3_MODE_CODE_64, 0 },
248 { "o64 sidt [rbx]", bs3CpuBasic2_sidt_rexw_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
249 { "o32 sidt [rbx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
250 { "o32 o64 sidt [rbx]", bs3CpuBasic2_sidt_opsize_rexw_bx_ud2_c64, 5, false, BS3_MODE_CODE_64, 0 },
251};
252
253/** SGDT test workers. */
254static BS3CB2SIDTSGDT const g_aSgdtWorkers[] =
255{
256 { "sgdt [bx]", bs3CpuBasic2_sgdt_bx_ud2_c16, 3, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
257 { "sgdt [ss:bx]", bs3CpuBasic2_sgdt_ss_bx_ud2_c16, 4, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
258 { "o32 sgdt [bx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c16, 4, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
259 { "o32 sgdt [ss:bx]", bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c16, 5, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
260 { "sgdt [ebx]", bs3CpuBasic2_sgdt_bx_ud2_c32, 3, false, BS3_MODE_CODE_32, 0 },
261 { "sgdt [ss:ebx]", bs3CpuBasic2_sgdt_ss_bx_ud2_c32, 4, true, BS3_MODE_CODE_32, 0 },
262 { "o16 sgdt [ebx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c32, 4, false, BS3_MODE_CODE_32, 0 },
263 { "o16 sgdt [ss:ebx]", bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c32, 5, true, BS3_MODE_CODE_32, 0 },
264 { "sgdt [rbx]", bs3CpuBasic2_sgdt_bx_ud2_c64, 3, false, BS3_MODE_CODE_64, 0 },
265 { "o64 sgdt [rbx]", bs3CpuBasic2_sgdt_rexw_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
266 { "o32 sgdt [rbx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
267 { "o32 o64 sgdt [rbx]", bs3CpuBasic2_sgdt_opsize_rexw_bx_ud2_c64, 5, false, BS3_MODE_CODE_64, 0 },
268};
269
270/** LIDT test workers. */
271static BS3CB2SIDTSGDT const g_aLidtWorkers[] =
272{
273 { "lidt [bx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c16, 11, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
274 { "lidt [ss:bx]", bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c16, 12, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
275 { "o32 lidt [bx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c16, 12, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
276 { "o32 lidt [bx]; sidt32", bs3CpuBasic2_lidt_opsize_bx__sidt32_es_di__lidt_es_si__ud2_c16, 27, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
277 { "o32 lidt [ss:bx]", bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c16, 13, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
278 { "lidt [ebx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c32, 11, false, BS3_MODE_CODE_32, 0 },
279 { "lidt [ss:ebx]", bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c32, 12, true, BS3_MODE_CODE_32, 0 },
280 { "o16 lidt [ebx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c32, 12, false, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
281 { "o16 lidt [ss:ebx]", bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c32, 13, true, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
282 { "lidt [rbx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c64, 9, false, BS3_MODE_CODE_64, 0 },
283 { "o64 lidt [rbx]", bs3CpuBasic2_lidt_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
284 { "o32 lidt [rbx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
285 { "o32 o64 lidt [rbx]", bs3CpuBasic2_lidt_opsize_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64, 11, false, BS3_MODE_CODE_64, 0 },
286};
287
288/** LGDT test workers. */
289static BS3CB2SIDTSGDT const g_aLgdtWorkers[] =
290{
291 { "lgdt [bx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 11, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
292 { "lgdt [ss:bx]", bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 12, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
293 { "o32 lgdt [bx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 12, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
294 { "o32 lgdt [ss:bx]", bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 13, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
295 { "lgdt [ebx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 11, false, BS3_MODE_CODE_32, 0 },
296 { "lgdt [ss:ebx]", bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 12, true, BS3_MODE_CODE_32, 0 },
297 { "o16 lgdt [ebx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 12, false, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
298 { "o16 lgdt [ss:ebx]", bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 13, true, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
299 { "lgdt [rbx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 9, false, BS3_MODE_CODE_64, 0 },
300 { "o64 lgdt [rbx]", bs3CpuBasic2_lgdt_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
301 { "o32 lgdt [rbx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
302 { "o32 o64 lgdt [rbx]", bs3CpuBasic2_lgdt_opsize_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 11, false, BS3_MODE_CODE_64, 0 },
303};
304
305
306
307#if 0
308/** Table containing invalid CS selector types. */
309static const BS3CB2INVLDESCTYPE g_aInvalidCsTypes[] =
310{
311 { X86_SEL_TYPE_RO, 1 },
312 { X86_SEL_TYPE_RO_ACC, 1 },
313 { X86_SEL_TYPE_RW, 1 },
314 { X86_SEL_TYPE_RW_ACC, 1 },
315 { X86_SEL_TYPE_RO_DOWN, 1 },
316 { X86_SEL_TYPE_RO_DOWN_ACC, 1 },
317 { X86_SEL_TYPE_RW_DOWN, 1 },
318 { X86_SEL_TYPE_RW_DOWN_ACC, 1 },
319 { 0, 0 },
320 { 1, 0 },
321 { 2, 0 },
322 { 3, 0 },
323 { 4, 0 },
324 { 5, 0 },
325 { 6, 0 },
326 { 7, 0 },
327 { 8, 0 },
328 { 9, 0 },
329 { 10, 0 },
330 { 11, 0 },
331 { 12, 0 },
332 { 13, 0 },
333 { 14, 0 },
334 { 15, 0 },
335};
336
337/** Table containing invalid SS selector types. */
338static const BS3CB2INVLDESCTYPE g_aInvalidSsTypes[] =
339{
340 { X86_SEL_TYPE_EO, 1 },
341 { X86_SEL_TYPE_EO_ACC, 1 },
342 { X86_SEL_TYPE_ER, 1 },
343 { X86_SEL_TYPE_ER_ACC, 1 },
344 { X86_SEL_TYPE_EO_CONF, 1 },
345 { X86_SEL_TYPE_EO_CONF_ACC, 1 },
346 { X86_SEL_TYPE_ER_CONF, 1 },
347 { X86_SEL_TYPE_ER_CONF_ACC, 1 },
348 { 0, 0 },
349 { 1, 0 },
350 { 2, 0 },
351 { 3, 0 },
352 { 4, 0 },
353 { 5, 0 },
354 { 6, 0 },
355 { 7, 0 },
356 { 8, 0 },
357 { 9, 0 },
358 { 10, 0 },
359 { 11, 0 },
360 { 12, 0 },
361 { 13, 0 },
362 { 14, 0 },
363 { 15, 0 },
364};
365#endif
366
367
368static const FNBS3CPUBASIC2ACTSTCODE g_aCmn16[] =
369{
370 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c16, MYOP_LD, 2, 2 },
371 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c16, MYOP_ST, 2, 2 },
372 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16, MYOP_LD_ST, 2, 2 },
373 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16, MYOP_LD_ST | MYOP_EFL, 2, 2 },
374 { bs3CpuBasic2_div_ds_bx__ud2_c16, MYOP_LD_DIV, 2, 2 },
375 { bs3CpuBasic2_fninit_fld_ds_bx__ud2_c16, MYOP_LD, 10, 8, 2 /*fninit*/ },
376 { bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c16, MYOP_LD, 10, 8, 2 /*fninit*/ },
377 { bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c16, MYOP_ST, 10, 8, 4 /*fninit+fldz*/ },
378 { bs3CpuBasic2_fxsave_ds_bx__ud2_c16, MYOP_ST | MYOP_AC_GP, 512, 16 },
379};
380
381static const FNBS3CPUBASIC2ACTSTCODE g_aCmn32[] =
382{
383 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c32, MYOP_LD, 4, 4 },
384 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c32, MYOP_ST, 4, 4 },
385 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c32, MYOP_LD_ST, 4, 4 },
386 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c32, MYOP_LD_ST | MYOP_EFL, 4, 4 },
387 { bs3CpuBasic2_div_ds_bx__ud2_c32, MYOP_LD_DIV, 4, 4 },
388 { bs3CpuBasic2_fninit_fld_ds_bx__ud2_c32, MYOP_LD, 10, 8, 2 /*fninit*/ },
389 { bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c32, MYOP_LD, 10, 8, 2 /*fninit*/ },
390 { bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c32, MYOP_ST, 10, 8, 4 /*fninit+fldz*/ },
391 { bs3CpuBasic2_fxsave_ds_bx__ud2_c32, MYOP_ST | MYOP_AC_GP, 512, 16 },
392};
393
394static const FNBS3CPUBASIC2ACTSTCODE g_aCmn64[] =
395{
396 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c64, MYOP_LD, 8, 8 },
397 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c64, MYOP_ST, 8, 8 },
398 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c64, MYOP_LD_ST, 8, 8 },
399 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c64, MYOP_LD_ST | MYOP_EFL, 8, 8 },
400 { bs3CpuBasic2_div_ds_bx__ud2_c64, MYOP_LD_DIV, 8, 8 },
401 { bs3CpuBasic2_fninit_fld_ds_bx__ud2_c64, MYOP_LD, 10, 8, 2 /*fninit*/ },
402 { bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c64, MYOP_LD, 10, 8, 2 /*fninit*/ },
403 { bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c64, MYOP_ST, 10, 8, 4 /*fninit+fldz*/ },
404 { bs3CpuBasic2_fxsave_ds_bx__ud2_c64, MYOP_ST | MYOP_AC_GP, 512, 16 },
405};
406
407static const BS3CPUBASIC2PFTTSTCMNMODE g_aCmnModes[] =
408{
409 { BS3_MODE_CODE_16, RT_ELEMENTS(g_aCmn16), g_aCmn16 },
410 { BS3_MODE_CODE_V86, RT_ELEMENTS(g_aCmn16), g_aCmn16 },
411 { BS3_MODE_CODE_32, RT_ELEMENTS(g_aCmn32), g_aCmn32 },
412 { BS3_MODE_CODE_64, RT_ELEMENTS(g_aCmn64), g_aCmn64 },
413};
414
415
416/**
417 * Sets globals according to the mode.
418 *
419 * @param bTestMode The test mode.
420 */
421static void bs3CpuBasic2_SetGlobals(uint8_t bTestMode)
422{
423 g_bTestMode = bTestMode;
424 g_pszTestMode = Bs3GetModeName(bTestMode);
425 g_f16BitSys = BS3_MODE_IS_16BIT_SYS(bTestMode);
426 g_usBs3TestStep = 0;
427}
428
429
430uint32_t ASMGetESP(void);
431#pragma aux ASMGetESP = \
432 ".386" \
433 "mov ax, sp" \
434 "mov edx, esp" \
435 "shr edx, 16" \
436 value [ax dx] \
437 modify exact [ax dx];
438
439
440/**
441 * Wrapper around Bs3TestFailedF that prefixes the error with g_usBs3TestStep
442 * and g_pszTestMode.
443 */
444static void bs3CpuBasic2_FailedF(const char *pszFormat, ...)
445{
446 va_list va;
447
448 char szTmp[168];
449 va_start(va, pszFormat);
450 Bs3StrPrintfV(szTmp, sizeof(szTmp), pszFormat, va);
451 va_end(va);
452
453 Bs3TestFailedF("%u - %s: %s", g_usBs3TestStep, g_pszTestMode, szTmp);
454}
455
456
457#if 0
458/**
459 * Compares trap stuff.
460 */
461static void bs3CpuBasic2_CompareIntCtx1(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint8_t bXcpt)
462{
463 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
464 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
465 CHECK_MEMBER("bErrCd", "%#06RX64", pTrapCtx->uErrCd, 0);
466 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, 2 /*int xx*/, 0 /*cbSpAdjust*/, 0 /*fExtraEfl*/, g_pszTestMode, g_usBs3TestStep);
467 if (Bs3TestSubErrorCount() != cErrorsBefore)
468 {
469 Bs3TrapPrintFrame(pTrapCtx);
470#if 1
471 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
472 Bs3TestPrintf("Halting in CompareTrapCtx1: bXcpt=%#x\n", bXcpt);
473 ASMHalt();
474#endif
475 }
476}
477#endif
478
479
480#if 0
481/**
482 * Compares trap stuff.
483 */
484static void bs3CpuBasic2_CompareTrapCtx2(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t cbIpAdjust,
485 uint8_t bXcpt, uint16_t uHandlerCs)
486{
487 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
488 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
489 CHECK_MEMBER("bErrCd", "%#06RX64", pTrapCtx->uErrCd, 0);
490 CHECK_MEMBER("uHandlerCs", "%#06x", pTrapCtx->uHandlerCs, uHandlerCs);
491 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, cbIpAdjust, 0 /*cbSpAdjust*/, 0 /*fExtraEfl*/, g_pszTestMode, g_usBs3TestStep);
492 if (Bs3TestSubErrorCount() != cErrorsBefore)
493 {
494 Bs3TrapPrintFrame(pTrapCtx);
495#if 1
496 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
497 Bs3TestPrintf("Halting in CompareTrapCtx2: bXcpt=%#x\n", bXcpt);
498 ASMHalt();
499#endif
500 }
501}
502#endif
503
504/**
505 * Compares a CPU trap.
506 */
507static void bs3CpuBasic2_CompareCpuTrapCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd,
508 uint8_t bXcpt, bool f486ResumeFlagHint, uint8_t cbIpAdjust)
509{
510 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
511 uint32_t fExtraEfl;
512
513 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
514 CHECK_MEMBER("bErrCd", "%#06RX16", (uint16_t)pTrapCtx->uErrCd, (uint16_t)uErrCd); /* 486 only writes a word */
515
516 if ( g_f16BitSys
517 || bXcpt == X86_XCPT_DB /* hack (10980xe)... */
518 || ( !f486ResumeFlagHint
519 && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) <= BS3CPU_80486 ) )
520 fExtraEfl = 0;
521 else
522 fExtraEfl = X86_EFL_RF;
523#if 0 /** @todo Running on an AMD Phenom II X6 1100T under AMD-V I'm not getting good X86_EFL_RF results. Enable this to get on with other work. */
524 fExtraEfl = pTrapCtx->Ctx.rflags.u32 & X86_EFL_RF;
525#endif
526 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, cbIpAdjust, 0 /*cbSpAdjust*/, fExtraEfl, g_pszTestMode, g_usBs3TestStep);
527 if (Bs3TestSubErrorCount() != cErrorsBefore)
528 {
529 Bs3TrapPrintFrame(pTrapCtx);
530#if 1
531 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
532 Bs3TestPrintf("Halting: bXcpt=%#x uErrCd=%#x\n", bXcpt, uErrCd);
533 ASMHalt();
534#endif
535 }
536}
537
538
539/**
540 * Compares \#GP trap.
541 */
542static void bs3CpuBasic2_CompareGpCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
543{
544 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_GP, true /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
545}
546
547#if 0
548/**
549 * Compares \#NP trap.
550 */
551static void bs3CpuBasic2_CompareNpCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
552{
553 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_NP, true /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
554}
555#endif
556
557/**
558 * Compares \#SS trap.
559 */
560static void bs3CpuBasic2_CompareSsCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd, bool f486ResumeFlagHint)
561{
562 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_SS, f486ResumeFlagHint, 0 /*cbIpAdjust*/);
563}
564
565#if 0
566/**
567 * Compares \#TS trap.
568 */
569static void bs3CpuBasic2_CompareTsCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
570{
571 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_TS, false /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
572}
573#endif
574
575/**
576 * Compares \#PF trap.
577 */
578static void bs3CpuBasic2_ComparePfCtx(PCBS3TRAPFRAME pTrapCtx, PBS3REGCTX pStartCtx, uint16_t uErrCd,
579 uint64_t uCr2Expected, uint8_t cbIpAdjust)
580{
581 uint64_t const uCr2Saved = pStartCtx->cr2.u;
582 pStartCtx->cr2.u = uCr2Expected;
583 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_PF, true /*f486ResumeFlagHint*/, cbIpAdjust);
584 pStartCtx->cr2.u = uCr2Saved;
585}
586
587/**
588 * Compares \#UD trap.
589 */
590static void bs3CpuBasic2_CompareUdCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx)
591{
592 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, 0 /*no error code*/, X86_XCPT_UD,
593 true /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
594}
595
596/**
597 * Compares \#AC trap.
598 */
599static void bs3CpuBasic2_CompareAcCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint8_t cbIpAdjust)
600{
601 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, 0 /*always zero*/, X86_XCPT_AC, true /*f486ResumeFlagHint*/, cbIpAdjust);
602}
603
604/**
605 * Compares \#DB trap.
606 */
607static void bs3CpuBasic2_CompareDbCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint32_t fDr6Expect)
608{
609 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
610 uint32_t const fDr6 = Bs3RegGetDr6();
611 fDr6Expect |= X86_DR6_RA1_MASK;
612 CHECK_MEMBER("dr6", "%#08RX32", fDr6, fDr6Expect);
613
614 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, 0 /*always zero*/, X86_XCPT_DB, false /*f486ResumeFlagHint?*/, 0 /*cbIpAdjust*/);
615
616 if (Bs3TestSubErrorCount() > cErrorsBefore)
617 {
618#if 0
619 Bs3TestPrintf("Halting\n");
620 ASMHalt();
621#endif
622 }
623}
624
625
626/**
627 * Checks that DR6 has the initial value, i.e. is unchanged when other exception
628 * was raised before a \#DB could occur.
629 */
630static void bs3CpuBasic2_CheckDr6InitVal(void)
631{
632 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
633 uint32_t const fDr6 = Bs3RegGetDr6();
634 uint32_t const fDr6Expect = X86_DR6_INIT_VAL;
635 CHECK_MEMBER("dr6", "%#08RX32", fDr6, fDr6Expect);
636 if (Bs3TestSubErrorCount() > cErrorsBefore)
637 {
638 Bs3TestPrintf("Halting\n");
639 ASMHalt();
640 }
641}
642
643#if 0 /* convert me */
644static void bs3CpuBasic2_RaiseXcpt1Common(uint16_t const uSysR0Cs, uint16_t const uSysR0CsConf, uint16_t const uSysR0Ss,
645 PX86DESC const paIdt, unsigned const cIdteShift)
646{
647 BS3TRAPFRAME TrapCtx;
648 BS3REGCTX Ctx80;
649 BS3REGCTX Ctx81;
650 BS3REGCTX Ctx82;
651 BS3REGCTX Ctx83;
652 BS3REGCTX CtxTmp;
653 BS3REGCTX CtxTmp2;
654 PBS3REGCTX apCtx8x[4];
655 unsigned iCtx;
656 unsigned iRing;
657 unsigned iDpl;
658 unsigned iRpl;
659 unsigned i, j, k;
660 uint32_t uExpected;
661 bool const f486Plus = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486;
662# if TMPL_BITS == 16
663 bool const f386Plus = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386;
664 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
665# else
666 bool const f286 = false;
667 bool const f386Plus = true;
668 int rc;
669 uint8_t *pbIdtCopyAlloc;
670 PX86DESC pIdtCopy;
671 const unsigned cbIdte = 1 << (3 + cIdteShift);
672 RTCCUINTXREG uCr0Saved = ASMGetCR0();
673 RTGDTR GdtrSaved;
674# endif
675 RTIDTR IdtrSaved;
676 RTIDTR Idtr;
677
678 ASMGetIDTR(&IdtrSaved);
679# if TMPL_BITS != 16
680 ASMGetGDTR(&GdtrSaved);
681# endif
682
683 /* make sure they're allocated */
684 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
685 Bs3MemZero(&Ctx80, sizeof(Ctx80));
686 Bs3MemZero(&Ctx81, sizeof(Ctx81));
687 Bs3MemZero(&Ctx82, sizeof(Ctx82));
688 Bs3MemZero(&Ctx83, sizeof(Ctx83));
689 Bs3MemZero(&CtxTmp, sizeof(CtxTmp));
690 Bs3MemZero(&CtxTmp2, sizeof(CtxTmp2));
691
692 /* Context array. */
693 apCtx8x[0] = &Ctx80;
694 apCtx8x[1] = &Ctx81;
695 apCtx8x[2] = &Ctx82;
696 apCtx8x[3] = &Ctx83;
697
698# if TMPL_BITS != 16
699 /* Allocate memory for playing around with the IDT. */
700 pbIdtCopyAlloc = NULL;
701 if (BS3_MODE_IS_PAGED(g_bTestMode))
702 pbIdtCopyAlloc = Bs3MemAlloc(BS3MEMKIND_FLAT32, 12*_1K);
703# endif
704
705 /*
706 * IDT entry 80 thru 83 are assigned DPLs according to the number.
707 * (We'll be useing more, but this'll do for now.)
708 */
709 paIdt[0x80 << cIdteShift].Gate.u2Dpl = 0;
710 paIdt[0x81 << cIdteShift].Gate.u2Dpl = 1;
711 paIdt[0x82 << cIdteShift].Gate.u2Dpl = 2;
712 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 3;
713
714 Bs3RegCtxSave(&Ctx80);
715 Ctx80.rsp.u -= 0x300;
716 Ctx80.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int80);
717# if TMPL_BITS == 16
718 Ctx80.cs = BS3_MODE_IS_RM_OR_V86(g_bTestMode) ? BS3_SEL_TEXT16 : BS3_SEL_R0_CS16;
719# elif TMPL_BITS == 32
720 g_uBs3TrapEipHint = Ctx80.rip.u32;
721# endif
722 Bs3MemCpy(&Ctx81, &Ctx80, sizeof(Ctx80));
723 Ctx81.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int81);
724 Bs3MemCpy(&Ctx82, &Ctx80, sizeof(Ctx80));
725 Ctx82.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int82);
726 Bs3MemCpy(&Ctx83, &Ctx80, sizeof(Ctx80));
727 Ctx83.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int83);
728
729 /*
730 * Check that all the above gates work from ring-0.
731 */
732 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
733 {
734 g_usBs3TestStep = iCtx;
735# if TMPL_BITS == 32
736 g_uBs3TrapEipHint = apCtx8x[iCtx]->rip.u32;
737# endif
738 Bs3TrapSetJmpAndRestore(apCtx8x[iCtx], &TrapCtx);
739 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, apCtx8x[iCtx], 0x80+iCtx /*bXcpt*/);
740 }
741
742 /*
743 * Check that the gate DPL checks works.
744 */
745 g_usBs3TestStep = 100;
746 for (iRing = 0; iRing <= 3; iRing++)
747 {
748 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
749 {
750 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
751 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
752# if TMPL_BITS == 32
753 g_uBs3TrapEipHint = CtxTmp.rip.u32;
754# endif
755 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
756 if (iCtx < iRing)
757 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
758 else
759 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
760 g_usBs3TestStep++;
761 }
762 }
763
764 /*
765 * Modify the gate CS value and run the handler at a different CPL.
766 * Throw RPL variations into the mix (completely ignored) together
767 * with gate presence.
768 * 1. CPL <= GATE.DPL
769 * 2. GATE.P
770 * 3. GATE.CS.DPL <= CPL (non-conforming segments)
771 */
772 g_usBs3TestStep = 1000;
773 for (i = 0; i <= 3; i++)
774 {
775 for (iRing = 0; iRing <= 3; iRing++)
776 {
777 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
778 {
779# if TMPL_BITS == 32
780 g_uBs3TrapEipHint = apCtx8x[iCtx]->rip.u32;
781# endif
782 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
783 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
784
785 for (j = 0; j <= 3; j++)
786 {
787 uint16_t const uCs = (uSysR0Cs | j) + (i << BS3_SEL_RING_SHIFT);
788 for (k = 0; k < 2; k++)
789 {
790 g_usBs3TestStep++;
791 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
792 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
793 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = k;
794 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
795 /*Bs3TrapPrintFrame(&TrapCtx);*/
796 if (iCtx < iRing)
797 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
798 else if (k == 0)
799 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
800 else if (i > iRing)
801 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
802 else
803 {
804 uint16_t uExpectedCs = uCs & X86_SEL_MASK_OFF_RPL;
805 if (i <= iCtx && i <= iRing)
806 uExpectedCs |= i;
807 bs3CpuBasic2_CompareTrapCtx2(&TrapCtx, &CtxTmp, 2 /*int 8xh*/, 0x80 + iCtx /*bXcpt*/, uExpectedCs);
808 }
809 }
810 }
811
812 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
813 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
814 }
815 }
816 }
817 BS3_ASSERT(g_usBs3TestStep < 1600);
818
819 /*
820 * Various CS and SS related faults
821 *
822 * We temporarily reconfigure gate 80 and 83 with new CS selectors, the
823 * latter have a CS.DPL of 2 for testing ring transisions and SS loading
824 * without making it impossible to handle faults.
825 */
826 g_usBs3TestStep = 1600;
827 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
828 Bs3GdteTestPage00.Gen.u1Present = 0;
829 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
830 paIdt[0x80 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_00;
831
832 /* CS.PRESENT = 0 */
833 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
834 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
835 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
836 bs3CpuBasic2_FailedF("selector was accessed");
837 g_usBs3TestStep++;
838
839 /* Check that GATE.DPL is checked before CS.PRESENT. */
840 for (iRing = 1; iRing < 4; iRing++)
841 {
842 Bs3MemCpy(&CtxTmp, &Ctx80, sizeof(CtxTmp));
843 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
844 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
845 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, (0x80 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
846 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
847 bs3CpuBasic2_FailedF("selector was accessed");
848 g_usBs3TestStep++;
849 }
850
851 /* CS.DPL mismatch takes precedence over CS.PRESENT = 0. */
852 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
853 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
854 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
855 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
856 bs3CpuBasic2_FailedF("CS selector was accessed");
857 g_usBs3TestStep++;
858 for (iDpl = 1; iDpl < 4; iDpl++)
859 {
860 Bs3GdteTestPage00.Gen.u2Dpl = iDpl;
861 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
862 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
863 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
864 bs3CpuBasic2_FailedF("CS selector was accessed");
865 g_usBs3TestStep++;
866 }
867
868 /* 1608: Check all the invalid CS selector types alone. */
869 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
870 for (i = 0; i < RT_ELEMENTS(g_aInvalidCsTypes); i++)
871 {
872 Bs3GdteTestPage00.Gen.u4Type = g_aInvalidCsTypes[i].u4Type;
873 Bs3GdteTestPage00.Gen.u1DescType = g_aInvalidCsTypes[i].u1DescType;
874 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
875 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
876 if (Bs3GdteTestPage00.Gen.u4Type != g_aInvalidCsTypes[i].u4Type)
877 bs3CpuBasic2_FailedF("Invalid CS type %#x/%u -> %#x/%u\n",
878 g_aInvalidCsTypes[i].u4Type, g_aInvalidCsTypes[i].u1DescType,
879 Bs3GdteTestPage00.Gen.u4Type, Bs3GdteTestPage00.Gen.u1DescType);
880 g_usBs3TestStep++;
881
882 /* Incorrect CS.TYPE takes precedence over CS.PRESENT = 0. */
883 Bs3GdteTestPage00.Gen.u1Present = 0;
884 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
885 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
886 Bs3GdteTestPage00.Gen.u1Present = 1;
887 g_usBs3TestStep++;
888 }
889
890 /* Fix CS again. */
891 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
892
893 /* 1632: Test SS. */
894 if (!BS3_MODE_IS_64BIT_SYS(g_bTestMode))
895 {
896 uint16_t BS3_FAR *puTssSs2 = BS3_MODE_IS_16BIT_SYS(g_bTestMode) ? &Bs3Tss16.ss2 : &Bs3Tss32.ss2;
897 uint16_t const uSavedSs2 = *puTssSs2;
898 X86DESC const SavedGate83 = paIdt[0x83 << cIdteShift];
899
900 /* Make the handler execute in ring-2. */
901 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
902 Bs3GdteTestPage02.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
903 paIdt[0x83 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_02 | 2;
904
905 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
906 Bs3RegCtxConvertToRingX(&CtxTmp, 3); /* yeah, from 3 so SS:xSP is reloaded. */
907 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
908 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
909 if (!(Bs3GdteTestPage02.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
910 bs3CpuBasic2_FailedF("CS selector was not access");
911 g_usBs3TestStep++;
912
913 /* Create a SS.DPL=2 stack segment and check that SS2.RPL matters and
914 that we get #SS if the selector isn't present. */
915 i = 0; /* used for cycling thru invalid CS types */
916 for (k = 0; k < 10; k++)
917 {
918 /* k=0: present,
919 k=1: not-present,
920 k=2: present but very low limit,
921 k=3: not-present, low limit.
922 k=4: present, read-only.
923 k=5: not-present, read-only.
924 k=6: present, code-selector.
925 k=7: not-present, code-selector.
926 k=8: present, read-write / no access + system (=LDT).
927 k=9: not-present, read-write / no access + system (=LDT).
928 */
929 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
930 Bs3GdteTestPage03.Gen.u1Present = !(k & 1);
931 if (k >= 8)
932 {
933 Bs3GdteTestPage03.Gen.u1DescType = 0; /* system */
934 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RW; /* = LDT */
935 }
936 else if (k >= 6)
937 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_ER;
938 else if (k >= 4)
939 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RO;
940 else if (k >= 2)
941 {
942 Bs3GdteTestPage03.Gen.u16LimitLow = 0x400;
943 Bs3GdteTestPage03.Gen.u4LimitHigh = 0;
944 Bs3GdteTestPage03.Gen.u1Granularity = 0;
945 }
946
947 for (iDpl = 0; iDpl < 4; iDpl++)
948 {
949 Bs3GdteTestPage03.Gen.u2Dpl = iDpl;
950
951 for (iRpl = 0; iRpl < 4; iRpl++)
952 {
953 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | iRpl;
954 //Bs3TestPrintf("k=%u iDpl=%u iRpl=%u step=%u\n", k, iDpl, iRpl, g_usBs3TestStep);
955 Bs3GdteTestPage02.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
956 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
957 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
958 if (iRpl != 2 || iRpl != iDpl || k >= 4)
959 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
960 else if (k != 0)
961 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03,
962 k == 2 /*f486ResumeFlagHint*/);
963 else
964 {
965 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
966 if (TrapCtx.uHandlerSs != (BS3_SEL_TEST_PAGE_03 | 2))
967 bs3CpuBasic2_FailedF("uHandlerSs=%#x expected %#x\n", TrapCtx.uHandlerSs, BS3_SEL_TEST_PAGE_03 | 2);
968 }
969 if (!(Bs3GdteTestPage02.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
970 bs3CpuBasic2_FailedF("CS selector was not access");
971 if ( TrapCtx.bXcpt == 0x83
972 || (TrapCtx.bXcpt == X86_XCPT_SS && k == 2) )
973 {
974 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
975 bs3CpuBasic2_FailedF("SS selector was not accessed");
976 }
977 else if (Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
978 bs3CpuBasic2_FailedF("SS selector was accessed");
979 g_usBs3TestStep++;
980
981 /* +1: Modify the gate DPL to check that this is checked before SS.DPL and SS.PRESENT. */
982 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 2;
983 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
984 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, (0x83 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
985 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 3;
986 g_usBs3TestStep++;
987
988 /* +2: Check the CS.DPL check is done before the SS ones. Restoring the
989 ring-0 INT 83 context triggers the CS.DPL < CPL check. */
990 Bs3TrapSetJmpAndRestore(&Ctx83, &TrapCtx);
991 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx83, BS3_SEL_TEST_PAGE_02);
992 g_usBs3TestStep++;
993
994 /* +3: Now mark the CS selector not present and check that that also triggers before SS stuff. */
995 Bs3GdteTestPage02.Gen.u1Present = 0;
996 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
997 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_02);
998 Bs3GdteTestPage02.Gen.u1Present = 1;
999 g_usBs3TestStep++;
1000
1001 /* +4: Make the CS selector some invalid type and check it triggers before SS stuff. */
1002 Bs3GdteTestPage02.Gen.u4Type = g_aInvalidCsTypes[i].u4Type;
1003 Bs3GdteTestPage02.Gen.u1DescType = g_aInvalidCsTypes[i].u1DescType;
1004 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1005 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_02);
1006 Bs3GdteTestPage02.Gen.u4Type = X86_SEL_TYPE_ER_ACC;
1007 Bs3GdteTestPage02.Gen.u1DescType = 1;
1008 g_usBs3TestStep++;
1009
1010 /* +5: Now, make the CS selector limit too small and that it triggers after SS trouble.
1011 The 286 had a simpler approach to these GP(0). */
1012 Bs3GdteTestPage02.Gen.u16LimitLow = 0;
1013 Bs3GdteTestPage02.Gen.u4LimitHigh = 0;
1014 Bs3GdteTestPage02.Gen.u1Granularity = 0;
1015 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1016 if (f286)
1017 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/);
1018 else if (iRpl != 2 || iRpl != iDpl || k >= 4)
1019 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
1020 else if (k != 0)
1021 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, k == 2 /*f486ResumeFlagHint*/);
1022 else
1023 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/);
1024 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1025 g_usBs3TestStep++;
1026 }
1027 }
1028 }
1029
1030 /* Check all the invalid SS selector types alone. */
1031 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1032 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1033 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | 2;
1034 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1035 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
1036 g_usBs3TestStep++;
1037 for (i = 0; i < RT_ELEMENTS(g_aInvalidSsTypes); i++)
1038 {
1039 Bs3GdteTestPage03.Gen.u4Type = g_aInvalidSsTypes[i].u4Type;
1040 Bs3GdteTestPage03.Gen.u1DescType = g_aInvalidSsTypes[i].u1DescType;
1041 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1042 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
1043 if (Bs3GdteTestPage03.Gen.u4Type != g_aInvalidSsTypes[i].u4Type)
1044 bs3CpuBasic2_FailedF("Invalid SS type %#x/%u -> %#x/%u\n",
1045 g_aInvalidSsTypes[i].u4Type, g_aInvalidSsTypes[i].u1DescType,
1046 Bs3GdteTestPage03.Gen.u4Type, Bs3GdteTestPage03.Gen.u1DescType);
1047 g_usBs3TestStep++;
1048 }
1049
1050 /*
1051 * Continue the SS experiments with a expand down segment. We'll use
1052 * the same setup as we already have with gate 83h being DPL and
1053 * having CS.DPL=2.
1054 *
1055 * Expand down segments are weird. The valid area is practically speaking
1056 * reversed. So, a 16-bit segment with a limit of 0x6000 will have valid
1057 * addresses from 0xffff thru 0x6001.
1058 *
1059 * So, with expand down segments we can more easily cut partially into the
1060 * pushing of the iret frame and trigger more interesting behavior than
1061 * with regular "expand up" segments where the whole pushing area is either
1062 * all fine or not not fine.
1063 */
1064 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1065 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1066 Bs3GdteTestPage03.Gen.u2Dpl = 2;
1067 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RW_DOWN;
1068 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | 2;
1069
1070 /* First test, limit = max --> no bytes accessible --> #GP */
1071 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1072 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, true /*f486ResumeFlagHint*/);
1073
1074 /* Second test, limit = 0 --> all by zero byte accessible --> works */
1075 Bs3GdteTestPage03.Gen.u16LimitLow = 0;
1076 Bs3GdteTestPage03.Gen.u4LimitHigh = 0;
1077 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1078 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
1079
1080 /* Modify the gate handler to be a dummy that immediately does UD2
1081 and triggers #UD, then advance the limit down till we get the #UD. */
1082 Bs3GdteTestPage03.Gen.u1Granularity = 0;
1083
1084 Bs3MemCpy(&CtxTmp2, &CtxTmp, sizeof(CtxTmp2)); /* #UD result context */
1085 if (g_f16BitSys)
1086 {
1087 CtxTmp2.rip.u = g_bs3CpuBasic2_ud2_FlatAddr - BS3_ADDR_BS3TEXT16;
1088 Bs3Trap16SetGate(0x83, X86_SEL_TYPE_SYS_286_INT_GATE, 3, BS3_SEL_TEST_PAGE_02, CtxTmp2.rip.u16, 0 /*cParams*/);
1089 CtxTmp2.rsp.u = Bs3Tss16.sp2 - 2*5;
1090 }
1091 else
1092 {
1093 CtxTmp2.rip.u = g_bs3CpuBasic2_ud2_FlatAddr;
1094 Bs3Trap32SetGate(0x83, X86_SEL_TYPE_SYS_386_INT_GATE, 3, BS3_SEL_TEST_PAGE_02, CtxTmp2.rip.u32, 0 /*cParams*/);
1095 CtxTmp2.rsp.u = Bs3Tss32.esp2 - 4*5;
1096 }
1097 CtxTmp2.bMode = g_bTestMode; /* g_bBs3CurrentMode not changed by the UD2 handler. */
1098 CtxTmp2.cs = BS3_SEL_TEST_PAGE_02 | 2;
1099 CtxTmp2.ss = BS3_SEL_TEST_PAGE_03 | 2;
1100 CtxTmp2.bCpl = 2;
1101
1102 /* test run. */
1103 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1104 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1105 g_usBs3TestStep++;
1106
1107 /* Real run. */
1108 i = (g_f16BitSys ? 2 : 4) * 6 + 1;
1109 while (i-- > 0)
1110 {
1111 Bs3GdteTestPage03.Gen.u16LimitLow = CtxTmp2.rsp.u16 + i - 1;
1112 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1113 if (i > 0)
1114 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, true /*f486ResumeFlagHint*/);
1115 else
1116 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1117 g_usBs3TestStep++;
1118 }
1119
1120 /* Do a run where we do the same-ring kind of access. */
1121 Bs3RegCtxConvertToRingX(&CtxTmp, 2);
1122 if (g_f16BitSys)
1123 {
1124 CtxTmp2.rsp.u32 = CtxTmp.rsp.u32 - 2*3;
1125 i = 2*3 - 1;
1126 }
1127 else
1128 {
1129 CtxTmp2.rsp.u32 = CtxTmp.rsp.u32 - 4*3;
1130 i = 4*3 - 1;
1131 }
1132 CtxTmp.ss = BS3_SEL_TEST_PAGE_03 | 2;
1133 CtxTmp2.ds = CtxTmp.ds;
1134 CtxTmp2.es = CtxTmp.es;
1135 CtxTmp2.fs = CtxTmp.fs;
1136 CtxTmp2.gs = CtxTmp.gs;
1137 while (i-- > 0)
1138 {
1139 Bs3GdteTestPage03.Gen.u16LimitLow = CtxTmp2.rsp.u16 + i - 1;
1140 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1141 if (i > 0)
1142 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, 0 /*BS3_SEL_TEST_PAGE_03*/, true /*f486ResumeFlagHint*/);
1143 else
1144 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1145 g_usBs3TestStep++;
1146 }
1147
1148 *puTssSs2 = uSavedSs2;
1149 paIdt[0x83 << cIdteShift] = SavedGate83;
1150 }
1151 paIdt[0x80 << cIdteShift].Gate.u16Sel = uSysR0Cs;
1152 BS3_ASSERT(g_usBs3TestStep < 3000);
1153
1154 /*
1155 * Modify the gate CS value with a conforming segment.
1156 */
1157 g_usBs3TestStep = 3000;
1158 for (i = 0; i <= 3; i++) /* cs.dpl */
1159 {
1160 for (iRing = 0; iRing <= 3; iRing++)
1161 {
1162 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1163 {
1164 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1165 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1166# if TMPL_BITS == 32
1167 g_uBs3TrapEipHint = CtxTmp.rip.u32;
1168# endif
1169
1170 for (j = 0; j <= 3; j++) /* rpl */
1171 {
1172 uint16_t const uCs = (uSysR0CsConf | j) + (i << BS3_SEL_RING_SHIFT);
1173 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
1174 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1175 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1176 //Bs3TestPrintf("%u/%u/%u/%u: cs=%04x hcs=%04x xcpt=%02x\n", i, iRing, iCtx, j, uCs, TrapCtx.uHandlerCs, TrapCtx.bXcpt);
1177 /*Bs3TrapPrintFrame(&TrapCtx);*/
1178 g_usBs3TestStep++;
1179 if (iCtx < iRing)
1180 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1181 else if (i > iRing)
1182 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
1183 else
1184 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1185 }
1186 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1187 }
1188 }
1189 }
1190 BS3_ASSERT(g_usBs3TestStep < 3500);
1191
1192 /*
1193 * The gates must be 64-bit in long mode.
1194 */
1195 if (cIdteShift != 0)
1196 {
1197 g_usBs3TestStep = 3500;
1198 for (i = 0; i <= 3; i++)
1199 {
1200 for (iRing = 0; iRing <= 3; iRing++)
1201 {
1202 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1203 {
1204 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1205 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1206
1207 for (j = 0; j < 2; j++)
1208 {
1209 static const uint16_t s_auCSes[2] = { BS3_SEL_R0_CS16, BS3_SEL_R0_CS32 };
1210 uint16_t uCs = (s_auCSes[j] | i) + (i << BS3_SEL_RING_SHIFT);
1211 g_usBs3TestStep++;
1212 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
1213 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1214 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1215 /*Bs3TrapPrintFrame(&TrapCtx);*/
1216 if (iCtx < iRing)
1217 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1218 else
1219 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
1220 }
1221 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1222 }
1223 }
1224 }
1225 BS3_ASSERT(g_usBs3TestStep < 4000);
1226 }
1227
1228 /*
1229 * IDT limit check. The 286 does not access X86DESCGATE::u16OffsetHigh.
1230 */
1231 g_usBs3TestStep = 5000;
1232 i = (0x80 << (cIdteShift + 3)) - 1;
1233 j = (0x82 << (cIdteShift + 3)) - (!f286 ? 1 : 3);
1234 k = (0x83 << (cIdteShift + 3)) - 1;
1235 for (; i <= k; i++, g_usBs3TestStep++)
1236 {
1237 Idtr = IdtrSaved;
1238 Idtr.cbIdt = i;
1239 ASMSetIDTR(&Idtr);
1240 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1241 if (i < j)
1242 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx81, (0x81 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1243 else
1244 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1245 }
1246 ASMSetIDTR(&IdtrSaved);
1247 BS3_ASSERT(g_usBs3TestStep < 5100);
1248
1249# if TMPL_BITS != 16 /* Only do the paging related stuff in 32-bit and 64-bit modes. */
1250
1251 /*
1252 * IDT page not present. Placing the IDT copy such that 0x80 is on the
1253 * first page and 0x81 is on the second page. We need proceed to move
1254 * it down byte by byte to check that any inaccessible byte means #PF.
1255 *
1256 * Note! We must reload the alternative IDTR for each run as any kind of
1257 * printing to the string (like error reporting) will cause a switch
1258 * to real mode and back, reloading the default IDTR.
1259 */
1260 g_usBs3TestStep = 5200;
1261 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1262 {
1263 uint32_t const uCr2Expected = Bs3SelPtrToFlat(pbIdtCopyAlloc) + _4K;
1264 for (j = 0; j < cbIdte; j++)
1265 {
1266 pIdtCopy = (PX86DESC)&pbIdtCopyAlloc[_4K - cbIdte * 0x81 - j];
1267 Bs3MemCpy(pIdtCopy, paIdt, cbIdte * 256);
1268
1269 Idtr.cbIdt = IdtrSaved.cbIdt;
1270 Idtr.pIdt = Bs3SelPtrToFlat(pIdtCopy);
1271
1272 ASMSetIDTR(&Idtr);
1273 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1274 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1275 g_usBs3TestStep++;
1276
1277 ASMSetIDTR(&Idtr);
1278 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1279 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1280 g_usBs3TestStep++;
1281
1282 rc = Bs3PagingProtect(uCr2Expected, _4K, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1283 if (RT_SUCCESS(rc))
1284 {
1285 ASMSetIDTR(&Idtr);
1286 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1287 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1288 g_usBs3TestStep++;
1289
1290 ASMSetIDTR(&Idtr);
1291 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1292 if (f486Plus)
1293 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, 0 /*uErrCd*/, uCr2Expected);
1294 else
1295 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, X86_TRAP_PF_RW /*uErrCd*/, uCr2Expected + 4 - RT_MIN(j, 4));
1296 g_usBs3TestStep++;
1297
1298 Bs3PagingProtect(uCr2Expected, _4K, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1299
1300 /* Check if that the entry type is checked after the whole IDTE has been cleared for #PF. */
1301 pIdtCopy[0x80 << cIdteShift].Gate.u4Type = 0;
1302 rc = Bs3PagingProtect(uCr2Expected, _4K, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1303 if (RT_SUCCESS(rc))
1304 {
1305 ASMSetIDTR(&Idtr);
1306 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1307 if (f486Plus)
1308 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, 0 /*uErrCd*/, uCr2Expected);
1309 else
1310 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, X86_TRAP_PF_RW /*uErrCd*/, uCr2Expected + 4 - RT_MIN(j, 4));
1311 g_usBs3TestStep++;
1312
1313 Bs3PagingProtect(uCr2Expected, _4K, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1314 }
1315 }
1316 else
1317 Bs3TestPrintf("Bs3PagingProtectPtr: %d\n", i);
1318
1319 ASMSetIDTR(&IdtrSaved);
1320 }
1321 }
1322
1323 /*
1324 * The read/write and user/supervisor bits the IDT PTEs are irrelevant.
1325 */
1326 g_usBs3TestStep = 5300;
1327 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1328 {
1329 Bs3MemCpy(pbIdtCopyAlloc, paIdt, cbIdte * 256);
1330 Idtr.cbIdt = IdtrSaved.cbIdt;
1331 Idtr.pIdt = Bs3SelPtrToFlat(pbIdtCopyAlloc);
1332
1333 ASMSetIDTR(&Idtr);
1334 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1335 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1336 g_usBs3TestStep++;
1337
1338 rc = Bs3PagingProtect(Idtr.pIdt, _4K, 0 /*fSet*/, X86_PTE_RW | X86_PTE_US /*fClear*/);
1339 if (RT_SUCCESS(rc))
1340 {
1341 ASMSetIDTR(&Idtr);
1342 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1343 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1344 g_usBs3TestStep++;
1345
1346 Bs3PagingProtect(Idtr.pIdt, _4K, X86_PTE_RW | X86_PTE_US /*fSet*/, 0 /*fClear*/);
1347 }
1348 ASMSetIDTR(&IdtrSaved);
1349 }
1350
1351 /*
1352 * Check that CS.u1Accessed is set to 1. Use the test page selector #0 and #3 together
1353 * with interrupt gates 80h and 83h, respectively.
1354 */
1355/** @todo Throw in SS.u1Accessed too. */
1356 g_usBs3TestStep = 5400;
1357 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1358 {
1359 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
1360 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1361 paIdt[0x80 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_00;
1362
1363 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Cs + (3 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1364 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1365 paIdt[0x83 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_03; /* rpl is ignored, so leave it as zero. */
1366
1367 /* Check that the CS.A bit is being set on a general basis and that
1368 the special CS values work with out generic handler code. */
1369 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1370 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1371 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1372 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed", Bs3GdteTestPage00.Gen.u4Type);
1373 g_usBs3TestStep++;
1374
1375 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1376 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1377 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1378 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1379 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1380 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1381 if (TrapCtx.uHandlerCs != (BS3_SEL_TEST_PAGE_03 | 3))
1382 bs3CpuBasic2_FailedF("uHandlerCs=%#x, expected %#x", TrapCtx.uHandlerCs, (BS3_SEL_TEST_PAGE_03 | 3));
1383 g_usBs3TestStep++;
1384
1385 /*
1386 * Now check that setting CS.u1Access to 1 does __NOT__ trigger a page
1387 * fault due to the RW bit being zero.
1388 * (We check both with with and without the WP bit if 80486.)
1389 */
1390 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
1391 ASMSetCR0(uCr0Saved | X86_CR0_WP);
1392
1393 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1394 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1395 rc = Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, 0 /*fSet*/, X86_PTE_RW /*fClear*/);
1396 if (RT_SUCCESS(rc))
1397 {
1398 /* ring-0 handler */
1399 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1400 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1401 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1402 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1403 g_usBs3TestStep++;
1404
1405 /* ring-3 handler */
1406 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1407 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1408 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1409 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1410 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1411 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1412 g_usBs3TestStep++;
1413
1414 /* clear WP and repeat the above. */
1415 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
1416 ASMSetCR0(uCr0Saved & ~X86_CR0_WP);
1417 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* (No need to RW the page - ring-0, WP=0.) */
1418 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* (No need to RW the page - ring-0, WP=0.) */
1419
1420 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1421 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1422 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1423 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1424 g_usBs3TestStep++;
1425
1426 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1427 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1428 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1429 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!n", Bs3GdteTestPage03.Gen.u4Type);
1430 g_usBs3TestStep++;
1431
1432 Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, X86_PTE_RW /*fSet*/, 0 /*fClear*/);
1433 }
1434
1435 ASMSetCR0(uCr0Saved);
1436
1437 /*
1438 * While we're here, check that if the CS GDT entry is a non-present
1439 * page we do get a #PF with the rigth error code and CR2.
1440 */
1441 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* Just for fun, really a pointless gesture. */
1442 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1443 rc = Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1444 if (RT_SUCCESS(rc))
1445 {
1446 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1447 if (f486Plus)
1448 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx80, 0 /*uErrCd*/, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00);
1449 else
1450 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx80, X86_TRAP_PF_RW, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00 + 4);
1451 g_usBs3TestStep++;
1452
1453 /* Do it from ring-3 to check ErrCd, which doesn't set X86_TRAP_PF_US it turns out. */
1454 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1455 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1456 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1457
1458 if (f486Plus)
1459 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_03);
1460 else
1461 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &CtxTmp, X86_TRAP_PF_RW, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_03 + 4);
1462 g_usBs3TestStep++;
1463
1464 Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1465 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
1466 bs3CpuBasic2_FailedF("u4Type=%#x, accessed! #1", Bs3GdteTestPage00.Gen.u4Type);
1467 if (Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
1468 bs3CpuBasic2_FailedF("u4Type=%#x, accessed! #2", Bs3GdteTestPage03.Gen.u4Type);
1469 }
1470
1471 /* restore */
1472 paIdt[0x80 << cIdteShift].Gate.u16Sel = uSysR0Cs;
1473 paIdt[0x83 << cIdteShift].Gate.u16Sel = uSysR0Cs;// + (3 << BS3_SEL_RING_SHIFT) + 3;
1474 }
1475
1476# endif /* 32 || 64*/
1477
1478 /*
1479 * Check broad EFLAGS effects.
1480 */
1481 g_usBs3TestStep = 5600;
1482 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1483 {
1484 for (iRing = 0; iRing < 4; iRing++)
1485 {
1486 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1487 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1488
1489 /* all set */
1490 CtxTmp.rflags.u32 &= X86_EFL_VM | X86_EFL_1;
1491 CtxTmp.rflags.u32 |= X86_EFL_CF | X86_EFL_PF | X86_EFL_AF | X86_EFL_ZF | X86_EFL_SF /* | X86_EFL_TF */ /*| X86_EFL_IF*/
1492 | X86_EFL_DF | X86_EFL_OF | X86_EFL_IOPL /* | X86_EFL_NT*/;
1493 if (f486Plus)
1494 CtxTmp.rflags.u32 |= X86_EFL_AC;
1495 if (f486Plus && !g_f16BitSys)
1496 CtxTmp.rflags.u32 |= X86_EFL_RF;
1497 if (g_uBs3CpuDetected & BS3CPU_F_CPUID)
1498 CtxTmp.rflags.u32 |= X86_EFL_VIF | X86_EFL_VIP;
1499 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1500 CtxTmp.rflags.u32 &= ~X86_EFL_RF;
1501
1502 if (iCtx >= iRing)
1503 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1504 else
1505 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1506 uExpected = CtxTmp.rflags.u32
1507 & ( X86_EFL_1 | X86_EFL_CF | X86_EFL_PF | X86_EFL_AF | X86_EFL_ZF | X86_EFL_SF | X86_EFL_DF
1508 | X86_EFL_OF | X86_EFL_IOPL | X86_EFL_NT | X86_EFL_VM | X86_EFL_AC | X86_EFL_VIF | X86_EFL_VIP
1509 | X86_EFL_ID /*| X86_EFL_TF*/ /*| X86_EFL_IF*/ /*| X86_EFL_RF*/ );
1510 if (TrapCtx.fHandlerRfl != uExpected)
1511 bs3CpuBasic2_FailedF("unexpected handler rflags value: %RX64 expected %RX32; CtxTmp.rflags=%RX64 Ctx.rflags=%RX64\n",
1512 TrapCtx.fHandlerRfl, uExpected, CtxTmp.rflags.u, TrapCtx.Ctx.rflags.u);
1513 g_usBs3TestStep++;
1514
1515 /* all cleared */
1516 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) < BS3CPU_80286)
1517 CtxTmp.rflags.u32 = apCtx8x[iCtx]->rflags.u32 & (X86_EFL_RA1_MASK | UINT16_C(0xf000));
1518 else
1519 CtxTmp.rflags.u32 = apCtx8x[iCtx]->rflags.u32 & (X86_EFL_VM | X86_EFL_RA1_MASK);
1520 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1521 if (iCtx >= iRing)
1522 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1523 else
1524 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1525 uExpected = CtxTmp.rflags.u32;
1526 if (TrapCtx.fHandlerRfl != uExpected)
1527 bs3CpuBasic2_FailedF("unexpected handler rflags value: %RX64 expected %RX32; CtxTmp.rflags=%RX64 Ctx.rflags=%RX64\n",
1528 TrapCtx.fHandlerRfl, uExpected, CtxTmp.rflags.u, TrapCtx.Ctx.rflags.u);
1529 g_usBs3TestStep++;
1530 }
1531 }
1532
1533/** @todo CS.LIMIT / canonical(CS) */
1534
1535
1536 /*
1537 * Check invalid gate types.
1538 */
1539 g_usBs3TestStep = 32000;
1540 for (iRing = 0; iRing <= 3; iRing++)
1541 {
1542 static const uint16_t s_auCSes[] = { BS3_SEL_R0_CS16, BS3_SEL_R0_CS32, BS3_SEL_R0_CS64,
1543 BS3_SEL_TSS16, BS3_SEL_TSS32, BS3_SEL_TSS64, 0, BS3_SEL_SPARE_1f };
1544 static uint16_t const s_auInvlTypes64[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13,
1545 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
1546 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f };
1547 static uint16_t const s_auInvlTypes32[] = { 0, 1, 2, 3, 8, 9, 10, 11, 13,
1548 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
1549 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
1550 /*286:*/ 12, 14, 15 };
1551 uint16_t const * const pauInvTypes = cIdteShift != 0 ? s_auInvlTypes64 : s_auInvlTypes32;
1552 uint16_t const cInvTypes = cIdteShift != 0 ? RT_ELEMENTS(s_auInvlTypes64)
1553 : f386Plus ? RT_ELEMENTS(s_auInvlTypes32) - 3 : RT_ELEMENTS(s_auInvlTypes32);
1554
1555
1556 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1557 {
1558 unsigned iType;
1559
1560 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1561 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1562# if TMPL_BITS == 32
1563 g_uBs3TrapEipHint = CtxTmp.rip.u32;
1564# endif
1565 for (iType = 0; iType < cInvTypes; iType++)
1566 {
1567 uint8_t const bSavedType = paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type;
1568 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1DescType = pauInvTypes[iType] >> 4;
1569 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type = pauInvTypes[iType] & 0xf;
1570
1571 for (i = 0; i < 4; i++)
1572 {
1573 for (j = 0; j < RT_ELEMENTS(s_auCSes); j++)
1574 {
1575 uint16_t uCs = (unsigned)(s_auCSes[j] - BS3_SEL_R0_FIRST) < (unsigned)(4 << BS3_SEL_RING_SHIFT)
1576 ? (s_auCSes[j] | i) + (i << BS3_SEL_RING_SHIFT)
1577 : s_auCSes[j] | i;
1578 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x type=%#x\n", g_usBs3TestStep, iCtx, iRing, i, uCs, pauInvTypes[iType]);*/
1579 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1580 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1581 g_usBs3TestStep++;
1582 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1583
1584 /* Mark it not-present to check that invalid type takes precedence. */
1585 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 0;
1586 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1587 g_usBs3TestStep++;
1588 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1589 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
1590 }
1591 }
1592
1593 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1594 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type = bSavedType;
1595 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1DescType = 0;
1596 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
1597 }
1598 }
1599 }
1600 BS3_ASSERT(g_usBs3TestStep < 62000U && g_usBs3TestStep > 32000U);
1601
1602
1603 /** @todo
1604 * - Run \#PF and \#GP (and others?) at CPLs other than zero.
1605 * - Quickly generate all faults.
1606 * - All the peculiarities v8086.
1607 */
1608
1609# if TMPL_BITS != 16
1610 Bs3MemFree(pbIdtCopyAlloc, 12*_1K);
1611# endif
1612}
1613#endif /* convert me */
1614
1615
1616static void bs3CpuBasic2_RaiseXcpt11Worker(uint8_t bMode, uint8_t *pbBuf, unsigned cbCacheLine, bool fAm, bool fPf,
1617 RTCCUINTXREG uFlatBufPtr, BS3CPUBASIC2PFTTSTCMNMODE const BS3_FAR *pCmn)
1618{
1619 BS3TRAPFRAME TrapCtx;
1620 BS3REGCTX Ctx;
1621 BS3REGCTX CtxUdExpected;
1622 uint8_t const cRings = bMode == BS3_MODE_RM ? 1 : 4;
1623 uint8_t iRing;
1624 uint16_t iTest;
1625
1626 /* make sure they're allocated */
1627 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
1628 Bs3MemZero(&Ctx, sizeof(Ctx));
1629 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
1630
1631 /*
1632 * Test all relevant rings.
1633 *
1634 * The memory operand is ds:xBX, so point it to pbBuf.
1635 * The test snippets mostly use xAX as operand, with the div
1636 * one also using xDX, so make sure they make some sense.
1637 */
1638 Bs3RegCtxSaveEx(&Ctx, bMode, 512);
1639
1640 Ctx.cr0.u32 &= ~(X86_CR0_MP | X86_CR0_EM | X86_CR0_TS); /* so fninit + fld works */
1641
1642 for (iRing = BS3_MODE_IS_V86(bMode) ? 3 : 0; iRing < cRings; iRing++)
1643 {
1644 uint32_t uEbx;
1645 uint8_t fAc;
1646
1647 if (!BS3_MODE_IS_RM_OR_V86(bMode))
1648 Bs3RegCtxConvertToRingX(&Ctx, iRing);
1649
1650 if (!fPf || BS3_MODE_IS_32BIT_CODE(bMode) || BS3_MODE_IS_64BIT_CODE(bMode))
1651 Bs3RegCtxSetGrpDsFromCurPtr(&Ctx, &Ctx.rbx, pbBuf);
1652 else
1653 {
1654 /* Bs3RegCtxSetGrpDsFromCurPtr barfs when trying to output a sel:off address for the aliased buffer. */
1655 Ctx.ds = BS3_FP_SEG(pbBuf);
1656 Ctx.rbx.u32 = BS3_FP_OFF(pbBuf);
1657 }
1658 uEbx = Ctx.rbx.u32;
1659
1660 Ctx.rax.u = (bMode & BS3_MODE_CODE_MASK) == BS3_MODE_CODE_64
1661 ? UINT64_C(0x80868028680386fe) : UINT32_C(0x65020686);
1662 Ctx.rdx.u = UINT32_C(0x00100100); /* careful with range due to div */
1663
1664 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
1665
1666 /*
1667 * AC flag loop.
1668 */
1669 for (fAc = 0; fAc < 2; fAc++)
1670 {
1671 if (fAc)
1672 Ctx.rflags.u32 |= X86_EFL_AC;
1673 else
1674 Ctx.rflags.u32 &= ~X86_EFL_AC;
1675
1676 /*
1677 * Loop over the test snippets.
1678 */
1679 for (iTest = 0; iTest < pCmn->cEntries; iTest++)
1680 {
1681 uint8_t const fOp = pCmn->paEntries[iTest].fOp;
1682 uint16_t const cbMem = pCmn->paEntries[iTest].cbMem;
1683 uint8_t const cbAlign = pCmn->paEntries[iTest].cbAlign;
1684 uint16_t const cbMax = cbCacheLine + cbMem;
1685 uint16_t offMem;
1686 uint8_t BS3_FAR *poffUd = (uint8_t BS3_FAR *)Bs3SelLnkPtrToCurPtr(pCmn->paEntries[iTest].pfn);
1687 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pCmn->paEntries[iTest].pfn);
1688 CtxUdExpected.rip = Ctx.rip;
1689 CtxUdExpected.rip.u = Ctx.rip.u + poffUd[-1];
1690 CtxUdExpected.cs = Ctx.cs;
1691 CtxUdExpected.rflags = Ctx.rflags;
1692 if (bMode == BS3_MODE_RM)
1693 CtxUdExpected.rflags.u32 &= ~X86_EFL_AC; /** @todo investigate. automatically cleared, or is it just our code? Observed with bs3-cpu-instr-3 too (10980xe), seems to be the CPU doing it. */
1694 CtxUdExpected.rdx = Ctx.rdx;
1695 CtxUdExpected.rax = Ctx.rax;
1696 if (fOp & MYOP_LD)
1697 {
1698 switch (cbMem)
1699 {
1700 case 2:
1701 CtxUdExpected.rax.u16 = 0x0101;
1702 break;
1703 case 4:
1704 CtxUdExpected.rax.u32 = UINT32_C(0x01010101);
1705 break;
1706 case 8:
1707 CtxUdExpected.rax.u64 = UINT64_C(0x0101010101010101);
1708 break;
1709 }
1710 }
1711
1712 /*
1713 * Buffer misalignment loop.
1714 * Note! We must make sure to cross a cache line here to make sure
1715 * to cover the split-lock scenario. (The buffer is cache
1716 * line aligned.)
1717 */
1718 for (offMem = 0; offMem < cbMax; offMem++)
1719 {
1720 bool const fMisaligned = (offMem & (cbAlign - 1)) != 0;
1721 unsigned offBuf = cbMax + cbMem * 2;
1722 while (offBuf-- > 0)
1723 pbBuf[offBuf] = 1; /* byte-by-byte to make sure it doesn't trigger AC. */
1724
1725 CtxUdExpected.rbx.u32 = Ctx.rbx.u32 = uEbx + offMem; /* ASSUMES memory in first 4GB. */
1726 if (BS3_MODE_IS_16BIT_SYS(bMode))
1727 g_uBs3TrapEipHint = Ctx.rip.u32;
1728
1729 //Bs3TestPrintf("iRing=%d iTest=%d cs:rip=%04RX16:%08RX32 ds:rbx=%04RX16:%08RX32 ss:esp=%04RX16:%08RX32 bXcpt=%#x errcd=%#x fAm=%d fAc=%d ESP=%#RX32\n",
1730 // iRing, iTest, Ctx.cs, Ctx.rip.u32, Ctx.ds, Ctx.rbx.u32, Ctx.ss, Ctx.rsp.u32, TrapCtx.bXcpt, (unsigned)TrapCtx.uErrCd, fAm, fAc, ASMGetESP());
1731
1732 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1733
1734 if ( (pCmn->paEntries[iTest].fOp & MYOP_AC_GP)
1735 && fMisaligned
1736 && (!fAm || iRing != 3 || !fAc || (offMem & 3 /* 10980XE */) == 0) )
1737 {
1738 if (fAc && bMode == BS3_MODE_RM)
1739 TrapCtx.Ctx.rflags.u32 |= X86_EFL_AC;
1740 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
1741 }
1742 else if (fPf && iRing == 3 && (!fAm || !fAc || !fMisaligned)) /* #AC beats #PF */
1743 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx,
1744 X86_TRAP_PF_P | X86_TRAP_PF_US
1745 | (pCmn->paEntries[iTest].fOp & MYOP_ST ? X86_TRAP_PF_RW : 0),
1746 uFlatBufPtr + offMem + (cbMem > 64 ? cbMem - 1 /*FXSAVE*/ : 0),
1747 pCmn->paEntries[iTest].offFaultInstr);
1748 else if (!fAm || iRing != 3 || !fAc || !fMisaligned)
1749 {
1750 if (fOp & MYOP_EFL)
1751 {
1752 CtxUdExpected.rflags.u16 &= ~X86_EFL_STATUS_BITS;
1753 CtxUdExpected.rflags.u16 |= TrapCtx.Ctx.rflags.u16 & X86_EFL_STATUS_BITS;
1754 }
1755 if (fOp == MYOP_LD_DIV)
1756 {
1757 CtxUdExpected.rax = TrapCtx.Ctx.rax;
1758 CtxUdExpected.rdx = TrapCtx.Ctx.rdx;
1759 }
1760 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1761 }
1762 else
1763 bs3CpuBasic2_CompareAcCtx(&TrapCtx, &Ctx, pCmn->paEntries[iTest].offFaultInstr);
1764
1765 g_usBs3TestStep++;
1766 }
1767 }
1768 }
1769 }
1770}
1771
1772
1773/**
1774 * Entrypoint for \#AC tests.
1775 *
1776 * @returns 0 or BS3TESTDOMODE_SKIPPED.
1777 * @param bMode The CPU mode we're testing.
1778 *
1779 * @note When testing v8086 code, we'll be running in v8086 mode. So, careful
1780 * with control registers and such.
1781 */
1782BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_RaiseXcpt11)(uint8_t bMode)
1783{
1784 unsigned cbCacheLine = 128; /** @todo detect */
1785 uint8_t BS3_FAR *pbBufAlloc;
1786 uint8_t BS3_FAR *pbBuf;
1787 unsigned idxCmnModes;
1788 uint32_t fCr0;
1789
1790 /*
1791 * Skip if 386 or older.
1792 */
1793 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) < BS3CPU_80486)
1794 {
1795 Bs3TestSkipped("#AC test requires 486 or later");
1796 return BS3TESTDOMODE_SKIPPED;
1797 }
1798
1799 bs3CpuBasic2_SetGlobals(bMode);
1800
1801 /* Get us a 64-byte aligned buffer. */
1802 pbBufAlloc = pbBuf = Bs3MemAllocZ(BS3_MODE_IS_RM_OR_V86(bMode) ? BS3MEMKIND_REAL : BS3MEMKIND_TILED, X86_PAGE_SIZE * 2);
1803 if (!pbBufAlloc)
1804 return Bs3TestFailed("Failed to allocate 2 pages of real-mode memory");
1805 if (BS3_FP_OFF(pbBuf) & (X86_PAGE_SIZE - 1))
1806 pbBuf = &pbBufAlloc[X86_PAGE_SIZE - (BS3_FP_OFF(pbBuf) & X86_PAGE_OFFSET_MASK)];
1807 BS3_ASSERT(pbBuf - pbBufAlloc <= X86_PAGE_SIZE);
1808 //Bs3TestPrintf("pbBuf=%p\n", pbBuf);
1809
1810 /* Find the g_aCmnModes entry. */
1811 idxCmnModes = 0;
1812 while (g_aCmnModes[idxCmnModes].bMode != (bMode & BS3_MODE_CODE_MASK))
1813 idxCmnModes++;
1814 //Bs3TestPrintf("idxCmnModes=%d bMode=%#x\n", idxCmnModes, bMode);
1815
1816 /* First round is w/o alignment checks enabled. */
1817 //Bs3TestPrintf("round 1\n");
1818 fCr0 = Bs3RegGetCr0();
1819 BS3_ASSERT(!(fCr0 & X86_CR0_AM));
1820 Bs3RegSetCr0(fCr0 & ~X86_CR0_AM);
1821#if 1
1822 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBuf, cbCacheLine, false /*fAm*/, false /*fPf*/, 0, &g_aCmnModes[idxCmnModes]);
1823#endif
1824
1825 /* The second round is with aligment checks enabled. */
1826#if 1
1827 //Bs3TestPrintf("round 2\n");
1828 Bs3RegSetCr0(Bs3RegGetCr0() | X86_CR0_AM);
1829 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBuf, cbCacheLine, true /*fAm*/, false /*fPf*/, 0, &g_aCmnModes[idxCmnModes]);
1830#endif
1831
1832#if 1
1833 /* The third and fourth round access the buffer via a page alias that's not
1834 accessible from ring-3. The third round has ACs disabled and the fourth
1835 has them enabled. */
1836 if (BS3_MODE_IS_PAGED(bMode) && !BS3_MODE_IS_V86(bMode))
1837 {
1838 /* Alias the buffer as system memory so ring-3 access with AC+AM will cause #PF: */
1839 /** @todo the aliasing is not necessary any more... */
1840 int rc;
1841 RTCCUINTXREG uFlatBufPtr = Bs3SelPtrToFlat(pbBuf);
1842 uint64_t const uAliasPgPtr = bMode & BS3_MODE_CODE_64 ? UINT64_C(0x0000648680000000) : UINT32_C(0x80000000);
1843 rc = Bs3PagingAlias(uAliasPgPtr, uFlatBufPtr & ~(uint64_t)X86_PAGE_OFFSET_MASK, X86_PAGE_SIZE * 2,
1844 X86_PTE_P | X86_PTE_RW);
1845 if (RT_SUCCESS(rc))
1846 {
1847 /* We 'misalign' the segment base here to make sure it's the final
1848 address that gets alignment checked and not just the operand value. */
1849 RTCCUINTXREG uAliasBufPtr = (RTCCUINTXREG)uAliasPgPtr + (uFlatBufPtr & X86_PAGE_OFFSET_MASK);
1850 uint8_t BS3_FAR *pbBufAlias = BS3_FP_MAKE(BS3_SEL_SPARE_00 | 3, (uFlatBufPtr & X86_PAGE_OFFSET_MASK) + 1);
1851 Bs3SelSetup16BitData(&Bs3GdteSpare00, uAliasPgPtr - 1);
1852
1853 //Bs3TestPrintf("round 3 pbBufAlias=%p\n", pbBufAlias);
1854 Bs3RegSetCr0(Bs3RegGetCr0() & ~X86_CR0_AM);
1855 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBufAlias, cbCacheLine, false /*fAm*/,
1856 true /*fPf*/, uAliasBufPtr, &g_aCmnModes[idxCmnModes]);
1857
1858 //Bs3TestPrintf("round 4\n");
1859 Bs3RegSetCr0(Bs3RegGetCr0() | X86_CR0_AM);
1860 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBufAlias, cbCacheLine, true /*fAm*/,
1861 true /*fPf*/, uAliasBufPtr, &g_aCmnModes[idxCmnModes]);
1862
1863 Bs3PagingUnalias(uAliasPgPtr, X86_PAGE_SIZE * 2);
1864 }
1865 else
1866 Bs3TestFailedF("Bs3PagingAlias failed with %Rrc", rc);
1867 }
1868#endif
1869
1870 Bs3MemFree(pbBufAlloc, X86_PAGE_SIZE * 2);
1871 Bs3RegSetCr0(fCr0);
1872 return 0;
1873}
1874
1875
1876/**
1877 * Executes one round of SIDT and SGDT tests using one assembly worker.
1878 *
1879 * This is written with driving everything from the 16-bit or 32-bit worker in
1880 * mind, i.e. not assuming the test bitcount is the same as the current.
1881 */
1882static void bs3CpuBasic2_sidt_sgdt_One(BS3CB2SIDTSGDT const BS3_FAR *pWorker, uint8_t bTestMode, uint8_t bRing,
1883 uint8_t const *pbExpected)
1884{
1885 BS3TRAPFRAME TrapCtx;
1886 BS3REGCTX Ctx;
1887 BS3REGCTX CtxUdExpected;
1888 BS3REGCTX TmpCtx;
1889 uint8_t const cbBuf = 8*2; /* test buffer area */
1890 uint8_t abBuf[8*2 + 8 + 8]; /* test buffer w/ misalignment test space and some extra guard. */
1891 uint8_t BS3_FAR *pbBuf = abBuf;
1892 uint8_t const cbIdtr = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 2+8 : 2+4;
1893 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
1894 uint8_t bFiller;
1895 int off;
1896 int off2;
1897 unsigned cb;
1898 uint8_t BS3_FAR *pbTest;
1899
1900 /* make sure they're allocated */
1901 Bs3MemZero(&Ctx, sizeof(Ctx));
1902 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
1903 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
1904 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
1905 Bs3MemZero(&abBuf, sizeof(abBuf));
1906
1907 /* Create a context, give this routine some more stack space, point the context
1908 at our SIDT [xBX] + UD2 combo, and point DS:xBX at abBuf. */
1909 Bs3RegCtxSaveEx(&Ctx, bTestMode, 256 /*cbExtraStack*/);
1910 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
1911 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pWorker->fpfnWorker);
1912 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
1913 g_uBs3TrapEipHint = Ctx.rip.u32;
1914 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
1915 Bs3RegCtxConvertToRingX(&Ctx, bRing);
1916
1917 /* For successful SIDT attempts, we'll stop at the UD2. */
1918 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
1919 CtxUdExpected.rip.u += pWorker->cbInstr;
1920
1921 /*
1922 * Check that it works at all and that only bytes we expect gets written to.
1923 */
1924 /* First with zero buffer. */
1925 Bs3MemZero(abBuf, sizeof(abBuf));
1926 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), 0))
1927 Bs3TestFailedF("ASMMemIsAllU8 or Bs3MemZero is busted: abBuf=%.*Rhxs\n", sizeof(abBuf), pbBuf);
1928 if (!ASMMemIsZero(abBuf, sizeof(abBuf)))
1929 Bs3TestFailedF("ASMMemIsZero or Bs3MemZero is busted: abBuf=%.*Rhxs\n", sizeof(abBuf), pbBuf);
1930 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1931 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1932 if (f286 && abBuf[cbIdtr - 1] != 0xff)
1933 Bs3TestFailedF("286: Top base byte isn't 0xff (#1): %#x\n", abBuf[cbIdtr - 1]);
1934 if (!ASMMemIsZero(&abBuf[cbIdtr], cbBuf - cbIdtr))
1935 Bs3TestFailedF("Unexpected buffer bytes set (#1): cbIdtr=%u abBuf=%.*Rhxs\n", cbIdtr, cbBuf, pbBuf);
1936 if (Bs3MemCmp(abBuf, pbExpected, cbIdtr) != 0)
1937 Bs3TestFailedF("Mismatch (%s,#1): expected %.*Rhxs, got %.*Rhxs\n", pWorker->pszDesc, cbIdtr, pbExpected, cbIdtr, abBuf);
1938 g_usBs3TestStep++;
1939
1940 /* Again with a buffer filled with a byte not occuring in the previous result. */
1941 bFiller = 0x55;
1942 while (Bs3MemChr(abBuf, bFiller, cbBuf) != NULL)
1943 bFiller++;
1944 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
1945 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
1946 Bs3TestFailedF("ASMMemIsAllU8 or Bs3MemSet is busted: bFiller=%#x abBuf=%.*Rhxs\n", bFiller, sizeof(abBuf), pbBuf);
1947
1948 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1949 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1950 if (f286 && abBuf[cbIdtr - 1] != 0xff)
1951 Bs3TestFailedF("286: Top base byte isn't 0xff (#2): %#x\n", abBuf[cbIdtr - 1]);
1952 if (!ASMMemIsAllU8(&abBuf[cbIdtr], cbBuf - cbIdtr, bFiller))
1953 Bs3TestFailedF("Unexpected buffer bytes set (#2): cbIdtr=%u bFiller=%#x abBuf=%.*Rhxs\n", cbIdtr, bFiller, cbBuf, pbBuf);
1954 if (Bs3MemChr(abBuf, bFiller, cbIdtr) != NULL)
1955 Bs3TestFailedF("Not all bytes touched: cbIdtr=%u bFiller=%#x abBuf=%.*Rhxs\n", cbIdtr, bFiller, cbBuf, pbBuf);
1956 if (Bs3MemCmp(abBuf, pbExpected, cbIdtr) != 0)
1957 Bs3TestFailedF("Mismatch (%s,#2): expected %.*Rhxs, got %.*Rhxs\n", pWorker->pszDesc, cbIdtr, pbExpected, cbIdtr, abBuf);
1958 g_usBs3TestStep++;
1959
1960 /*
1961 * Slide the buffer along 8 bytes to cover misalignment.
1962 */
1963 for (off = 0; off < 8; off++)
1964 {
1965 pbBuf = &abBuf[off];
1966 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &abBuf[off]);
1967 CtxUdExpected.rbx.u = Ctx.rbx.u;
1968
1969 /* First with zero buffer. */
1970 Bs3MemZero(abBuf, sizeof(abBuf));
1971 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1972 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1973 if (off > 0 && !ASMMemIsZero(abBuf, off))
1974 Bs3TestFailedF("Unexpected buffer bytes set before (#3): cbIdtr=%u off=%u abBuf=%.*Rhxs\n",
1975 cbIdtr, off, off + cbBuf, abBuf);
1976 if (!ASMMemIsZero(&abBuf[off + cbIdtr], sizeof(abBuf) - cbIdtr - off))
1977 Bs3TestFailedF("Unexpected buffer bytes set after (#3): cbIdtr=%u off=%u abBuf=%.*Rhxs\n",
1978 cbIdtr, off, off + cbBuf, abBuf);
1979 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
1980 Bs3TestFailedF("286: Top base byte isn't 0xff (#3): %#x\n", abBuf[off + cbIdtr - 1]);
1981 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
1982 Bs3TestFailedF("Mismatch (#3): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
1983 g_usBs3TestStep++;
1984
1985 /* Again with a buffer filled with a byte not occuring in the previous result. */
1986 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
1987 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1988 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1989 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
1990 Bs3TestFailedF("Unexpected buffer bytes set before (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1991 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1992 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - cbIdtr - off, bFiller))
1993 Bs3TestFailedF("Unexpected buffer bytes set after (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1994 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1995 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
1996 Bs3TestFailedF("Not all bytes touched (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1997 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1998 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
1999 Bs3TestFailedF("286: Top base byte isn't 0xff (#4): %#x\n", abBuf[off + cbIdtr - 1]);
2000 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
2001 Bs3TestFailedF("Mismatch (#4): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
2002 g_usBs3TestStep++;
2003 }
2004 pbBuf = abBuf;
2005 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
2006 CtxUdExpected.rbx.u = Ctx.rbx.u;
2007
2008 /*
2009 * Play with the selector limit if the target mode supports limit checking
2010 * We use BS3_SEL_TEST_PAGE_00 for this
2011 */
2012 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
2013 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
2014 {
2015 uint16_t cbLimit;
2016 uint32_t uFlatBuf = Bs3SelPtrToFlat(abBuf);
2017 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
2018 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
2019 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatBuf;
2020 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatBuf >> 16);
2021 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatBuf >> 24);
2022
2023 if (pWorker->fSs)
2024 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2025 else
2026 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2027
2028 /* Expand up (normal). */
2029 for (off = 0; off < 8; off++)
2030 {
2031 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2032 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2033 {
2034 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2035 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
2036 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2037 if (off + cbIdtr <= cbLimit + 1)
2038 {
2039 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2040 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
2041 Bs3TestFailedF("Not all bytes touched (#5): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2042 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2043 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
2044 Bs3TestFailedF("Mismatch (#5): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
2045 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
2046 Bs3TestFailedF("286: Top base byte isn't 0xff (#5): %#x\n", abBuf[off + cbIdtr - 1]);
2047 }
2048 else
2049 {
2050 if (pWorker->fSs)
2051 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2052 else
2053 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2054 if (off + 2 <= cbLimit + 1)
2055 {
2056 if (Bs3MemChr(&abBuf[off], bFiller, 2) != NULL)
2057 Bs3TestFailedF("Limit bytes not touched (#6): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2058 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2059 if (Bs3MemCmp(&abBuf[off], pbExpected, 2) != 0)
2060 Bs3TestFailedF("Mismatch (#6): expected %.2Rhxs, got %.2Rhxs\n", pbExpected, &abBuf[off]);
2061 if (!ASMMemIsAllU8(&abBuf[off + 2], cbIdtr - 2, bFiller))
2062 Bs3TestFailedF("Base bytes touched on #GP (#6): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2063 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2064 }
2065 else if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
2066 Bs3TestFailedF("Bytes touched on #GP: cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2067 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2068 }
2069
2070 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
2071 Bs3TestFailedF("Leading bytes touched (#7): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2072 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2073 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - off - cbIdtr, bFiller))
2074 Bs3TestFailedF("Trailing bytes touched (#7): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2075 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2076
2077 g_usBs3TestStep++;
2078 }
2079 }
2080
2081 /* Expand down (weird). Inverted valid area compared to expand up,
2082 so a limit of zero give us a valid range for 0001..0ffffh (instead of
2083 a segment with one valid byte at 0000h). Whereas a limit of 0fffeh
2084 means one valid byte at 0ffffh, and a limit of 0ffffh means none
2085 (because in a normal expand up the 0ffffh means all 64KB are
2086 accessible). */
2087 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2088 for (off = 0; off < 8; off++)
2089 {
2090 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2091 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2092 {
2093 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2094 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
2095 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2096
2097 if (off > cbLimit)
2098 {
2099 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2100 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
2101 Bs3TestFailedF("Not all bytes touched (#8): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2102 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2103 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
2104 Bs3TestFailedF("Mismatch (#8): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
2105 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
2106 Bs3TestFailedF("286: Top base byte isn't 0xff (#8): %#x\n", abBuf[off + cbIdtr - 1]);
2107 }
2108 else
2109 {
2110 if (pWorker->fSs)
2111 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2112 else
2113 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2114 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
2115 Bs3TestFailedF("Bytes touched on #GP: cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2116 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2117 }
2118
2119 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
2120 Bs3TestFailedF("Leading bytes touched (#9): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2121 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2122 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - off - cbIdtr, bFiller))
2123 Bs3TestFailedF("Trailing bytes touched (#9): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2124 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2125
2126 g_usBs3TestStep++;
2127 }
2128 }
2129
2130 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
2131 CtxUdExpected.rbx.u = Ctx.rbx.u;
2132 CtxUdExpected.ss = Ctx.ss;
2133 CtxUdExpected.ds = Ctx.ds;
2134 }
2135
2136 /*
2137 * Play with the paging.
2138 */
2139 if ( BS3_MODE_IS_PAGED(bTestMode)
2140 && (!pWorker->fSs || bRing == 3) /* SS.DPL == CPL, we'll get some tiled ring-3 selector here. */
2141 && (pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED)) != NULL)
2142 {
2143 RTCCUINTXREG uFlatTest = Bs3SelPtrToFlat(pbTest);
2144
2145 /*
2146 * Slide the buffer towards the trailing guard page. We'll observe the
2147 * first word being written entirely separately from the 2nd dword/qword.
2148 */
2149 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2150 {
2151 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller, cbIdtr * 2);
2152 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2153 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2154 if (off + cbIdtr <= X86_PAGE_SIZE)
2155 {
2156 CtxUdExpected.rbx = Ctx.rbx;
2157 CtxUdExpected.ss = Ctx.ss;
2158 CtxUdExpected.ds = Ctx.ds;
2159 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2160 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2161 Bs3TestFailedF("Mismatch (#9): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2162 }
2163 else
2164 {
2165 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2166 uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2167 if ( off <= X86_PAGE_SIZE - 2
2168 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2169 Bs3TestFailedF("Mismatch (#10): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2170 pbExpected, &pbTest[off], off);
2171 if ( off < X86_PAGE_SIZE - 2
2172 && !ASMMemIsAllU8(&pbTest[off + 2], X86_PAGE_SIZE - off - 2, bFiller))
2173 Bs3TestFailedF("Wrote partial base on #PF (#10): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2174 bFiller, X86_PAGE_SIZE - off - 2, &pbTest[off + 2], off);
2175 if (off == X86_PAGE_SIZE - 1 && pbTest[off] != bFiller)
2176 Bs3TestFailedF("Wrote partial limit on #PF (#10): Expected %02x, got %02x\n", bFiller, pbTest[off]);
2177 }
2178 g_usBs3TestStep++;
2179 }
2180
2181 /*
2182 * Now, do it the other way around. It should look normal now since writing
2183 * the limit will #PF first and nothing should be written.
2184 */
2185 for (off = cbIdtr + 4; off >= -cbIdtr - 4; off--)
2186 {
2187 Bs3MemSet(pbTest, bFiller, 48);
2188 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2189 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2190 if (off >= 0)
2191 {
2192 CtxUdExpected.rbx = Ctx.rbx;
2193 CtxUdExpected.ss = Ctx.ss;
2194 CtxUdExpected.ds = Ctx.ds;
2195 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2196 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2197 Bs3TestFailedF("Mismatch (#11): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2198 }
2199 else
2200 {
2201 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2202 uFlatTest + off, 0 /*cbIpAdjust*/);
2203 if ( -off < cbIdtr
2204 && !ASMMemIsAllU8(pbTest, cbIdtr + off, bFiller))
2205 Bs3TestFailedF("Wrote partial content on #PF (#12): bFiller=%#x, found %.*Rhxs; off=%d\n",
2206 bFiller, cbIdtr + off, pbTest, off);
2207 }
2208 if (!ASMMemIsAllU8(&pbTest[RT_MAX(cbIdtr + off, 0)], 16, bFiller))
2209 Bs3TestFailedF("Wrote beyond expected area (#13): bFiller=%#x, found %.16Rhxs; off=%d\n",
2210 bFiller, &pbTest[RT_MAX(cbIdtr + off, 0)], off);
2211 g_usBs3TestStep++;
2212 }
2213
2214 /*
2215 * Combine paging and segment limit and check ordering.
2216 * This is kind of interesting here since it the instruction seems to
2217 * be doing two separate writes.
2218 */
2219 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
2220 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
2221 {
2222 uint16_t cbLimit;
2223
2224 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
2225 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
2226 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
2227 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
2228 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
2229
2230 if (pWorker->fSs)
2231 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2232 else
2233 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2234
2235 /* Expand up (normal), approaching tail guard page. */
2236 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2237 {
2238 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2239 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
2240 {
2241 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2242 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller, cbIdtr * 2);
2243 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2244 if (off + cbIdtr <= cbLimit + 1)
2245 {
2246 /* No #GP, but maybe #PF. */
2247 if (off + cbIdtr <= X86_PAGE_SIZE)
2248 {
2249 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2250 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2251 Bs3TestFailedF("Mismatch (#14): expected %.*Rhxs, got %.*Rhxs\n",
2252 cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2253 }
2254 else
2255 {
2256 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2257 uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2258 if ( off <= X86_PAGE_SIZE - 2
2259 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2260 Bs3TestFailedF("Mismatch (#15): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2261 pbExpected, &pbTest[off], off);
2262 cb = X86_PAGE_SIZE - off - 2;
2263 if ( off < X86_PAGE_SIZE - 2
2264 && !ASMMemIsAllU8(&pbTest[off + 2], cb, bFiller))
2265 Bs3TestFailedF("Wrote partial base on #PF (#15): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2266 bFiller, cb, &pbTest[off + 2], off);
2267 if (off == X86_PAGE_SIZE - 1 && pbTest[off] != bFiller)
2268 Bs3TestFailedF("Wrote partial limit on #PF (#15): Expected %02x, got %02x\n", bFiller, pbTest[off]);
2269 }
2270 }
2271 else if (off + 2 <= cbLimit + 1)
2272 {
2273 /* [ig]tr.limit writing does not cause #GP, but may cause #PG, if not writing the base causes #GP. */
2274 if (off <= X86_PAGE_SIZE - 2)
2275 {
2276 if (pWorker->fSs)
2277 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2278 else
2279 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2280 if (Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2281 Bs3TestFailedF("Mismatch (#16): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2282 pbExpected, &pbTest[off], off);
2283 cb = X86_PAGE_SIZE - off - 2;
2284 if ( off < X86_PAGE_SIZE - 2
2285 && !ASMMemIsAllU8(&pbTest[off + 2], cb, bFiller))
2286 Bs3TestFailedF("Wrote partial base with limit (#16): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2287 bFiller, cb, &pbTest[off + 2], off);
2288 }
2289 else
2290 {
2291 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2292 uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2293 if ( off < X86_PAGE_SIZE
2294 && !ASMMemIsAllU8(&pbTest[off], X86_PAGE_SIZE - off, bFiller))
2295 Bs3TestFailedF("Mismatch (#16): Partial limit write on #PF: bFiller=%#x, got %.*Rhxs\n",
2296 bFiller, X86_PAGE_SIZE - off, &pbTest[off]);
2297 }
2298 }
2299 else
2300 {
2301 /* #GP/#SS on limit. */
2302 if (pWorker->fSs)
2303 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2304 else
2305 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2306 if ( off < X86_PAGE_SIZE
2307 && !ASMMemIsAllU8(&pbTest[off], X86_PAGE_SIZE - off, bFiller))
2308 Bs3TestFailedF("Mismatch (#17): Partial write on #GP: bFiller=%#x, got %.*Rhxs\n",
2309 bFiller, X86_PAGE_SIZE - off, &pbTest[off]);
2310 }
2311
2312 cb = RT_MIN(cbIdtr * 2, off - (X86_PAGE_SIZE - cbIdtr*2));
2313 if (!ASMMemIsAllU8(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], cb, bFiller))
2314 Bs3TestFailedF("Leading bytes touched (#18): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2315 cbIdtr, off, cbLimit, bFiller, cb, pbTest[X86_PAGE_SIZE - cbIdtr * 2]);
2316
2317 g_usBs3TestStep++;
2318
2319 /* Set DS to 0 and check that we get #GP(0). */
2320 if (!pWorker->fSs)
2321 {
2322 Ctx.ds = 0;
2323 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2324 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2325 Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2326 g_usBs3TestStep++;
2327 }
2328 }
2329 }
2330
2331 /* Expand down. */
2332 pbTest -= X86_PAGE_SIZE; /* Note! we're backing up a page to simplify things */
2333 uFlatTest -= X86_PAGE_SIZE;
2334
2335 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2336 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
2337 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
2338 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
2339
2340 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2341 {
2342 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2343 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
2344 {
2345 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2346 Bs3MemSet(&pbTest[X86_PAGE_SIZE], bFiller, cbIdtr * 2);
2347 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2348 if (cbLimit < off && off >= X86_PAGE_SIZE)
2349 {
2350 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2351 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2352 Bs3TestFailedF("Mismatch (#19): expected %.*Rhxs, got %.*Rhxs\n",
2353 cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2354 cb = X86_PAGE_SIZE + cbIdtr*2 - off;
2355 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], cb, bFiller))
2356 Bs3TestFailedF("Trailing bytes touched (#20): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2357 cbIdtr, off, cbLimit, bFiller, cb, pbTest[off + cbIdtr]);
2358 }
2359 else
2360 {
2361 if (cbLimit < off && off < X86_PAGE_SIZE)
2362 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2363 uFlatTest + off, 0 /*cbIpAdjust*/);
2364 else if (pWorker->fSs)
2365 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2366 else
2367 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2368 cb = cbIdtr*2;
2369 if (!ASMMemIsAllU8(&pbTest[X86_PAGE_SIZE], cb, bFiller))
2370 Bs3TestFailedF("Trailing bytes touched (#20): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2371 cbIdtr, off, cbLimit, bFiller, cb, pbTest[X86_PAGE_SIZE]);
2372 }
2373 g_usBs3TestStep++;
2374 }
2375 }
2376
2377 pbTest += X86_PAGE_SIZE;
2378 uFlatTest += X86_PAGE_SIZE;
2379 }
2380
2381 Bs3MemGuardedTestPageFree(pbTest);
2382 }
2383
2384 /*
2385 * Check non-canonical 64-bit space.
2386 */
2387 if ( BS3_MODE_IS_64BIT_CODE(bTestMode)
2388 && (pbTest = (uint8_t BS3_FAR *)Bs3PagingSetupCanonicalTraps()) != NULL)
2389 {
2390 /* Make our references relative to the gap. */
2391 pbTest += g_cbBs3PagingOneCanonicalTrap;
2392
2393 /* Hit it from below. */
2394 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
2395 {
2396 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0x0000800000000000) + off;
2397 Bs3MemSet(&pbTest[-64], bFiller, 64*2);
2398 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2399 if (off + cbIdtr <= 0)
2400 {
2401 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2402 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2403 Bs3TestFailedF("Mismatch (#21): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2404 }
2405 else
2406 {
2407 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2408 if (off <= -2 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2409 Bs3TestFailedF("Mismatch (#21): expected limit %.2Rhxs, got %.2Rhxs\n", pbExpected, &pbTest[off]);
2410 off2 = off <= -2 ? 2 : 0;
2411 cb = cbIdtr - off2;
2412 if (!ASMMemIsAllU8(&pbTest[off + off2], cb, bFiller))
2413 Bs3TestFailedF("Mismatch (#21): touched base %.*Rhxs, got %.*Rhxs\n",
2414 cb, &pbExpected[off], cb, &pbTest[off + off2]);
2415 }
2416 if (!ASMMemIsAllU8(&pbTest[off - 16], 16, bFiller))
2417 Bs3TestFailedF("Leading bytes touched (#21): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off]);
2418 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], 16, bFiller))
2419 Bs3TestFailedF("Trailing bytes touched (#21): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off + cbIdtr]);
2420 }
2421
2422 /* Hit it from above. */
2423 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
2424 {
2425 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0xffff800000000000) + off;
2426 Bs3MemSet(&pbTest[-64], bFiller, 64*2);
2427 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2428 if (off >= 0)
2429 {
2430 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2431 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2432 Bs3TestFailedF("Mismatch (#22): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2433 }
2434 else
2435 {
2436 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2437 if (!ASMMemIsAllU8(&pbTest[off], cbIdtr, bFiller))
2438 Bs3TestFailedF("Mismatch (#22): touched base %.*Rhxs, got %.*Rhxs\n",
2439 cbIdtr, &pbExpected[off], cbIdtr, &pbTest[off]);
2440 }
2441 if (!ASMMemIsAllU8(&pbTest[off - 16], 16, bFiller))
2442 Bs3TestFailedF("Leading bytes touched (#22): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off]);
2443 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], 16, bFiller))
2444 Bs3TestFailedF("Trailing bytes touched (#22): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off + cbIdtr]);
2445 }
2446
2447 }
2448}
2449
2450
2451static void bs3CpuBasic2_sidt_sgdt_Common(uint8_t bTestMode, BS3CB2SIDTSGDT const BS3_FAR *paWorkers, unsigned cWorkers,
2452 uint8_t const *pbExpected)
2453{
2454 unsigned idx;
2455 unsigned bRing;
2456 unsigned iStep = 0;
2457
2458 /* Note! We skip the SS checks for ring-0 since we badly mess up SS in the
2459 test and don't want to bother with double faults. */
2460 for (bRing = 0; bRing <= 3; bRing++)
2461 {
2462 for (idx = 0; idx < cWorkers; idx++)
2463 if ( (paWorkers[idx].bMode & (bTestMode & BS3_MODE_CODE_MASK))
2464 && (!paWorkers[idx].fSs || bRing != 0 /** @todo || BS3_MODE_IS_64BIT_SYS(bTestMode)*/ ))
2465 {
2466 g_usBs3TestStep = iStep;
2467 bs3CpuBasic2_sidt_sgdt_One(&paWorkers[idx], bTestMode, bRing, pbExpected);
2468 iStep += 1000;
2469 }
2470 if (BS3_MODE_IS_RM_OR_V86(bTestMode))
2471 break;
2472 }
2473}
2474
2475
2476BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_sidt)(uint8_t bMode)
2477{
2478 union
2479 {
2480 RTIDTR Idtr;
2481 uint8_t ab[16];
2482 } Expected;
2483
2484 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
2485 bs3CpuBasic2_SetGlobals(bMode);
2486
2487 /*
2488 * Pass to common worker which is only compiled once per mode.
2489 */
2490 Bs3MemZero(&Expected, sizeof(Expected));
2491 ASMGetIDTR(&Expected.Idtr);
2492 bs3CpuBasic2_sidt_sgdt_Common(bMode, g_aSidtWorkers, RT_ELEMENTS(g_aSidtWorkers), Expected.ab);
2493
2494 /*
2495 * Re-initialize the IDT.
2496 */
2497 Bs3TrapReInit();
2498 return 0;
2499}
2500
2501
2502BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_sgdt)(uint8_t bMode)
2503{
2504 uint64_t const uOrgAddr = Bs3Lgdt_Gdt.uAddr;
2505 uint64_t uNew = 0;
2506 union
2507 {
2508 RTGDTR Gdtr;
2509 uint8_t ab[16];
2510 } Expected;
2511
2512 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
2513 bs3CpuBasic2_SetGlobals(bMode);
2514
2515 /*
2516 * If paged mode, try push the GDT way up.
2517 */
2518 Bs3MemZero(&Expected, sizeof(Expected));
2519 ASMGetGDTR(&Expected.Gdtr);
2520 if (BS3_MODE_IS_PAGED(bMode))
2521 {
2522/** @todo loading non-canonical base addresses. */
2523 int rc;
2524 uNew = BS3_MODE_IS_64BIT_SYS(bMode) ? UINT64_C(0xffff80fedcb70000) : UINT64_C(0xc2d28000);
2525 uNew |= uOrgAddr & X86_PAGE_OFFSET_MASK;
2526 rc = Bs3PagingAlias(uNew, uOrgAddr, Bs3Lgdt_Gdt.cb, X86_PTE_P | X86_PTE_RW | X86_PTE_US | X86_PTE_D | X86_PTE_A);
2527 if (RT_SUCCESS(rc))
2528 {
2529 Bs3Lgdt_Gdt.uAddr = uNew;
2530 Bs3UtilSetFullGdtr(Bs3Lgdt_Gdt.cb, uNew);
2531 ASMGetGDTR(&Expected.Gdtr);
2532 if (BS3_MODE_IS_64BIT_SYS(bMode) && ARCH_BITS != 64)
2533 *(uint32_t *)&Expected.ab[6] = (uint32_t)(uNew >> 32);
2534 }
2535 }
2536
2537 /*
2538 * Pass to common worker which is only compiled once per mode.
2539 */
2540 bs3CpuBasic2_sidt_sgdt_Common(bMode, g_aSgdtWorkers, RT_ELEMENTS(g_aSgdtWorkers), Expected.ab);
2541
2542 /*
2543 * Unalias the GDT.
2544 */
2545 if (uNew != 0)
2546 {
2547 Bs3Lgdt_Gdt.uAddr = uOrgAddr;
2548 Bs3UtilSetFullGdtr(Bs3Lgdt_Gdt.cb, uOrgAddr);
2549 Bs3PagingUnalias(uNew, Bs3Lgdt_Gdt.cb);
2550 }
2551
2552 /*
2553 * Re-initialize the IDT.
2554 */
2555 Bs3TrapReInit();
2556 return 0;
2557}
2558
2559
2560
2561/*
2562 * LIDT & LGDT
2563 */
2564
2565/**
2566 * Executes one round of LIDT and LGDT tests using one assembly worker.
2567 *
2568 * This is written with driving everything from the 16-bit or 32-bit worker in
2569 * mind, i.e. not assuming the test bitcount is the same as the current.
2570 */
2571static void bs3CpuBasic2_lidt_lgdt_One(BS3CB2SIDTSGDT const BS3_FAR *pWorker, uint8_t bTestMode, uint8_t bRing,
2572 uint8_t const *pbRestore, size_t cbRestore, uint8_t const *pbExpected)
2573{
2574 static const struct
2575 {
2576 bool fGP;
2577 uint16_t cbLimit;
2578 uint64_t u64Base;
2579 } s_aValues64[] =
2580 {
2581 { false, 0x0000, UINT64_C(0x0000000000000000) },
2582 { false, 0x0001, UINT64_C(0x0000000000000001) },
2583 { false, 0x0002, UINT64_C(0x0000000000000010) },
2584 { false, 0x0003, UINT64_C(0x0000000000000123) },
2585 { false, 0x0004, UINT64_C(0x0000000000001234) },
2586 { false, 0x0005, UINT64_C(0x0000000000012345) },
2587 { false, 0x0006, UINT64_C(0x0000000000123456) },
2588 { false, 0x0007, UINT64_C(0x0000000001234567) },
2589 { false, 0x0008, UINT64_C(0x0000000012345678) },
2590 { false, 0x0009, UINT64_C(0x0000000123456789) },
2591 { false, 0x000a, UINT64_C(0x000000123456789a) },
2592 { false, 0x000b, UINT64_C(0x00000123456789ab) },
2593 { false, 0x000c, UINT64_C(0x0000123456789abc) },
2594 { false, 0x001c, UINT64_C(0x00007ffffeefefef) },
2595 { false, 0xffff, UINT64_C(0x00007fffffffffff) },
2596 { true, 0xf3f1, UINT64_C(0x0000800000000000) },
2597 { true, 0x0000, UINT64_C(0x0000800000000000) },
2598 { true, 0x0000, UINT64_C(0x0000800000000333) },
2599 { true, 0x00f0, UINT64_C(0x0001000000000000) },
2600 { true, 0x0ff0, UINT64_C(0x0012000000000000) },
2601 { true, 0x0eff, UINT64_C(0x0123000000000000) },
2602 { true, 0xe0fe, UINT64_C(0x1234000000000000) },
2603 { true, 0x00ad, UINT64_C(0xffff300000000000) },
2604 { true, 0x0000, UINT64_C(0xffff7fffffffffff) },
2605 { true, 0x00f0, UINT64_C(0xffff7fffffffffff) },
2606 { false, 0x5678, UINT64_C(0xffff800000000000) },
2607 { false, 0x2969, UINT64_C(0xffffffffffeefefe) },
2608 { false, 0x1221, UINT64_C(0xffffffffffffffff) },
2609 { false, 0x1221, UINT64_C(0xffffffffffffffff) },
2610 };
2611 static const struct
2612 {
2613 uint16_t cbLimit;
2614 uint32_t u32Base;
2615 } s_aValues32[] =
2616 {
2617 { 0xdfdf, UINT32_C(0xefefefef) },
2618 { 0x0000, UINT32_C(0x00000000) },
2619 { 0x0001, UINT32_C(0x00000001) },
2620 { 0x0002, UINT32_C(0x00000012) },
2621 { 0x0003, UINT32_C(0x00000123) },
2622 { 0x0004, UINT32_C(0x00001234) },
2623 { 0x0005, UINT32_C(0x00012345) },
2624 { 0x0006, UINT32_C(0x00123456) },
2625 { 0x0007, UINT32_C(0x01234567) },
2626 { 0x0008, UINT32_C(0x12345678) },
2627 { 0x0009, UINT32_C(0x80204060) },
2628 { 0x000a, UINT32_C(0xddeeffaa) },
2629 { 0x000b, UINT32_C(0xfdecdbca) },
2630 { 0x000c, UINT32_C(0x6098456b) },
2631 { 0x000d, UINT32_C(0x98506099) },
2632 { 0x000e, UINT32_C(0x206950bc) },
2633 { 0x000f, UINT32_C(0x9740395d) },
2634 { 0x0334, UINT32_C(0x64a9455e) },
2635 { 0xb423, UINT32_C(0xd20b6eff) },
2636 { 0x4955, UINT32_C(0x85296d46) },
2637 { 0xffff, UINT32_C(0x07000039) },
2638 { 0xefe1, UINT32_C(0x0007fe00) },
2639 };
2640
2641 BS3TRAPFRAME TrapCtx;
2642 BS3REGCTX Ctx;
2643 BS3REGCTX CtxUdExpected;
2644 BS3REGCTX TmpCtx;
2645 uint8_t abBufLoad[40]; /* Test buffer w/ misalignment test space and some (cbIdtr) extra guard. */
2646 uint8_t abBufSave[32]; /* For saving the result after loading. */
2647 uint8_t abBufRestore[24]; /* For restoring sane value (same seg as abBufSave!). */
2648 uint8_t abExpectedFilled[32]; /* Same as pbExpected, except it's filled with bFiller2 instead of zeros. */
2649 uint8_t BS3_FAR *pbBufSave; /* Correctly aligned pointer into abBufSave. */
2650 uint8_t BS3_FAR *pbBufRestore; /* Correctly aligned pointer into abBufRestore. */
2651 uint8_t const cbIdtr = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 2+8 : 2+4;
2652 uint8_t const cbBaseLoaded = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 8
2653 : BS3_MODE_IS_16BIT_CODE(bTestMode) == !(pWorker->fFlags & BS3CB2SIDTSGDT_F_OPSIZE)
2654 ? 3 : 4;
2655 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
2656 uint8_t const bTop16BitBase = f286 ? 0xff : 0x00;
2657 uint8_t bFiller1; /* For filling abBufLoad. */
2658 uint8_t bFiller2; /* For filling abBufSave and expectations. */
2659 int off;
2660 uint8_t BS3_FAR *pbTest;
2661 unsigned i;
2662
2663 /* make sure they're allocated */
2664 Bs3MemZero(&Ctx, sizeof(Ctx));
2665 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
2666 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
2667 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
2668 Bs3MemZero(abBufSave, sizeof(abBufSave));
2669 Bs3MemZero(abBufLoad, sizeof(abBufLoad));
2670 Bs3MemZero(abBufRestore, sizeof(abBufRestore));
2671
2672 /*
2673 * Create a context, giving this routine some more stack space.
2674 * - Point the context at our LIDT [xBX] + SIDT [xDI] + LIDT [xSI] + UD2 combo.
2675 * - Point DS/SS:xBX at abBufLoad.
2676 * - Point ES:xDI at abBufSave.
2677 * - Point ES:xSI at abBufRestore.
2678 */
2679 Bs3RegCtxSaveEx(&Ctx, bTestMode, 256 /*cbExtraStack*/);
2680 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pWorker->fpfnWorker);
2681 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
2682 g_uBs3TrapEipHint = Ctx.rip.u32;
2683 Ctx.rflags.u16 &= ~X86_EFL_IF;
2684 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2685
2686 pbBufSave = abBufSave;
2687 if ((BS3_FP_OFF(pbBufSave) + 2) & 7)
2688 pbBufSave += 8 - ((BS3_FP_OFF(pbBufSave) + 2) & 7);
2689 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rdi, &Ctx.es, pbBufSave);
2690
2691 pbBufRestore = abBufRestore;
2692 if ((BS3_FP_OFF(pbBufRestore) + 2) & 7)
2693 pbBufRestore += 8 - ((BS3_FP_OFF(pbBufRestore) + 2) & 7);
2694 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsi, &Ctx.es, pbBufRestore);
2695 Bs3MemCpy(pbBufRestore, pbRestore, cbRestore);
2696
2697 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
2698 Bs3RegCtxConvertToRingX(&Ctx, bRing);
2699
2700 /* For successful SIDT attempts, we'll stop at the UD2. */
2701 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
2702 CtxUdExpected.rip.u += pWorker->cbInstr;
2703
2704 /*
2705 * Check that it works at all.
2706 */
2707 Bs3MemZero(abBufLoad, sizeof(abBufLoad));
2708 Bs3MemCpy(abBufLoad, pbBufRestore, cbIdtr);
2709 Bs3MemZero(abBufSave, sizeof(abBufSave));
2710 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2711 if (bRing != 0)
2712 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2713 else
2714 {
2715 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2716 if (Bs3MemCmp(pbBufSave, pbExpected, cbIdtr * 2) != 0)
2717 Bs3TestFailedF("Mismatch (%s, #1): expected %.*Rhxs, got %.*Rhxs\n",
2718 pWorker->pszDesc, cbIdtr*2, pbExpected, cbIdtr*2, pbBufSave);
2719 }
2720 g_usBs3TestStep++;
2721
2722 /* Determine two filler bytes that doesn't appear in the previous result or our expectations. */
2723 bFiller1 = ~0x55;
2724 while ( Bs3MemChr(pbBufSave, bFiller1, cbIdtr) != NULL
2725 || Bs3MemChr(pbRestore, bFiller1, cbRestore) != NULL
2726 || bFiller1 == 0xff)
2727 bFiller1++;
2728 bFiller2 = 0x33;
2729 while ( Bs3MemChr(pbBufSave, bFiller2, cbIdtr) != NULL
2730 || Bs3MemChr(pbRestore, bFiller2, cbRestore) != NULL
2731 || bFiller2 == 0xff
2732 || bFiller2 == bFiller1)
2733 bFiller2++;
2734 Bs3MemSet(abExpectedFilled, bFiller2, sizeof(abExpectedFilled));
2735 Bs3MemCpy(abExpectedFilled, pbExpected, cbIdtr);
2736
2737 /* Again with a buffer filled with a byte not occuring in the previous result. */
2738 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2739 Bs3MemCpy(abBufLoad, pbBufRestore, cbIdtr);
2740 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2741 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2742 if (bRing != 0)
2743 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2744 else
2745 {
2746 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2747 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2748 Bs3TestFailedF("Mismatch (%s, #2): expected %.*Rhxs, got %.*Rhxs\n",
2749 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2750 }
2751 g_usBs3TestStep++;
2752
2753 /*
2754 * Try loading a bunch of different limit+base value to check what happens,
2755 * especially what happens wrt the top part of the base in 16-bit mode.
2756 */
2757 if (BS3_MODE_IS_64BIT_CODE(bTestMode))
2758 {
2759 for (i = 0; i < RT_ELEMENTS(s_aValues64); i++)
2760 {
2761 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2762 Bs3MemCpy(&abBufLoad[0], &s_aValues64[i].cbLimit, 2);
2763 Bs3MemCpy(&abBufLoad[2], &s_aValues64[i].u64Base, 8);
2764 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2765 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2766 if (bRing != 0 || s_aValues64[i].fGP)
2767 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2768 else
2769 {
2770 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2771 if ( Bs3MemCmp(&pbBufSave[0], &s_aValues64[i].cbLimit, 2) != 0
2772 || Bs3MemCmp(&pbBufSave[2], &s_aValues64[i].u64Base, 8) != 0
2773 || !ASMMemIsAllU8(&pbBufSave[10], cbIdtr, bFiller2))
2774 Bs3TestFailedF("Mismatch (%s, #2): expected %04RX16:%016RX64, fillers %#x %#x, got %.*Rhxs\n",
2775 pWorker->pszDesc, s_aValues64[i].cbLimit, s_aValues64[i].u64Base,
2776 bFiller1, bFiller2, cbIdtr*2, pbBufSave);
2777 }
2778 g_usBs3TestStep++;
2779 }
2780 }
2781 else
2782 {
2783 for (i = 0; i < RT_ELEMENTS(s_aValues32); i++)
2784 {
2785 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2786 Bs3MemCpy(&abBufLoad[0], &s_aValues32[i].cbLimit, 2);
2787 Bs3MemCpy(&abBufLoad[2], &s_aValues32[i].u32Base, cbBaseLoaded);
2788 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2789 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2790 if (bRing != 0)
2791 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2792 else
2793 {
2794 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2795 if ( Bs3MemCmp(&pbBufSave[0], &s_aValues32[i].cbLimit, 2) != 0
2796 || Bs3MemCmp(&pbBufSave[2], &s_aValues32[i].u32Base, cbBaseLoaded) != 0
2797 || ( cbBaseLoaded != 4
2798 && pbBufSave[2+3] != bTop16BitBase)
2799 || !ASMMemIsAllU8(&pbBufSave[8], cbIdtr, bFiller2))
2800 Bs3TestFailedF("Mismatch (%s,#3): loaded %04RX16:%08RX32, fillers %#x %#x%s, got %.*Rhxs\n",
2801 pWorker->pszDesc, s_aValues32[i].cbLimit, s_aValues32[i].u32Base, bFiller1, bFiller2,
2802 f286 ? ", 286" : "", cbIdtr*2, pbBufSave);
2803 }
2804 g_usBs3TestStep++;
2805 }
2806 }
2807
2808 /*
2809 * Slide the buffer along 8 bytes to cover misalignment.
2810 */
2811 for (off = 0; off < 8; off++)
2812 {
2813 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &abBufLoad[off]);
2814 CtxUdExpected.rbx.u = Ctx.rbx.u;
2815
2816 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2817 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2818 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2819 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2820 if (bRing != 0)
2821 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2822 else
2823 {
2824 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2825 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2826 Bs3TestFailedF("Mismatch (%s, #4): expected %.*Rhxs, got %.*Rhxs\n",
2827 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2828 }
2829 g_usBs3TestStep++;
2830 }
2831 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2832 CtxUdExpected.rbx.u = Ctx.rbx.u;
2833
2834 /*
2835 * Play with the selector limit if the target mode supports limit checking
2836 * We use BS3_SEL_TEST_PAGE_00 for this
2837 */
2838 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
2839 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
2840 {
2841 uint16_t cbLimit;
2842 uint32_t uFlatBuf = Bs3SelPtrToFlat(abBufLoad);
2843 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
2844 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
2845 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatBuf;
2846 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatBuf >> 16);
2847 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatBuf >> 24);
2848
2849 if (pWorker->fSs)
2850 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2851 else
2852 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2853
2854 /* Expand up (normal). */
2855 for (off = 0; off < 8; off++)
2856 {
2857 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2858 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2859 {
2860 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2861
2862 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2863 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2864 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2865 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2866 if (bRing != 0)
2867 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2868 else if (off + cbIdtr <= cbLimit + 1)
2869 {
2870 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2871 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2872 Bs3TestFailedF("Mismatch (%s, #5): expected %.*Rhxs, got %.*Rhxs\n",
2873 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2874 }
2875 else if (pWorker->fSs)
2876 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2877 else
2878 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2879 g_usBs3TestStep++;
2880
2881 /* Again with zero limit and messed up base (should trigger tripple fault if partially loaded). */
2882 abBufLoad[off] = abBufLoad[off + 1] = 0;
2883 abBufLoad[off + 2] |= 1;
2884 abBufLoad[off + cbIdtr - 2] ^= 0x5a;
2885 abBufLoad[off + cbIdtr - 1] ^= 0xa5;
2886 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2887 if (bRing != 0)
2888 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2889 else if (off + cbIdtr <= cbLimit + 1)
2890 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2891 else if (pWorker->fSs)
2892 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2893 else
2894 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2895 }
2896 }
2897
2898 /* Expand down (weird). Inverted valid area compared to expand up,
2899 so a limit of zero give us a valid range for 0001..0ffffh (instead of
2900 a segment with one valid byte at 0000h). Whereas a limit of 0fffeh
2901 means one valid byte at 0ffffh, and a limit of 0ffffh means none
2902 (because in a normal expand up the 0ffffh means all 64KB are
2903 accessible). */
2904 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2905 for (off = 0; off < 8; off++)
2906 {
2907 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2908 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2909 {
2910 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2911
2912 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2913 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2914 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2915 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2916 if (bRing != 0)
2917 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2918 else if (off > cbLimit)
2919 {
2920 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2921 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2922 Bs3TestFailedF("Mismatch (%s, #6): expected %.*Rhxs, got %.*Rhxs\n",
2923 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2924 }
2925 else if (pWorker->fSs)
2926 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2927 else
2928 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2929 g_usBs3TestStep++;
2930
2931 /* Again with zero limit and messed up base (should trigger triple fault if partially loaded). */
2932 abBufLoad[off] = abBufLoad[off + 1] = 0;
2933 abBufLoad[off + 2] |= 3;
2934 abBufLoad[off + cbIdtr - 2] ^= 0x55;
2935 abBufLoad[off + cbIdtr - 1] ^= 0xaa;
2936 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2937 if (bRing != 0)
2938 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2939 else if (off > cbLimit)
2940 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2941 else if (pWorker->fSs)
2942 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2943 else
2944 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2945 }
2946 }
2947
2948 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2949 CtxUdExpected.rbx.u = Ctx.rbx.u;
2950 CtxUdExpected.ss = Ctx.ss;
2951 CtxUdExpected.ds = Ctx.ds;
2952 }
2953
2954 /*
2955 * Play with the paging.
2956 */
2957 if ( BS3_MODE_IS_PAGED(bTestMode)
2958 && (!pWorker->fSs || bRing == 3) /* SS.DPL == CPL, we'll get some tiled ring-3 selector here. */
2959 && (pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED)) != NULL)
2960 {
2961 RTCCUINTXREG uFlatTest = Bs3SelPtrToFlat(pbTest);
2962
2963 /*
2964 * Slide the load buffer towards the trailing guard page.
2965 */
2966 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[X86_PAGE_SIZE]);
2967 CtxUdExpected.ss = Ctx.ss;
2968 CtxUdExpected.ds = Ctx.ds;
2969 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2970 {
2971 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller1, cbIdtr*2);
2972 if (off < X86_PAGE_SIZE)
2973 Bs3MemCpy(&pbTest[off], pbBufRestore, RT_MIN(X86_PAGE_SIZE - off, cbIdtr));
2974 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2975 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2976 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2977 if (bRing != 0)
2978 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2979 else if (off + cbIdtr <= X86_PAGE_SIZE)
2980 {
2981 CtxUdExpected.rbx = Ctx.rbx;
2982 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2983 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr*2) != 0)
2984 Bs3TestFailedF("Mismatch (%s, #7): expected %.*Rhxs, got %.*Rhxs\n",
2985 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2986 }
2987 else
2988 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2989 g_usBs3TestStep++;
2990
2991 /* Again with zero limit and maybe messed up base as well (triple fault if buggy).
2992 The 386DX-40 here triple faults (or something) with off == 0xffe, nothing else. */
2993 if ( off < X86_PAGE_SIZE && off + cbIdtr > X86_PAGE_SIZE
2994 && ( off != X86_PAGE_SIZE - 2
2995 || (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) != BS3CPU_80386)
2996 )
2997 {
2998 pbTest[off] = 0;
2999 if (off + 1 < X86_PAGE_SIZE)
3000 pbTest[off + 1] = 0;
3001 if (off + 2 < X86_PAGE_SIZE)
3002 pbTest[off + 2] |= 7;
3003 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3004 if (bRing != 0)
3005 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3006 else
3007 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
3008 g_usBs3TestStep++;
3009 }
3010 }
3011
3012 /*
3013 * Now, do it the other way around. It should look normal now since writing
3014 * the limit will #PF first and nothing should be written.
3015 */
3016 for (off = cbIdtr + 4; off >= -cbIdtr - 4; off--)
3017 {
3018 Bs3MemSet(pbTest, bFiller1, 48);
3019 if (off >= 0)
3020 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3021 else if (off + cbIdtr > 0)
3022 Bs3MemCpy(pbTest, &pbBufRestore[-off], cbIdtr + off);
3023 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
3024 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3025 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3026 if (bRing != 0)
3027 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3028 else if (off >= 0)
3029 {
3030 CtxUdExpected.rbx = Ctx.rbx;
3031 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3032 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr*2) != 0)
3033 Bs3TestFailedF("Mismatch (%s, #8): expected %.*Rhxs, got %.*Rhxs\n",
3034 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3035 }
3036 else
3037 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off, 0 /*cbIpAdjust*/);
3038 g_usBs3TestStep++;
3039
3040 /* Again with messed up base as well (triple fault if buggy). */
3041 if (off < 0 && off > -cbIdtr)
3042 {
3043 if (off + 2 >= 0)
3044 pbTest[off + 2] |= 15;
3045 pbTest[off + cbIdtr - 1] ^= 0xaa;
3046 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3047 if (bRing != 0)
3048 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3049 else
3050 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off, 0 /*cbIpAdjust*/);
3051 g_usBs3TestStep++;
3052 }
3053 }
3054
3055 /*
3056 * Combine paging and segment limit and check ordering.
3057 * This is kind of interesting here since it the instruction seems to
3058 * actually be doing two separate read, just like it's S[IG]DT counterpart.
3059 *
3060 * Note! My 486DX4 does a DWORD limit read when the operand size is 32-bit,
3061 * that's what f486Weirdness deals with.
3062 */
3063 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
3064 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
3065 {
3066 bool const f486Weirdness = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80486
3067 && BS3_MODE_IS_32BIT_CODE(bTestMode) == !(pWorker->fFlags & BS3CB2SIDTSGDT_F_OPSIZE);
3068 uint16_t cbLimit;
3069
3070 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
3071 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
3072 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
3073 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
3074 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
3075
3076 if (pWorker->fSs)
3077 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
3078 else
3079 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
3080
3081 /* Expand up (normal), approaching tail guard page. */
3082 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
3083 {
3084 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
3085 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
3086 {
3087 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
3088 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller1, cbIdtr * 2);
3089 if (off < X86_PAGE_SIZE)
3090 Bs3MemCpy(&pbTest[off], pbBufRestore, RT_MIN(cbIdtr, X86_PAGE_SIZE - off));
3091 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3092 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3093 if (bRing != 0)
3094 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3095 else if (off + cbIdtr <= cbLimit + 1)
3096 {
3097 /* No #GP, but maybe #PF. */
3098 if (off + cbIdtr <= X86_PAGE_SIZE)
3099 {
3100 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3101 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3102 Bs3TestFailedF("Mismatch (%s, #9): expected %.*Rhxs, got %.*Rhxs\n",
3103 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3104 }
3105 else
3106 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
3107 }
3108 /* No #GP/#SS on limit, but instead #PF? */
3109 else if ( !f486Weirdness
3110 ? off < cbLimit && off >= 0xfff
3111 : off + 2 < cbLimit && off >= 0xffd)
3112 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
3113 /* #GP/#SS on limit or base. */
3114 else if (pWorker->fSs)
3115 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
3116 else
3117 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3118
3119 g_usBs3TestStep++;
3120
3121 /* Set DS to 0 and check that we get #GP(0). */
3122 if (!pWorker->fSs)
3123 {
3124 Ctx.ds = 0;
3125 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3126 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3127 Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
3128 g_usBs3TestStep++;
3129 }
3130 }
3131 }
3132
3133 /* Expand down. */
3134 pbTest -= X86_PAGE_SIZE; /* Note! we're backing up a page to simplify things */
3135 uFlatTest -= X86_PAGE_SIZE;
3136
3137 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
3138 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
3139 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
3140 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
3141
3142 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
3143 {
3144 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
3145 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
3146 {
3147 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
3148 Bs3MemSet(&pbTest[X86_PAGE_SIZE], bFiller1, cbIdtr * 2);
3149 if (off >= X86_PAGE_SIZE)
3150 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3151 else if (off > X86_PAGE_SIZE - cbIdtr)
3152 Bs3MemCpy(&pbTest[X86_PAGE_SIZE], &pbBufRestore[X86_PAGE_SIZE - off], cbIdtr - (X86_PAGE_SIZE - off));
3153 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3154 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3155 if (bRing != 0)
3156 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3157 else if (cbLimit < off && off >= X86_PAGE_SIZE)
3158 {
3159 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3160 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3161 Bs3TestFailedF("Mismatch (%s, #10): expected %.*Rhxs, got %.*Rhxs\n",
3162 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3163 }
3164 else if (cbLimit < off && off < X86_PAGE_SIZE)
3165 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off, 0 /*cbIpAdjust*/);
3166 else if (pWorker->fSs)
3167 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
3168 else
3169 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3170 g_usBs3TestStep++;
3171 }
3172 }
3173
3174 pbTest += X86_PAGE_SIZE;
3175 uFlatTest += X86_PAGE_SIZE;
3176 }
3177
3178 Bs3MemGuardedTestPageFree(pbTest);
3179 }
3180
3181 /*
3182 * Check non-canonical 64-bit space.
3183 */
3184 if ( BS3_MODE_IS_64BIT_CODE(bTestMode)
3185 && (pbTest = (uint8_t BS3_FAR *)Bs3PagingSetupCanonicalTraps()) != NULL)
3186 {
3187 /* Make our references relative to the gap. */
3188 pbTest += g_cbBs3PagingOneCanonicalTrap;
3189
3190 /* Hit it from below. */
3191 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
3192 {
3193 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0x0000800000000000) + off;
3194 Bs3MemSet(&pbTest[-64], bFiller1, 64*2);
3195 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3196 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3197 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3198 if (off + cbIdtr > 0 || bRing != 0)
3199 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3200 else
3201 {
3202 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3203 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3204 Bs3TestFailedF("Mismatch (%s, #11): expected %.*Rhxs, got %.*Rhxs\n",
3205 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3206 }
3207 }
3208
3209 /* Hit it from above. */
3210 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
3211 {
3212 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0xffff800000000000) + off;
3213 Bs3MemSet(&pbTest[-64], bFiller1, 64*2);
3214 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3215 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3216 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3217 if (off < 0 || bRing != 0)
3218 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3219 else
3220 {
3221 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3222 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3223 Bs3TestFailedF("Mismatch (%s, #19): expected %.*Rhxs, got %.*Rhxs\n",
3224 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3225 }
3226 }
3227
3228 }
3229}
3230
3231
3232static void bs3CpuBasic2_lidt_lgdt_Common(uint8_t bTestMode, BS3CB2SIDTSGDT const BS3_FAR *paWorkers, unsigned cWorkers,
3233 void const *pvRestore, size_t cbRestore, uint8_t const *pbExpected)
3234{
3235 unsigned idx;
3236 unsigned bRing;
3237 unsigned iStep = 0;
3238
3239 /* Note! We skip the SS checks for ring-0 since we badly mess up SS in the
3240 test and don't want to bother with double faults. */
3241 for (bRing = BS3_MODE_IS_V86(bTestMode) ? 3 : 0; bRing <= 3; bRing++)
3242 {
3243 for (idx = 0; idx < cWorkers; idx++)
3244 if ( (paWorkers[idx].bMode & (bTestMode & BS3_MODE_CODE_MASK))
3245 && (!paWorkers[idx].fSs || bRing != 0 /** @todo || BS3_MODE_IS_64BIT_SYS(bTestMode)*/ )
3246 && ( !(paWorkers[idx].fFlags & BS3CB2SIDTSGDT_F_386PLUS)
3247 || ( bTestMode > BS3_MODE_PE16
3248 || ( bTestMode == BS3_MODE_PE16
3249 && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386)) ) )
3250 {
3251 //Bs3TestPrintf("idx=%-2d fpfnWorker=%p fSs=%d cbInstr=%d\n",
3252 // idx, paWorkers[idx].fpfnWorker, paWorkers[idx].fSs, paWorkers[idx].cbInstr);
3253 g_usBs3TestStep = iStep;
3254 bs3CpuBasic2_lidt_lgdt_One(&paWorkers[idx], bTestMode, bRing, pvRestore, cbRestore, pbExpected);
3255 iStep += 1000;
3256 }
3257 if (BS3_MODE_IS_RM_SYS(bTestMode))
3258 break;
3259 }
3260}
3261
3262
3263BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_lidt)(uint8_t bMode)
3264{
3265 union
3266 {
3267 RTIDTR Idtr;
3268 uint8_t ab[32]; /* At least cbIdtr*2! */
3269 } Expected;
3270
3271 //if (bMode != BS3_MODE_LM64) return 0;
3272 bs3CpuBasic2_SetGlobals(bMode);
3273
3274 /*
3275 * Pass to common worker which is only compiled once per mode.
3276 */
3277 Bs3MemZero(&Expected, sizeof(Expected));
3278 ASMGetIDTR(&Expected.Idtr);
3279
3280 if (BS3_MODE_IS_RM_SYS(bMode))
3281 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3282 &Bs3Lidt_Ivt, sizeof(Bs3Lidt_Ivt), Expected.ab);
3283 else if (BS3_MODE_IS_16BIT_SYS(bMode))
3284 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3285 &Bs3Lidt_Idt16, sizeof(Bs3Lidt_Idt16), Expected.ab);
3286 else if (BS3_MODE_IS_32BIT_SYS(bMode))
3287 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3288 &Bs3Lidt_Idt32, sizeof(Bs3Lidt_Idt32), Expected.ab);
3289 else
3290 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3291 &Bs3Lidt_Idt64, sizeof(Bs3Lidt_Idt64), Expected.ab);
3292
3293 /*
3294 * Re-initialize the IDT.
3295 */
3296 Bs3TrapReInit();
3297 return 0;
3298}
3299
3300
3301BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_lgdt)(uint8_t bMode)
3302{
3303 union
3304 {
3305 RTGDTR Gdtr;
3306 uint8_t ab[32]; /* At least cbIdtr*2! */
3307 } Expected;
3308
3309 //if (!BS3_MODE_IS_64BIT_SYS(bMode)) return 0;
3310 bs3CpuBasic2_SetGlobals(bMode);
3311
3312 /*
3313 * Pass to common worker which is only compiled once per mode.
3314 */
3315 if (BS3_MODE_IS_RM_SYS(bMode))
3316 ASMSetGDTR((PRTGDTR)&Bs3LgdtDef_Gdt);
3317 Bs3MemZero(&Expected, sizeof(Expected));
3318 ASMGetGDTR(&Expected.Gdtr);
3319
3320 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLgdtWorkers, RT_ELEMENTS(g_aLgdtWorkers),
3321 &Bs3LgdtDef_Gdt, sizeof(Bs3LgdtDef_Gdt), Expected.ab);
3322
3323 /*
3324 * Re-initialize the IDT.
3325 */
3326 Bs3TrapReInit();
3327 return 0;
3328}
3329
3330typedef union IRETBUF
3331{
3332 uint64_t au64[6]; /* max req is 5 */
3333 uint32_t au32[12]; /* max req is 9 */
3334 uint16_t au16[24]; /* max req is 5 */
3335 uint8_t ab[48];
3336} IRETBUF;
3337typedef IRETBUF BS3_FAR *PIRETBUF;
3338
3339
3340static void iretbuf_SetupFrame(PIRETBUF pIretBuf, unsigned const cbPop,
3341 uint16_t uCS, uint64_t uPC, uint32_t fEfl, uint16_t uSS, uint64_t uSP)
3342{
3343 if (cbPop == 2)
3344 {
3345 pIretBuf->au16[0] = (uint16_t)uPC;
3346 pIretBuf->au16[1] = uCS;
3347 pIretBuf->au16[2] = (uint16_t)fEfl;
3348 pIretBuf->au16[3] = (uint16_t)uSP;
3349 pIretBuf->au16[4] = uSS;
3350 }
3351 else if (cbPop != 8)
3352 {
3353 pIretBuf->au32[0] = (uint32_t)uPC;
3354 pIretBuf->au16[1*2] = uCS;
3355 pIretBuf->au32[2] = (uint32_t)fEfl;
3356 pIretBuf->au32[3] = (uint32_t)uSP;
3357 pIretBuf->au16[4*2] = uSS;
3358 }
3359 else
3360 {
3361 pIretBuf->au64[0] = uPC;
3362 pIretBuf->au16[1*4] = uCS;
3363 pIretBuf->au64[2] = fEfl;
3364 pIretBuf->au64[3] = uSP;
3365 pIretBuf->au16[4*4] = uSS;
3366 }
3367}
3368
3369
3370static void bs3CpuBasic2_iret_Worker(uint8_t bTestMode, FPFNBS3FAR pfnIret, unsigned const cbPop,
3371 PIRETBUF pIretBuf, const char BS3_FAR *pszDesc)
3372{
3373 BS3TRAPFRAME TrapCtx;
3374 BS3REGCTX Ctx;
3375 BS3REGCTX CtxUdExpected;
3376 BS3REGCTX TmpCtx;
3377 BS3REGCTX TmpCtxExpected;
3378 uint8_t abLowUd[8];
3379 uint8_t abLowIret[8];
3380 FPFNBS3FAR pfnUdLow = (FPFNBS3FAR)abLowUd;
3381 FPFNBS3FAR pfnIretLow = (FPFNBS3FAR)abLowIret;
3382 unsigned const cbSameCplFrame = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 5*cbPop : 3*cbPop;
3383 bool const fUseLowCode = cbPop == 2 && !BS3_MODE_IS_16BIT_CODE(bTestMode);
3384 int iRingDst;
3385 int iRingSrc;
3386 uint16_t uDplSs;
3387 uint16_t uRplCs;
3388 uint16_t uRplSs;
3389// int i;
3390 uint8_t BS3_FAR *pbTest;
3391
3392 NOREF(abLowUd);
3393#define IRETBUF_SET_SEL(a_idx, a_uValue) \
3394 do { *(uint16_t)&pIretBuf->ab[a_idx * cbPop] = (a_uValue); } while (0)
3395#define IRETBUF_SET_REG(a_idx, a_uValue) \
3396 do { uint8_t const BS3_FAR *pbTmp = &pIretBuf->ab[a_idx * cbPop]; \
3397 if (cbPop == 2) *(uint16_t)pbTmp = (uint16_t)(a_uValue); \
3398 else if (cbPop != 8) *(uint32_t)pbTmp = (uint32_t)(a_uValue); \
3399 else *(uint64_t)pbTmp = (a_uValue); \
3400 } while (0)
3401
3402 /* make sure they're allocated */
3403 Bs3MemZero(&Ctx, sizeof(Ctx));
3404 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
3405 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
3406 Bs3MemZero(&TmpCtxExpected, sizeof(TmpCtxExpected));
3407 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
3408
3409 /*
3410 * When dealing with 16-bit irets in 32-bit or 64-bit mode, we must have
3411 * copies of both iret and ud in the first 64KB of memory. The stack is
3412 * below 64KB, so we'll just copy the instructions onto the stack.
3413 */
3414 Bs3MemCpy(abLowUd, bs3CpuBasic2_ud2, 4);
3415 Bs3MemCpy(abLowIret, pfnIret, 4);
3416
3417 /*
3418 * Create a context (stack is irrelevant, we'll mainly be using pIretBuf).
3419 * - Point the context at our iret instruction.
3420 * - Point SS:xSP at pIretBuf.
3421 */
3422 Bs3RegCtxSaveEx(&Ctx, bTestMode, 0);
3423 if (!fUseLowCode)
3424 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pfnIret);
3425 else
3426 Bs3RegCtxSetRipCsFromCurPtr(&Ctx, pfnIretLow);
3427 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
3428 g_uBs3TrapEipHint = Ctx.rip.u32;
3429 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, pIretBuf);
3430
3431 /*
3432 * The first success (UD) context keeps the same code bit-count as the iret.
3433 */
3434 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
3435 if (!fUseLowCode)
3436 Bs3RegCtxSetRipCsFromLnkPtr(&CtxUdExpected, bs3CpuBasic2_ud2);
3437 else
3438 Bs3RegCtxSetRipCsFromCurPtr(&CtxUdExpected, pfnUdLow);
3439 CtxUdExpected.rsp.u += cbSameCplFrame;
3440
3441 /*
3442 * Check that it works at all.
3443 */
3444 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3445 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3446
3447 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3448 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3449 g_usBs3TestStep++;
3450
3451 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
3452 {
3453 /* Selectors are modified when switching rings, so we need to know
3454 what we're dealing with there. */
3455 if ( !BS3_SEL_IS_IN_R0_RANGE(Ctx.cs) || !BS3_SEL_IS_IN_R0_RANGE(Ctx.ss)
3456 || !BS3_SEL_IS_IN_R0_RANGE(Ctx.ds) || !BS3_SEL_IS_IN_R0_RANGE(Ctx.es))
3457 Bs3TestFailedF("Expected R0 CS, SS, DS and ES; not %#x, %#x, %#x and %#x\n", Ctx.cs, Ctx.ss, Ctx.ds, Ctx.es);
3458 if (Ctx.fs || Ctx.gs)
3459 Bs3TestFailed("Expected R0 FS and GS to be 0!\n");
3460
3461 /*
3462 * Test returning to outer rings if protected mode.
3463 */
3464 Bs3MemCpy(&TmpCtx, &Ctx, sizeof(TmpCtx));
3465 Bs3MemCpy(&TmpCtxExpected, &CtxUdExpected, sizeof(TmpCtxExpected));
3466 for (iRingDst = 3; iRingDst >= 0; iRingDst--)
3467 {
3468 Bs3RegCtxConvertToRingX(&TmpCtxExpected, iRingDst);
3469 TmpCtxExpected.ds = iRingDst ? 0 : TmpCtx.ds;
3470 TmpCtx.es = TmpCtxExpected.es;
3471 iretbuf_SetupFrame(pIretBuf, cbPop, TmpCtxExpected.cs, TmpCtxExpected.rip.u,
3472 TmpCtxExpected.rflags.u32, TmpCtxExpected.ss, TmpCtxExpected.rsp.u);
3473 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3474 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3475 g_usBs3TestStep++;
3476 }
3477
3478 /*
3479 * Check CS.RPL and SS.RPL.
3480 */
3481 for (iRingDst = 3; iRingDst >= 0; iRingDst--)
3482 {
3483 uint16_t const uDstSsR0 = (CtxUdExpected.ss & BS3_SEL_RING_SUB_MASK) + BS3_SEL_R0_FIRST;
3484 Bs3MemCpy(&TmpCtxExpected, &CtxUdExpected, sizeof(TmpCtxExpected));
3485 Bs3RegCtxConvertToRingX(&TmpCtxExpected, iRingDst);
3486 for (iRingSrc = 3; iRingSrc >= 0; iRingSrc--)
3487 {
3488 Bs3MemCpy(&TmpCtx, &Ctx, sizeof(TmpCtx));
3489 Bs3RegCtxConvertToRingX(&TmpCtx, iRingSrc);
3490 TmpCtx.es = TmpCtxExpected.es;
3491 TmpCtxExpected.ds = iRingDst != iRingSrc ? 0 : TmpCtx.ds;
3492 for (uRplCs = 0; uRplCs <= 3; uRplCs++)
3493 {
3494 uint16_t const uSrcEs = TmpCtx.es;
3495 uint16_t const uDstCs = (TmpCtxExpected.cs & X86_SEL_MASK_OFF_RPL) | uRplCs;
3496 //Bs3TestPrintf("dst=%d src=%d rplCS=%d\n", iRingDst, iRingSrc, uRplCs);
3497
3498 /* CS.RPL */
3499 iretbuf_SetupFrame(pIretBuf, cbPop, uDstCs, TmpCtxExpected.rip.u, TmpCtxExpected.rflags.u32,
3500 TmpCtxExpected.ss, TmpCtxExpected.rsp.u);
3501 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3502 if (uRplCs == iRingDst && iRingDst >= iRingSrc)
3503 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3504 else
3505 {
3506 if (iRingDst < iRingSrc)
3507 TmpCtx.es = 0;
3508 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstCs & X86_SEL_MASK_OFF_RPL);
3509 TmpCtx.es = uSrcEs;
3510 }
3511 g_usBs3TestStep++;
3512
3513 /* SS.RPL */
3514 if (iRingDst != iRingSrc || BS3_MODE_IS_64BIT_CODE(bTestMode))
3515 {
3516 uint16_t uSavedDstSs = TmpCtxExpected.ss;
3517 for (uRplSs = 0; uRplSs <= 3; uRplSs++)
3518 {
3519 /* SS.DPL (iRingDst == CS.DPL) */
3520 for (uDplSs = 0; uDplSs <= 3; uDplSs++)
3521 {
3522 uint16_t const uDstSs = ((uDplSs << BS3_SEL_RING_SHIFT) | uRplSs) + uDstSsR0;
3523 //Bs3TestPrintf("dst=%d src=%d rplCS=%d rplSS=%d dplSS=%d dst %04x:%08RX64 %08RX32 %04x:%08RX64\n",
3524 // iRingDst, iRingSrc, uRplCs, uRplSs, uDplSs, uDstCs, TmpCtxExpected.rip.u,
3525 // TmpCtxExpected.rflags.u32, uDstSs, TmpCtxExpected.rsp.u);
3526
3527 iretbuf_SetupFrame(pIretBuf, cbPop, uDstCs, TmpCtxExpected.rip.u,
3528 TmpCtxExpected.rflags.u32, uDstSs, TmpCtxExpected.rsp.u);
3529 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3530 if (uRplCs != iRingDst || iRingDst < iRingSrc)
3531 {
3532 if (iRingDst < iRingSrc)
3533 TmpCtx.es = 0;
3534 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstCs & X86_SEL_MASK_OFF_RPL);
3535 }
3536 else if (uRplSs != iRingDst || uDplSs != iRingDst)
3537 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstSs & X86_SEL_MASK_OFF_RPL);
3538 else
3539 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3540 TmpCtx.es = uSrcEs;
3541 g_usBs3TestStep++;
3542 }
3543 }
3544
3545 TmpCtxExpected.ss = uSavedDstSs;
3546 }
3547 }
3548 }
3549 }
3550 }
3551
3552 /*
3553 * Special 64-bit checks.
3554 */
3555 if (BS3_MODE_IS_64BIT_CODE(bTestMode))
3556 {
3557 /* The VM flag is completely ignored. */
3558 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3559 CtxUdExpected.rflags.u32 | X86_EFL_VM, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3560 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3561 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3562 g_usBs3TestStep++;
3563
3564 /* The NT flag can be loaded just fine. */
3565 CtxUdExpected.rflags.u32 |= X86_EFL_NT;
3566 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3567 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3568 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3569 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3570 CtxUdExpected.rflags.u32 &= ~X86_EFL_NT;
3571 g_usBs3TestStep++;
3572
3573 /* However, we'll #GP(0) if it's already set (in RFLAGS) when executing IRET. */
3574 Ctx.rflags.u32 |= X86_EFL_NT;
3575 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3576 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3577 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3578 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3579 g_usBs3TestStep++;
3580
3581 /* The NT flag #GP(0) should trump all other exceptions - pit it against #PF. */
3582 pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED);
3583 if (pbTest != NULL)
3584 {
3585 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, &pbTest[X86_PAGE_SIZE]);
3586 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3587 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3588 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3589 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3590 g_usBs3TestStep++;
3591
3592 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, pIretBuf);
3593 Bs3MemGuardedTestPageFree(pbTest);
3594 }
3595 Ctx.rflags.u32 &= ~X86_EFL_NT;
3596 }
3597}
3598
3599
3600BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_iret)(uint8_t bMode)
3601{
3602 struct
3603 {
3604 uint8_t abExtraStack[4096]; /**< we've got ~30KB of stack, so 4KB for the trap handlers++ is not a problem. */
3605 IRETBUF IRetBuf;
3606 uint8_t abGuard[32];
3607 } uBuf;
3608 size_t cbUnused;
3609
3610 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
3611 bs3CpuBasic2_SetGlobals(bMode);
3612
3613 /*
3614 * Primary instruction form.
3615 */
3616 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3617 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3618 if (BS3_MODE_IS_16BIT_CODE(bMode))
3619 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 2, &uBuf.IRetBuf, "iret");
3620 else if (BS3_MODE_IS_32BIT_CODE(bMode))
3621 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 4, &uBuf.IRetBuf, "iretd");
3622 else
3623 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_rexw, 8, &uBuf.IRetBuf, "o64 iret");
3624
3625 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3626 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3627 - (uintptr_t)uBuf.abExtraStack;
3628 if (cbUnused < 2048)
3629 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 1);
3630
3631 /*
3632 * Secondary variation: opsize prefixed.
3633 */
3634 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3635 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3636 if (BS3_MODE_IS_16BIT_CODE(bMode) && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386)
3637 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 4, &uBuf.IRetBuf, "o32 iret");
3638 else if (BS3_MODE_IS_32BIT_CODE(bMode))
3639 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 2, &uBuf.IRetBuf, "o16 iret");
3640 else if (BS3_MODE_IS_64BIT_CODE(bMode))
3641 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 4, &uBuf.IRetBuf, "iretd");
3642 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3643 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3644 - (uintptr_t)uBuf.abExtraStack;
3645 if (cbUnused < 2048)
3646 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 2);
3647
3648 /*
3649 * Third variation: 16-bit in 64-bit mode (truly unlikely)
3650 */
3651 if (BS3_MODE_IS_64BIT_CODE(bMode))
3652 {
3653 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3654 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3655 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 2, &uBuf.IRetBuf, "o16 iret");
3656 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3657 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3658 - (uintptr_t)uBuf.abExtraStack;
3659 if (cbUnused < 2048)
3660 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 3);
3661 }
3662
3663 return 0;
3664}
3665
3666
3667
3668/*********************************************************************************************************************************
3669* Non-far JMP & CALL Tests *
3670*********************************************************************************************************************************/
3671#define PROTO_ALL(a_Template) \
3672 FNBS3FAR a_Template ## _c16, \
3673 a_Template ## _c32, \
3674 a_Template ## _c64
3675PROTO_ALL(bs3CpuBasic2_jmp_jb__ud2);
3676PROTO_ALL(bs3CpuBasic2_jmp_jb_back__ud2);
3677PROTO_ALL(bs3CpuBasic2_jmp_jv__ud2);
3678PROTO_ALL(bs3CpuBasic2_jmp_jv_back__ud2);
3679PROTO_ALL(bs3CpuBasic2_jmp_ind_mem__ud2);
3680PROTO_ALL(bs3CpuBasic2_jmp_ind_xAX__ud2);
3681PROTO_ALL(bs3CpuBasic2_jmp_ind_xDI__ud2);
3682FNBS3FAR bs3CpuBasic2_jmp_ind_r9__ud2_c64;
3683PROTO_ALL(bs3CpuBasic2_call_jv__ud2);
3684PROTO_ALL(bs3CpuBasic2_call_jv_back__ud2);
3685PROTO_ALL(bs3CpuBasic2_call_ind_mem__ud2);
3686PROTO_ALL(bs3CpuBasic2_call_ind_xAX__ud2);
3687PROTO_ALL(bs3CpuBasic2_call_ind_xDI__ud2);
3688FNBS3FAR bs3CpuBasic2_call_ind_r9__ud2_c64;
3689
3690PROTO_ALL(bs3CpuBasic2_jmp_opsize_begin);
3691PROTO_ALL(bs3CpuBasic2_jmp_jb_opsize__ud2);
3692PROTO_ALL(bs3CpuBasic2_jmp_jb_opsize_back__ud2);
3693PROTO_ALL(bs3CpuBasic2_jmp_jv_opsize__ud2);
3694PROTO_ALL(bs3CpuBasic2_jmp_jv_opsize_back__ud2);
3695PROTO_ALL(bs3CpuBasic2_jmp_ind_mem_opsize__ud2);
3696FNBS3FAR bs3CpuBasic2_jmp_ind_mem_opsize__ud2__intel_c64;
3697PROTO_ALL(bs3CpuBasic2_jmp_ind_xAX_opsize__ud2);
3698PROTO_ALL(bs3CpuBasic2_call_jv_opsize__ud2);
3699PROTO_ALL(bs3CpuBasic2_call_jv_opsize_back__ud2);
3700PROTO_ALL(bs3CpuBasic2_call_ind_mem_opsize__ud2);
3701FNBS3FAR bs3CpuBasic2_call_ind_mem_opsize__ud2__intel_c64;
3702PROTO_ALL(bs3CpuBasic2_call_ind_xAX_opsize__ud2);
3703PROTO_ALL(bs3CpuBasic2_jmp_opsize_end);
3704#undef PROTO_ALL
3705
3706FNBS3FAR bs3CpuBasic2_jmptext16_start;
3707
3708FNBS3FAR bs3CpuBasic2_jmp_target_wrap_forward;
3709FNBS3FAR bs3CpuBasic2_jmp_jb_wrap_forward__ud2;
3710FNBS3FAR bs3CpuBasic2_jmp_jb_opsize_wrap_forward__ud2;
3711FNBS3FAR bs3CpuBasic2_jmp_jv16_wrap_forward__ud2;
3712FNBS3FAR bs3CpuBasic2_jmp_jv16_opsize_wrap_forward__ud2;
3713FNBS3FAR bs3CpuBasic2_call_jv16_wrap_forward__ud2;
3714FNBS3FAR bs3CpuBasic2_call_jv16_opsize_wrap_forward__ud2;
3715
3716FNBS3FAR bs3CpuBasic2_jmp_target_wrap_backward;
3717FNBS3FAR bs3CpuBasic2_jmp_jb_wrap_backward__ud2;
3718FNBS3FAR bs3CpuBasic2_jmp_jb_opsize_wrap_backward__ud2;
3719FNBS3FAR bs3CpuBasic2_jmp_jv16_wrap_backward__ud2;
3720FNBS3FAR bs3CpuBasic2_jmp_jv16_opsize_wrap_backward__ud2;
3721FNBS3FAR bs3CpuBasic2_call_jv16_wrap_backward__ud2;
3722FNBS3FAR bs3CpuBasic2_call_jv16_opsize_wrap_backward__ud2;
3723
3724
3725
3726/**
3727 * Entrypoint for non-far JMP & CALL tests.
3728 *
3729 * @returns 0 or BS3TESTDOMODE_SKIPPED.
3730 * @param bMode The CPU mode we're testing.
3731 *
3732 * @note When testing v8086 code, we'll be running in v8086 mode. So, careful
3733 * with control registers and such.
3734 */
3735BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_jmp_call)(uint8_t bMode)
3736{
3737 BS3TRAPFRAME TrapCtx;
3738 BS3REGCTX Ctx;
3739 BS3REGCTX CtxExpected;
3740 unsigned iTest;
3741
3742 /* make sure they're allocated */
3743 Bs3MemZero(&Ctx, sizeof(Ctx));
3744 Bs3MemZero(&CtxExpected, sizeof(Ctx));
3745 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
3746
3747 bs3CpuBasic2_SetGlobals(bMode);
3748
3749 /*
3750 * Create a context.
3751 */
3752 Bs3RegCtxSaveEx(&Ctx, bMode, 768);
3753 Bs3MemCpy(&CtxExpected, &Ctx, sizeof(CtxExpected));
3754
3755 /*
3756 * 16-bit tests.
3757 *
3758 * When opsize is 16-bit relative jumps will do 16-bit calculations and
3759 * modify IP. This means that it is not possible to trigger a segment
3760 * limit #GP(0) when the limit is set to 0xffff.
3761 */
3762 if (BS3_MODE_IS_16BIT_CODE(bMode))
3763 {
3764 static struct
3765 {
3766 int8_t iWrap;
3767 bool fOpSizePfx;
3768 int8_t iGprIndirect;
3769 bool fCall;
3770 FPFNBS3FAR pfnTest;
3771 }
3772 const s_aTests[] =
3773 {
3774 { 0, false, -1, false, bs3CpuBasic2_jmp_jb__ud2_c16, },
3775 { 0, false, -1, false, bs3CpuBasic2_jmp_jb_back__ud2_c16, },
3776 { 0, true, -1, false, bs3CpuBasic2_jmp_jb_opsize__ud2_c16, },
3777 { 0, true, -1, false, bs3CpuBasic2_jmp_jb_opsize_back__ud2_c16, },
3778 { 0, false, -1, false, bs3CpuBasic2_jmp_jv__ud2_c16, },
3779 { 0, false, -1, false, bs3CpuBasic2_jmp_jv_back__ud2_c16, },
3780 { 0, true, -1, false, bs3CpuBasic2_jmp_jv_opsize__ud2_c16, },
3781 { 0, true, -1, false, bs3CpuBasic2_jmp_jv_opsize_back__ud2_c16, },
3782 { 0, false, -1, false, bs3CpuBasic2_jmp_ind_mem__ud2_c16, },
3783 { 0, true, -1, false, bs3CpuBasic2_jmp_ind_mem_opsize__ud2_c16, },
3784 { 0, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX__ud2_c16, },
3785 { 0, false, X86_GREG_xDI, false, bs3CpuBasic2_jmp_ind_xDI__ud2_c16, },
3786 { 0, true, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX_opsize__ud2_c16, },
3787 { 0, false, -1, true, bs3CpuBasic2_call_jv__ud2_c16, },
3788 { 0, false, -1, true, bs3CpuBasic2_call_jv_back__ud2_c16, },
3789 { 0, true, -1, true, bs3CpuBasic2_call_jv_opsize__ud2_c16, },
3790 { 0, true, -1, true, bs3CpuBasic2_call_jv_opsize_back__ud2_c16, },
3791 { 0, false, -1, true, bs3CpuBasic2_call_ind_mem__ud2_c16, },
3792 { 0, true, -1, true, bs3CpuBasic2_call_ind_mem_opsize__ud2_c16, },
3793 { 0, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX__ud2_c16, },
3794 { 0, false, X86_GREG_xDI, true, bs3CpuBasic2_call_ind_xDI__ud2_c16, },
3795 { 0, true, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX_opsize__ud2_c16, },
3796
3797 { -1, false, -1, false, bs3CpuBasic2_jmp_jb_wrap_backward__ud2, },
3798 { +1, false, -1, false, bs3CpuBasic2_jmp_jb_wrap_forward__ud2, },
3799 { -1, true, -1, false, bs3CpuBasic2_jmp_jb_opsize_wrap_backward__ud2, },
3800 { +1, true, -1, false, bs3CpuBasic2_jmp_jb_opsize_wrap_forward__ud2, },
3801
3802 { -1, false, -1, false, bs3CpuBasic2_jmp_jv16_wrap_backward__ud2, },
3803 { +1, false, -1, false, bs3CpuBasic2_jmp_jv16_wrap_forward__ud2, },
3804 { -1, true, -1, false, bs3CpuBasic2_jmp_jv16_opsize_wrap_backward__ud2, },
3805 { +1, true, -1, false, bs3CpuBasic2_jmp_jv16_opsize_wrap_forward__ud2, },
3806 { -1, false, -1, true, bs3CpuBasic2_call_jv16_wrap_backward__ud2, },
3807 { +1, false, -1, true, bs3CpuBasic2_call_jv16_wrap_forward__ud2, },
3808 { -1, true, -1, true, bs3CpuBasic2_call_jv16_opsize_wrap_backward__ud2, },
3809 { +1, true, -1, true, bs3CpuBasic2_call_jv16_opsize_wrap_forward__ud2, },
3810 };
3811
3812 if (!BS3_MODE_IS_RM_OR_V86(bMode))
3813 Bs3SelSetup16BitCode(&Bs3GdteSpare03, Bs3SelLnkPtrToFlat(bs3CpuBasic2_jmptext16_start), 0);
3814
3815 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
3816 {
3817 uint64_t uGprSaved;
3818 if (s_aTests[iTest].iWrap == 0)
3819 {
3820 uint8_t const BS3_FAR *fpbCode;
3821 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
3822 fpbCode = (uint8_t const BS3_FAR *)BS3_FP_MAKE(Ctx.cs, Ctx.rip.u16);
3823 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
3824 }
3825 else
3826 {
3827 if (BS3_MODE_IS_RM_OR_V86(bMode))
3828 Ctx.cs = BS3_FP_SEG(s_aTests[iTest].pfnTest);
3829 else
3830 Ctx.cs = BS3_SEL_SPARE_03;
3831 Ctx.rip.u = BS3_FP_OFF(s_aTests[iTest].pfnTest);
3832 if (s_aTests[iTest].fOpSizePfx)
3833 CtxExpected.rip.u = Ctx.rip.u;
3834 else if (s_aTests[iTest].iWrap < 0)
3835 CtxExpected.rip.u = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_backward);
3836 else
3837 CtxExpected.rip.u = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_forward);
3838 }
3839 CtxExpected.cs = Ctx.cs;
3840 if (s_aTests[iTest].iGprIndirect >= 0)
3841 {
3842 uGprSaved = (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u;
3843 (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u
3844 = (&CtxExpected.rax)[s_aTests[iTest].iGprIndirect].u = CtxExpected.rip.u;
3845 }
3846 CtxExpected.rsp.u = Ctx.rsp.u;
3847 if (s_aTests[iTest].fCall && (s_aTests[iTest].iWrap == 0 || !s_aTests[iTest].fOpSizePfx))
3848 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx ? 4 : 2;
3849 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u);
3850
3851 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3852 if (s_aTests[iTest].iWrap == 0 || !s_aTests[iTest].fOpSizePfx)
3853 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
3854 else
3855 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
3856 g_usBs3TestStep++;
3857
3858 /* Again single stepping: */
3859 //Bs3TestPrintf("stepping...\n");
3860 Bs3RegSetDr6(0);
3861 Ctx.rflags.u16 |= X86_EFL_TF;
3862 CtxExpected.rflags.u16 = Ctx.rflags.u16;
3863 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3864 if (s_aTests[iTest].iWrap == 0 || !s_aTests[iTest].fOpSizePfx)
3865 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
3866 else
3867 {
3868 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
3869 bs3CpuBasic2_CheckDr6InitVal();
3870 }
3871 Ctx.rflags.u16 &= ~X86_EFL_TF;
3872 CtxExpected.rflags.u16 = Ctx.rflags.u16;
3873 g_usBs3TestStep++;
3874
3875 if (s_aTests[iTest].iGprIndirect >= 0)
3876 (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u = (&CtxExpected.rax)[s_aTests[iTest].iGprIndirect].u = uGprSaved;
3877 }
3878
3879 /* Limit the wraparound CS segment to exclude bs3CpuBasic2_jmp_target_wrap_backward
3880 and run the backward wrapping tests. */
3881 if (!BS3_MODE_IS_RM_OR_V86(bMode))
3882 {
3883 Bs3GdteSpare03.Gen.u16LimitLow = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_backward) - 1;
3884 CtxExpected.cs = Ctx.cs = BS3_SEL_SPARE_03;
3885 CtxExpected.rsp.u = Ctx.rsp.u;
3886 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
3887 if (s_aTests[iTest].iWrap < 0)
3888 {
3889 CtxExpected.rip.u = Ctx.rip.u = BS3_FP_OFF(s_aTests[iTest].pfnTest);
3890 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 v1\n", Ctx.cs, Ctx.rip.u);
3891 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3892 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
3893 g_usBs3TestStep++;
3894 }
3895
3896 /* Do another round where we put the limit in the middle of the UD2
3897 instruction we're jumping to: */
3898 Bs3GdteSpare03.Gen.u16LimitLow = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_backward);
3899 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
3900 if (s_aTests[iTest].iWrap < 0)
3901 {
3902 Ctx.rip.u = BS3_FP_OFF(s_aTests[iTest].pfnTest);
3903 if (s_aTests[iTest].fOpSizePfx)
3904 CtxExpected.rip.u = Ctx.rip.u;
3905 else
3906 CtxExpected.rip.u = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_backward);
3907 CtxExpected.rsp.u = Ctx.rsp.u;
3908 if (s_aTests[iTest].fCall && (s_aTests[iTest].iWrap == 0 || !s_aTests[iTest].fOpSizePfx))
3909 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx ? 4 : 2;
3910 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 v2\n", Ctx.cs, Ctx.rip.u);
3911 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3912 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
3913 g_usBs3TestStep++;
3914 }
3915 }
3916
3917 }
3918 /*
3919 * 32-bit & 64-bit tests.
3920 *
3921 * When the opsize prefix is applied here, IP is updated and bits 63:16
3922 * cleared. However in 64-bit mode, Intel ignores the opsize prefix
3923 * whereas AMD doesn't and it works like you expect.
3924 */
3925 else
3926 {
3927 static struct
3928 {
3929 uint8_t cBits;
3930 bool fOpSizePfx;
3931 bool fIgnPfx;
3932 int8_t iGprIndirect;
3933 bool fCall;
3934 FPFNBS3FAR pfnTest;
3935 }
3936 const s_aTests[] =
3937 {
3938 { 32, false, false, -1, false, bs3CpuBasic2_jmp_jb__ud2_c32, },
3939 { 32, false, false, -1, false, bs3CpuBasic2_jmp_jb_back__ud2_c32, },
3940 { 32, true, false, -1, false, bs3CpuBasic2_jmp_jb_opsize__ud2_c32, },
3941 { 32, true, false, -1, false, bs3CpuBasic2_jmp_jb_opsize_back__ud2_c32, },
3942 { 32, false, false, -1, false, bs3CpuBasic2_jmp_jv__ud2_c32, },
3943 { 32, false, false, -1, false, bs3CpuBasic2_jmp_jv_back__ud2_c32, },
3944 { 32, true, false, -1, false, bs3CpuBasic2_jmp_jv_opsize__ud2_c32, },
3945 { 32, true, false, -1, false, bs3CpuBasic2_jmp_jv_opsize_back__ud2_c32, },
3946 { 32, false, false, -1, false, bs3CpuBasic2_jmp_ind_mem__ud2_c32, },
3947 { 32, true, false, -1, false, bs3CpuBasic2_jmp_ind_mem_opsize__ud2_c32, },
3948 { 32, false, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX__ud2_c32, },
3949 { 32, false, false, X86_GREG_xDI, false, bs3CpuBasic2_jmp_ind_xDI__ud2_c32, },
3950 { 32, true, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX_opsize__ud2_c32, },
3951 { 32, false, false, -1, true, bs3CpuBasic2_call_jv__ud2_c32, },
3952 { 32, false, false, -1, true, bs3CpuBasic2_call_jv_back__ud2_c32, },
3953 { 32, true, false, -1, true, bs3CpuBasic2_call_jv_opsize__ud2_c32, },
3954 { 32, true, false, -1, true, bs3CpuBasic2_call_jv_opsize_back__ud2_c32, },
3955 { 32, false, false, -1, true, bs3CpuBasic2_call_ind_mem__ud2_c32, },
3956 { 32, true, false, -1, true, bs3CpuBasic2_call_ind_mem_opsize__ud2_c32, },
3957 { 32, false, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX__ud2_c32, },
3958 { 32, false, false, X86_GREG_xDI, true, bs3CpuBasic2_call_ind_xDI__ud2_c32, },
3959 { 32, true, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX_opsize__ud2_c32, },
3960 /* 64bit/Intel: Use the _c64 tests, which are written to ignore the o16 prefix. */
3961 { 64, false, true, -1, false, bs3CpuBasic2_jmp_jb__ud2_c64, },
3962 { 64, false, true, -1, false, bs3CpuBasic2_jmp_jb_back__ud2_c64, },
3963 { 64, true, true, -1, false, bs3CpuBasic2_jmp_jb_opsize__ud2_c64, },
3964 { 64, true, true, -1, false, bs3CpuBasic2_jmp_jb_opsize_back__ud2_c64, },
3965 { 64, false, true, -1, false, bs3CpuBasic2_jmp_jv__ud2_c64, },
3966 { 64, false, true, -1, false, bs3CpuBasic2_jmp_jv_back__ud2_c64, },
3967 { 64, true, true, -1, false, bs3CpuBasic2_jmp_jv_opsize__ud2_c64, },
3968 { 64, true, true, -1, false, bs3CpuBasic2_jmp_jv_opsize_back__ud2_c64, },
3969 { 64, false, true, -1, false, bs3CpuBasic2_jmp_ind_mem__ud2_c64, },
3970 { 64, true, true, -1, false, bs3CpuBasic2_jmp_ind_mem_opsize__ud2__intel_c64, },
3971 { 64, false, true, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX__ud2_c64, },
3972 { 64, false, true, X86_GREG_xDI, false, bs3CpuBasic2_jmp_ind_xDI__ud2_c64, },
3973 { 64, false, true, X86_GREG_x9, false, bs3CpuBasic2_jmp_ind_r9__ud2_c64, },
3974 { 64, true, true, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX_opsize__ud2_c64, }, /* no intel version needed */
3975 { 64, false, true, -1, true, bs3CpuBasic2_call_jv__ud2_c64, },
3976 { 64, false, true, -1, true, bs3CpuBasic2_call_jv_back__ud2_c64, },
3977 { 64, true, true, -1, true, bs3CpuBasic2_call_jv_opsize__ud2_c64, },
3978 { 64, true, true, -1, true, bs3CpuBasic2_call_jv_opsize_back__ud2_c64, },
3979 { 64, false, true, -1, true, bs3CpuBasic2_call_ind_mem__ud2_c64, },
3980 { 64, true, true, -1, true, bs3CpuBasic2_call_ind_mem_opsize__ud2__intel_c64,},
3981 { 64, false, true, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX__ud2_c64, },
3982 { 64, false, true, X86_GREG_xDI, true, bs3CpuBasic2_call_ind_xDI__ud2_c64, },
3983 { 64, false, true, X86_GREG_x9, true, bs3CpuBasic2_call_ind_r9__ud2_c64, },
3984 { 64, true, true, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX_opsize__ud2_c64, }, /* no intel version needed */
3985 /* 64bit/AMD: Use the _c32 tests. */
3986 { 64, false, false, -1, false, bs3CpuBasic2_jmp_jb__ud2_c32, },
3987 { 64, false, false, -1, false, bs3CpuBasic2_jmp_jb_back__ud2_c32, },
3988 { 64, true, false, -1, false, bs3CpuBasic2_jmp_jb_opsize__ud2_c32, },
3989 { 64, true, false, -1, false, bs3CpuBasic2_jmp_jb_opsize_back__ud2_c32, },
3990 { 64, false, false, -1, false, bs3CpuBasic2_jmp_jv__ud2_c32, },
3991 { 64, false, false, -1, false, bs3CpuBasic2_jmp_jv_back__ud2_c32, },
3992 { 64, true, false, -1, false, bs3CpuBasic2_jmp_jv_opsize__ud2_c32, },
3993 { 64, true, false, -1, false, bs3CpuBasic2_jmp_jv_opsize_back__ud2_c32, },
3994 { 64, false, false, -1, false, bs3CpuBasic2_jmp_ind_mem__ud2_c64, }, /* using c64 here */
3995 { 64, true, false, -1, false, bs3CpuBasic2_jmp_ind_mem_opsize__ud2_c64, }, /* ditto */
3996 { 64, false, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX__ud2_c64, }, /* ditto */
3997 { 64, false, false, X86_GREG_xDI, false, bs3CpuBasic2_jmp_ind_xDI__ud2_c64, }, /* ditto */
3998 { 64, false, false, X86_GREG_x9, false, bs3CpuBasic2_jmp_ind_r9__ud2_c64, }, /* ditto */
3999 { 64, true, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX_opsize__ud2_c64, }, /* ditto */
4000 { 64, false, false, -1, true, bs3CpuBasic2_call_jv__ud2_c32, }, /* using c32 again */
4001 { 64, false, false, -1, true, bs3CpuBasic2_call_jv_back__ud2_c32, },
4002 { 64, true, false, -1, true, bs3CpuBasic2_call_jv_opsize__ud2_c32, },
4003 { 64, true, false, -1, true, bs3CpuBasic2_call_jv_opsize_back__ud2_c32, },
4004 { 64, false, false, -1, true, bs3CpuBasic2_call_ind_mem__ud2_c64, }, /* using c64 here */
4005 { 64, true, false, -1, true, bs3CpuBasic2_call_ind_mem_opsize__ud2_c64, }, /* ditto */
4006 { 64, false, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX__ud2_c64, }, /* ditto */
4007 { 64, false, false, X86_GREG_xDI, true, bs3CpuBasic2_call_ind_xDI__ud2_c64, }, /* ditto */
4008 { 64, false, false, X86_GREG_x9, true, bs3CpuBasic2_call_ind_r9__ud2_c64, }, /* ditto */
4009 { 64, true, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX_opsize__ud2_c64, }, /* ditto */
4010 };
4011 uint8_t const cBits = BS3_MODE_IS_64BIT_CODE(bMode) ? 64 : 32;
4012 BS3CPUVENDOR const enmCpuVendor = Bs3GetCpuVendor();
4013 bool const fIgnPfx = cBits == 64 && enmCpuVendor == BS3CPUVENDOR_INTEL; /** @todo what does VIA do? */
4014
4015 /* Prepare a copy of the UD2 instructions in low memory for opsize prefixed tests. */
4016 uint16_t const offLow = BS3_FP_OFF(bs3CpuBasic2_jmp_opsize_begin_c32);
4017 uint16_t const cbLow = BS3_FP_OFF(bs3CpuBasic2_jmp_opsize_end_c64) - offLow;
4018 uint8_t BS3_FAR * const pbCode16 = BS3_MAKE_PROT_R0PTR_FROM_FLAT(BS3_ADDR_BS3TEXT16);
4019 uint8_t BS3_FAR * const pbLow = BS3_FP_MAKE(BS3_SEL_TILED_R0, 0);
4020 if (offLow < 0x600 || offLow + cbLow >= BS3_ADDR_STACK_R2)
4021 Bs3TestFailedF("Opsize overriden jumps are out of place: %#x LB %#x\n", offLow, cbLow);
4022 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4023 if (!fIgnPfx)
4024 {
4025 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4026 if (s_aTests[iTest].fOpSizePfx && s_aTests[iTest].cBits == cBits && s_aTests[iTest].fIgnPfx == fIgnPfx)
4027 {
4028 uint16_t const offFn = BS3_FP_OFF(s_aTests[iTest].pfnTest);
4029 uint16_t const offUd = offFn + (int16_t)(int8_t)pbCode16[offFn - 1];
4030 BS3_ASSERT(offUd - offLow + 1 < cbLow);
4031 pbCode16[offUd] = 0xf1; /* replace original ud2 with icebp */
4032 pbCode16[offUd + 1] = 0xf1;
4033 pbLow[offUd] = 0x0f; /* plant ud2 in low memory */
4034 pbLow[offUd + 1] = 0x0b;
4035 }
4036 }
4037
4038 /* Run the tests. */
4039 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4040 {
4041 if (s_aTests[iTest].cBits == cBits && s_aTests[iTest].fIgnPfx == fIgnPfx)
4042 {
4043 uint64_t uGprSaved;
4044 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4045 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4046 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4047 if (s_aTests[iTest].iGprIndirect >= 0)
4048 {
4049 uGprSaved = (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u;
4050 (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u
4051 = (&CtxExpected.rax)[s_aTests[iTest].iGprIndirect].u = CtxExpected.rip.u;
4052 }
4053 if (s_aTests[iTest].fOpSizePfx && !fIgnPfx)
4054 CtxExpected.rip.u &= UINT16_MAX;
4055 CtxExpected.rsp.u = Ctx.rsp.u;
4056 if (s_aTests[iTest].fCall)
4057 CtxExpected.rsp.u -= s_aTests[iTest].cBits == 64 ? 8
4058 : !s_aTests[iTest].fOpSizePfx ? 4 : 2;
4059
4060 //Bs3TestPrintf("cs:rip=%04RX16:%08RX64\n", Ctx.cs, Ctx.rip.u);
4061
4062 if (BS3_MODE_IS_16BIT_SYS(bMode))
4063 g_uBs3TrapEipHint = s_aTests[iTest].fOpSizePfx ? 0 : Ctx.rip.u32;
4064 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4065
4066 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4067 g_usBs3TestStep++;
4068
4069 /* Again single stepping: */
4070 //Bs3TestPrintf("stepping...\n");
4071 Bs3RegSetDr6(0);
4072 Ctx.rflags.u16 |= X86_EFL_TF;
4073 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4074 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4075 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4076 Ctx.rflags.u16 &= ~X86_EFL_TF;
4077 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4078 g_usBs3TestStep++;
4079
4080 if (s_aTests[iTest].iGprIndirect >= 0)
4081 (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u = (&CtxExpected.rax)[s_aTests[iTest].iGprIndirect].u = uGprSaved;
4082 }
4083 }
4084
4085 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4086 }
4087
4088 return 0;
4089}
4090
4091
4092/*********************************************************************************************************************************
4093* FAR JMP & FAR CALL Tests *
4094*********************************************************************************************************************************/
4095#define PROTO_ALL(a_Template) \
4096 FNBS3FAR a_Template ## _c16, \
4097 a_Template ## _c32, \
4098 a_Template ## _c64
4099PROTO_ALL(bs3CpuBasic2_far_jmp_call_opsize_begin);
4100
4101FNBS3FAR bs3CpuBasic2_jmpf_ptr_rm__ud2_c16;
4102PROTO_ALL(bs3CpuBasic2_jmpf_ptr_same_r0__ud2);
4103PROTO_ALL(bs3CpuBasic2_jmpf_ptr_same_r1__ud2);
4104PROTO_ALL(bs3CpuBasic2_jmpf_ptr_same_r2__ud2);
4105PROTO_ALL(bs3CpuBasic2_jmpf_ptr_same_r3__ud2);
4106PROTO_ALL(bs3CpuBasic2_jmpf_ptr_opsize_flipbit_r0__ud2);
4107PROTO_ALL(bs3CpuBasic2_jmpf_ptr_r0_cs64__ud2);
4108PROTO_ALL(bs3CpuBasic2_jmpf_ptr_r0_cs16l__ud2);
4109
4110FNBS3FAR bs3CpuBasic2_callf_ptr_rm__ud2_c16;
4111PROTO_ALL(bs3CpuBasic2_callf_ptr_same_r0__ud2);
4112PROTO_ALL(bs3CpuBasic2_callf_ptr_same_r1__ud2);
4113PROTO_ALL(bs3CpuBasic2_callf_ptr_same_r2__ud2);
4114PROTO_ALL(bs3CpuBasic2_callf_ptr_same_r3__ud2);
4115PROTO_ALL(bs3CpuBasic2_callf_ptr_opsize_flipbit_r0__ud2);
4116PROTO_ALL(bs3CpuBasic2_callf_ptr_r0_cs64__ud2);
4117PROTO_ALL(bs3CpuBasic2_callf_ptr_r0_cs16l__ud2);
4118
4119FNBS3FAR bs3CpuBasic2_jmpf_mem_rm__ud2_c16;
4120PROTO_ALL(bs3CpuBasic2_jmpf_mem_same_r0__ud2);
4121PROTO_ALL(bs3CpuBasic2_jmpf_mem_same_r1__ud2);
4122PROTO_ALL(bs3CpuBasic2_jmpf_mem_same_r2__ud2);
4123PROTO_ALL(bs3CpuBasic2_jmpf_mem_same_r3__ud2);
4124PROTO_ALL(bs3CpuBasic2_jmpf_mem_r0_cs16__ud2);
4125PROTO_ALL(bs3CpuBasic2_jmpf_mem_r0_cs32__ud2);
4126PROTO_ALL(bs3CpuBasic2_jmpf_mem_r0_cs64__ud2);
4127PROTO_ALL(bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2);
4128
4129FNBS3FAR bs3CpuBasic2_jmpf_mem_same_r0__ud2_intel_c64;
4130FNBS3FAR bs3CpuBasic2_jmpf_mem_same_r1__ud2_intel_c64;
4131FNBS3FAR bs3CpuBasic2_jmpf_mem_same_r2__ud2_intel_c64;
4132FNBS3FAR bs3CpuBasic2_jmpf_mem_same_r3__ud2_intel_c64;
4133FNBS3FAR bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_intel_c64;
4134FNBS3FAR bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_intel_c64;
4135FNBS3FAR bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_intel_c64;
4136FNBS3FAR bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_intel_c64;
4137
4138FNBS3FAR bs3CpuBasic2_callf_mem_rm__ud2_c16;
4139PROTO_ALL(bs3CpuBasic2_callf_mem_same_r0__ud2);
4140PROTO_ALL(bs3CpuBasic2_callf_mem_same_r1__ud2);
4141PROTO_ALL(bs3CpuBasic2_callf_mem_same_r2__ud2);
4142PROTO_ALL(bs3CpuBasic2_callf_mem_same_r3__ud2);
4143PROTO_ALL(bs3CpuBasic2_callf_mem_r0_cs16__ud2);
4144PROTO_ALL(bs3CpuBasic2_callf_mem_r0_cs32__ud2);
4145PROTO_ALL(bs3CpuBasic2_callf_mem_r0_cs64__ud2);
4146PROTO_ALL(bs3CpuBasic2_callf_mem_r0_cs16l__ud2);
4147
4148FNBS3FAR bs3CpuBasic2_callf_mem_same_r0__ud2_intel_c64;
4149FNBS3FAR bs3CpuBasic2_callf_mem_same_r1__ud2_intel_c64;
4150FNBS3FAR bs3CpuBasic2_callf_mem_same_r2__ud2_intel_c64;
4151FNBS3FAR bs3CpuBasic2_callf_mem_same_r3__ud2_intel_c64;
4152FNBS3FAR bs3CpuBasic2_callf_mem_r0_cs16__ud2_intel_c64;
4153FNBS3FAR bs3CpuBasic2_callf_mem_r0_cs32__ud2_intel_c64;
4154FNBS3FAR bs3CpuBasic2_callf_mem_r0_cs64__ud2_intel_c64;
4155FNBS3FAR bs3CpuBasic2_callf_mem_r0_cs16l__ud2_intel_c64;
4156
4157PROTO_ALL(bs3CpuBasic2_far_jmp_call_opsize_end);
4158#undef PROTO_ALL
4159
4160
4161
4162/**
4163 * Entrypoint for FAR JMP & FAR CALL tests.
4164 *
4165 * @returns 0 or BS3TESTDOMODE_SKIPPED.
4166 * @param bMode The CPU mode we're testing.
4167 *
4168 * @note When testing v8086 code, we'll be running in v8086 mode. So, careful
4169 * with control registers and such.
4170 */
4171BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_far_jmp_call)(uint8_t bMode)
4172{
4173 BS3TRAPFRAME TrapCtx;
4174 BS3REGCTX Ctx;
4175 BS3REGCTX CtxExpected;
4176 unsigned iTest;
4177
4178 /* make sure they're allocated */
4179 Bs3MemZero(&Ctx, sizeof(Ctx));
4180 Bs3MemZero(&CtxExpected, sizeof(Ctx));
4181 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
4182
4183 bs3CpuBasic2_SetGlobals(bMode);
4184
4185 /*
4186 * Create a context.
4187 */
4188 Bs3RegCtxSaveEx(&Ctx, bMode, 768);
4189 Bs3MemCpy(&CtxExpected, &Ctx, sizeof(CtxExpected));
4190
4191 if (Ctx.rax.u8 == 0 || Ctx.rax.u8 == 0xff) /* for salc & the 64-bit detection */
4192 CtxExpected.rax.u8 = Ctx.rax.u8 = 0x42;
4193
4194 /*
4195 * Set up spare selectors.
4196 */
4197 Bs3GdteSpare00 = Bs3Gdte_CODE16;
4198 Bs3GdteSpare00.Gen.u1Long = 1;
4199
4200 /*
4201 * 16-bit tests.
4202 */
4203 if (BS3_MODE_IS_16BIT_CODE(bMode))
4204 {
4205 static struct
4206 {
4207 bool fRmOrV86;
4208 bool fCall;
4209 uint16_t uDstSel;
4210 uint8_t uDstBits;
4211 bool fOpSizePfx;
4212 FPFNBS3FAR pfnTest;
4213 }
4214 const s_aTests[] =
4215 {
4216 { true, false, BS3_SEL_TEXT16, 16, false, bs3CpuBasic2_jmpf_ptr_rm__ud2_c16, },
4217 { false, false, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_jmpf_ptr_same_r0__ud2_c16, },
4218 { false, false, BS3_SEL_R1_CS16 | 1, 16, false, bs3CpuBasic2_jmpf_ptr_same_r1__ud2_c16, },
4219 { false, false, BS3_SEL_R2_CS16 | 2, 16, false, bs3CpuBasic2_jmpf_ptr_same_r2__ud2_c16, },
4220 { false, false, BS3_SEL_R3_CS16 | 3, 16, false, bs3CpuBasic2_jmpf_ptr_same_r3__ud2_c16, },
4221 { false, false, BS3_SEL_R0_CS32, 32, true, bs3CpuBasic2_jmpf_ptr_opsize_flipbit_r0__ud2_c16, },
4222 { false, false, BS3_SEL_R0_CS64, 64, true, bs3CpuBasic2_jmpf_ptr_r0_cs64__ud2_c16, }, /* 16-bit CS, except in LM. */
4223 { false, false, BS3_SEL_SPARE_00, 64, false, bs3CpuBasic2_jmpf_ptr_r0_cs16l__ud2_c16, }, /* 16-bit CS, except in LM. */
4224
4225 { true, true, BS3_SEL_TEXT16, 16, false, bs3CpuBasic2_callf_ptr_rm__ud2_c16, },
4226 { false, true, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_callf_ptr_same_r0__ud2_c16, },
4227 { false, true, BS3_SEL_R1_CS16 | 1, 16, false, bs3CpuBasic2_callf_ptr_same_r1__ud2_c16, },
4228 { false, true, BS3_SEL_R2_CS16 | 2, 16, false, bs3CpuBasic2_callf_ptr_same_r2__ud2_c16, },
4229 { false, true, BS3_SEL_R3_CS16 | 3, 16, false, bs3CpuBasic2_callf_ptr_same_r3__ud2_c16, },
4230 { false, true, BS3_SEL_R0_CS32, 32, true, bs3CpuBasic2_callf_ptr_opsize_flipbit_r0__ud2_c16, },
4231 { false, true, BS3_SEL_R0_CS64, 64, true, bs3CpuBasic2_callf_ptr_r0_cs64__ud2_c16, }, /* 16-bit CS, except in LM. */
4232 { false, true, BS3_SEL_SPARE_00, 64, false, bs3CpuBasic2_callf_ptr_r0_cs16l__ud2_c16, }, /* 16-bit CS, except in LM. */
4233
4234 { true, false, BS3_SEL_TEXT16, 16, false, bs3CpuBasic2_jmpf_mem_rm__ud2_c16, },
4235 { false, false, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_jmpf_mem_same_r0__ud2_c16, },
4236 { false, false, BS3_SEL_R1_CS16 | 1, 16, false, bs3CpuBasic2_jmpf_mem_same_r1__ud2_c16, },
4237 { false, false, BS3_SEL_R2_CS16 | 2, 16, false, bs3CpuBasic2_jmpf_mem_same_r2__ud2_c16, },
4238 { false, false, BS3_SEL_R3_CS16 | 3, 16, false, bs3CpuBasic2_jmpf_mem_same_r3__ud2_c16, },
4239 { false, false, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_c16, },
4240 { false, false, BS3_SEL_R0_CS32, 32, true, bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_c16, },
4241 { false, false, BS3_SEL_R0_CS64, 64, true, bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_c16, }, /* 16-bit CS, except in LM. */
4242 { false, false, BS3_SEL_SPARE_00, 64, false, bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_c16, }, /* 16-bit CS, except in LM. */
4243
4244 { true, true, BS3_SEL_TEXT16, 16, false, bs3CpuBasic2_callf_mem_rm__ud2_c16, },
4245 { false, true, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_callf_mem_same_r0__ud2_c16, },
4246 { false, true, BS3_SEL_R1_CS16 | 1, 16, false, bs3CpuBasic2_callf_mem_same_r1__ud2_c16, },
4247 { false, true, BS3_SEL_R2_CS16 | 2, 16, false, bs3CpuBasic2_callf_mem_same_r2__ud2_c16, },
4248 { false, true, BS3_SEL_R3_CS16 | 3, 16, false, bs3CpuBasic2_callf_mem_same_r3__ud2_c16, },
4249 { false, true, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_callf_mem_r0_cs16__ud2_c16, },
4250 { false, true, BS3_SEL_R0_CS32, 32, true, bs3CpuBasic2_callf_mem_r0_cs32__ud2_c16, },
4251 { false, true, BS3_SEL_R0_CS64, 64, true, bs3CpuBasic2_callf_mem_r0_cs64__ud2_c16, }, /* 16-bit CS, except in LM. */
4252 { false, true, BS3_SEL_SPARE_00, 64, false, bs3CpuBasic2_callf_mem_r0_cs16l__ud2_c16, }, /* 16-bit CS, except in LM. */
4253 };
4254 bool const fRmOrV86 = BS3_MODE_IS_RM_OR_V86(bMode);
4255
4256 /* Prepare a copy of the SALC & UD2 instructions in low memory for opsize
4257 prefixed tests jumping to BS3_SEL_SPARE_00 when in 64-bit mode, because
4258 it'll be a 64-bit CS then with base=0 instead of a CS16 with base=0x10000. */
4259 if (BS3_MODE_IS_64BIT_SYS(bMode))
4260 {
4261 uint16_t const offLow = BS3_FP_OFF(bs3CpuBasic2_far_jmp_call_opsize_begin_c16);
4262 uint16_t const cbLow = BS3_FP_OFF(bs3CpuBasic2_far_jmp_call_opsize_end_c16) - offLow;
4263 uint8_t BS3_FAR * const pbLow = BS3_FP_MAKE(BS3_SEL_TILED_R0, 0);
4264 uint8_t BS3_FAR * const pbCode16 = BS3_MAKE_PROT_R0PTR_FROM_FLAT(BS3_ADDR_BS3TEXT16);
4265 if (offLow < 0x600 || offLow + cbLow >= BS3_ADDR_STACK_R2)
4266 Bs3TestFailedF("Opsize overriden jumps/calls are out of place: %#x LB %#x\n", offLow, cbLow);
4267 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4268 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4269 if (s_aTests[iTest].uDstSel == BS3_SEL_SPARE_00 && s_aTests[iTest].uDstBits == 64)
4270 {
4271 uint16_t const offFn = BS3_FP_OFF(s_aTests[iTest].pfnTest);
4272 uint16_t const offUd = offFn + (int16_t)(int8_t)pbCode16[offFn - 1];
4273 BS3_ASSERT(offUd - offLow + 1 < cbLow);
4274 pbLow[offUd - 1] = 0xd6; /* plant salc + ud2 in low memory */
4275 pbLow[offUd] = 0x0f;
4276 pbLow[offUd + 1] = 0x0b;
4277 }
4278 }
4279
4280 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4281 if (s_aTests[iTest].fRmOrV86 == fRmOrV86)
4282 {
4283 uint64_t const uSavedRsp = Ctx.rsp.u;
4284 bool const fGp = (s_aTests[iTest].uDstSel & X86_SEL_RPL) != 0;
4285 uint8_t const BS3_FAR *fpbCode;
4286
4287 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
4288 fpbCode = (uint8_t const BS3_FAR *)BS3_FP_MAKE(Ctx.cs, Ctx.rip.u16);
4289 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4290 if ( s_aTests[iTest].uDstBits == 32
4291 || ( s_aTests[iTest].uDstBits == 64
4292 && !BS3_MODE_IS_16BIT_SYS(bMode)
4293 && s_aTests[iTest].uDstSel != BS3_SEL_SPARE_00))
4294 CtxExpected.rip.u += BS3_ADDR_BS3TEXT16;
4295 if (s_aTests[iTest].uDstSel == BS3_SEL_SPARE_00 && s_aTests[iTest].uDstBits == 64 && BS3_MODE_IS_64BIT_SYS(bMode))
4296 CtxExpected.rip.u &= UINT16_MAX;
4297 CtxExpected.cs = s_aTests[iTest].uDstSel;
4298 if (fGp)
4299 {
4300 CtxExpected.rip.u = Ctx.rip.u;
4301 CtxExpected.cs = Ctx.cs;
4302 }
4303 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4304 CtxExpected.rsp.u = Ctx.rsp.u;
4305 if (s_aTests[iTest].fCall && !fGp)
4306 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx ? 8 : 4;
4307 if (s_aTests[iTest].uDstBits == 64 && !fGp)
4308 {
4309 if (BS3_MODE_IS_64BIT_SYS(bMode))
4310 CtxExpected.rip.u -= 1;
4311 else
4312 CtxExpected.rax.u8 = CtxExpected.rflags.u & X86_EFL_CF ? 0xff : 0x00;
4313 }
4314 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4315 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4316 if (!fGp)
4317 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4318 else
4319 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4320 Ctx.rsp.u = uSavedRsp;
4321 g_usBs3TestStep++;
4322
4323 /* Again single stepping: */
4324 //Bs3TestPrintf("stepping...\n");
4325 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4326 Ctx.rflags.u16 |= X86_EFL_TF;
4327 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4328 CtxExpected.rax.u = Ctx.rax.u;
4329 if (s_aTests[iTest].uDstBits == 64 && !fGp && !BS3_MODE_IS_64BIT_SYS(bMode))
4330 CtxExpected.rip.u -= 1;
4331 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4332 if (!fGp)
4333 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4334 else
4335 {
4336 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4337 bs3CpuBasic2_CheckDr6InitVal();
4338 }
4339 Ctx.rflags.u16 &= ~X86_EFL_TF;
4340 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4341 Ctx.rsp.u = uSavedRsp;
4342 g_usBs3TestStep++;
4343 }
4344 }
4345 /*
4346 * 32-bit tests.
4347 */
4348 else if (BS3_MODE_IS_32BIT_CODE(bMode))
4349 {
4350 static struct
4351 {
4352 bool fCall;
4353 uint16_t uDstSel;
4354 uint8_t uDstBits;
4355 bool fOpSizePfx;
4356 FPFNBS3FAR pfnTest;
4357 }
4358 const s_aTests[] =
4359 {
4360 { false, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_jmpf_ptr_same_r0__ud2_c32, },
4361 { false, BS3_SEL_R1_CS32 | 1, 32, false, bs3CpuBasic2_jmpf_ptr_same_r1__ud2_c32, },
4362 { false, BS3_SEL_R2_CS32 | 2, 32, false, bs3CpuBasic2_jmpf_ptr_same_r2__ud2_c32, },
4363 { false, BS3_SEL_R3_CS32 | 3, 32, false, bs3CpuBasic2_jmpf_ptr_same_r3__ud2_c32, },
4364 { false, BS3_SEL_R0_CS16, 16, true, bs3CpuBasic2_jmpf_ptr_opsize_flipbit_r0__ud2_c32, },
4365 { false, BS3_SEL_R0_CS64, 64, false, bs3CpuBasic2_jmpf_ptr_r0_cs64__ud2_c32, }, /* 16-bit CS, except in LM. */
4366 { false, BS3_SEL_SPARE_00, 64, true, bs3CpuBasic2_jmpf_ptr_r0_cs16l__ud2_c32, }, /* 16-bit CS, except in LM. */
4367
4368 { true, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_callf_ptr_same_r0__ud2_c32, },
4369 { true, BS3_SEL_R1_CS32 | 1, 32, false, bs3CpuBasic2_callf_ptr_same_r1__ud2_c32, },
4370 { true, BS3_SEL_R2_CS32 | 2, 32, false, bs3CpuBasic2_callf_ptr_same_r2__ud2_c32, },
4371 { true, BS3_SEL_R3_CS32 | 3, 32, false, bs3CpuBasic2_callf_ptr_same_r3__ud2_c32, },
4372 { true, BS3_SEL_R0_CS16, 16, true, bs3CpuBasic2_callf_ptr_opsize_flipbit_r0__ud2_c32, },
4373 { true, BS3_SEL_R0_CS64, 64, false, bs3CpuBasic2_callf_ptr_r0_cs64__ud2_c32, }, /* 16-bit CS, except in LM. */
4374 { true, BS3_SEL_SPARE_00, 64, true, bs3CpuBasic2_callf_ptr_r0_cs16l__ud2_c32, }, /* 16-bit CS, except in LM. */
4375
4376 { false, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_jmpf_mem_same_r0__ud2_c32, },
4377 { false, BS3_SEL_R1_CS32 | 1, 32, false, bs3CpuBasic2_jmpf_mem_same_r1__ud2_c32, },
4378 { false, BS3_SEL_R2_CS32 | 2, 32, false, bs3CpuBasic2_jmpf_mem_same_r2__ud2_c32, },
4379 { false, BS3_SEL_R3_CS32 | 3, 32, false, bs3CpuBasic2_jmpf_mem_same_r3__ud2_c32, },
4380 { false, BS3_SEL_R0_CS16, 16, true, bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_c32, },
4381 { false, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_c32, },
4382 { false, BS3_SEL_R0_CS64, 64, false, bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_c32, }, /* 16-bit CS, except in LM. */
4383 { false, BS3_SEL_SPARE_00, 64, true, bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_c32, }, /* 16-bit CS, except in LM. */
4384
4385 { true, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_callf_mem_same_r0__ud2_c32, },
4386 { true, BS3_SEL_R1_CS32 | 1, 32, false, bs3CpuBasic2_callf_mem_same_r1__ud2_c32, },
4387 { true, BS3_SEL_R2_CS32 | 2, 32, false, bs3CpuBasic2_callf_mem_same_r2__ud2_c32, },
4388 { true, BS3_SEL_R3_CS32 | 3, 32, false, bs3CpuBasic2_callf_mem_same_r3__ud2_c32, },
4389 { true, BS3_SEL_R0_CS16, 16, true, bs3CpuBasic2_callf_mem_r0_cs16__ud2_c32, },
4390 { true, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_callf_mem_r0_cs32__ud2_c32, },
4391 { true, BS3_SEL_R0_CS64, 64, false, bs3CpuBasic2_callf_mem_r0_cs64__ud2_c32, }, /* 16-bit CS, except in LM. */
4392 { true, BS3_SEL_SPARE_00, 64, true, bs3CpuBasic2_callf_mem_r0_cs16l__ud2_c32, }, /* 16-bit CS, except in LM. */
4393 };
4394
4395 /* Prepare a copy of the SALC & UD2 instructions in low memory for opsize
4396 prefixed tests jumping to BS3_SEL_SPARE_00 when in 64-bit mode, because
4397 it'll be a 64-bit CS then with base=0 instead of a CS16 with base=0x10000. */
4398 if (BS3_MODE_IS_64BIT_SYS(bMode))
4399 {
4400 uint16_t const offLow = BS3_FP_OFF(bs3CpuBasic2_far_jmp_call_opsize_begin_c32);
4401 uint16_t const cbLow = BS3_FP_OFF(bs3CpuBasic2_far_jmp_call_opsize_end_c32) - offLow;
4402 uint8_t BS3_FAR * const pbLow = BS3_FP_MAKE(BS3_SEL_TILED_R0, 0);
4403 uint8_t BS3_FAR * const pbCode16 = BS3_MAKE_PROT_R0PTR_FROM_FLAT(BS3_ADDR_BS3TEXT16);
4404 if (offLow < 0x600 || offLow + cbLow >= BS3_ADDR_STACK_R2)
4405 Bs3TestFailedF("Opsize overriden jumps/calls are out of place: %#x LB %#x\n", offLow, cbLow);
4406 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4407 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4408 if (s_aTests[iTest].uDstSel == BS3_SEL_SPARE_00 && s_aTests[iTest].uDstBits == 64)
4409 {
4410 uint16_t const offFn = BS3_FP_OFF(s_aTests[iTest].pfnTest);
4411 uint16_t const offUd = offFn + (int16_t)(int8_t)pbCode16[offFn - 1];
4412 BS3_ASSERT(offUd - offLow + 1 < cbLow);
4413 pbLow[offUd - 1] = 0xd6; /* plant salc + ud2 in low memory */
4414 pbLow[offUd] = 0x0f;
4415 pbLow[offUd + 1] = 0x0b;
4416 }
4417 }
4418 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4419 {
4420 uint64_t const uSavedRsp = Ctx.rsp.u;
4421 bool const fGp = (s_aTests[iTest].uDstSel & X86_SEL_RPL) != 0;
4422 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4423
4424 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4425 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4426 if ( s_aTests[iTest].uDstBits == 16
4427 || ( s_aTests[iTest].uDstBits == 64
4428 && ( BS3_MODE_IS_16BIT_SYS(bMode))
4429 || s_aTests[iTest].uDstSel == BS3_SEL_SPARE_00))
4430 CtxExpected.rip.u &= UINT16_MAX;
4431 CtxExpected.cs = s_aTests[iTest].uDstSel;
4432 if (fGp)
4433 {
4434 CtxExpected.rip.u = Ctx.rip.u;
4435 CtxExpected.cs = Ctx.cs;
4436 }
4437 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4438 CtxExpected.rsp.u = Ctx.rsp.u;
4439 if (s_aTests[iTest].fCall && !fGp)
4440 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx ? 4 : 8;
4441 if (s_aTests[iTest].uDstBits == 64 && !fGp)
4442 {
4443 if (BS3_MODE_IS_64BIT_SYS(bMode))
4444 CtxExpected.rip.u -= 1;
4445 else
4446 CtxExpected.rax.u8 = CtxExpected.rflags.u & X86_EFL_CF ? 0xff : 0x00;
4447 }
4448 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4449 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4450 if (!fGp)
4451 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4452 else
4453 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4454 Ctx.rsp.u = uSavedRsp;
4455 g_usBs3TestStep++;
4456
4457 /* Again single stepping: */
4458 //Bs3TestPrintf("stepping...\n");
4459 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4460 Ctx.rflags.u16 |= X86_EFL_TF;
4461 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4462 CtxExpected.rax.u = Ctx.rax.u;
4463 if (s_aTests[iTest].uDstBits == 64 && !fGp && !BS3_MODE_IS_64BIT_SYS(bMode))
4464 CtxExpected.rip.u -= 1;
4465 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4466 if (!fGp)
4467 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4468 else
4469 {
4470 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4471 bs3CpuBasic2_CheckDr6InitVal();
4472 }
4473 Ctx.rflags.u16 &= ~X86_EFL_TF;
4474 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4475 Ctx.rsp.u = uSavedRsp;
4476 g_usBs3TestStep++;
4477 }
4478 }
4479 /*
4480 * 64-bit tests.
4481 */
4482 else if (BS3_MODE_IS_64BIT_CODE(bMode))
4483 {
4484 static struct
4485 {
4486 bool fInvalid;
4487 bool fCall;
4488 uint16_t uDstSel;
4489 uint8_t uDstBits;
4490 uint8_t fOpSizePfx; /**< 0: none, 1: 066h, 2: REX.W, 3: 066h REX.W */
4491 int8_t fFix64OpSize;
4492 FPFNBS3FAR pfnTest;
4493 }
4494 const s_aTests[] =
4495 {
4496 /* invalid opcodes: */
4497 { true, false, BS3_SEL_R0_CS32, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_same_r0__ud2_c32, },
4498 { true, false, BS3_SEL_R1_CS32 | 1, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_same_r1__ud2_c32, },
4499 { true, false, BS3_SEL_R2_CS32 | 2, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_same_r2__ud2_c32, },
4500 { true, false, BS3_SEL_R3_CS32 | 3, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_same_r3__ud2_c32, },
4501 { true, false, BS3_SEL_R0_CS16, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_opsize_flipbit_r0__ud2_c32, },
4502 { true, false, BS3_SEL_R0_CS64, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_r0_cs64__ud2_c32, },
4503 { true, false, BS3_SEL_SPARE_00, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_r0_cs16l__ud2_c32, },
4504
4505 { true, true, BS3_SEL_R0_CS32, 64, 0, -1, bs3CpuBasic2_callf_ptr_same_r0__ud2_c32, },
4506 { true, true, BS3_SEL_R1_CS32 | 1, 64, 0, -1, bs3CpuBasic2_callf_ptr_same_r1__ud2_c32, },
4507 { true, true, BS3_SEL_R2_CS32 | 2, 64, 0, -1, bs3CpuBasic2_callf_ptr_same_r2__ud2_c32, },
4508 { true, true, BS3_SEL_R3_CS32 | 3, 64, 0, -1, bs3CpuBasic2_callf_ptr_same_r3__ud2_c32, },
4509 { true, true, BS3_SEL_R0_CS16, 64, 0, -1, bs3CpuBasic2_callf_ptr_opsize_flipbit_r0__ud2_c32, },
4510 { true, true, BS3_SEL_R0_CS64, 64, 0, -1, bs3CpuBasic2_callf_ptr_r0_cs64__ud2_c32, },
4511 { true, true, BS3_SEL_SPARE_00, 64, 0, -1, bs3CpuBasic2_callf_ptr_r0_cs16l__ud2_c32, },
4512
4513 { false, false, BS3_SEL_R0_CS64, 64, 0, false, bs3CpuBasic2_jmpf_mem_same_r0__ud2_c64, },
4514 { false, false, BS3_SEL_R1_CS64 | 1, 64, 0, false, bs3CpuBasic2_jmpf_mem_same_r1__ud2_c64, },
4515 { false, false, BS3_SEL_R2_CS64 | 2, 64, 0, false, bs3CpuBasic2_jmpf_mem_same_r2__ud2_c64, },
4516 { false, false, BS3_SEL_R3_CS64 | 3, 64, 0, false, bs3CpuBasic2_jmpf_mem_same_r3__ud2_c64, },
4517 { false, false, BS3_SEL_R0_CS16, 16, 1, false, bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_c64, },
4518 { false, false, BS3_SEL_R0_CS32, 32, 0, false, bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_c64, },
4519 { false, false, BS3_SEL_R0_CS64, 64, 0, false, bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_c64, }, /* 16-bit CS, except in LM. */
4520 { false, false, BS3_SEL_SPARE_00, 64, 0, false, bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_c64, }, /* 16-bit CS, except in LM. */
4521
4522 { false, false, BS3_SEL_R0_CS64, 64, 2, true, bs3CpuBasic2_jmpf_mem_same_r0__ud2_intel_c64, },
4523 { false, false, BS3_SEL_R1_CS64 | 1, 64, 2, true, bs3CpuBasic2_jmpf_mem_same_r1__ud2_intel_c64, },
4524 { false, false, BS3_SEL_R2_CS64 | 2, 64, 0, true, bs3CpuBasic2_jmpf_mem_same_r2__ud2_intel_c64, },
4525 { false, false, BS3_SEL_R3_CS64 | 3, 64, 2, true, bs3CpuBasic2_jmpf_mem_same_r3__ud2_intel_c64, },
4526 { false, false, BS3_SEL_R0_CS16, 16, 1, true, bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_intel_c64, },
4527 { false, false, BS3_SEL_R0_CS32, 32, 0, true, bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_intel_c64, },
4528 { false, false, BS3_SEL_R0_CS64, 64, 2, true, bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_intel_c64, }, /* 16-bit CS, except in LM. */
4529 { false, false, BS3_SEL_SPARE_00, 64, 0, true, bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_intel_c64, }, /* 16-bit CS, except in LM. */
4530
4531 { false, true, BS3_SEL_R0_CS64, 64, 2, false, bs3CpuBasic2_callf_mem_same_r0__ud2_c64, },
4532 { false, true, BS3_SEL_R1_CS64 | 1, 64, 2, false, bs3CpuBasic2_callf_mem_same_r1__ud2_c64, },
4533 { false, true, BS3_SEL_R2_CS64 | 2, 64, 0, false, bs3CpuBasic2_callf_mem_same_r2__ud2_c64, },
4534 { false, true, BS3_SEL_R3_CS64 | 3, 64, 2, false, bs3CpuBasic2_callf_mem_same_r3__ud2_c64, },
4535 { false, true, BS3_SEL_R0_CS16, 16, 1, false, bs3CpuBasic2_callf_mem_r0_cs16__ud2_c64, },
4536 { false, true, BS3_SEL_R0_CS32, 32, 2, false, bs3CpuBasic2_callf_mem_r0_cs32__ud2_c64, },
4537 { false, true, BS3_SEL_R0_CS64, 64, 0, false, bs3CpuBasic2_callf_mem_r0_cs64__ud2_c64, }, /* 16-bit CS, except in LM. */
4538 { false, true, BS3_SEL_SPARE_00, 64, 0, false, bs3CpuBasic2_callf_mem_r0_cs16l__ud2_c64, }, /* 16-bit CS, except in LM. */
4539
4540 { false, true, BS3_SEL_R0_CS64, 64, 2, true, bs3CpuBasic2_callf_mem_same_r0__ud2_intel_c64, },
4541 { false, true, BS3_SEL_R1_CS64 | 1, 64, 2, true, bs3CpuBasic2_callf_mem_same_r1__ud2_intel_c64, },
4542 { false, true, BS3_SEL_R2_CS64 | 2, 64, 0, true, bs3CpuBasic2_callf_mem_same_r2__ud2_intel_c64, },
4543 { false, true, BS3_SEL_R3_CS64 | 3, 64, 2, true, bs3CpuBasic2_callf_mem_same_r3__ud2_intel_c64, },
4544 { false, true, BS3_SEL_R0_CS16, 16, 1, true, bs3CpuBasic2_callf_mem_r0_cs16__ud2_intel_c64, },
4545 { false, true, BS3_SEL_R0_CS32, 32, 0, true, bs3CpuBasic2_callf_mem_r0_cs32__ud2_intel_c64, },
4546 { false, true, BS3_SEL_R0_CS64, 64, 2, true, bs3CpuBasic2_callf_mem_r0_cs64__ud2_intel_c64, }, /* 16-bit CS, except in LM. */
4547 { false, true, BS3_SEL_SPARE_00, 64, 0, true, bs3CpuBasic2_callf_mem_r0_cs16l__ud2_intel_c64, }, /* 16-bit CS, except in LM. */
4548 };
4549 BS3CPUVENDOR const enmCpuVendor = Bs3GetCpuVendor();
4550 bool const fFix64OpSize = enmCpuVendor == BS3CPUVENDOR_INTEL; /** @todo what does VIA do? */
4551
4552 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4553 {
4554 uint64_t const uSavedRsp = Ctx.rsp.u;
4555 bool const fUd = s_aTests[iTest].fInvalid;
4556 bool const fGp = (s_aTests[iTest].uDstSel & X86_SEL_RPL) != 0;
4557 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4558
4559 if (s_aTests[iTest].fFix64OpSize != fFix64OpSize && s_aTests[iTest].fFix64OpSize >= 0)
4560 continue;
4561
4562 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4563 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4564 CtxExpected.cs = s_aTests[iTest].uDstSel;
4565 if (s_aTests[iTest].uDstBits == 16)
4566 CtxExpected.rip.u &= UINT16_MAX;
4567 else if (s_aTests[iTest].uDstBits == 64 && fFix64OpSize && s_aTests[iTest].uDstSel != BS3_SEL_SPARE_00)
4568 CtxExpected.rip.u |= UINT64_C(0xfffff00000000000);
4569
4570 if (fGp || fUd)
4571 {
4572 CtxExpected.rip.u = Ctx.rip.u;
4573 CtxExpected.cs = Ctx.cs;
4574 }
4575 CtxExpected.rsp.u = Ctx.rsp.u;
4576 if (s_aTests[iTest].fCall && !fGp && !fUd)
4577 {
4578 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx == 0 ? 8
4579 : s_aTests[iTest].fOpSizePfx == 1 ? 4 : 16;
4580 //Bs3TestPrintf("cs:rsp=%04RX16:%04RX64 -> %04RX64 (fOpSizePfx=%d)\n", Ctx.ss, Ctx.rsp.u, CtxExpected.rsp.u, s_aTests[iTest].fOpSizePfx);
4581 }
4582 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4583 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4584 if (!fGp || fUd)
4585 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4586 else
4587 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4588 Ctx.rsp.u = uSavedRsp;
4589 g_usBs3TestStep++;
4590
4591 /* Again single stepping: */
4592 //Bs3TestPrintf("stepping...\n");
4593 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4594 Ctx.rflags.u16 |= X86_EFL_TF;
4595 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4596 CtxExpected.rax.u = Ctx.rax.u;
4597 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4598 if (fUd)
4599 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4600 else if (!fGp)
4601 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4602 else
4603 {
4604 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4605 bs3CpuBasic2_CheckDr6InitVal();
4606 }
4607 Ctx.rflags.u16 &= ~X86_EFL_TF;
4608 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4609 Ctx.rsp.u = uSavedRsp;
4610 g_usBs3TestStep++;
4611 }
4612 }
4613 else
4614 Bs3TestFailed("wtf?");
4615
4616 return 0;
4617}
4618
4619
4620/*********************************************************************************************************************************
4621* Near RET *
4622*********************************************************************************************************************************/
4623#define PROTO_ALL(a_Template) \
4624 FNBS3FAR a_Template ## _c16, \
4625 a_Template ## _c32, \
4626 a_Template ## _c64
4627PROTO_ALL(bs3CpuBasic2_retn_opsize_begin);
4628PROTO_ALL(bs3CpuBasic2_retn__ud2);
4629PROTO_ALL(bs3CpuBasic2_retn_opsize__ud2);
4630PROTO_ALL(bs3CpuBasic2_retn_i24__ud2);
4631PROTO_ALL(bs3CpuBasic2_retn_i24_opsize__ud2);
4632PROTO_ALL(bs3CpuBasic2_retn_i0__ud2);
4633PROTO_ALL(bs3CpuBasic2_retn_i0_opsize__ud2);
4634FNBS3FAR bs3CpuBasic2_retn_rexw__ud2_c64;
4635FNBS3FAR bs3CpuBasic2_retn_i24_rexw__ud2_c64;
4636FNBS3FAR bs3CpuBasic2_retn_opsize_rexw__ud2_c64;
4637FNBS3FAR bs3CpuBasic2_retn_rexw_opsize__ud2_c64;
4638FNBS3FAR bs3CpuBasic2_retn_i24_opsize_rexw__ud2_c64;
4639FNBS3FAR bs3CpuBasic2_retn_i24_rexw_opsize__ud2_c64;
4640PROTO_ALL(bs3CpuBasic2_retn_opsize_end);
4641#undef PROTO_ALL
4642
4643
4644static void bs3CpuBasic2_retn_PrepStack(BS3PTRUNION StkPtr, PCBS3REGCTX pCtxExpected, uint8_t cbAddr)
4645{
4646 StkPtr.pu32[3] = UINT32_MAX;
4647 StkPtr.pu32[2] = UINT32_MAX;
4648 StkPtr.pu32[1] = UINT32_MAX;
4649 StkPtr.pu32[0] = UINT32_MAX;
4650 StkPtr.pu32[-1] = UINT32_MAX;
4651 StkPtr.pu32[-2] = UINT32_MAX;
4652 StkPtr.pu32[-3] = UINT32_MAX;
4653 StkPtr.pu32[-4] = UINT32_MAX;
4654 if (cbAddr == 2)
4655 StkPtr.pu16[0] = pCtxExpected->rip.u16;
4656 else if (cbAddr == 4)
4657 StkPtr.pu32[0] = pCtxExpected->rip.u32;
4658 else
4659 StkPtr.pu64[0] = pCtxExpected->rip.u64;
4660}
4661
4662
4663/**
4664 * Entrypoint for NEAR RET tests.
4665 *
4666 * @returns 0 or BS3TESTDOMODE_SKIPPED.
4667 * @param bMode The CPU mode we're testing.
4668 */
4669BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_near_ret)(uint8_t bMode)
4670{
4671 BS3TRAPFRAME TrapCtx;
4672 BS3REGCTX Ctx;
4673 BS3REGCTX CtxExpected;
4674 unsigned iTest;
4675 BS3PTRUNION StkPtr;
4676
4677 /* make sure they're allocated */
4678 Bs3MemZero(&Ctx, sizeof(Ctx));
4679 Bs3MemZero(&CtxExpected, sizeof(Ctx));
4680 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
4681
4682 bs3CpuBasic2_SetGlobals(bMode);
4683
4684 /*
4685 * Create a context.
4686 *
4687 * ASSUMES we're in on the ring-0 stack in ring-0 and using less than 16KB.
4688 */
4689 Bs3RegCtxSaveEx(&Ctx, bMode, 768);
4690 Ctx.rsp.u = BS3_ADDR_STACK - _16K;
4691 Bs3MemCpy(&CtxExpected, &Ctx, sizeof(CtxExpected));
4692
4693 StkPtr.pv = Bs3RegCtxGetRspSsAsCurPtr(&Ctx);
4694 //Bs3TestPrintf("Stack=%p rsp=%RX64\n", StkPtr.pv, Ctx.rsp.u);
4695
4696 /*
4697 * 16-bit tests.
4698 */
4699 if (BS3_MODE_IS_16BIT_CODE(bMode))
4700 {
4701 static struct
4702 {
4703 bool fOpSizePfx;
4704 uint16_t cbImm;
4705 FPFNBS3FAR pfnTest;
4706 }
4707 const s_aTests[] =
4708 {
4709 { false, 0, bs3CpuBasic2_retn__ud2_c16, },
4710 { true, 0, bs3CpuBasic2_retn_opsize__ud2_c16, },
4711 { false, 24, bs3CpuBasic2_retn_i24__ud2_c16, },
4712 { true, 24, bs3CpuBasic2_retn_i24_opsize__ud2_c16, },
4713 { false, 0, bs3CpuBasic2_retn_i0__ud2_c16, },
4714 { true, 0, bs3CpuBasic2_retn_i0_opsize__ud2_c16, },
4715 };
4716
4717 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4718 {
4719 uint8_t const BS3_FAR *fpbCode;
4720
4721 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
4722 fpbCode = (uint8_t const BS3_FAR *)BS3_FP_MAKE(Ctx.cs, Ctx.rip.u16);
4723 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4724 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4725 CtxExpected.cs = Ctx.cs;
4726 if (!s_aTests[iTest].fOpSizePfx)
4727 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 2;
4728 else
4729 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 4;
4730 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4731 //Bs3TestPrintf("ss:rsp=%04RX16:%04RX64\n", Ctx.ss, Ctx.rsp.u);
4732 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx ? 4 : 2);
4733 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4734 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4735 g_usBs3TestStep++;
4736
4737 /* Again single stepping: */
4738 //Bs3TestPrintf("stepping...\n");
4739 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4740 Ctx.rflags.u16 |= X86_EFL_TF;
4741 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4742 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx ? 4 : 2);
4743 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4744 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4745 Ctx.rflags.u16 &= ~X86_EFL_TF;
4746 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4747 g_usBs3TestStep++;
4748 }
4749 }
4750 /*
4751 * 32-bit tests.
4752 */
4753 else if (BS3_MODE_IS_32BIT_CODE(bMode))
4754 {
4755 static struct
4756 {
4757 uint8_t cBits;
4758 bool fOpSizePfx;
4759 uint16_t cbImm;
4760 FPFNBS3FAR pfnTest;
4761 }
4762 const s_aTests[] =
4763 {
4764 { 32, false, 0, bs3CpuBasic2_retn__ud2_c32, },
4765 { 32, true, 0, bs3CpuBasic2_retn_opsize__ud2_c32, },
4766 { 32, false, 24, bs3CpuBasic2_retn_i24__ud2_c32, },
4767 { 32, true, 24, bs3CpuBasic2_retn_i24_opsize__ud2_c32, },
4768 { 32, false, 0, bs3CpuBasic2_retn_i0__ud2_c32, },
4769 { 32, true, 0, bs3CpuBasic2_retn_i0_opsize__ud2_c32, },
4770 };
4771
4772 /* Prepare a copy of the UD2 instructions in low memory for opsize prefixed tests. */
4773 uint16_t const offLow = BS3_FP_OFF(bs3CpuBasic2_retn_opsize_begin_c32);
4774 uint16_t const cbLow = BS3_FP_OFF(bs3CpuBasic2_retn_opsize_end_c32) - offLow;
4775 uint8_t BS3_FAR * const pbLow = BS3_FP_MAKE(BS3_SEL_TILED_R0, 0);
4776 uint8_t BS3_FAR * const pbCode16 = BS3_MAKE_PROT_R0PTR_FROM_FLAT(BS3_ADDR_BS3TEXT16);
4777 if (offLow < 0x600 || offLow + cbLow >= BS3_ADDR_STACK_R2)
4778 Bs3TestFailedF("Opsize overriden jumps/calls are out of place: %#x LB %#x\n", offLow, cbLow);
4779 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4780 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4781 if (s_aTests[iTest].fOpSizePfx)
4782 {
4783 uint16_t const offFn = BS3_FP_OFF(s_aTests[iTest].pfnTest);
4784 uint16_t const offUd = offFn + (int16_t)(int8_t)pbCode16[offFn - 1];
4785 BS3_ASSERT(offUd - offLow + 1 < cbLow);
4786 pbCode16[offUd] = 0xf1; /* replace original ud2 with icebp */
4787 pbCode16[offUd + 1] = 0xf1;
4788 pbLow[offUd] = 0x0f; /* plant ud2 in low memory */
4789 pbLow[offUd + 1] = 0x0b;
4790 }
4791
4792 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4793 {
4794 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4795
4796 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4797 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4798 CtxExpected.cs = Ctx.cs;
4799 if (!s_aTests[iTest].fOpSizePfx)
4800 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 4;
4801 else
4802 {
4803 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 2;
4804 CtxExpected.rip.u &= UINT16_MAX;
4805 }
4806 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4807 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4808 //Bs3TestPrintf("ss:rsp=%04RX16:%04RX64\n", Ctx.ss, Ctx.rsp.u);
4809 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx ? 2 : 4);
4810 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4811 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4812 g_usBs3TestStep++;
4813
4814 /* Again single stepping: */
4815 //Bs3TestPrintf("stepping...\n");
4816 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4817 Ctx.rflags.u16 |= X86_EFL_TF;
4818 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4819 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx ? 2 : 4);
4820 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4821 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4822 Ctx.rflags.u16 &= ~X86_EFL_TF;
4823 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4824 g_usBs3TestStep++;
4825 }
4826 }
4827 /*
4828 * 64-bit tests.
4829 */
4830 else if (BS3_MODE_IS_64BIT_CODE(bMode))
4831 {
4832 static struct
4833 {
4834 uint8_t cBits;
4835 bool fOpSizePfx;
4836 uint16_t cbImm;
4837 FPFNBS3FAR pfnTest;
4838 }
4839 const s_aTests[] =
4840 {
4841 { 32, false, 0, bs3CpuBasic2_retn__ud2_c64, },
4842 { 32, false, 0, bs3CpuBasic2_retn_rexw__ud2_c64, },
4843 { 32, true, 0, bs3CpuBasic2_retn_opsize__ud2_c64, },
4844 { 32, false, 0, bs3CpuBasic2_retn_opsize_rexw__ud2_c64, },
4845 { 32, true, 0, bs3CpuBasic2_retn_rexw_opsize__ud2_c64, },
4846 { 32, false, 24, bs3CpuBasic2_retn_i24__ud2_c64, },
4847 { 32, false, 24, bs3CpuBasic2_retn_i24_rexw__ud2_c64, },
4848 { 32, true, 24, bs3CpuBasic2_retn_i24_opsize__ud2_c64, },
4849 { 32, false, 24, bs3CpuBasic2_retn_i24_opsize_rexw__ud2_c64, },
4850 { 32, true, 24, bs3CpuBasic2_retn_i24_rexw_opsize__ud2_c64, },
4851 { 32, false, 0, bs3CpuBasic2_retn_i0__ud2_c64, },
4852 { 32, true, 0, bs3CpuBasic2_retn_i0_opsize__ud2_c64, },
4853 };
4854 BS3CPUVENDOR const enmCpuVendor = Bs3GetCpuVendor();
4855 bool const fFix64OpSize = enmCpuVendor == BS3CPUVENDOR_INTEL; /** @todo what does VIA do? */
4856
4857 /* Prepare a copy of the UD2 instructions in low memory for opsize prefixed
4858 tests, unless we're on intel where the opsize prefix is ignored. Here we
4859 just fill low memory with int3's so we can detect non-intel behaviour. */
4860 uint16_t const offLow = BS3_FP_OFF(bs3CpuBasic2_retn_opsize_begin_c64);
4861 uint16_t const cbLow = BS3_FP_OFF(bs3CpuBasic2_retn_opsize_end_c64) - offLow;
4862 uint8_t BS3_FAR * const pbLow = BS3_FP_MAKE(BS3_SEL_TILED_R0, 0);
4863 uint8_t BS3_FAR * const pbCode16 = BS3_MAKE_PROT_R0PTR_FROM_FLAT(BS3_ADDR_BS3TEXT16);
4864 if (offLow < 0x600 || offLow + cbLow >= BS3_ADDR_STACK_R2)
4865 Bs3TestFailedF("Opsize overriden jumps/calls are out of place: %#x LB %#x\n", offLow, cbLow);
4866 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4867 if (!fFix64OpSize)
4868 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4869 if (s_aTests[iTest].fOpSizePfx)
4870 {
4871 uint16_t const offFn = BS3_FP_OFF(s_aTests[iTest].pfnTest);
4872 uint16_t const offUd = offFn + (int16_t)(int8_t)pbCode16[offFn - 1];
4873 BS3_ASSERT(offUd - offLow + 1 < cbLow);
4874 pbCode16[offUd] = 0xf1; /* replace original ud2 with icebp */
4875 pbCode16[offUd + 1] = 0xf1;
4876 pbLow[offUd] = 0x0f; /* plant ud2 in low memory */
4877 pbLow[offUd + 1] = 0x0b;
4878 }
4879
4880 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4881 {
4882 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4883
4884 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4885 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4886 CtxExpected.cs = Ctx.cs;
4887 if (!s_aTests[iTest].fOpSizePfx || fFix64OpSize)
4888 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 8;
4889 else
4890 {
4891 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 2;
4892 CtxExpected.rip.u &= UINT16_MAX;
4893 }
4894 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4895 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4896 //Bs3TestPrintf("ss:rsp=%04RX16:%04RX64\n", Ctx.ss, Ctx.rsp.u);
4897 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx && !fFix64OpSize ? 2 : 8);
4898 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4899 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4900 g_usBs3TestStep++;
4901
4902 /* Again single stepping: */
4903 //Bs3TestPrintf("stepping...\n");
4904 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4905 Ctx.rflags.u16 |= X86_EFL_TF;
4906 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4907 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx && !fFix64OpSize ? 2 : 8);
4908 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4909 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4910 Ctx.rflags.u16 &= ~X86_EFL_TF;
4911 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4912 g_usBs3TestStep++;
4913 }
4914 }
4915 else
4916 Bs3TestFailed("wtf?");
4917
4918 return 0;
4919}
4920
4921
4922/*********************************************************************************************************************************
4923* Far RET *
4924*********************************************************************************************************************************/
4925#define PROTO_ALL(a_Template) \
4926 FNBS3FAR a_Template ## _c16, \
4927 a_Template ## _c32, \
4928 a_Template ## _c64
4929PROTO_ALL(bs3CpuBasic2_retf);
4930PROTO_ALL(bs3CpuBasic2_retf_opsize);
4931FNBS3FAR bs3CpuBasic2_retf_rexw_c64;
4932FNBS3FAR bs3CpuBasic2_retf_rexw_opsize_c64;
4933FNBS3FAR bs3CpuBasic2_retf_opsize_rexw_c64;
4934PROTO_ALL(bs3CpuBasic2_retf_i32);
4935PROTO_ALL(bs3CpuBasic2_retf_i32_opsize);
4936FNBS3FAR bs3CpuBasic2_retf_i32_rexw_c64;
4937FNBS3FAR bs3CpuBasic2_retf_i32_rexw_opsize_c64;
4938FNBS3FAR bs3CpuBasic2_retf_i32_opsize_rexw_c64;
4939#undef PROTO_ALL
4940
4941
4942static void bs3CpuBasic2_retf_PrepStack(BS3PTRUNION StkPtr, uint8_t cbStkItem, RTSEL uRetCs, uint64_t uRetRip,
4943 bool fWithStack, uint16_t cbImm, RTSEL uRetSs, uint64_t uRetRsp)
4944{
4945 Bs3MemSet(&StkPtr.pu32[-4], 0xff, 96);
4946 if (cbStkItem == 2)
4947 {
4948 StkPtr.pu16[0] = (uint16_t)uRetRip;
4949 StkPtr.pu16[1] = uRetCs;
4950 if (fWithStack)
4951 {
4952 StkPtr.pb += cbImm;
4953 StkPtr.pu16[2] = (uint16_t)uRetRsp;
4954 StkPtr.pu16[3] = uRetSs;
4955 }
4956 }
4957 else if (cbStkItem == 4)
4958 {
4959 StkPtr.pu32[0] = (uint32_t)uRetRip;
4960 StkPtr.pu16[2] = uRetCs;
4961 if (fWithStack)
4962 {
4963 StkPtr.pb += cbImm;
4964 StkPtr.pu32[2] = (uint32_t)uRetRsp;
4965 StkPtr.pu16[6] = uRetSs;
4966 }
4967 }
4968 else
4969 {
4970 StkPtr.pu64[0] = uRetRip;
4971 StkPtr.pu16[4] = uRetCs;
4972 if (fWithStack)
4973 {
4974 StkPtr.pb += cbImm;
4975 StkPtr.pu64[2] = uRetRsp;
4976 StkPtr.pu16[12] = uRetSs;
4977 }
4978 }
4979}
4980
4981
4982/**
4983 * Entrypoint for FAR RET tests.
4984 *
4985 * @returns 0 or BS3TESTDOMODE_SKIPPED.
4986 * @param bMode The CPU mode we're testing.
4987 */
4988BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_far_ret)(uint8_t bMode)
4989{
4990 BS3TRAPFRAME TrapCtx;
4991 BS3REGCTX Ctx;
4992 BS3REGCTX CtxExpected;
4993 unsigned iTest;
4994 unsigned iSubTest;
4995 BS3PTRUNION StkPtr;
4996#define LOW_UD_ADDR 0x0609
4997 uint8_t BS3_FAR * const pbLowUd = BS3_FP_MAKE(BS3_FP_SEG(&StkPtr), LOW_UD_ADDR);
4998#define LOW_SALC_UD_ADDR 0x0611
4999 uint8_t BS3_FAR * const pbLowSalcUd = BS3_FP_MAKE(BS3_FP_SEG(&StkPtr), LOW_SALC_UD_ADDR);
5000#define BS3TEXT16_ADDR_HI (BS3_ADDR_BS3TEXT16 >> 16)
5001
5002 /* make sure they're allocated */
5003 Bs3MemZero(&Ctx, sizeof(Ctx));
5004 Bs3MemZero(&CtxExpected, sizeof(Ctx));
5005 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
5006
5007 bs3CpuBasic2_SetGlobals(bMode);
5008
5009 //if (!BS3_MODE_IS_64BIT_SYS(bMode) && bMode != BS3_MODE_PP32_16) return 0xff;
5010 //if (bMode != BS3_MODE_PE32) return 0xff;
5011
5012 /*
5013 * When dealing retf with 16-bit effective operand size to 32-bit or 64-bit
5014 * code, we're restricted to a 16-bit address. So, we plant a UD
5015 * instruction below 64KB that we can target with flat 32/64 code segments.
5016 * (Putting it on the stack would be possible too, but we'd have to create
5017 * the sub-test tables dynamically, which isn't necessary.)
5018 */
5019 Bs3MemSet(&pbLowUd[-9], 0xcc, 32);
5020 Bs3MemSet(&pbLowSalcUd[-9], 0xcc, 32);
5021 pbLowUd[0] = 0x0f; /* ud2 */
5022 pbLowUd[1] = 0x0b;
5023
5024 /* A variation to detect whether we're in 64-bit or 16-bit mode when
5025 executing the code. */
5026 pbLowSalcUd[0] = 0xd6; /* salc */
5027 pbLowSalcUd[1] = 0x0f; /* ud2 */
5028 pbLowSalcUd[2] = 0x0b;
5029
5030 /*
5031 * Use separate stacks for all relevant CPU exceptions so we can put
5032 * garbage in unused RSP bits w/o needing to care about where a long mode
5033 * handler will end up when accessing the whole RSP. (Not an issue with
5034 * 16-bit and 32-bit protected mode kernels, as here the weird SS based
5035 * stack pointer handling is in effect and the exception handler code
5036 * will just continue using the same SS and same portion of RSP.)
5037 *
5038 * See r154660.
5039 */
5040 if (BS3_MODE_IS_64BIT_SYS(bMode))
5041 Bs3Trap64InitEx(true);
5042
5043 /*
5044 * Create some call gates and whatnot for the UD2 code using the spare selectors.
5045 */
5046 if (BS3_MODE_IS_64BIT_SYS(bMode))
5047 for (iTest = 0; iTest < 16; iTest++)
5048 Bs3SelSetupGate64(&Bs3GdteSpare00 + iTest * 2, iTest /*bType*/, 3 /*bDpl*/,
5049 BS3_SEL_R0_CS64, BS3_FP_OFF(bs3CpuBasic2_ud2) + BS3_ADDR_BS3TEXT16);
5050 else
5051 {
5052 for (iTest = 0; iTest < 16; iTest++)
5053 {
5054 Bs3SelSetupGate(&Bs3GdteSpare00 + iTest, iTest /*bType*/, 3 /*bDpl*/,
5055 BS3_SEL_R0_CS16, BS3_FP_OFF(bs3CpuBasic2_ud2), 0);
5056 Bs3SelSetupGate(&Bs3GdteSpare00 + iTest + 16, iTest /*bType*/, 3 /*bDpl*/,
5057 BS3_SEL_R0_CS32, BS3_FP_OFF(bs3CpuBasic2_ud2) + BS3_ADDR_BS3TEXT16, 0);
5058 }
5059 }
5060
5061 /*
5062 * Create a context.
5063 *
5064 * ASSUMES we're in on the ring-0 stack in ring-0 and using less than 16KB.
5065 */
5066 Bs3RegCtxSaveEx(&Ctx, bMode, 768);
5067 Ctx.rsp.u = BS3_ADDR_STACK - _16K;
5068 Bs3MemCpy(&CtxExpected, &Ctx, sizeof(CtxExpected));
5069
5070 StkPtr.pv = Bs3RegCtxGetRspSsAsCurPtr(&Ctx);
5071 //Bs3TestPrintf("Stack=%p rsp=%RX64\n", StkPtr.pv, Ctx.rsp.u);
5072
5073 /*
5074 * 16-bit tests.
5075 */
5076 if (BS3_MODE_IS_16BIT_CODE(bMode))
5077 {
5078 static struct
5079 {
5080 bool fOpSizePfx;
5081 uint16_t cbImm;
5082 FPFNBS3FAR pfnTest;
5083 } const s_aTests[] =
5084 {
5085 { false, 0, bs3CpuBasic2_retf_c16, },
5086 { true, 0, bs3CpuBasic2_retf_opsize_c16, },
5087 { false, 32, bs3CpuBasic2_retf_i32_c16, },
5088 { true, 32, bs3CpuBasic2_retf_i32_opsize_c16, },
5089 };
5090
5091 static struct
5092 {
5093 bool fRmOrV86;
5094 bool fInterPriv;
5095 int8_t iXcpt;
5096 RTSEL uStartSs;
5097 uint8_t cDstBits;
5098 RTSEL uDstCs;
5099 union /* must use a union here as the compiler won't compile if uint16_t and will mess up fixups for uint32_t. */
5100 {
5101 uint32_t offDst;
5102 struct
5103 {
5104 NPVOID pv;
5105 uint16_t uHigh;
5106 } s;
5107 };
5108 RTSEL uDstSs;
5109 uint16_t uErrCd;
5110 } const s_aSubTests[] =
5111 { /* rm/v86, PriChg, Xcpt, uStartSs, => bits uDstCs offDst/pv uDstSs uErrCd */
5112 { true, false, -1, 0, 16, BS3_SEL_TEXT16, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, 0, 0 },
5113 { false, false, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_TEXT16 | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, 0 },
5114 { false, false, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16 | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, 0 },
5115 { false, false, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16 | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, 0 },
5116 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, 0 },
5117 { false, true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, 0 },
5118 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS32 | 1, 0 },
5119 { false, true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS32 | 1, 0 },
5120 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, 0 },
5121 { false, true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, 0 },
5122 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS32 | 2, 0 },
5123 { false, true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS32 | 2, 0 },
5124 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 3, 0 },
5125 { false, true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 3, 0 },
5126 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS32 | 3, 0 },
5127 { false, true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS32 | 3, 0 },
5128 /* conforming stuff */
5129 { false, false, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, 0 },
5130 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, 0 },
5131 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 1, BS3_SEL_R0_SS16 },
5132 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16_CNF | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, 0 },
5133 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16_CNF | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 3, 0 },
5134 { false, false, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R1_CS16_CNF },
5135 { false, false, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R1_CS16_CNF },
5136 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, 0 },
5137 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16_CNF | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, 0 },
5138 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16_CNF | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 3, 0 },
5139 { false, false, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R2_CS16_CNF },
5140 { false, false, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R2_CS16_CNF },
5141 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R2_CS16_CNF },
5142 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R2_CS16_CNF },
5143 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16_CNF | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, 0 },
5144 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16_CNF | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 3, 0 },
5145 { false, false, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R3_CS16_CNF },
5146 { false, false, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R3_CS16_CNF },
5147 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R3_CS16_CNF },
5148 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R3_CS16_CNF },
5149 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS16_CNF },
5150 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 2, BS3_SEL_R3_CS16_CNF },
5151 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 3, 0 },
5152 /* returning to 32-bit code: */
5153 { false, false, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32 | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
5154 { false, false, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32 | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS16 | 0, 0 },
5155 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5156 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
5157 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5158 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
5159 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5160 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
5161 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5162 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
5163 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5164 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
5165 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5166 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
5167 { false, false, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32 | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
5168 { false, false, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32 | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, 0 },
5169 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5170 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
5171 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5172 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
5173 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5174 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
5175 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5176 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
5177 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5178 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
5179 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5180 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
5181 /* returning to 32-bit conforming code: */
5182 { false, false, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
5183 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5184 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 1, BS3_SEL_R0_SS16 },
5185 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R0_SS16 },
5186 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 1, BS3_SEL_R3_SS16 },
5187 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, BS3_SEL_R3_SS16 },
5188 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5189 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5190 { false, false, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R1_CS32_CNF },
5191 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5192 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 1, BS3_SEL_R0_SS16 },
5193 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R0_SS16 },
5194 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 1, BS3_SEL_R3_SS16 },
5195 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, BS3_SEL_R3_SS16 },
5196 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5197 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5198 { false, false, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R2_CS32_CNF },
5199 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R2_CS32_CNF },
5200 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5201 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5202 { false, false, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R3_CS32_CNF },
5203 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R3_CS32_CNF },
5204 { false, true, 42, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS32_CNF },
5205 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5206 /* returning to 64-bit code or 16-bit when not in long mode: */
5207 { false, false, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64 | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
5208 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5209 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5210 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5211 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_DS64 | 1, BS3_SEL_R0_DS64 },
5212 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_DS64 | 1, 0 },
5213 { false, false, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64 | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
5214 { false, true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5215 { false, true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5216 { false, true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5217 { false, true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5218 { false, true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5219 { false, true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5220 { false, true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R2_CS64 },
5221 { false, true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 3, BS3_SEL_R2_CS64 },
5222 { false, true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 3, BS3_SEL_R1_SS32 },
5223 { false, true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 2, BS3_SEL_R3_SS32 },
5224 /* returning to 64-bit code or 16-bit when not in long mode, conforming code variant: */
5225 { false, false, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
5226 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5227 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5228 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5229
5230 { false, false, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R1_CS64_CNF },
5231 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5232 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 2, BS3_SEL_R1_SS16 },
5233 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 1, BS3_SEL_R2_SS16 },
5234 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R2_SS16 },
5235 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5236 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5237
5238 { false, false, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R2_CS64_CNF },
5239 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R2_CS64_CNF },
5240 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5241 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5242
5243 { false, false, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R3_CS64_CNF },
5244 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R3_CS64_CNF },
5245 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS64_CNF },
5246 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5247
5248 /* some additional #GP variations */ /** @todo test all possible exceptions! */
5249 { false, true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS16 },
5250 { false, true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_TSS32_DF | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_TSS32_DF },
5251 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_00 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_00 },
5252 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_01 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_01 },
5253 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_02 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_02 },
5254 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_03 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_03 },
5255 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_04 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_04 },
5256 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_05 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_05 },
5257 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_06 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_06 },
5258 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_07 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_07 },
5259 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_08 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_08 },
5260 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_09 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_09 },
5261 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_0a | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_0a },
5262 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_0b | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_0b },
5263 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_0c | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_0c },
5264 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_0d | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_0d },
5265 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_0e | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_0e },
5266 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_0f | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_0f },
5267 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_10 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_10 },
5268 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_11 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_11 },
5269 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_12 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_12 },
5270 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_13 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_13 },
5271 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_14 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_14 },
5272 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_15 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_15 },
5273 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_16 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_16 },
5274 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_17 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_17 },
5275 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_18 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_18 },
5276 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_19 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_19 },
5277 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_1a | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_1a },
5278 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_1b | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_1b },
5279 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_1c | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_1c },
5280 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_1d | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_1d },
5281 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_1e | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_1e },
5282 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_1f | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_1f },
5283 };
5284
5285 bool const fRmOrV86 = BS3_MODE_IS_RM_OR_V86(bMode);
5286
5287 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
5288 {
5289 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
5290
5291 for (iSubTest = 0; iSubTest < RT_ELEMENTS(s_aSubTests); iSubTest++)
5292 {
5293 g_usBs3TestStep = (iTest << 12) | (iSubTest << 1);
5294 if ( s_aSubTests[iSubTest].fRmOrV86 == fRmOrV86
5295 && (s_aSubTests[iSubTest].offDst <= UINT16_MAX || s_aTests[iTest].fOpSizePfx))
5296 {
5297 uint16_t const cbFrmDisp = s_aSubTests[iSubTest].fInterPriv ? iSubTest % 7 : 0;
5298 uint16_t const cbStkItem = s_aTests[iTest].fOpSizePfx ? 4 : 2;
5299 uint16_t const cbFrame = (s_aSubTests[iSubTest].fInterPriv ? 4 : 2) * cbStkItem;
5300 RTSEL const uDstSs = s_aSubTests[iSubTest].uDstSs;
5301 uint64_t uDstRspExpect, uDstRspPush;
5302
5303 Ctx.ss = s_aSubTests[iSubTest].uStartSs;
5304 if (Ctx.ss != BS3_SEL_R0_SS32)
5305 Ctx.rsp.u32 |= UINT32_C(0xfffe0000);
5306 else
5307 Ctx.rsp.u32 &= UINT16_MAX;
5308 uDstRspExpect = uDstRspPush = Ctx.rsp.u + s_aTests[iTest].cbImm + cbFrame + cbFrmDisp;
5309 if (s_aSubTests[iSubTest].fInterPriv)
5310 {
5311 if (s_aTests[iTest].fOpSizePfx)
5312 uDstRspPush = (uDstRspPush & UINT16_MAX) | UINT32_C(0xacdc0000);
5313 if ( uDstSs == (BS3_SEL_R1_SS32 | 1)
5314 || uDstSs == (BS3_SEL_R2_SS32 | 2)
5315 || uDstSs == (BS3_SEL_R3_SS32 | 3)
5316 || (s_aSubTests[iSubTest].cDstBits == 64 && BS3_MODE_IS_64BIT_SYS(bMode)))
5317 {
5318 if (s_aTests[iTest].fOpSizePfx)
5319 uDstRspExpect = uDstRspPush;
5320 else
5321 uDstRspExpect &= UINT16_MAX;
5322 }
5323 }
5324
5325 CtxExpected.bCpl = Ctx.bCpl;
5326 CtxExpected.cs = Ctx.cs;
5327 CtxExpected.ss = Ctx.ss;
5328 CtxExpected.ds = Ctx.ds;
5329 CtxExpected.es = Ctx.es;
5330 CtxExpected.fs = Ctx.fs;
5331 CtxExpected.gs = Ctx.gs;
5332 CtxExpected.rip.u = Ctx.rip.u;
5333 CtxExpected.rsp.u = Ctx.rsp.u;
5334 CtxExpected.rax.u = Ctx.rax.u;
5335 if (s_aSubTests[iSubTest].iXcpt < 0)
5336 {
5337 CtxExpected.cs = s_aSubTests[iSubTest].uDstCs;
5338 CtxExpected.rip.u = s_aSubTests[iSubTest].offDst;
5339 if (s_aSubTests[iSubTest].cDstBits == 64 && !BS3_MODE_IS_64BIT_SYS(bMode))
5340 {
5341 CtxExpected.rip.u += 1;
5342 CtxExpected.rax.au8[0] = CtxExpected.rflags.u16 & X86_EFL_CF ? 0xff : 0;
5343 }
5344 CtxExpected.ss = uDstSs;
5345 CtxExpected.rsp.u = uDstRspExpect;
5346 if (s_aSubTests[iSubTest].fInterPriv)
5347 {
5348 uint16_t BS3_FAR *puSel = &CtxExpected.ds; /* ASSUME member order! */
5349 unsigned cSels = 4;
5350 CtxExpected.bCpl = CtxExpected.ss & X86_SEL_RPL;
5351 while (cSels-- > 0)
5352 {
5353 uint16_t uSel = *puSel;
5354 if ( (uSel & X86_SEL_MASK_OFF_RPL)
5355 && Bs3Gdt[uSel >> X86_SEL_SHIFT].Gen.u2Dpl < CtxExpected.bCpl
5356 && (Bs3Gdt[uSel >> X86_SEL_SHIFT].Gen.u4Type & (X86_SEL_TYPE_CODE | X86_SEL_TYPE_CONF))
5357 != (X86_SEL_TYPE_CODE | X86_SEL_TYPE_CONF))
5358 *puSel = 0;
5359 puSel++;
5360 }
5361 CtxExpected.rsp.u += s_aTests[iTest].cbImm; /* arguments are dropped from both stacks. */
5362 }
5363 }
5364 g_uBs3TrapEipHint = CtxExpected.rip.u32;
5365 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
5366 //Bs3TestPrintf("ss:rsp=%04RX16:%04RX64 -> %04RX16:%04RX64 [pushed %#RX64]\n", Ctx.ss, Ctx.rsp.u, CtxExpected.ss, CtxExpected.rsp.u, uDstRspPush);
5367 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5368 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5369 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5370 //Bs3TestPrintf("%p: %04RX16 %04RX16 %04RX16 %04RX16\n", StkPtr.pu16, StkPtr.pu16[0], StkPtr.pu16[1], StkPtr.pu16[2], StkPtr.pu16[3]);
5371 //Bs3TestPrintf("%.48Rhxd\n", StkPtr.pu16);
5372 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5373 if (s_aSubTests[iSubTest].iXcpt < 0)
5374 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
5375 else
5376 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5377 g_usBs3TestStep++;
5378
5379 /* Again single stepping: */
5380 //Bs3TestPrintf("stepping...\n");
5381 Bs3RegSetDr6(X86_DR6_INIT_VAL);
5382 Ctx.rflags.u16 |= X86_EFL_TF;
5383 CtxExpected.rflags.u16 = Ctx.rflags.u16;
5384 if (s_aSubTests[iSubTest].iXcpt < 0 && s_aSubTests[iSubTest].cDstBits == 64 && !BS3_MODE_IS_64BIT_SYS(bMode))
5385 {
5386 CtxExpected.rip.u -= 1;
5387 CtxExpected.rax.u = Ctx.rax.u;
5388 }
5389 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5390 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5391 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5392 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5393 if (s_aSubTests[iSubTest].iXcpt < 0)
5394 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
5395 else
5396 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5397 Ctx.rflags.u16 &= ~X86_EFL_TF;
5398 CtxExpected.rflags.u16 = Ctx.rflags.u16;
5399 g_usBs3TestStep++;
5400 }
5401 }
5402 }
5403 }
5404 /*
5405 * 32-bit tests.
5406 */
5407 else if (BS3_MODE_IS_32BIT_CODE(bMode))
5408 {
5409 static struct
5410 {
5411 bool fOpSizePfx;
5412 uint16_t cbImm;
5413 FPFNBS3FAR pfnTest;
5414 } const s_aTests[] =
5415 {
5416 { false, 0, bs3CpuBasic2_retf_c16, },
5417 { true, 0, bs3CpuBasic2_retf_opsize_c16, },
5418 { false, 32, bs3CpuBasic2_retf_i32_c16, },
5419 { true, 32, bs3CpuBasic2_retf_i32_opsize_c16, },
5420 };
5421
5422 static struct
5423 {
5424 bool fInterPriv;
5425 int8_t iXcpt;
5426 RTSEL uStartSs;
5427 uint8_t cDstBits;
5428 RTSEL uDstCs;
5429 union /* must use a union here as the compiler won't compile if uint16_t and will mess up fixups for uint32_t. */
5430 {
5431 uint32_t offDst;
5432 struct
5433 {
5434 NPVOID pv;
5435 uint16_t uHigh;
5436 } s;
5437 };
5438 RTSEL uDstSs;
5439 uint16_t uErrCd;
5440 } const s_aSubTests[] =
5441 { /* PriChg, Xcpt, uStartSs, => bits uDstCs offDst/pv uDstSs uErrCd */
5442 { false, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS32 | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
5443 { false, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS32 | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
5444 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5445 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5446 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5447 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5448 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5449 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5450 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5451 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5452 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5453 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5454 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5455 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5456 /* same with 32-bit wide target addresses: */
5457 { false, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS32 | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, 0 },
5458 { false, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS32 | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, 0 },
5459 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
5460 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
5461 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
5462 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
5463 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
5464 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
5465 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
5466 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
5467 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
5468 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
5469 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
5470 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
5471 /* conforming stuff */
5472 { false, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
5473 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5474 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
5475 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5476 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5477 { false, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R1_CS32_CNF },
5478 { false, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R1_CS32_CNF },
5479 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5480 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5481 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5482 { false, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R2_CS32_CNF },
5483 { false, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R2_CS32_CNF },
5484 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R2_CS32_CNF },
5485 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R2_CS32_CNF },
5486 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5487 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5488 { false, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R3_CS32_CNF },
5489 { false, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R3_CS32_CNF },
5490 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R3_CS32_CNF },
5491 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R3_CS32_CNF },
5492 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, BS3_SEL_R3_CS32_CNF },
5493 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 2, BS3_SEL_R3_CS32_CNF },
5494 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5495 /* returning to 16-bit code: */
5496 { false, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS16 | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, 0 },
5497 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS32 | 1, 0 },
5498 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS16 | 1, 0 },
5499 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
5500 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS16 | 2, 0 },
5501 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
5502 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS16 | 3, 0 },
5503 { false, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS16 | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS16 | 0, 0 },
5504 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS16 | 1, 0 },
5505 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS32 | 1, 0 },
5506 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS16 | 2, 0 },
5507 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
5508 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS16 | 3, 0 },
5509 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
5510 /* returning to 16-bit conforming code: */
5511 { false, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, 0 },
5512 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS32 | 1, 0 },
5513 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
5514 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R0_SS32 },
5515 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 1, BS3_SEL_R3_SS32 },
5516 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R3_SS32 },
5517 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS16 | 2, 0 },
5518 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
5519 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R1_CS16_CNF },
5520 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS16 | 1, 0 },
5521 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
5522 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R0_SS32 },
5523 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 1, BS3_SEL_R3_SS32 },
5524 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R3_SS32 },
5525 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
5526 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
5527 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R2_CS16_CNF },
5528 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R2_CS16_CNF },
5529 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
5530 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS16 | 3, 0 },
5531 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R3_CS16_CNF },
5532 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R3_CS16_CNF },
5533 { true, 42, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS32 | 2, BS3_SEL_R3_CS16_CNF },
5534 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
5535 /* returning to 64-bit code or 16-bit when not in long mode: */
5536 { false, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64 | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
5537 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5538 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5539 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5540 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_DS64 | 1, BS3_SEL_R0_DS64 },
5541 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_DS64 | 1, 0 },
5542 { false, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64 | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
5543 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5544 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5545 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5546 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5547 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5548 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5549 { true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R2_CS64 },
5550 { true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 3, BS3_SEL_R2_CS64 },
5551 { true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 3, BS3_SEL_R1_SS32 },
5552 { true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 2, BS3_SEL_R3_SS32 },
5553 /* returning to 64-bit code or 16-bit when not in long mode, conforming code variant: */
5554 { false, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
5555 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5556 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5557 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5558
5559 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R1_CS64_CNF },
5560 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5561 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 2, BS3_SEL_R1_SS16 },
5562 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 1, BS3_SEL_R2_SS16 },
5563 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R2_SS16 },
5564 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5565 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5566
5567 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R2_CS64_CNF },
5568 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R2_CS64_CNF },
5569 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5570 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5571
5572 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R3_CS64_CNF },
5573 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R3_CS64_CNF },
5574 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS64_CNF },
5575 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5576
5577 /* some additional #GP variations */ /** @todo test all possible exceptions! */
5578 { true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS16 },
5579 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_00 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_00 },
5580 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_01 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_01 },
5581 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_02 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_02 },
5582 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_03 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_03 },
5583 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_04 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_04 },
5584 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_05 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_05 },
5585 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_06 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_06 },
5586 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_07 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_07 },
5587 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_08 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_08 },
5588 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_09 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_09 },
5589 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_0a | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0a },
5590 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_0b | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0b },
5591 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_0c | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0c },
5592 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_0d | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0d },
5593 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_0e | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0e },
5594 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_0f | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0f },
5595 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_10 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_10 },
5596 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_11 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_11 },
5597 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_12 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_12 },
5598 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_13 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_13 },
5599 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_14 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_14 },
5600 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_15 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_15 },
5601 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_16 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_16 },
5602 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_17 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_17 },
5603 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_18 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_18 },
5604 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_19 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_19 },
5605 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_1a | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1a },
5606 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_1b | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1b },
5607 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_1c | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1c },
5608 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_1d | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1d },
5609 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_1e | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1e },
5610 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_1f | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1f },
5611 };
5612
5613 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
5614 {
5615 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
5616 //Bs3TestPrintf("-------------- #%u: cs:eip=%04RX16:%08RX64 imm=%u%s\n",
5617 // iTest, Ctx.cs, Ctx.rip.u, s_aTests[iTest].cbImm, s_aTests[iTest].fOpSizePfx ? " o16" : "");
5618
5619 for (iSubTest = 0; iSubTest < RT_ELEMENTS(s_aSubTests); iSubTest++)
5620 {
5621 g_usBs3TestStep = (iTest << 12) | (iSubTest << 1);
5622 if (!s_aTests[iTest].fOpSizePfx || s_aSubTests[iSubTest].offDst <= UINT16_MAX)
5623 {
5624 uint16_t const cbFrmDisp = s_aSubTests[iSubTest].fInterPriv ? iSubTest % 7 : 0;
5625 uint16_t const cbStkItem = s_aTests[iTest].fOpSizePfx ? 2 : 4;
5626 uint16_t const cbFrame = (s_aSubTests[iSubTest].fInterPriv ? 4 : 2) * cbStkItem;
5627 RTSEL const uDstSs = s_aSubTests[iSubTest].uDstSs;
5628 uint64_t uDstRspExpect, uDstRspPush;
5629 //Bs3TestPrintf(" #%u: %s %d %#04RX16 -> %u %#04RX16:%#04RX32 %#04RX16 %#RX16\n", iSubTest, s_aSubTests[iSubTest].fInterPriv ? "priv" : "same", s_aSubTests[iSubTest].iXcpt, s_aSubTests[iSubTest].uStartSs,
5630 // s_aSubTests[iSubTest].cDstBits, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst, s_aSubTests[iSubTest].uDstSs, s_aSubTests[iSubTest].uErrCd);
5631
5632 Ctx.ss = s_aSubTests[iSubTest].uStartSs;
5633 if (Ctx.ss != BS3_SEL_R0_SS32)
5634 Ctx.rsp.u32 |= UINT32_C(0xfffe0000);
5635 else
5636 Ctx.rsp.u32 &= UINT16_MAX;
5637 uDstRspExpect = uDstRspPush = Ctx.rsp.u + s_aTests[iTest].cbImm + cbFrame + cbFrmDisp;
5638 if (s_aSubTests[iSubTest].fInterPriv)
5639 {
5640 if (!s_aTests[iTest].fOpSizePfx)
5641 uDstRspPush = (uDstRspPush & UINT16_MAX) | UINT32_C(0xacdc0000);
5642 if ( uDstSs == (BS3_SEL_R1_SS32 | 1)
5643 || uDstSs == (BS3_SEL_R2_SS32 | 2)
5644 || uDstSs == (BS3_SEL_R3_SS32 | 3)
5645 || (s_aSubTests[iSubTest].cDstBits == 64 && BS3_MODE_IS_64BIT_SYS(bMode)))
5646 {
5647 if (!s_aTests[iTest].fOpSizePfx)
5648 uDstRspExpect = uDstRspPush;
5649 else
5650 uDstRspExpect &= UINT16_MAX;
5651 }
5652 }
5653
5654 CtxExpected.bCpl = Ctx.bCpl;
5655 CtxExpected.cs = Ctx.cs;
5656 CtxExpected.ss = Ctx.ss;
5657 CtxExpected.ds = Ctx.ds;
5658 CtxExpected.es = Ctx.es;
5659 CtxExpected.fs = Ctx.fs;
5660 CtxExpected.gs = Ctx.gs;
5661 CtxExpected.rip.u = Ctx.rip.u;
5662 CtxExpected.rsp.u = Ctx.rsp.u;
5663 CtxExpected.rax.u = Ctx.rax.u;
5664 if (s_aSubTests[iSubTest].iXcpt < 0)
5665 {
5666 CtxExpected.cs = s_aSubTests[iSubTest].uDstCs;
5667 CtxExpected.rip.u = s_aSubTests[iSubTest].offDst;
5668 if (s_aSubTests[iSubTest].cDstBits == 64 && !BS3_MODE_IS_64BIT_SYS(bMode))
5669 {
5670 CtxExpected.rip.u += 1;
5671 CtxExpected.rax.au8[0] = CtxExpected.rflags.u16 & X86_EFL_CF ? 0xff : 0;
5672 }
5673 CtxExpected.ss = uDstSs;
5674 CtxExpected.rsp.u = uDstRspExpect;
5675 if (s_aSubTests[iSubTest].fInterPriv)
5676 {
5677 uint16_t BS3_FAR *puSel = &CtxExpected.ds; /* ASSUME member order! */
5678 unsigned cSels = 4;
5679 CtxExpected.bCpl = CtxExpected.ss & X86_SEL_RPL;
5680 while (cSels-- > 0)
5681 {
5682 uint16_t uSel = *puSel;
5683 if ( (uSel & X86_SEL_MASK_OFF_RPL)
5684 && Bs3Gdt[uSel >> X86_SEL_SHIFT].Gen.u2Dpl < CtxExpected.bCpl
5685 && (Bs3Gdt[uSel >> X86_SEL_SHIFT].Gen.u4Type & (X86_SEL_TYPE_CODE | X86_SEL_TYPE_CONF))
5686 != (X86_SEL_TYPE_CODE | X86_SEL_TYPE_CONF))
5687 *puSel = 0;
5688 puSel++;
5689 }
5690 CtxExpected.rsp.u += s_aTests[iTest].cbImm; /* arguments are dropped from both stacks. */
5691 }
5692 }
5693 g_uBs3TrapEipHint = CtxExpected.rip.u32;
5694 //Bs3TestPrintf("ss:rsp=%04RX16:%04RX64 -> %04RX16:%04RX64 [pushed %#RX64]; %04RX16:%04RX64\n",Ctx.ss, Ctx.rsp.u,
5695 // CtxExpected.ss, CtxExpected.rsp.u, uDstRspPush, CtxExpected.cs, CtxExpected.rip.u);
5696 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5697 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5698 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5699 //Bs3TestPrintf("%p: %04RX16 %04RX16 %04RX16 %04RX16\n", StkPtr.pu16, StkPtr.pu16[0], StkPtr.pu16[1], StkPtr.pu16[2], StkPtr.pu16[3]);
5700 //Bs3TestPrintf("%.48Rhxd\n", StkPtr.pu16);
5701 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5702 if (s_aSubTests[iSubTest].iXcpt < 0)
5703 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
5704 else
5705 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5706 g_usBs3TestStep++;
5707
5708 /* Again single stepping: */
5709 //Bs3TestPrintf("stepping...\n");
5710 Bs3RegSetDr6(X86_DR6_INIT_VAL);
5711 Ctx.rflags.u16 |= X86_EFL_TF;
5712 CtxExpected.rflags.u16 = Ctx.rflags.u16;
5713 if (s_aSubTests[iSubTest].iXcpt < 0 && s_aSubTests[iSubTest].cDstBits == 64 && !BS3_MODE_IS_64BIT_SYS(bMode))
5714 {
5715 CtxExpected.rip.u -= 1;
5716 CtxExpected.rax.u = Ctx.rax.u;
5717 }
5718 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5719 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5720 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5721 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5722 if (s_aSubTests[iSubTest].iXcpt < 0)
5723 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
5724 else
5725 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5726 Ctx.rflags.u16 &= ~X86_EFL_TF;
5727 CtxExpected.rflags.u16 = Ctx.rflags.u16;
5728 g_usBs3TestStep++;
5729 }
5730 }
5731 }
5732 }
5733 /*
5734 * 64-bit tests.
5735 */
5736 else if (BS3_MODE_IS_64BIT_CODE(bMode))
5737 {
5738 }
5739 else
5740 Bs3TestFailed("wtf?");
5741
5742 if (BS3_MODE_IS_64BIT_SYS(bMode))
5743 Bs3TrapReInit();
5744 return 0;
5745}
5746
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette